You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@beam.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2018/04/29 18:15:40 UTC

Build failed in Jenkins: beam_PerformanceTests_MongoDBIO_IT #110

See <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/110/display/redirect>

------------------------------------------
[...truncated 879.98 KB...]
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
	at java.lang.Thread.run(Thread.java:745)
com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.225.151.117:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
	at com.mongodb.Mongo.execute(Mongo.java:781)
	at com.mongodb.Mongo$2.execute(Mongo.java:764)
	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.225.151.117:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
	at com.mongodb.Mongo.execute(Mongo.java:781)
	at com.mongodb.Mongo$2.execute(Mongo.java:764)
	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.225.151.117:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
	at com.mongodb.Mongo.execute(Mongo.java:781)
	at com.mongodb.Mongo$2.execute(Mongo.java:764)
	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
Workflow failed. Causes: S01:Generate sequence/Read(BoundedCountingSource)+Produce documents/Map+Write documents to MongoDB/ParDo(Write) failed., A work item was attempted 4 times without success. Each time the worker eventually lost contact with the service. The work item was attempted on: 
  mongodbioit0testwriteandr-04291108-bnui-harness-zbz7,
  mongodbioit0testwriteandr-04291108-bnui-harness-zbz7,
  mongodbioit0testwriteandr-04291108-bnui-harness-zbz7,
  mongodbioit0testwriteandr-04291108-bnui-harness-zbz7
	at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:134)
	at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:90)
	at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:55)
	at org.apache.beam.sdk.Pipeline.run(Pipeline.java:311)
	at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:346)
	at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:328)
	at org.apache.beam.sdk.io.mongodb.MongoDBIOIT.testWriteAndRead(MongoDBIOIT.java:118)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27)
	at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:317)
	at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:317)
	at org.junit.rules.RunRules.evaluate(RunRules.java:20)
	at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
	at org.apache.maven.surefire.junitcore.pc.Scheduler$1.run(Scheduler.java:410)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)

[ERROR] testWriteAndRead(org.apache.beam.sdk.io.mongodb.MongoDBIOIT)  Time elapsed: 0 s  <<< ERROR!
com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.225.151.117:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:158)
	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:133)
	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:128)
	at com.mongodb.operation.DropDatabaseOperation.execute(DropDatabaseOperation.java:51)
	at com.mongodb.operation.DropDatabaseOperation.execute(DropDatabaseOperation.java:36)
	at com.mongodb.Mongo.execute(Mongo.java:781)
	at com.mongodb.Mongo$2.execute(Mongo.java:764)
	at com.mongodb.MongoDatabaseImpl.drop(MongoDatabaseImpl.java:136)
	at org.apache.beam.sdk.io.mongodb.MongoDBIOIT.tearDown(MongoDBIOIT.java:103)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:33)
	at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:317)
	at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:317)
	at org.junit.rules.RunRules.evaluate(RunRules.java:20)
	at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
	at org.apache.maven.surefire.junitcore.pc.Scheduler$1.run(Scheduler.java:410)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)

[INFO] 
[INFO] Results:
[INFO] 
[ERROR] Errors: 
[ERROR] org.apache.beam.sdk.io.mongodb.MongoDBIOIT.testWriteAndRead(org.apache.beam.sdk.io.mongodb.MongoDBIOIT)
[ERROR]   Run 1: MongoDBIOIT.testWriteAndRead:118  Runtime com.mongodb.MongoTimeoutException: ...
[ERROR]   Run 2: MongoDBIOIT.tearDown:103  MongoTimeout Timed out after 30000 ms while waiting...
[INFO] 
[INFO] 
[ERROR] Tests run: 1, Failures: 0, Errors: 1, Skipped: 0
[INFO] 
[INFO] 
[INFO] --- maven-dependency-plugin:3.0.2:analyze-only (default) @ beam-sdks-java-io-mongodb ---
[INFO] No dependency problems found
[INFO] 
[INFO] --- maven-failsafe-plugin:2.21.0:verify (default) @ beam-sdks-java-io-mongodb ---
[INFO] Failsafe report directory: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/ccc9bd91/beam/sdks/java/io/mongodb/target/failsafe-reports>
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 07:34 min
[INFO] Finished at: 2018-04-29T18:15:38Z
[INFO] Final Memory: 85M/2362M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-failsafe-plugin:2.21.0:verify (default) on project beam-sdks-java-io-mongodb: There are test failures.
[ERROR] 
[ERROR] Please refer to <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/ccc9bd91/beam/sdks/java/io/mongodb/target/failsafe-reports> for the individual test results.
[ERROR] Please refer to dump files (if any exist) [date]-jvmRun[N].dump, [date].dumpstream and [date]-jvmRun[N].dumpstream.
[ERROR] -> [Help 1]
org.apache.maven.lifecycle.LifecycleExecutionException: Failed to execute goal org.apache.maven.plugins:maven-failsafe-plugin:2.21.0:verify (default) on project beam-sdks-java-io-mongodb: There are test failures.

Please refer to <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/ccc9bd91/beam/sdks/java/io/mongodb/target/failsafe-reports> for the individual test results.
Please refer to dump files (if any exist) [date]-jvmRun[N].dump, [date].dumpstream and [date]-jvmRun[N].dumpstream.
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:213)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:154)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:146)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:117)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:81)
    at org.apache.maven.lifecycle.internal.builder.singlethreaded.SingleThreadedBuilder.build (SingleThreadedBuilder.java:51)
    at org.apache.maven.lifecycle.internal.LifecycleStarter.execute (LifecycleStarter.java:128)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:309)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:194)
    at org.apache.maven.DefaultMaven.execute (DefaultMaven.java:107)
    at org.apache.maven.cli.MavenCli.execute (MavenCli.java:955)
    at org.apache.maven.cli.MavenCli.doMain (MavenCli.java:290)
    at org.apache.maven.cli.MavenCli.main (MavenCli.java:194)
    at sun.reflect.NativeMethodAccessorImpl.invoke0 (Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke (NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke (DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke (Method.java:498)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launchEnhanced (Launcher.java:289)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launch (Launcher.java:229)
    at org.codehaus.plexus.classworlds.launcher.Launcher.mainWithExitCode (Launcher.java:415)
    at org.codehaus.plexus.classworlds.launcher.Launcher.main (Launcher.java:356)
Caused by: org.apache.maven.plugin.MojoFailureException: There are test failures.

Please refer to <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/ccc9bd91/beam/sdks/java/io/mongodb/target/failsafe-reports> for the individual test results.
Please refer to dump files (if any exist) [date]-jvmRun[N].dump, [date].dumpstream and [date]-jvmRun[N].dumpstream.
    at org.apache.maven.plugin.surefire.SurefireHelper.throwException (SurefireHelper.java:240)
    at org.apache.maven.plugin.surefire.SurefireHelper.reportExecution (SurefireHelper.java:112)
    at org.apache.maven.plugin.failsafe.VerifyMojo.execute (VerifyMojo.java:192)
    at org.apache.maven.plugin.DefaultBuildPluginManager.executeMojo (DefaultBuildPluginManager.java:134)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:208)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:154)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:146)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:117)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:81)
    at org.apache.maven.lifecycle.internal.builder.singlethreaded.SingleThreadedBuilder.build (SingleThreadedBuilder.java:51)
    at org.apache.maven.lifecycle.internal.LifecycleStarter.execute (LifecycleStarter.java:128)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:309)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:194)
    at org.apache.maven.DefaultMaven.execute (DefaultMaven.java:107)
    at org.apache.maven.cli.MavenCli.execute (MavenCli.java:955)
    at org.apache.maven.cli.MavenCli.doMain (MavenCli.java:290)
    at org.apache.maven.cli.MavenCli.main (MavenCli.java:194)
    at sun.reflect.NativeMethodAccessorImpl.invoke0 (Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke (NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke (DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke (Method.java:498)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launchEnhanced (Launcher.java:289)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launch (Launcher.java:229)
    at org.codehaus.plexus.classworlds.launcher.Launcher.mainWithExitCode (Launcher.java:415)
    at org.codehaus.plexus.classworlds.launcher.Launcher.main (Launcher.java:356)
[ERROR] 
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException

STDERR: 
2018-04-29 18:15:39,146 ccc9bd91 MainThread beam_integration_benchmark(1/1) ERROR    Error during benchmark beam_integration_benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-04-29 18:15:39,147 ccc9bd91 MainThread beam_integration_benchmark(1/1) INFO     Cleaning up benchmark beam_integration_benchmark
2018-04-29 18:15:39,147 ccc9bd91 MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525014087005> delete -f <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/src/.test-infra/kubernetes/mongodb/load-balancer/mongo.yml> --ignore-not-found
2018-04-29 18:15:39,887 ccc9bd91 MainThread beam_integration_benchmark(1/1) ERROR    Exception running benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 780, in RunBenchmarkTask
    RunBenchmark(spec, collector)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-04-29 18:15:39,887 ccc9bd91 MainThread beam_integration_benchmark(1/1) ERROR    Benchmark 1/1 beam_integration_benchmark (UID: beam_integration_benchmark0) failed. Execution will continue.
2018-04-29 18:15:39,929 ccc9bd91 MainThread INFO     Benchmark run statuses:
---------------------------------------------------------------------------------
Name                        UID                          Status  Failed Substatus
---------------------------------------------------------------------------------
beam_integration_benchmark  beam_integration_benchmark0  FAILED                  
---------------------------------------------------------------------------------
Success rate: 0.00% (0/1)
2018-04-29 18:15:39,930 ccc9bd91 MainThread INFO     Complete logs can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/ccc9bd91/pkb.log>
2018-04-29 18:15:39,930 ccc9bd91 MainThread INFO     Completion statuses can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/ccc9bd91/completion_statuses.json>
Build step 'Execute shell' marked build as failure

Jenkins build is back to normal : beam_PerformanceTests_MongoDBIO_IT #133

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/133/display/redirect?page=changes>


Build failed in Jenkins: beam_PerformanceTests_MongoDBIO_IT #132

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/132/display/redirect?page=changes>

Changes:

[coheigea] Combine nested if statements

------------------------------------------
[...truncated 296.69 KB...]
    INFO: No server chosen by WritableServerSelector from cluster description ClusterDescription{type=UNKNOWN, connectionMode=SINGLE, all=[ServerDescription{address=173.255.112.144:27017, type=UNKNOWN, state=CONNECTING}]}. Waiting for 30000 ms before timing out
    May 05, 2018 6:13:45 AM com.mongodb.diagnostics.logging.SLF4JLogger info
    INFO: Exception in monitor thread while connecting to server 173.255.112.144:27017
    com.mongodb.MongoSocketOpenException: Exception opening socket
    	at com.mongodb.connection.SocketStream.open(SocketStream.java:63)
    	at com.mongodb.connection.InternalStreamConnection.open(InternalStreamConnection.java:114)
    	at com.mongodb.connection.DefaultServerMonitor$ServerMonitorRunnable.run(DefaultServerMonitor.java:128)
    	at java.lang.Thread.run(Thread.java:748)
    Caused by: java.net.SocketTimeoutException: connect timed out
    	at java.net.PlainSocketImpl.socketConnect(Native Method)
    	at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350)
    	at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206)
    	at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188)
    	at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392)
    	at java.net.Socket.connect(Socket.java:589)
    	at com.mongodb.connection.SocketStreamHelper.initialize(SocketStreamHelper.java:50)
    	at com.mongodb.connection.SocketStream.open(SocketStream.java:58)
    	... 3 more


Gradle Test Executor 1 finished executing tests.

> Task :beam-sdks-java-io-mongodb:integrationTest

org.apache.beam.sdk.io.mongodb.MongoDBIOIT > testWriteAndRead FAILED
    java.lang.RuntimeException: com.mongodb.MongoSocketReadException: Prematurely reached end of stream
    	at com.mongodb.connection.SocketStream.read(SocketStream.java:88)
    	at com.mongodb.connection.InternalStreamConnection.receiveResponseBuffers(InternalStreamConnection.java:491)
    	at com.mongodb.connection.InternalStreamConnection.receiveMessage(InternalStreamConnection.java:221)
    	at com.mongodb.connection.UsageTrackingInternalConnection.receiveMessage(UsageTrackingInternalConnection.java:102)
    	at com.mongodb.connection.DefaultConnectionPool$PooledConnection.receiveMessage(DefaultConnectionPool.java:435)
    	at com.mongodb.connection.WriteCommandProtocol.receiveMessage(WriteCommandProtocol.java:234)
    	at com.mongodb.connection.WriteCommandProtocol.execute(WriteCommandProtocol.java:104)
    	at com.mongodb.connection.InsertCommandProtocol.execute(InsertCommandProtocol.java:67)
    	at com.mongodb.connection.InsertCommandProtocol.execute(InsertCommandProtocol.java:37)
    	at com.mongodb.connection.DefaultServer$DefaultServerProtocolExecutor.execute(DefaultServer.java:159)
    	at com.mongodb.connection.DefaultServerConnection.executeProtocol(DefaultServerConnection.java:286)
    	at com.mongodb.connection.DefaultServerConnection.insertCommand(DefaultServerConnection.java:115)
    	at com.mongodb.operation.MixedBulkWriteOperation$Run$2.executeWriteCommandProtocol(MixedBulkWriteOperation.java:455)
    	at com.mongodb.operation.MixedBulkWriteOperation$Run$RunExecutor.execute(MixedBulkWriteOperation.java:646)
    	at com.mongodb.operation.MixedBulkWriteOperation$Run.execute(MixedBulkWriteOperation.java:401)
    	at com.mongodb.operation.MixedBulkWriteOperation$1.call(MixedBulkWriteOperation.java:179)
    	at com.mongodb.operation.MixedBulkWriteOperation$1.call(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.OperationHelper.withConnectionSource(OperationHelper.java:230)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:221)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=173.255.112.144:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=173.255.112.144:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=173.255.112.144:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    Workflow failed. Causes: S01:Generate sequence/Read(BoundedCountingSource)+Produce documents/Map+Write documents to MongoDB/ParDo(Write) failed., A work item was attempted 4 times without success. Each time the worker eventually lost contact with the service. The work item was attempted on: 
      mongodbioit0testwriteandr-05042306-e7l5-harness-p69q,
      mongodbioit0testwriteandr-05042306-e7l5-harness-p69q,
      mongodbioit0testwriteandr-05042306-e7l5-harness-p69q,
      mongodbioit0testwriteandr-05042306-e7l5-harness-p69q
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:134)
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:90)
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:55)
        at org.apache.beam.sdk.Pipeline.run(Pipeline.java:311)
        at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:346)
        at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:328)
        at org.apache.beam.sdk.io.mongodb.MongoDBIOIT.testWriteAndRead(MongoDBIOIT.java:118)

    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=173.255.112.144:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
        at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
        at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
        at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
        at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
        at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
        at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:158)
        at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:133)
        at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:128)
        at com.mongodb.operation.DropDatabaseOperation.execute(DropDatabaseOperation.java:51)
        at com.mongodb.operation.DropDatabaseOperation.execute(DropDatabaseOperation.java:36)
        at com.mongodb.Mongo.execute(Mongo.java:781)
        at com.mongodb.Mongo$2.execute(Mongo.java:764)
        at com.mongodb.MongoDatabaseImpl.drop(MongoDatabaseImpl.java:136)
        at org.apache.beam.sdk.io.mongodb.MongoDBIOIT.tearDown(MongoDBIOIT.java:103)

1 test completed, 1 failed
Finished generating test XML results (0.023 secs) into: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/d659d710/beam/sdks/java/io/mongodb/build/test-results/integrationTest>
Generating HTML test report...
Finished generating test html results (0.03 secs) into: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/d659d710/beam/sdks/java/io/mongodb/build/reports/tests/integrationTest>

> Task :beam-sdks-java-io-mongodb:integrationTest FAILED
:beam-sdks-java-io-mongodb:integrationTest (Thread[Task worker for ':' Thread 15,5,main]) completed. Took 7 mins 28.801 secs.

Deprecated Gradle features were used in this build, making it incompatible with Gradle 5.0.
See https://docs.gradle.org/4.7/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 47s
55 actionable tasks: 21 executed, 11 from cache, 23 up-to-date

Publishing build scan...
https://gradle.com/s/a5aoefdj2zs7y


STDERR: 
FAILURE: Build failed with an exception.

* What went wrong:
Execution failed for task ':beam-sdks-java-io-mongodb:integrationTest'.
> There were failing tests. See the report at: file://<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/d659d710/beam/sdks/java/io/mongodb/build/reports/tests/integrationTest/index.html>

* Try:
Run with --debug option to get more log output. Run with --scan to get full insights.

* Exception is:
org.gradle.api.tasks.TaskExecutionException: Execution failed for task ':beam-sdks-java-io-mongodb:integrationTest'.
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeActions(ExecuteActionsTaskExecuter.java:103)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.execute(ExecuteActionsTaskExecuter.java:73)
	at org.gradle.api.internal.tasks.execution.OutputDirectoryCreatingTaskExecuter.execute(OutputDirectoryCreatingTaskExecuter.java:51)
	at org.gradle.api.internal.tasks.execution.SkipCachedTaskExecuter.execute(SkipCachedTaskExecuter.java:105)
	at org.gradle.api.internal.tasks.execution.SkipUpToDateTaskExecuter.execute(SkipUpToDateTaskExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.ResolveTaskOutputCachingStateExecuter.execute(ResolveTaskOutputCachingStateExecuter.java:54)
	at org.gradle.api.internal.tasks.execution.ResolveBuildCacheKeyExecuter.execute(ResolveBuildCacheKeyExecuter.java:66)
	at org.gradle.api.internal.tasks.execution.ValidatingTaskExecuter.execute(ValidatingTaskExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.SkipEmptySourceFilesTaskExecuter.execute(SkipEmptySourceFilesTaskExecuter.java:101)
	at org.gradle.api.internal.tasks.execution.FinalizeInputFilePropertiesTaskExecuter.execute(FinalizeInputFilePropertiesTaskExecuter.java:44)
	at org.gradle.api.internal.tasks.execution.CleanupStaleOutputsExecuter.execute(CleanupStaleOutputsExecuter.java:91)
	at org.gradle.api.internal.tasks.execution.ResolveTaskArtifactStateTaskExecuter.execute(ResolveTaskArtifactStateTaskExecuter.java:62)
	at org.gradle.api.internal.tasks.execution.SkipTaskWithNoActionsExecuter.execute(SkipTaskWithNoActionsExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.SkipOnlyIfTaskExecuter.execute(SkipOnlyIfTaskExecuter.java:54)
	at org.gradle.api.internal.tasks.execution.ExecuteAtMostOnceTaskExecuter.execute(ExecuteAtMostOnceTaskExecuter.java:43)
	at org.gradle.api.internal.tasks.execution.CatchExceptionTaskExecuter.execute(CatchExceptionTaskExecuter.java:34)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker$1.run(DefaultTaskGraphExecuter.java:256)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:317)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:309)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:185)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.run(DefaultBuildOperationExecutor.java:97)
	at org.gradle.internal.operations.DelegatingBuildOperationExecutor.run(DelegatingBuildOperationExecutor.java:31)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker.execute(DefaultTaskGraphExecuter.java:249)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker.execute(DefaultTaskGraphExecuter.java:238)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker$1.execute(DefaultTaskPlanExecutor.java:104)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker$1.execute(DefaultTaskPlanExecutor.java:98)
	at org.gradle.execution.taskgraph.DefaultTaskExecutionPlan.execute(DefaultTaskExecutionPlan.java:663)
	at org.gradle.execution.taskgraph.DefaultTaskExecutionPlan.executeWithTask(DefaultTaskExecutionPlan.java:596)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker.run(DefaultTaskPlanExecutor.java:98)
	at org.gradle.internal.concurrent.ExecutorPolicy$CatchAndRecordFailures.onExecute(ExecutorPolicy.java:63)
	at org.gradle.internal.concurrent.ManagedExecutorImpl$1.run(ManagedExecutorImpl.java:46)
	at org.gradle.internal.concurrent.ThreadFactoryImpl$ManagedThreadRunnable.run(ThreadFactoryImpl.java:55)
Caused by: org.gradle.api.GradleException: There were failing tests. See the report at: file://<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/d659d710/beam/sdks/java/io/mongodb/build/reports/tests/integrationTest/index.html>
	at org.gradle.api.tasks.testing.AbstractTestTask.handleTestFailures(AbstractTestTask.java:612)
	at org.gradle.api.tasks.testing.AbstractTestTask.executeTests(AbstractTestTask.java:484)
	at org.gradle.api.tasks.testing.Test.executeTests(Test.java:583)
	at org.gradle.internal.reflect.JavaMethod.invoke(JavaMethod.java:73)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.doExecute(StandardTaskAction.java:46)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.execute(StandardTaskAction.java:39)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.execute(StandardTaskAction.java:26)
	at org.gradle.api.internal.AbstractTask$TaskActionWrapper.execute(AbstractTask.java:794)
	at org.gradle.api.internal.AbstractTask$TaskActionWrapper.execute(AbstractTask.java:761)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter$1.run(ExecuteActionsTaskExecuter.java:124)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:317)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:309)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:185)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.run(DefaultBuildOperationExecutor.java:97)
	at org.gradle.internal.operations.DelegatingBuildOperationExecutor.run(DelegatingBuildOperationExecutor.java:31)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeAction(ExecuteActionsTaskExecuter.java:113)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeActions(ExecuteActionsTaskExecuter.java:95)
	... 31 more


* Get more help at https://help.gradle.org

2018-05-05 06:13:57,919 d659d710 MainThread beam_integration_benchmark(1/1) ERROR    Error during benchmark beam_integration_benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 647, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 527, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-05-05 06:13:57,920 d659d710 MainThread beam_integration_benchmark(1/1) INFO     Cleaning up benchmark beam_integration_benchmark
2018-05-05 06:13:57,921 d659d710 MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525497103645> delete -f <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/src/.test-infra/kubernetes/mongodb/load-balancer/mongo.yml> --ignore-not-found
2018-05-05 06:13:58,439 d659d710 MainThread beam_integration_benchmark(1/1) ERROR    Exception running benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 781, in RunBenchmarkTask
    RunBenchmark(spec, collector)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 647, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 527, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-05-05 06:13:58,439 d659d710 MainThread beam_integration_benchmark(1/1) ERROR    Benchmark 1/1 beam_integration_benchmark (UID: beam_integration_benchmark0) failed. Execution will continue.
2018-05-05 06:13:58,461 d659d710 MainThread INFO     Benchmark run statuses:
---------------------------------------------------------------------------------
Name                        UID                          Status  Failed Substatus
---------------------------------------------------------------------------------
beam_integration_benchmark  beam_integration_benchmark0  FAILED                  
---------------------------------------------------------------------------------
Success rate: 0.00% (0/1)
2018-05-05 06:13:58,461 d659d710 MainThread INFO     Complete logs can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/d659d710/pkb.log>
2018-05-05 06:13:58,462 d659d710 MainThread INFO     Completion statuses can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/d659d710/completion_statuses.json>
Build step 'Execute shell' marked build as failure

Build failed in Jenkins: beam_PerformanceTests_MongoDBIO_IT #131

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/131/display/redirect?page=changes>

Changes:

[mariagh] Update CoGroupByKey docstring

[sidhom] Use wildcard generics rather than Object in portable translator

[sidhom] [BEAM-4231] Runner utility for coder instantiation

------------------------------------------
[...truncated 298.28 KB...]
    INFO: No server chosen by WritableServerSelector from cluster description ClusterDescription{type=UNKNOWN, connectionMode=SINGLE, all=[ServerDescription{address=35.202.112.125:27017, type=UNKNOWN, state=CONNECTING}]}. Waiting for 30000 ms before timing out
    May 05, 2018 12:23:52 AM com.mongodb.diagnostics.logging.SLF4JLogger info
    INFO: Exception in monitor thread while connecting to server 35.202.112.125:27017
    com.mongodb.MongoSocketOpenException: Exception opening socket
    	at com.mongodb.connection.SocketStream.open(SocketStream.java:63)
    	at com.mongodb.connection.InternalStreamConnection.open(InternalStreamConnection.java:114)
    	at com.mongodb.connection.DefaultServerMonitor$ServerMonitorRunnable.run(DefaultServerMonitor.java:128)
    	at java.lang.Thread.run(Thread.java:748)
    Caused by: java.net.SocketTimeoutException: connect timed out
    	at java.net.PlainSocketImpl.socketConnect(Native Method)
    	at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350)
    	at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206)
    	at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188)
    	at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392)
    	at java.net.Socket.connect(Socket.java:589)
    	at com.mongodb.connection.SocketStreamHelper.initialize(SocketStreamHelper.java:50)
    	at com.mongodb.connection.SocketStream.open(SocketStream.java:58)
    	... 3 more


Gradle Test Executor 1 finished executing tests.

> Task :beam-sdks-java-io-mongodb:integrationTest

org.apache.beam.sdk.io.mongodb.MongoDBIOIT > testWriteAndRead FAILED
    java.lang.RuntimeException: com.mongodb.MongoSocketReadException: Prematurely reached end of stream
    	at com.mongodb.connection.SocketStream.read(SocketStream.java:88)
    	at com.mongodb.connection.InternalStreamConnection.receiveResponseBuffers(InternalStreamConnection.java:491)
    	at com.mongodb.connection.InternalStreamConnection.receiveMessage(InternalStreamConnection.java:221)
    	at com.mongodb.connection.UsageTrackingInternalConnection.receiveMessage(UsageTrackingInternalConnection.java:102)
    	at com.mongodb.connection.DefaultConnectionPool$PooledConnection.receiveMessage(DefaultConnectionPool.java:435)
    	at com.mongodb.connection.WriteCommandProtocol.receiveMessage(WriteCommandProtocol.java:234)
    	at com.mongodb.connection.WriteCommandProtocol.execute(WriteCommandProtocol.java:104)
    	at com.mongodb.connection.InsertCommandProtocol.execute(InsertCommandProtocol.java:67)
    	at com.mongodb.connection.InsertCommandProtocol.execute(InsertCommandProtocol.java:37)
    	at com.mongodb.connection.DefaultServer$DefaultServerProtocolExecutor.execute(DefaultServer.java:159)
    	at com.mongodb.connection.DefaultServerConnection.executeProtocol(DefaultServerConnection.java:286)
    	at com.mongodb.connection.DefaultServerConnection.insertCommand(DefaultServerConnection.java:115)
    	at com.mongodb.operation.MixedBulkWriteOperation$Run$2.executeWriteCommandProtocol(MixedBulkWriteOperation.java:455)
    	at com.mongodb.operation.MixedBulkWriteOperation$Run$RunExecutor.execute(MixedBulkWriteOperation.java:646)
    	at com.mongodb.operation.MixedBulkWriteOperation$Run.execute(MixedBulkWriteOperation.java:401)
    	at com.mongodb.operation.MixedBulkWriteOperation$1.call(MixedBulkWriteOperation.java:179)
    	at com.mongodb.operation.MixedBulkWriteOperation$1.call(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.OperationHelper.withConnectionSource(OperationHelper.java:230)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:221)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.202.112.125:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.202.112.125:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.202.112.125:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    Workflow failed. Causes: S01:Generate sequence/Read(BoundedCountingSource)+Produce documents/Map+Write documents to MongoDB/ParDo(Write) failed., A work item was attempted 4 times without success. Each time the worker eventually lost contact with the service. The work item was attempted on: 
      mongodbioit0testwriteandr-05041715-85ey-harness-5mwm,
      mongodbioit0testwriteandr-05041715-85ey-harness-5mwm,
      mongodbioit0testwriteandr-05041715-85ey-harness-5mwm,
      mongodbioit0testwriteandr-05041715-85ey-harness-5mwm
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:134)
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:90)
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:55)
        at org.apache.beam.sdk.Pipeline.run(Pipeline.java:311)
        at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:346)
        at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:328)
        at org.apache.beam.sdk.io.mongodb.MongoDBIOIT.testWriteAndRead(MongoDBIOIT.java:118)

    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.202.112.125:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
        at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
        at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
        at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
        at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
        at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
        at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:158)
        at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:133)
        at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:128)
        at com.mongodb.operation.DropDatabaseOperation.execute(DropDatabaseOperation.java:51)
        at com.mongodb.operation.DropDatabaseOperation.execute(DropDatabaseOperation.java:36)
        at com.mongodb.Mongo.execute(Mongo.java:781)
        at com.mongodb.Mongo$2.execute(Mongo.java:764)
        at com.mongodb.MongoDatabaseImpl.drop(MongoDatabaseImpl.java:136)
        at org.apache.beam.sdk.io.mongodb.MongoDBIOIT.tearDown(MongoDBIOIT.java:103)

1 test completed, 1 failed
Finished generating test XML results (0.02 secs) into: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/63b5b56e/beam/sdks/java/io/mongodb/build/test-results/integrationTest>
Generating HTML test report...
Finished generating test html results (0.028 secs) into: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/63b5b56e/beam/sdks/java/io/mongodb/build/reports/tests/integrationTest>

> Task :beam-sdks-java-io-mongodb:integrationTest FAILED
:beam-sdks-java-io-mongodb:integrationTest (Thread[Task worker for ':' Thread 12,5,main]) completed. Took 8 mins 30.059 secs.

Deprecated Gradle features were used in this build, making it incompatible with Gradle 5.0.
See https://docs.gradle.org/4.7/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 46s
55 actionable tasks: 21 executed, 11 from cache, 23 up-to-date

Publishing build scan...
https://gradle.com/s/w6ezarc4povtq


STDERR: 
FAILURE: Build failed with an exception.

* What went wrong:
Execution failed for task ':beam-sdks-java-io-mongodb:integrationTest'.
> There were failing tests. See the report at: file://<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/63b5b56e/beam/sdks/java/io/mongodb/build/reports/tests/integrationTest/index.html>

* Try:
Run with --debug option to get more log output. Run with --scan to get full insights.

* Exception is:
org.gradle.api.tasks.TaskExecutionException: Execution failed for task ':beam-sdks-java-io-mongodb:integrationTest'.
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeActions(ExecuteActionsTaskExecuter.java:103)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.execute(ExecuteActionsTaskExecuter.java:73)
	at org.gradle.api.internal.tasks.execution.OutputDirectoryCreatingTaskExecuter.execute(OutputDirectoryCreatingTaskExecuter.java:51)
	at org.gradle.api.internal.tasks.execution.SkipCachedTaskExecuter.execute(SkipCachedTaskExecuter.java:105)
	at org.gradle.api.internal.tasks.execution.SkipUpToDateTaskExecuter.execute(SkipUpToDateTaskExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.ResolveTaskOutputCachingStateExecuter.execute(ResolveTaskOutputCachingStateExecuter.java:54)
	at org.gradle.api.internal.tasks.execution.ResolveBuildCacheKeyExecuter.execute(ResolveBuildCacheKeyExecuter.java:66)
	at org.gradle.api.internal.tasks.execution.ValidatingTaskExecuter.execute(ValidatingTaskExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.SkipEmptySourceFilesTaskExecuter.execute(SkipEmptySourceFilesTaskExecuter.java:101)
	at org.gradle.api.internal.tasks.execution.FinalizeInputFilePropertiesTaskExecuter.execute(FinalizeInputFilePropertiesTaskExecuter.java:44)
	at org.gradle.api.internal.tasks.execution.CleanupStaleOutputsExecuter.execute(CleanupStaleOutputsExecuter.java:91)
	at org.gradle.api.internal.tasks.execution.ResolveTaskArtifactStateTaskExecuter.execute(ResolveTaskArtifactStateTaskExecuter.java:62)
	at org.gradle.api.internal.tasks.execution.SkipTaskWithNoActionsExecuter.execute(SkipTaskWithNoActionsExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.SkipOnlyIfTaskExecuter.execute(SkipOnlyIfTaskExecuter.java:54)
	at org.gradle.api.internal.tasks.execution.ExecuteAtMostOnceTaskExecuter.execute(ExecuteAtMostOnceTaskExecuter.java:43)
	at org.gradle.api.internal.tasks.execution.CatchExceptionTaskExecuter.execute(CatchExceptionTaskExecuter.java:34)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker$1.run(DefaultTaskGraphExecuter.java:256)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:317)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:309)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:185)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.run(DefaultBuildOperationExecutor.java:97)
	at org.gradle.internal.operations.DelegatingBuildOperationExecutor.run(DelegatingBuildOperationExecutor.java:31)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker.execute(DefaultTaskGraphExecuter.java:249)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker.execute(DefaultTaskGraphExecuter.java:238)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker$1.execute(DefaultTaskPlanExecutor.java:104)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker$1.execute(DefaultTaskPlanExecutor.java:98)
	at org.gradle.execution.taskgraph.DefaultTaskExecutionPlan.execute(DefaultTaskExecutionPlan.java:663)
	at org.gradle.execution.taskgraph.DefaultTaskExecutionPlan.executeWithTask(DefaultTaskExecutionPlan.java:596)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker.run(DefaultTaskPlanExecutor.java:98)
	at org.gradle.internal.concurrent.ExecutorPolicy$CatchAndRecordFailures.onExecute(ExecutorPolicy.java:63)
	at org.gradle.internal.concurrent.ManagedExecutorImpl$1.run(ManagedExecutorImpl.java:46)
	at org.gradle.internal.concurrent.ThreadFactoryImpl$ManagedThreadRunnable.run(ThreadFactoryImpl.java:55)
Caused by: org.gradle.api.GradleException: There were failing tests. See the report at: file://<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/63b5b56e/beam/sdks/java/io/mongodb/build/reports/tests/integrationTest/index.html>
	at org.gradle.api.tasks.testing.AbstractTestTask.handleTestFailures(AbstractTestTask.java:612)
	at org.gradle.api.tasks.testing.AbstractTestTask.executeTests(AbstractTestTask.java:484)
	at org.gradle.api.tasks.testing.Test.executeTests(Test.java:583)
	at org.gradle.internal.reflect.JavaMethod.invoke(JavaMethod.java:73)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.doExecute(StandardTaskAction.java:46)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.execute(StandardTaskAction.java:39)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.execute(StandardTaskAction.java:26)
	at org.gradle.api.internal.AbstractTask$TaskActionWrapper.execute(AbstractTask.java:794)
	at org.gradle.api.internal.AbstractTask$TaskActionWrapper.execute(AbstractTask.java:761)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter$1.run(ExecuteActionsTaskExecuter.java:124)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:317)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:309)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:185)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.run(DefaultBuildOperationExecutor.java:97)
	at org.gradle.internal.operations.DelegatingBuildOperationExecutor.run(DelegatingBuildOperationExecutor.java:31)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeAction(ExecuteActionsTaskExecuter.java:113)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeActions(ExecuteActionsTaskExecuter.java:95)
	... 31 more


* Get more help at https://help.gradle.org

2018-05-05 00:24:04,431 63b5b56e MainThread beam_integration_benchmark(1/1) ERROR    Error during benchmark beam_integration_benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 647, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 527, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-05-05 00:24:04,431 63b5b56e MainThread beam_integration_benchmark(1/1) INFO     Cleaning up benchmark beam_integration_benchmark
2018-05-05 00:24:04,432 63b5b56e MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525464580423> delete -f <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/src/.test-infra/kubernetes/mongodb/load-balancer/mongo.yml> --ignore-not-found
2018-05-05 00:24:04,860 63b5b56e MainThread beam_integration_benchmark(1/1) ERROR    Exception running benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 781, in RunBenchmarkTask
    RunBenchmark(spec, collector)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 647, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 527, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-05-05 00:24:04,861 63b5b56e MainThread beam_integration_benchmark(1/1) ERROR    Benchmark 1/1 beam_integration_benchmark (UID: beam_integration_benchmark0) failed. Execution will continue.
2018-05-05 00:24:04,917 63b5b56e MainThread INFO     Benchmark run statuses:
---------------------------------------------------------------------------------
Name                        UID                          Status  Failed Substatus
---------------------------------------------------------------------------------
beam_integration_benchmark  beam_integration_benchmark0  FAILED                  
---------------------------------------------------------------------------------
Success rate: 0.00% (0/1)
2018-05-05 00:24:04,918 63b5b56e MainThread INFO     Complete logs can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/63b5b56e/pkb.log>
2018-05-05 00:24:04,918 63b5b56e MainThread INFO     Completion statuses can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/63b5b56e/completion_statuses.json>
Build step 'Execute shell' marked build as failure

Build failed in Jenkins: beam_PerformanceTests_MongoDBIO_IT #130

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/130/display/redirect?page=changes>

Changes:

[swegner] Remove Gradle worker cap.

[swegner] Re-enable additional Jenkins worker parallelism for Dataflow

[swegner] Adjust Jenkins Gradle memory usage based on max-workers

[swegner] Hard-code jenkins machines total memory until we find a working API

------------------------------------------
[...truncated 360.87 KB...]
    INFO: No server chosen by WritableServerSelector from cluster description ClusterDescription{type=UNKNOWN, connectionMode=SINGLE, all=[ServerDescription{address=35.226.235.90:27017, type=UNKNOWN, state=CONNECTING}]}. Waiting for 30000 ms before timing out
    May 04, 2018 6:18:15 PM com.mongodb.diagnostics.logging.SLF4JLogger info
    INFO: Opened connection [connectionId{localValue:1, serverValue:1}] to 35.226.235.90:27017
    May 04, 2018 6:18:15 PM com.mongodb.diagnostics.logging.SLF4JLogger info
    INFO: Monitor thread successfully connected to server with description ServerDescription{address=35.226.235.90:27017, type=STANDALONE, state=CONNECTED, ok=true, version=ServerVersion{versionList=[3, 6, 4]}, minWireVersion=0, maxWireVersion=6, maxDocumentSize=16777216, roundTripTimeNanos=1813396}
    May 04, 2018 6:18:15 PM com.mongodb.diagnostics.logging.SLF4JLogger info
    INFO: Opened connection [connectionId{localValue:2, serverValue:2}] to 35.226.235.90:27017

Gradle Test Executor 1 finished executing tests.

> Task :beam-sdks-java-io-mongodb:integrationTest FAILED

org.apache.beam.sdk.io.mongodb.MongoDBIOIT > testWriteAndRead FAILED
    java.lang.RuntimeException: com.mongodb.MongoSocketReadException: Prematurely reached end of stream
    	at com.mongodb.connection.SocketStream.read(SocketStream.java:88)
    	at com.mongodb.connection.InternalStreamConnection.receiveResponseBuffers(InternalStreamConnection.java:491)
    	at com.mongodb.connection.InternalStreamConnection.receiveMessage(InternalStreamConnection.java:221)
    	at com.mongodb.connection.UsageTrackingInternalConnection.receiveMessage(UsageTrackingInternalConnection.java:102)
    	at com.mongodb.connection.DefaultConnectionPool$PooledConnection.receiveMessage(DefaultConnectionPool.java:435)
    	at com.mongodb.connection.WriteCommandProtocol.receiveMessage(WriteCommandProtocol.java:234)
    	at com.mongodb.connection.WriteCommandProtocol.execute(WriteCommandProtocol.java:104)
    	at com.mongodb.connection.InsertCommandProtocol.execute(InsertCommandProtocol.java:67)
    	at com.mongodb.connection.InsertCommandProtocol.execute(InsertCommandProtocol.java:37)
    	at com.mongodb.connection.DefaultServer$DefaultServerProtocolExecutor.execute(DefaultServer.java:159)
    	at com.mongodb.connection.DefaultServerConnection.executeProtocol(DefaultServerConnection.java:286)
    	at com.mongodb.connection.DefaultServerConnection.insertCommand(DefaultServerConnection.java:115)
    	at com.mongodb.operation.MixedBulkWriteOperation$Run$2.executeWriteCommandProtocol(MixedBulkWriteOperation.java:455)
    	at com.mongodb.operation.MixedBulkWriteOperation$Run$RunExecutor.execute(MixedBulkWriteOperation.java:646)
    	at com.mongodb.operation.MixedBulkWriteOperation$Run.execute(MixedBulkWriteOperation.java:401)
    	at com.mongodb.operation.MixedBulkWriteOperation$1.call(MixedBulkWriteOperation.java:179)
    	at com.mongodb.operation.MixedBulkWriteOperation$1.call(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.OperationHelper.withConnectionSource(OperationHelper.java:230)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:221)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.226.235.90:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.226.235.90:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.226.235.90:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.226.235.90:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.226.235.90:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    Workflow failed. Causes: S01:Generate sequence/Read(BoundedCountingSource)+Produce documents/Map+Write documents to MongoDB/ParDo(Write) failed., A work item was attempted 4 times without success. Each time the worker eventually lost contact with the service. The work item was attempted on: 
      mongodbioit0testwriteandr-05041109-vx6l-harness-gbfg,
      mongodbioit0testwriteandr-05041109-vx6l-harness-gbfg,
      mongodbioit0testwriteandr-05041109-vx6l-harness-gbfg,
      mongodbioit0testwriteandr-05041109-vx6l-harness-gbfg
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:134)
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:90)
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:55)
        at org.apache.beam.sdk.Pipeline.run(Pipeline.java:311)
        at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:346)
        at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:328)
        at org.apache.beam.sdk.io.mongodb.MongoDBIOIT.testWriteAndRead(MongoDBIOIT.java:118)

1 test completed, 1 failed
Finished generating test XML results (0.019 secs) into: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/95d031b4/beam/sdks/java/io/mongodb/build/test-results/integrationTest>
Generating HTML test report...
Finished generating test html results (0.027 secs) into: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/95d031b4/beam/sdks/java/io/mongodb/build/reports/tests/integrationTest>
:beam-sdks-java-io-mongodb:integrationTest (Thread[Task worker for ':' Thread 13,5,main]) completed. Took 9 mins 32.227 secs.

Deprecated Gradle features were used in this build, making it incompatible with Gradle 5.0.
See https://docs.gradle.org/4.7/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 10m 25s
55 actionable tasks: 19 executed, 13 from cache, 23 up-to-date

Publishing build scan...
https://gradle.com/s/vc77iqpaxq2fw


STDERR: 
FAILURE: Build failed with an exception.

* What went wrong:
Execution failed for task ':beam-sdks-java-io-mongodb:integrationTest'.
> There were failing tests. See the report at: file://<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/95d031b4/beam/sdks/java/io/mongodb/build/reports/tests/integrationTest/index.html>

* Try:
Run with --debug option to get more log output. Run with --scan to get full insights.

* Exception is:
org.gradle.api.tasks.TaskExecutionException: Execution failed for task ':beam-sdks-java-io-mongodb:integrationTest'.
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeActions(ExecuteActionsTaskExecuter.java:103)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.execute(ExecuteActionsTaskExecuter.java:73)
	at org.gradle.api.internal.tasks.execution.OutputDirectoryCreatingTaskExecuter.execute(OutputDirectoryCreatingTaskExecuter.java:51)
	at org.gradle.api.internal.tasks.execution.SkipCachedTaskExecuter.execute(SkipCachedTaskExecuter.java:105)
	at org.gradle.api.internal.tasks.execution.SkipUpToDateTaskExecuter.execute(SkipUpToDateTaskExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.ResolveTaskOutputCachingStateExecuter.execute(ResolveTaskOutputCachingStateExecuter.java:54)
	at org.gradle.api.internal.tasks.execution.ResolveBuildCacheKeyExecuter.execute(ResolveBuildCacheKeyExecuter.java:66)
	at org.gradle.api.internal.tasks.execution.ValidatingTaskExecuter.execute(ValidatingTaskExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.SkipEmptySourceFilesTaskExecuter.execute(SkipEmptySourceFilesTaskExecuter.java:101)
	at org.gradle.api.internal.tasks.execution.FinalizeInputFilePropertiesTaskExecuter.execute(FinalizeInputFilePropertiesTaskExecuter.java:44)
	at org.gradle.api.internal.tasks.execution.CleanupStaleOutputsExecuter.execute(CleanupStaleOutputsExecuter.java:91)
	at org.gradle.api.internal.tasks.execution.ResolveTaskArtifactStateTaskExecuter.execute(ResolveTaskArtifactStateTaskExecuter.java:62)
	at org.gradle.api.internal.tasks.execution.SkipTaskWithNoActionsExecuter.execute(SkipTaskWithNoActionsExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.SkipOnlyIfTaskExecuter.execute(SkipOnlyIfTaskExecuter.java:54)
	at org.gradle.api.internal.tasks.execution.ExecuteAtMostOnceTaskExecuter.execute(ExecuteAtMostOnceTaskExecuter.java:43)
	at org.gradle.api.internal.tasks.execution.CatchExceptionTaskExecuter.execute(CatchExceptionTaskExecuter.java:34)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker$1.run(DefaultTaskGraphExecuter.java:256)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:317)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:309)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:185)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.run(DefaultBuildOperationExecutor.java:97)
	at org.gradle.internal.operations.DelegatingBuildOperationExecutor.run(DelegatingBuildOperationExecutor.java:31)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker.execute(DefaultTaskGraphExecuter.java:249)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker.execute(DefaultTaskGraphExecuter.java:238)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker$1.execute(DefaultTaskPlanExecutor.java:104)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker$1.execute(DefaultTaskPlanExecutor.java:98)
	at org.gradle.execution.taskgraph.DefaultTaskExecutionPlan.execute(DefaultTaskExecutionPlan.java:663)
	at org.gradle.execution.taskgraph.DefaultTaskExecutionPlan.executeWithTask(DefaultTaskExecutionPlan.java:596)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker.run(DefaultTaskPlanExecutor.java:98)
	at org.gradle.internal.concurrent.ExecutorPolicy$CatchAndRecordFailures.onExecute(ExecutorPolicy.java:63)
	at org.gradle.internal.concurrent.ManagedExecutorImpl$1.run(ManagedExecutorImpl.java:46)
	at org.gradle.internal.concurrent.ThreadFactoryImpl$ManagedThreadRunnable.run(ThreadFactoryImpl.java:55)
Caused by: org.gradle.api.GradleException: There were failing tests. See the report at: file://<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/95d031b4/beam/sdks/java/io/mongodb/build/reports/tests/integrationTest/index.html>
	at org.gradle.api.tasks.testing.AbstractTestTask.handleTestFailures(AbstractTestTask.java:612)
	at org.gradle.api.tasks.testing.AbstractTestTask.executeTests(AbstractTestTask.java:484)
	at org.gradle.api.tasks.testing.Test.executeTests(Test.java:583)
	at org.gradle.internal.reflect.JavaMethod.invoke(JavaMethod.java:73)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.doExecute(StandardTaskAction.java:46)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.execute(StandardTaskAction.java:39)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.execute(StandardTaskAction.java:26)
	at org.gradle.api.internal.AbstractTask$TaskActionWrapper.execute(AbstractTask.java:794)
	at org.gradle.api.internal.AbstractTask$TaskActionWrapper.execute(AbstractTask.java:761)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter$1.run(ExecuteActionsTaskExecuter.java:124)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:317)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:309)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:185)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.run(DefaultBuildOperationExecutor.java:97)
	at org.gradle.internal.operations.DelegatingBuildOperationExecutor.run(DelegatingBuildOperationExecutor.java:31)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeAction(ExecuteActionsTaskExecuter.java:113)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeActions(ExecuteActionsTaskExecuter.java:95)
	... 31 more


* Get more help at https://help.gradle.org

2018-05-04 18:18:17,117 95d031b4 MainThread beam_integration_benchmark(1/1) ERROR    Error during benchmark beam_integration_benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-05-04 18:18:17,118 95d031b4 MainThread beam_integration_benchmark(1/1) INFO     Cleaning up benchmark beam_integration_benchmark
2018-05-04 18:18:17,119 95d031b4 MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525446260217> delete -f <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/src/.test-infra/kubernetes/mongodb/load-balancer/mongo.yml> --ignore-not-found
2018-05-04 18:18:17,592 95d031b4 MainThread beam_integration_benchmark(1/1) ERROR    Exception running benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 780, in RunBenchmarkTask
    RunBenchmark(spec, collector)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-05-04 18:18:17,593 95d031b4 MainThread beam_integration_benchmark(1/1) ERROR    Benchmark 1/1 beam_integration_benchmark (UID: beam_integration_benchmark0) failed. Execution will continue.
2018-05-04 18:18:17,644 95d031b4 MainThread INFO     Benchmark run statuses:
---------------------------------------------------------------------------------
Name                        UID                          Status  Failed Substatus
---------------------------------------------------------------------------------
beam_integration_benchmark  beam_integration_benchmark0  FAILED                  
---------------------------------------------------------------------------------
Success rate: 0.00% (0/1)
2018-05-04 18:18:17,644 95d031b4 MainThread INFO     Complete logs can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/95d031b4/pkb.log>
2018-05-04 18:18:17,645 95d031b4 MainThread INFO     Completion statuses can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/95d031b4/completion_statuses.json>
Build step 'Execute shell' marked build as failure

Build failed in Jenkins: beam_PerformanceTests_MongoDBIO_IT #129

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/129/display/redirect?page=changes>

Changes:

[iemejia] Make MetricsHttpSinkTest more concise and fix typo

------------------------------------------
[...truncated 320.22 KB...]
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.193.0.151:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.193.0.151:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    Workflow failed. Causes: S01:Generate sequence/Read(BoundedCountingSource)+Produce documents/Map+Write documents to MongoDB/ParDo(Write) failed., A work item was attempted 4 times without success. Each time the worker eventually lost contact with the service. The work item was attempted on: 
      mongodbioit0testwriteandr-05040505-79lr-harness-9cnk,
      mongodbioit0testwriteandr-05040505-79lr-harness-9cnk,
      mongodbioit0testwriteandr-05040505-79lr-harness-9cnk,
      mongodbioit0testwriteandr-05040505-79lr-harness-9cnk
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:134)
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:90)
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:55)
        at org.apache.beam.sdk.Pipeline.run(Pipeline.java:311)
        at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:346)
        at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:328)
        at org.apache.beam.sdk.io.mongodb.MongoDBIOIT.testWriteAndRead(MongoDBIOIT.java:118)

    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.193.0.151:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
        at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
        at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
        at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
        at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
        at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
        at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:158)
        at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:133)
        at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:128)
        at com.mongodb.operation.DropDatabaseOperation.execute(DropDatabaseOperation.java:51)
        at com.mongodb.operation.DropDatabaseOperation.execute(DropDatabaseOperation.java:36)
        at com.mongodb.Mongo.execute(Mongo.java:781)
        at com.mongodb.Mongo$2.execute(Mongo.java:764)
        at com.mongodb.MongoDatabaseImpl.drop(MongoDatabaseImpl.java:136)
        at org.apache.beam.sdk.io.mongodb.MongoDBIOIT.tearDown(MongoDBIOIT.java:103)

1 test completed, 1 failed
Finished generating test XML results (0.024 secs) into: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/9bb74f6e/beam/sdks/java/io/mongodb/build/test-results/integrationTest>
Generating HTML test report...
Finished generating test html results (0.029 secs) into: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/9bb74f6e/beam/sdks/java/io/mongodb/build/reports/tests/integrationTest>

> Task :beam-sdks-java-io-mongodb:integrationTest FAILED
:beam-sdks-java-io-mongodb:integrationTest (Thread[main,5,main]) completed. Took 6 mins 41.618 secs.

Deprecated Gradle features were used in this build, making it incompatible with Gradle 5.0.
See https://docs.gradle.org/4.7/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 59s
55 actionable tasks: 21 executed, 11 from cache, 23 up-to-date

Publishing build scan...
https://gradle.com/s/gplbiq6l7wbgw


STDERR: 
FAILURE: Build failed with an exception.

* What went wrong:
Execution failed for task ':beam-sdks-java-io-mongodb:integrationTest'.
> There were failing tests. See the report at: file://<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/9bb74f6e/beam/sdks/java/io/mongodb/build/reports/tests/integrationTest/index.html>

* Try:
Run with --debug option to get more log output. Run with --scan to get full insights.

* Exception is:
org.gradle.api.tasks.TaskExecutionException: Execution failed for task ':beam-sdks-java-io-mongodb:integrationTest'.
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeActions(ExecuteActionsTaskExecuter.java:103)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.execute(ExecuteActionsTaskExecuter.java:73)
	at org.gradle.api.internal.tasks.execution.OutputDirectoryCreatingTaskExecuter.execute(OutputDirectoryCreatingTaskExecuter.java:51)
	at org.gradle.api.internal.tasks.execution.SkipCachedTaskExecuter.execute(SkipCachedTaskExecuter.java:105)
	at org.gradle.api.internal.tasks.execution.SkipUpToDateTaskExecuter.execute(SkipUpToDateTaskExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.ResolveTaskOutputCachingStateExecuter.execute(ResolveTaskOutputCachingStateExecuter.java:54)
	at org.gradle.api.internal.tasks.execution.ResolveBuildCacheKeyExecuter.execute(ResolveBuildCacheKeyExecuter.java:66)
	at org.gradle.api.internal.tasks.execution.ValidatingTaskExecuter.execute(ValidatingTaskExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.SkipEmptySourceFilesTaskExecuter.execute(SkipEmptySourceFilesTaskExecuter.java:101)
	at org.gradle.api.internal.tasks.execution.FinalizeInputFilePropertiesTaskExecuter.execute(FinalizeInputFilePropertiesTaskExecuter.java:44)
	at org.gradle.api.internal.tasks.execution.CleanupStaleOutputsExecuter.execute(CleanupStaleOutputsExecuter.java:91)
	at org.gradle.api.internal.tasks.execution.ResolveTaskArtifactStateTaskExecuter.execute(ResolveTaskArtifactStateTaskExecuter.java:62)
	at org.gradle.api.internal.tasks.execution.SkipTaskWithNoActionsExecuter.execute(SkipTaskWithNoActionsExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.SkipOnlyIfTaskExecuter.execute(SkipOnlyIfTaskExecuter.java:54)
	at org.gradle.api.internal.tasks.execution.ExecuteAtMostOnceTaskExecuter.execute(ExecuteAtMostOnceTaskExecuter.java:43)
	at org.gradle.api.internal.tasks.execution.CatchExceptionTaskExecuter.execute(CatchExceptionTaskExecuter.java:34)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker$1.run(DefaultTaskGraphExecuter.java:256)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:317)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:309)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:185)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.run(DefaultBuildOperationExecutor.java:97)
	at org.gradle.internal.operations.DelegatingBuildOperationExecutor.run(DelegatingBuildOperationExecutor.java:31)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker.execute(DefaultTaskGraphExecuter.java:249)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker.execute(DefaultTaskGraphExecuter.java:238)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker$1.execute(DefaultTaskPlanExecutor.java:104)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker$1.execute(DefaultTaskPlanExecutor.java:98)
	at org.gradle.execution.taskgraph.DefaultTaskExecutionPlan.execute(DefaultTaskExecutionPlan.java:663)
	at org.gradle.execution.taskgraph.DefaultTaskExecutionPlan.executeWithTask(DefaultTaskExecutionPlan.java:596)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker.run(DefaultTaskPlanExecutor.java:98)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor.process(DefaultTaskPlanExecutor.java:59)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter.execute(DefaultTaskGraphExecuter.java:130)
	at org.gradle.execution.SelectedTaskExecutionAction.execute(SelectedTaskExecutionAction.java:37)
	at org.gradle.execution.DefaultBuildExecuter.execute(DefaultBuildExecuter.java:37)
	at org.gradle.execution.DefaultBuildExecuter.access$000(DefaultBuildExecuter.java:23)
	at org.gradle.execution.DefaultBuildExecuter$1.proceed(DefaultBuildExecuter.java:43)
	at org.gradle.execution.DryRunBuildExecutionAction.execute(DryRunBuildExecutionAction.java:46)
	at org.gradle.execution.DefaultBuildExecuter.execute(DefaultBuildExecuter.java:37)
	at org.gradle.execution.DefaultBuildExecuter.execute(DefaultBuildExecuter.java:30)
	at org.gradle.initialization.DefaultGradleLauncher$ExecuteTasks.run(DefaultGradleLauncher.java:336)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:317)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:309)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:185)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.run(DefaultBuildOperationExecutor.java:97)
	at org.gradle.internal.operations.DelegatingBuildOperationExecutor.run(DelegatingBuildOperationExecutor.java:31)
	at org.gradle.initialization.DefaultGradleLauncher.runTasks(DefaultGradleLauncher.java:210)
	at org.gradle.initialization.DefaultGradleLauncher.doBuildStages(DefaultGradleLauncher.java:140)
	at org.gradle.initialization.DefaultGradleLauncher.executeTasks(DefaultGradleLauncher.java:115)
	at org.gradle.internal.invocation.GradleBuildController$1.call(GradleBuildController.java:78)
	at org.gradle.internal.invocation.GradleBuildController$1.call(GradleBuildController.java:75)
	at org.gradle.internal.work.DefaultWorkerLeaseService.withLocks(DefaultWorkerLeaseService.java:152)
	at org.gradle.internal.work.StopShieldingWorkerLeaseService.withLocks(StopShieldingWorkerLeaseService.java:38)
	at org.gradle.internal.invocation.GradleBuildController.doBuild(GradleBuildController.java:100)
	at org.gradle.internal.invocation.GradleBuildController.run(GradleBuildController.java:75)
	at org.gradle.tooling.internal.provider.ExecuteBuildActionRunner.run(ExecuteBuildActionRunner.java:28)
	at org.gradle.launcher.exec.ChainingBuildActionRunner.run(ChainingBuildActionRunner.java:35)
	at org.gradle.tooling.internal.provider.ValidatingBuildActionRunner.run(ValidatingBuildActionRunner.java:32)
	at org.gradle.launcher.exec.RunAsBuildOperationBuildActionRunner$3.run(RunAsBuildOperationBuildActionRunner.java:45)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:317)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:309)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:185)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.run(DefaultBuildOperationExecutor.java:97)
	at org.gradle.internal.operations.DelegatingBuildOperationExecutor.run(DelegatingBuildOperationExecutor.java:31)
	at org.gradle.launcher.exec.RunAsBuildOperationBuildActionRunner.run(RunAsBuildOperationBuildActionRunner.java:42)
	at org.gradle.tooling.internal.provider.SubscribableBuildActionRunner.run(SubscribableBuildActionRunner.java:51)
	at org.gradle.launcher.exec.InProcessBuildActionExecuter.execute(InProcessBuildActionExecuter.java:47)
	at org.gradle.launcher.exec.InProcessBuildActionExecuter.execute(InProcessBuildActionExecuter.java:31)
	at org.gradle.launcher.exec.BuildTreeScopeBuildActionExecuter.execute(BuildTreeScopeBuildActionExecuter.java:39)
	at org.gradle.launcher.exec.BuildTreeScopeBuildActionExecuter.execute(BuildTreeScopeBuildActionExecuter.java:25)
	at org.gradle.tooling.internal.provider.ContinuousBuildActionExecuter.execute(ContinuousBuildActionExecuter.java:80)
	at org.gradle.tooling.internal.provider.ContinuousBuildActionExecuter.execute(ContinuousBuildActionExecuter.java:53)
	at org.gradle.tooling.internal.provider.ServicesSetupBuildActionExecuter.execute(ServicesSetupBuildActionExecuter.java:61)
	at org.gradle.tooling.internal.provider.ServicesSetupBuildActionExecuter.execute(ServicesSetupBuildActionExecuter.java:34)
	at org.gradle.tooling.internal.provider.GradleThreadBuildActionExecuter.execute(GradleThreadBuildActionExecuter.java:36)
	at org.gradle.tooling.internal.provider.GradleThreadBuildActionExecuter.execute(GradleThreadBuildActionExecuter.java:25)
	at org.gradle.tooling.internal.provider.ParallelismConfigurationBuildActionExecuter.execute(ParallelismConfigurationBuildActionExecuter.java:43)
	at org.gradle.tooling.internal.provider.ParallelismConfigurationBuildActionExecuter.execute(ParallelismConfigurationBuildActionExecuter.java:29)
	at org.gradle.tooling.internal.provider.StartParamsValidatingActionExecuter.execute(StartParamsValidatingActionExecuter.java:64)
	at org.gradle.tooling.internal.provider.StartParamsValidatingActionExecuter.execute(StartParamsValidatingActionExecuter.java:29)
	at org.gradle.tooling.internal.provider.SessionFailureReportingActionExecuter.execute(SessionFailureReportingActionExecuter.java:59)
	at org.gradle.tooling.internal.provider.SessionFailureReportingActionExecuter.execute(SessionFailureReportingActionExecuter.java:44)
	at org.gradle.tooling.internal.provider.SetupLoggingActionExecuter.execute(SetupLoggingActionExecuter.java:46)
	at org.gradle.tooling.internal.provider.SetupLoggingActionExecuter.execute(SetupLoggingActionExecuter.java:30)
	at org.gradle.launcher.cli.RunBuildAction.run(RunBuildAction.java:52)
	at org.gradle.internal.Actions$RunnableActionAdapter.execute(Actions.java:173)
	at org.gradle.launcher.cli.CommandLineActionFactory$ParseAndBuildAction.execute(CommandLineActionFactory.java:379)
	at org.gradle.launcher.cli.CommandLineActionFactory$ParseAndBuildAction.execute(CommandLineActionFactory.java:352)
	at org.gradle.launcher.cli.ExceptionReportingAction.execute(ExceptionReportingAction.java:37)
	at org.gradle.launcher.cli.ExceptionReportingAction.execute(ExceptionReportingAction.java:23)
	at org.gradle.launcher.cli.CommandLineActionFactory$WithLogging.execute(CommandLineActionFactory.java:345)
	at org.gradle.launcher.cli.CommandLineActionFactory$WithLogging.execute(CommandLineActionFactory.java:275)
	at org.gradle.launcher.Main.doAction(Main.java:36)
	at org.gradle.launcher.bootstrap.EntryPoint.run(EntryPoint.java:45)
	at org.gradle.launcher.bootstrap.ProcessBootstrap.runNoExit(ProcessBootstrap.java:60)
	at org.gradle.launcher.bootstrap.ProcessBootstrap.run(ProcessBootstrap.java:37)
	at org.gradle.launcher.GradleMain.main(GradleMain.java:23)
	at org.gradle.wrapper.BootstrapMainStarter.start(BootstrapMainStarter.java:31)
	at org.gradle.wrapper.WrapperExecutor.execute(WrapperExecutor.java:108)
	at org.gradle.wrapper.GradleWrapperMain.main(GradleWrapperMain.java:61)
Caused by: org.gradle.api.GradleException: There were failing tests. See the report at: file://<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/9bb74f6e/beam/sdks/java/io/mongodb/build/reports/tests/integrationTest/index.html>
	at org.gradle.api.tasks.testing.AbstractTestTask.handleTestFailures(AbstractTestTask.java:612)
	at org.gradle.api.tasks.testing.AbstractTestTask.executeTests(AbstractTestTask.java:484)
	at org.gradle.api.tasks.testing.Test.executeTests(Test.java:583)
	at org.gradle.internal.reflect.JavaMethod.invoke(JavaMethod.java:73)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.doExecute(StandardTaskAction.java:46)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.execute(StandardTaskAction.java:39)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.execute(StandardTaskAction.java:26)
	at org.gradle.api.internal.AbstractTask$TaskActionWrapper.execute(AbstractTask.java:794)
	at org.gradle.api.internal.AbstractTask$TaskActionWrapper.execute(AbstractTask.java:761)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter$1.run(ExecuteActionsTaskExecuter.java:124)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:317)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:309)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:185)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.run(DefaultBuildOperationExecutor.java:97)
	at org.gradle.internal.operations.DelegatingBuildOperationExecutor.run(DelegatingBuildOperationExecutor.java:31)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeAction(ExecuteActionsTaskExecuter.java:113)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeActions(ExecuteActionsTaskExecuter.java:95)
	... 97 more


* Get more help at https://help.gradle.org

2018-05-04 12:12:15,292 9bb74f6e MainThread beam_integration_benchmark(1/1) ERROR    Error during benchmark beam_integration_benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-05-04 12:12:15,293 9bb74f6e MainThread beam_integration_benchmark(1/1) INFO     Cleaning up benchmark beam_integration_benchmark
2018-05-04 12:12:15,294 9bb74f6e MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525428103395> delete -f <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/src/.test-infra/kubernetes/mongodb/load-balancer/mongo.yml> --ignore-not-found
2018-05-04 12:12:15,994 9bb74f6e MainThread beam_integration_benchmark(1/1) ERROR    Exception running benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 780, in RunBenchmarkTask
    RunBenchmark(spec, collector)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-05-04 12:12:15,994 9bb74f6e MainThread beam_integration_benchmark(1/1) ERROR    Benchmark 1/1 beam_integration_benchmark (UID: beam_integration_benchmark0) failed. Execution will continue.
2018-05-04 12:12:16,013 9bb74f6e MainThread INFO     Benchmark run statuses:
---------------------------------------------------------------------------------
Name                        UID                          Status  Failed Substatus
---------------------------------------------------------------------------------
beam_integration_benchmark  beam_integration_benchmark0  FAILED                  
---------------------------------------------------------------------------------
Success rate: 0.00% (0/1)
2018-05-04 12:12:16,014 9bb74f6e MainThread INFO     Complete logs can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/9bb74f6e/pkb.log>
2018-05-04 12:12:16,014 9bb74f6e MainThread INFO     Completion statuses can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/9bb74f6e/completion_statuses.json>
Build step 'Execute shell' marked build as failure

Build failed in Jenkins: beam_PerformanceTests_MongoDBIO_IT #128

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/128/display/redirect?page=changes>

Changes:

[tgroh] Add an Unsupported StateRequestHandler

[apilloud] [SQL] Use the same javacc and fmpp as calcite

[apilloud] [SQL] BeamQueryPlanner uses BeamSqlParserImpl

------------------------------------------
[...truncated 536.80 KB...]
    	at com.google.cloud.dataflow.worker.WorkerCustomSources$BoundedReaderIterator.start(WorkerCustomSources.java:592)
    	... 14 more
    java.io.IOException: Failed to start reading from source: org.apache.beam.sdk.io.mongodb.MongoDbIO$BoundedMongoDbSource@9ea3bd7
    	at com.google.cloud.dataflow.worker.WorkerCustomSources$BoundedReaderIterator.start(WorkerCustomSources.java:595)
    	at com.google.cloud.dataflow.worker.util.common.worker.ReadOperation$SynchronizedReaderIterator.start(ReadOperation.java:360)
    	at com.google.cloud.dataflow.worker.util.common.worker.ReadOperation.runReadLoop(ReadOperation.java:193)
    	at com.google.cloud.dataflow.worker.util.common.worker.ReadOperation.start(ReadOperation.java:158)
    	at com.google.cloud.dataflow.worker.util.common.worker.MapTaskExecutor.execute(MapTaskExecutor.java:75)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:383)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:355)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:286)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:134)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:114)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:101)
    	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
    	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
    	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
    	at java.lang.Thread.run(Thread.java:745)
    Caused by: com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches ReadPreferenceServerSelector{readPreference=primary}. Client view of cluster state is {type=UNKNOWN, servers=[{address=104.154.41.215:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getReadConnectionSource(ClusterBinding.java:63)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:210)
    	at com.mongodb.operation.FindOperation.execute(FindOperation.java:482)
    	at com.mongodb.operation.FindOperation.execute(FindOperation.java:79)
    	at com.mongodb.Mongo.execute(Mongo.java:772)
    	at com.mongodb.Mongo$2.execute(Mongo.java:759)
    	at com.mongodb.OperationIterable.iterator(OperationIterable.java:47)
    	at com.mongodb.FindIterableImpl.iterator(FindIterableImpl.java:143)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$BoundedMongoDbReader.start(MongoDbIO.java:456)
    	at com.google.cloud.dataflow.worker.WorkerCustomSources$BoundedReaderIterator.start(WorkerCustomSources.java:592)
    	... 14 more
    java.io.IOException: Failed to start reading from source: org.apache.beam.sdk.io.mongodb.MongoDbIO$BoundedMongoDbSource@19013fff
    	at com.google.cloud.dataflow.worker.WorkerCustomSources$BoundedReaderIterator.start(WorkerCustomSources.java:595)
    	at com.google.cloud.dataflow.worker.util.common.worker.ReadOperation$SynchronizedReaderIterator.start(ReadOperation.java:360)
    	at com.google.cloud.dataflow.worker.util.common.worker.ReadOperation.runReadLoop(ReadOperation.java:193)
    	at com.google.cloud.dataflow.worker.util.common.worker.ReadOperation.start(ReadOperation.java:158)
    	at com.google.cloud.dataflow.worker.util.common.worker.MapTaskExecutor.execute(MapTaskExecutor.java:75)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:383)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:355)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:286)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:134)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:114)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:101)
    	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
    	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
    	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
    	at java.lang.Thread.run(Thread.java:745)
    Caused by: com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches ReadPreferenceServerSelector{readPreference=primary}. Client view of cluster state is {type=UNKNOWN, servers=[{address=104.154.41.215:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getReadConnectionSource(ClusterBinding.java:63)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:210)
    	at com.mongodb.operation.FindOperation.execute(FindOperation.java:482)
    	at com.mongodb.operation.FindOperation.execute(FindOperation.java:79)
    	at com.mongodb.Mongo.execute(Mongo.java:772)
    	at com.mongodb.Mongo$2.execute(Mongo.java:759)
    	at com.mongodb.OperationIterable.iterator(OperationIterable.java:47)
    	at com.mongodb.FindIterableImpl.iterator(FindIterableImpl.java:143)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$BoundedMongoDbReader.start(MongoDbIO.java:456)
    	at com.google.cloud.dataflow.worker.WorkerCustomSources$BoundedReaderIterator.start(WorkerCustomSources.java:592)
    	... 14 more
    java.io.IOException: Failed to start reading from source: org.apache.beam.sdk.io.mongodb.MongoDbIO$BoundedMongoDbSource@6639b207
    	at com.google.cloud.dataflow.worker.WorkerCustomSources$BoundedReaderIterator.start(WorkerCustomSources.java:595)
    	at com.google.cloud.dataflow.worker.util.common.worker.ReadOperation$SynchronizedReaderIterator.start(ReadOperation.java:360)
    	at com.google.cloud.dataflow.worker.util.common.worker.ReadOperation.runReadLoop(ReadOperation.java:193)
    	at com.google.cloud.dataflow.worker.util.common.worker.ReadOperation.start(ReadOperation.java:158)
    	at com.google.cloud.dataflow.worker.util.common.worker.MapTaskExecutor.execute(MapTaskExecutor.java:75)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:383)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:355)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:286)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:134)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:114)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:101)
    	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
    	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
    	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
    	at java.lang.Thread.run(Thread.java:745)
    Caused by: com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches ReadPreferenceServerSelector{readPreference=primary}. Client view of cluster state is {type=UNKNOWN, servers=[{address=104.154.41.215:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getReadConnectionSource(ClusterBinding.java:63)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:210)
    	at com.mongodb.operation.FindOperation.execute(FindOperation.java:482)
    	at com.mongodb.operation.FindOperation.execute(FindOperation.java:79)
    	at com.mongodb.Mongo.execute(Mongo.java:772)
    	at com.mongodb.Mongo$2.execute(Mongo.java:759)
    	at com.mongodb.OperationIterable.iterator(OperationIterable.java:47)
    	at com.mongodb.FindIterableImpl.iterator(FindIterableImpl.java:143)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$BoundedMongoDbReader.start(MongoDbIO.java:456)
    	at com.google.cloud.dataflow.worker.WorkerCustomSources$BoundedReaderIterator.start(WorkerCustomSources.java:592)
    	... 14 more
    Workflow failed. Causes: S03:Read all documents/Read(BoundedMongoDbSource)+Map documents to Strings/Map+Calculate hashcode/WithKeys/AddKeys/Map+Calculate hashcode/Combine.perKey(Hashing)/GroupByKey+Calculate hashcode/Combine.perKey(Hashing)/Combine.GroupedValues/Partial+Calculate hashcode/Combine.perKey(Hashing)/GroupByKey/Reify+Calculate hashcode/Combine.perKey(Hashing)/GroupByKey/Write failed., A work item was attempted 4 times without success. Each time the worker eventually lost contact with the service. The work item was attempted on: 
      mongodbioit0testwriteandr-05032312-iorq-harness-nvlf,
      mongodbioit0testwriteandr-05032312-iorq-harness-nvlf,
      mongodbioit0testwriteandr-05032312-iorq-harness-nvlf,
      mongodbioit0testwriteandr-05032312-iorq-harness-nvlf
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:134)
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:90)
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:55)
        at org.apache.beam.sdk.Pipeline.run(Pipeline.java:311)
        at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:346)
        at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:328)
        at org.apache.beam.sdk.io.mongodb.MongoDBIOIT.testWriteAndRead(MongoDBIOIT.java:131)

    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=104.154.41.215:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
        at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
        at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
        at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
        at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
        at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
        at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:158)
        at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:133)
        at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:128)
        at com.mongodb.operation.DropDatabaseOperation.execute(DropDatabaseOperation.java:51)
        at com.mongodb.operation.DropDatabaseOperation.execute(DropDatabaseOperation.java:36)
        at com.mongodb.Mongo.execute(Mongo.java:781)
        at com.mongodb.Mongo$2.execute(Mongo.java:764)
        at com.mongodb.MongoDatabaseImpl.drop(MongoDatabaseImpl.java:136)
        at org.apache.beam.sdk.io.mongodb.MongoDBIOIT.tearDown(MongoDBIOIT.java:103)

1 test completed, 1 failed
Finished generating test XML results (0.051 secs) into: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/500af69a/beam/sdks/java/io/mongodb/build/test-results/integrationTest>
Generating HTML test report...
Finished generating test html results (0.039 secs) into: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/500af69a/beam/sdks/java/io/mongodb/build/reports/tests/integrationTest>

> Task :beam-sdks-java-io-mongodb:integrationTest FAILED
:beam-sdks-java-io-mongodb:integrationTest (Thread[Task worker for ':',5,main]) completed. Took 13 mins 15.378 secs.

Deprecated Gradle features were used in this build, making it incompatible with Gradle 5.0.
See https://docs.gradle.org/4.7/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 15m 22s
55 actionable tasks: 32 executed, 23 up-to-date

Publishing build scan...
https://gradle.com/s/xjscptb44o7gy


STDERR: 
FAILURE: Build failed with an exception.

* What went wrong:
Execution failed for task ':beam-sdks-java-io-mongodb:integrationTest'.
> There were failing tests. See the report at: file://<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/500af69a/beam/sdks/java/io/mongodb/build/reports/tests/integrationTest/index.html>

* Try:
Run with --debug option to get more log output. Run with --scan to get full insights.

* Exception is:
org.gradle.api.tasks.TaskExecutionException: Execution failed for task ':beam-sdks-java-io-mongodb:integrationTest'.
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeActions(ExecuteActionsTaskExecuter.java:103)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.execute(ExecuteActionsTaskExecuter.java:73)
	at org.gradle.api.internal.tasks.execution.OutputDirectoryCreatingTaskExecuter.execute(OutputDirectoryCreatingTaskExecuter.java:51)
	at org.gradle.api.internal.tasks.execution.SkipCachedTaskExecuter.execute(SkipCachedTaskExecuter.java:105)
	at org.gradle.api.internal.tasks.execution.SkipUpToDateTaskExecuter.execute(SkipUpToDateTaskExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.ResolveTaskOutputCachingStateExecuter.execute(ResolveTaskOutputCachingStateExecuter.java:54)
	at org.gradle.api.internal.tasks.execution.ResolveBuildCacheKeyExecuter.execute(ResolveBuildCacheKeyExecuter.java:66)
	at org.gradle.api.internal.tasks.execution.ValidatingTaskExecuter.execute(ValidatingTaskExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.SkipEmptySourceFilesTaskExecuter.execute(SkipEmptySourceFilesTaskExecuter.java:101)
	at org.gradle.api.internal.tasks.execution.FinalizeInputFilePropertiesTaskExecuter.execute(FinalizeInputFilePropertiesTaskExecuter.java:44)
	at org.gradle.api.internal.tasks.execution.CleanupStaleOutputsExecuter.execute(CleanupStaleOutputsExecuter.java:91)
	at org.gradle.api.internal.tasks.execution.ResolveTaskArtifactStateTaskExecuter.execute(ResolveTaskArtifactStateTaskExecuter.java:62)
	at org.gradle.api.internal.tasks.execution.SkipTaskWithNoActionsExecuter.execute(SkipTaskWithNoActionsExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.SkipOnlyIfTaskExecuter.execute(SkipOnlyIfTaskExecuter.java:54)
	at org.gradle.api.internal.tasks.execution.ExecuteAtMostOnceTaskExecuter.execute(ExecuteAtMostOnceTaskExecuter.java:43)
	at org.gradle.api.internal.tasks.execution.CatchExceptionTaskExecuter.execute(CatchExceptionTaskExecuter.java:34)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker$1.run(DefaultTaskGraphExecuter.java:256)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:317)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:309)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:185)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.run(DefaultBuildOperationExecutor.java:97)
	at org.gradle.internal.operations.DelegatingBuildOperationExecutor.run(DelegatingBuildOperationExecutor.java:31)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker.execute(DefaultTaskGraphExecuter.java:249)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker.execute(DefaultTaskGraphExecuter.java:238)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker$1.execute(DefaultTaskPlanExecutor.java:104)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker$1.execute(DefaultTaskPlanExecutor.java:98)
	at org.gradle.execution.taskgraph.DefaultTaskExecutionPlan.execute(DefaultTaskExecutionPlan.java:663)
	at org.gradle.execution.taskgraph.DefaultTaskExecutionPlan.executeWithTask(DefaultTaskExecutionPlan.java:596)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker.run(DefaultTaskPlanExecutor.java:98)
	at org.gradle.internal.concurrent.ExecutorPolicy$CatchAndRecordFailures.onExecute(ExecutorPolicy.java:63)
	at org.gradle.internal.concurrent.ManagedExecutorImpl$1.run(ManagedExecutorImpl.java:46)
	at org.gradle.internal.concurrent.ThreadFactoryImpl$ManagedThreadRunnable.run(ThreadFactoryImpl.java:55)
Caused by: org.gradle.api.GradleException: There were failing tests. See the report at: file://<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/500af69a/beam/sdks/java/io/mongodb/build/reports/tests/integrationTest/index.html>
	at org.gradle.api.tasks.testing.AbstractTestTask.handleTestFailures(AbstractTestTask.java:612)
	at org.gradle.api.tasks.testing.AbstractTestTask.executeTests(AbstractTestTask.java:484)
	at org.gradle.api.tasks.testing.Test.executeTests(Test.java:583)
	at org.gradle.internal.reflect.JavaMethod.invoke(JavaMethod.java:73)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.doExecute(StandardTaskAction.java:46)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.execute(StandardTaskAction.java:39)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.execute(StandardTaskAction.java:26)
	at org.gradle.api.internal.AbstractTask$TaskActionWrapper.execute(AbstractTask.java:794)
	at org.gradle.api.internal.AbstractTask$TaskActionWrapper.execute(AbstractTask.java:761)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter$1.run(ExecuteActionsTaskExecuter.java:124)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:317)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:309)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:185)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.run(DefaultBuildOperationExecutor.java:97)
	at org.gradle.internal.operations.DelegatingBuildOperationExecutor.run(DelegatingBuildOperationExecutor.java:31)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeAction(ExecuteActionsTaskExecuter.java:113)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeActions(ExecuteActionsTaskExecuter.java:95)
	... 31 more


* Get more help at https://help.gradle.org

2018-05-04 06:19:32,594 500af69a MainThread beam_integration_benchmark(1/1) ERROR    Error during benchmark beam_integration_benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-05-04 06:19:32,594 500af69a MainThread beam_integration_benchmark(1/1) INFO     Cleaning up benchmark beam_integration_benchmark
2018-05-04 06:19:32,595 500af69a MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525410093131> delete -f <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/src/.test-infra/kubernetes/mongodb/load-balancer/mongo.yml> --ignore-not-found
2018-05-04 06:19:33,073 500af69a MainThread beam_integration_benchmark(1/1) ERROR    Exception running benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 780, in RunBenchmarkTask
    RunBenchmark(spec, collector)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-05-04 06:19:33,074 500af69a MainThread beam_integration_benchmark(1/1) ERROR    Benchmark 1/1 beam_integration_benchmark (UID: beam_integration_benchmark0) failed. Execution will continue.
2018-05-04 06:19:33,123 500af69a MainThread INFO     Benchmark run statuses:
---------------------------------------------------------------------------------
Name                        UID                          Status  Failed Substatus
---------------------------------------------------------------------------------
beam_integration_benchmark  beam_integration_benchmark0  FAILED                  
---------------------------------------------------------------------------------
Success rate: 0.00% (0/1)
2018-05-04 06:19:33,123 500af69a MainThread INFO     Complete logs can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/500af69a/pkb.log>
2018-05-04 06:19:33,124 500af69a MainThread INFO     Completion statuses can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/500af69a/completion_statuses.json>
Build step 'Execute shell' marked build as failure

Build failed in Jenkins: beam_PerformanceTests_MongoDBIO_IT #127

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/127/display/redirect?page=changes>

Changes:

[apilloud] [SQL] Add BeamEnumerableConverter

[mairbek] Templatize host name in SpannerIO

[coheigea] A few small fixes to catch a few regressions - put String literals

[sidhom] Move SyntheticNodes to top-level construction package and rename

[sidhom] Pull Flink execution environment utilities into static methods

[sidhom] Refactor Runner/SDK wire coder creation into a shared utility

[sidhom] [BEAM-3972] Translate portable batch pipelines by proto

[sidhom] FIXUP: Use ModelCoders to create proto coders and use WireCoders to

[sidhom] FIXUP: Use RunnerApi in WireCoders to prepare for coder instantiation

[tgroh] Add additional methods to QueryablePipeline

[tgroh] Expand the ExecutableGraph interface

[tgroh] Add ProtoOverrides

[jasonkuster] Add labels to Go SDK Dataflow Runner.

[jasonkuster] Add verbose error message if unmarshaling fails for label flag.

------------------------------------------
[...truncated 493.68 KB...]
    	at com.mongodb.connection.WriteCommandProtocol.receiveMessage(WriteCommandProtocol.java:234)
    	at com.mongodb.connection.WriteCommandProtocol.execute(WriteCommandProtocol.java:104)
    	at com.mongodb.connection.InsertCommandProtocol.execute(InsertCommandProtocol.java:67)
    	at com.mongodb.connection.InsertCommandProtocol.execute(InsertCommandProtocol.java:37)
    	at com.mongodb.connection.DefaultServer$DefaultServerProtocolExecutor.execute(DefaultServer.java:159)
    	at com.mongodb.connection.DefaultServerConnection.executeProtocol(DefaultServerConnection.java:286)
    	at com.mongodb.connection.DefaultServerConnection.insertCommand(DefaultServerConnection.java:115)
    	at com.mongodb.operation.MixedBulkWriteOperation$Run$2.executeWriteCommandProtocol(MixedBulkWriteOperation.java:455)
    	at com.mongodb.operation.MixedBulkWriteOperation$Run$RunExecutor.execute(MixedBulkWriteOperation.java:646)
    	at com.mongodb.operation.MixedBulkWriteOperation$Run.execute(MixedBulkWriteOperation.java:401)
    	at com.mongodb.operation.MixedBulkWriteOperation$1.call(MixedBulkWriteOperation.java:179)
    	at com.mongodb.operation.MixedBulkWriteOperation$1.call(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.OperationHelper.withConnectionSource(OperationHelper.java:230)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:221)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn$DoFnInvoker.invokeProcessElement(Unknown Source)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:177)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:141)
    	at com.google.cloud.dataflow.worker.SimpleParDoFn.processElement(SimpleParDoFn.java:323)
    	at com.google.cloud.dataflow.worker.util.common.worker.ParDoOperation.process(ParDoOperation.java:43)
    	at com.google.cloud.dataflow.worker.util.common.worker.OutputReceiver.process(OutputReceiver.java:48)
    	at com.google.cloud.dataflow.worker.SimpleParDoFn$1.output(SimpleParDoFn.java:271)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.outputWindowedValue(SimpleDoFnRunner.java:211)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.access$700(SimpleDoFnRunner.java:66)
    	at org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:436)
    	at org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:424)
    	at org.apache.beam.sdk.transforms.MapElements$1.processElement(MapElements.java:129)
    	at org.apache.beam.sdk.transforms.MapElements$1$DoFnInvoker.invokeProcessElement(Unknown Source)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:177)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:141)
    	at com.google.cloud.dataflow.worker.SimpleParDoFn.processElement(SimpleParDoFn.java:323)
    	at com.google.cloud.dataflow.worker.util.common.worker.ParDoOperation.process(ParDoOperation.java:43)
    	at com.google.cloud.dataflow.worker.util.common.worker.OutputReceiver.process(OutputReceiver.java:48)
    	at com.google.cloud.dataflow.worker.util.common.worker.ReadOperation.runReadLoop(ReadOperation.java:200)
    	at com.google.cloud.dataflow.worker.util.common.worker.ReadOperation.start(ReadOperation.java:158)
    	at com.google.cloud.dataflow.worker.util.common.worker.MapTaskExecutor.execute(MapTaskExecutor.java:75)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:383)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:355)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:286)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:134)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:114)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:101)
    	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
    	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
    	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
    	at java.lang.Thread.run(Thread.java:745)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.193.81.82:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.193.81.82:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.193.81.82:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    Workflow failed. Causes: S01:Generate sequence/Read(BoundedCountingSource)+Produce documents/Map+Write documents to MongoDB/ParDo(Write) failed., A work item was attempted 4 times without success. Each time the worker eventually lost contact with the service. The work item was attempted on: 
      mongodbioit0testwriteandr-05031706-3733-harness-jvvd,
      mongodbioit0testwriteandr-05031706-3733-harness-jvvd,
      mongodbioit0testwriteandr-05031706-3733-harness-jvvd,
      mongodbioit0testwriteandr-05031706-3733-harness-jvvd
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:134)
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:90)
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:55)
        at org.apache.beam.sdk.Pipeline.run(Pipeline.java:311)
        at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:346)
        at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:328)
        at org.apache.beam.sdk.io.mongodb.MongoDBIOIT.testWriteAndRead(MongoDBIOIT.java:118)

    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.193.81.82:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
        at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
        at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
        at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
        at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
        at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
        at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:158)
        at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:133)
        at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:128)
        at com.mongodb.operation.DropDatabaseOperation.execute(DropDatabaseOperation.java:51)
        at com.mongodb.operation.DropDatabaseOperation.execute(DropDatabaseOperation.java:36)
        at com.mongodb.Mongo.execute(Mongo.java:781)
        at com.mongodb.Mongo$2.execute(Mongo.java:764)
        at com.mongodb.MongoDatabaseImpl.drop(MongoDatabaseImpl.java:136)
        at org.apache.beam.sdk.io.mongodb.MongoDBIOIT.tearDown(MongoDBIOIT.java:103)

1 test completed, 1 failed
Finished generating test XML results (0.032 secs) into: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/f31ac82d/beam/sdks/java/io/mongodb/build/test-results/integrationTest>
Generating HTML test report...
Finished generating test html results (0.033 secs) into: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/f31ac82d/beam/sdks/java/io/mongodb/build/reports/tests/integrationTest>

> Task :beam-sdks-java-io-mongodb:integrationTest FAILED
:beam-sdks-java-io-mongodb:integrationTest (Thread[Task worker for ':' Thread 7,5,main]) completed. Took 7 mins 4.819 secs.

Deprecated Gradle features were used in this build, making it incompatible with Gradle 5.0.
See https://docs.gradle.org/4.7/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 13s
55 actionable tasks: 34 executed, 21 up-to-date

Publishing build scan...
https://gradle.com/s/wt3oajlwe4zwo


STDERR: 
FAILURE: Build failed with an exception.

* What went wrong:
Execution failed for task ':beam-sdks-java-io-mongodb:integrationTest'.
> There were failing tests. See the report at: file://<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/f31ac82d/beam/sdks/java/io/mongodb/build/reports/tests/integrationTest/index.html>

* Try:
Run with --debug option to get more log output. Run with --scan to get full insights.

* Exception is:
org.gradle.api.tasks.TaskExecutionException: Execution failed for task ':beam-sdks-java-io-mongodb:integrationTest'.
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeActions(ExecuteActionsTaskExecuter.java:103)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.execute(ExecuteActionsTaskExecuter.java:73)
	at org.gradle.api.internal.tasks.execution.OutputDirectoryCreatingTaskExecuter.execute(OutputDirectoryCreatingTaskExecuter.java:51)
	at org.gradle.api.internal.tasks.execution.SkipCachedTaskExecuter.execute(SkipCachedTaskExecuter.java:105)
	at org.gradle.api.internal.tasks.execution.SkipUpToDateTaskExecuter.execute(SkipUpToDateTaskExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.ResolveTaskOutputCachingStateExecuter.execute(ResolveTaskOutputCachingStateExecuter.java:54)
	at org.gradle.api.internal.tasks.execution.ResolveBuildCacheKeyExecuter.execute(ResolveBuildCacheKeyExecuter.java:66)
	at org.gradle.api.internal.tasks.execution.ValidatingTaskExecuter.execute(ValidatingTaskExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.SkipEmptySourceFilesTaskExecuter.execute(SkipEmptySourceFilesTaskExecuter.java:101)
	at org.gradle.api.internal.tasks.execution.FinalizeInputFilePropertiesTaskExecuter.execute(FinalizeInputFilePropertiesTaskExecuter.java:44)
	at org.gradle.api.internal.tasks.execution.CleanupStaleOutputsExecuter.execute(CleanupStaleOutputsExecuter.java:91)
	at org.gradle.api.internal.tasks.execution.ResolveTaskArtifactStateTaskExecuter.execute(ResolveTaskArtifactStateTaskExecuter.java:62)
	at org.gradle.api.internal.tasks.execution.SkipTaskWithNoActionsExecuter.execute(SkipTaskWithNoActionsExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.SkipOnlyIfTaskExecuter.execute(SkipOnlyIfTaskExecuter.java:54)
	at org.gradle.api.internal.tasks.execution.ExecuteAtMostOnceTaskExecuter.execute(ExecuteAtMostOnceTaskExecuter.java:43)
	at org.gradle.api.internal.tasks.execution.CatchExceptionTaskExecuter.execute(CatchExceptionTaskExecuter.java:34)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker$1.run(DefaultTaskGraphExecuter.java:256)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:317)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:309)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:185)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.run(DefaultBuildOperationExecutor.java:97)
	at org.gradle.internal.operations.DelegatingBuildOperationExecutor.run(DelegatingBuildOperationExecutor.java:31)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker.execute(DefaultTaskGraphExecuter.java:249)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker.execute(DefaultTaskGraphExecuter.java:238)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker$1.execute(DefaultTaskPlanExecutor.java:104)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker$1.execute(DefaultTaskPlanExecutor.java:98)
	at org.gradle.execution.taskgraph.DefaultTaskExecutionPlan.execute(DefaultTaskExecutionPlan.java:663)
	at org.gradle.execution.taskgraph.DefaultTaskExecutionPlan.executeWithTask(DefaultTaskExecutionPlan.java:596)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker.run(DefaultTaskPlanExecutor.java:98)
	at org.gradle.internal.concurrent.ExecutorPolicy$CatchAndRecordFailures.onExecute(ExecutorPolicy.java:63)
	at org.gradle.internal.concurrent.ManagedExecutorImpl$1.run(ManagedExecutorImpl.java:46)
	at org.gradle.internal.concurrent.ThreadFactoryImpl$ManagedThreadRunnable.run(ThreadFactoryImpl.java:55)
Caused by: org.gradle.api.GradleException: There were failing tests. See the report at: file://<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/f31ac82d/beam/sdks/java/io/mongodb/build/reports/tests/integrationTest/index.html>
	at org.gradle.api.tasks.testing.AbstractTestTask.handleTestFailures(AbstractTestTask.java:612)
	at org.gradle.api.tasks.testing.AbstractTestTask.executeTests(AbstractTestTask.java:484)
	at org.gradle.api.tasks.testing.Test.executeTests(Test.java:583)
	at org.gradle.internal.reflect.JavaMethod.invoke(JavaMethod.java:73)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.doExecute(StandardTaskAction.java:46)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.execute(StandardTaskAction.java:39)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.execute(StandardTaskAction.java:26)
	at org.gradle.api.internal.AbstractTask$TaskActionWrapper.execute(AbstractTask.java:794)
	at org.gradle.api.internal.AbstractTask$TaskActionWrapper.execute(AbstractTask.java:761)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter$1.run(ExecuteActionsTaskExecuter.java:124)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:317)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:309)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:185)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.run(DefaultBuildOperationExecutor.java:97)
	at org.gradle.internal.operations.DelegatingBuildOperationExecutor.run(DelegatingBuildOperationExecutor.java:31)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeAction(ExecuteActionsTaskExecuter.java:113)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeActions(ExecuteActionsTaskExecuter.java:95)
	... 31 more


* Get more help at https://help.gradle.org

2018-05-04 00:13:25,726 f31ac82d MainThread beam_integration_benchmark(1/1) ERROR    Error during benchmark beam_integration_benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-05-04 00:13:25,727 f31ac82d MainThread beam_integration_benchmark(1/1) INFO     Cleaning up benchmark beam_integration_benchmark
2018-05-04 00:13:25,728 f31ac82d MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525390334046> delete -f <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/src/.test-infra/kubernetes/mongodb/load-balancer/mongo.yml> --ignore-not-found
2018-05-04 00:13:26,160 f31ac82d MainThread beam_integration_benchmark(1/1) ERROR    Exception running benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 780, in RunBenchmarkTask
    RunBenchmark(spec, collector)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-05-04 00:13:26,161 f31ac82d MainThread beam_integration_benchmark(1/1) ERROR    Benchmark 1/1 beam_integration_benchmark (UID: beam_integration_benchmark0) failed. Execution will continue.
2018-05-04 00:13:26,176 f31ac82d MainThread INFO     Benchmark run statuses:
---------------------------------------------------------------------------------
Name                        UID                          Status  Failed Substatus
---------------------------------------------------------------------------------
beam_integration_benchmark  beam_integration_benchmark0  FAILED                  
---------------------------------------------------------------------------------
Success rate: 0.00% (0/1)
2018-05-04 00:13:26,177 f31ac82d MainThread INFO     Complete logs can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/f31ac82d/pkb.log>
2018-05-04 00:13:26,177 f31ac82d MainThread INFO     Completion statuses can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/f31ac82d/completion_statuses.json>
Build step 'Execute shell' marked build as failure

Build failed in Jenkins: beam_PerformanceTests_MongoDBIO_IT #126

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/126/display/redirect>

------------------------------------------
[...truncated 449.84 KB...]
    	at com.mongodb.connection.WriteCommandProtocol.receiveMessage(WriteCommandProtocol.java:234)
    	at com.mongodb.connection.WriteCommandProtocol.execute(WriteCommandProtocol.java:104)
    	at com.mongodb.connection.InsertCommandProtocol.execute(InsertCommandProtocol.java:67)
    	at com.mongodb.connection.InsertCommandProtocol.execute(InsertCommandProtocol.java:37)
    	at com.mongodb.connection.DefaultServer$DefaultServerProtocolExecutor.execute(DefaultServer.java:159)
    	at com.mongodb.connection.DefaultServerConnection.executeProtocol(DefaultServerConnection.java:286)
    	at com.mongodb.connection.DefaultServerConnection.insertCommand(DefaultServerConnection.java:115)
    	at com.mongodb.operation.MixedBulkWriteOperation$Run$2.executeWriteCommandProtocol(MixedBulkWriteOperation.java:455)
    	at com.mongodb.operation.MixedBulkWriteOperation$Run$RunExecutor.execute(MixedBulkWriteOperation.java:646)
    	at com.mongodb.operation.MixedBulkWriteOperation$Run.execute(MixedBulkWriteOperation.java:401)
    	at com.mongodb.operation.MixedBulkWriteOperation$1.call(MixedBulkWriteOperation.java:179)
    	at com.mongodb.operation.MixedBulkWriteOperation$1.call(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.OperationHelper.withConnectionSource(OperationHelper.java:230)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:221)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn$DoFnInvoker.invokeProcessElement(Unknown Source)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:177)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:141)
    	at com.google.cloud.dataflow.worker.SimpleParDoFn.processElement(SimpleParDoFn.java:323)
    	at com.google.cloud.dataflow.worker.util.common.worker.ParDoOperation.process(ParDoOperation.java:43)
    	at com.google.cloud.dataflow.worker.util.common.worker.OutputReceiver.process(OutputReceiver.java:48)
    	at com.google.cloud.dataflow.worker.SimpleParDoFn$1.output(SimpleParDoFn.java:271)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.outputWindowedValue(SimpleDoFnRunner.java:211)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.access$700(SimpleDoFnRunner.java:66)
    	at org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:436)
    	at org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:424)
    	at org.apache.beam.sdk.transforms.MapElements$1.processElement(MapElements.java:129)
    	at org.apache.beam.sdk.transforms.MapElements$1$DoFnInvoker.invokeProcessElement(Unknown Source)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:177)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:141)
    	at com.google.cloud.dataflow.worker.SimpleParDoFn.processElement(SimpleParDoFn.java:323)
    	at com.google.cloud.dataflow.worker.util.common.worker.ParDoOperation.process(ParDoOperation.java:43)
    	at com.google.cloud.dataflow.worker.util.common.worker.OutputReceiver.process(OutputReceiver.java:48)
    	at com.google.cloud.dataflow.worker.util.common.worker.ReadOperation.runReadLoop(ReadOperation.java:200)
    	at com.google.cloud.dataflow.worker.util.common.worker.ReadOperation.start(ReadOperation.java:158)
    	at com.google.cloud.dataflow.worker.util.common.worker.MapTaskExecutor.execute(MapTaskExecutor.java:75)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:383)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:355)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:286)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:134)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:114)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:101)
    	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
    	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
    	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
    	at java.lang.Thread.run(Thread.java:745)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.226.31.124:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.226.31.124:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.226.31.124:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    Workflow failed. Causes: S01:Generate sequence/Read(BoundedCountingSource)+Produce documents/Map+Write documents to MongoDB/ParDo(Write) failed., A work item was attempted 4 times without success. Each time the worker eventually lost contact with the service. The work item was attempted on: 
      mongodbioit0testwriteandr-05031106-b8pn-harness-448s,
      mongodbioit0testwriteandr-05031106-b8pn-harness-448s,
      mongodbioit0testwriteandr-05031106-b8pn-harness-448s,
      mongodbioit0testwriteandr-05031106-b8pn-harness-448s
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:134)
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:90)
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:55)
        at org.apache.beam.sdk.Pipeline.run(Pipeline.java:311)
        at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:346)
        at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:328)
        at org.apache.beam.sdk.io.mongodb.MongoDBIOIT.testWriteAndRead(MongoDBIOIT.java:118)

    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.226.31.124:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
        at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
        at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
        at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
        at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
        at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
        at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:158)
        at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:133)
        at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:128)
        at com.mongodb.operation.DropDatabaseOperation.execute(DropDatabaseOperation.java:51)
        at com.mongodb.operation.DropDatabaseOperation.execute(DropDatabaseOperation.java:36)
        at com.mongodb.Mongo.execute(Mongo.java:781)
        at com.mongodb.Mongo$2.execute(Mongo.java:764)
        at com.mongodb.MongoDatabaseImpl.drop(MongoDatabaseImpl.java:136)
        at org.apache.beam.sdk.io.mongodb.MongoDBIOIT.tearDown(MongoDBIOIT.java:103)

1 test completed, 1 failed
Finished generating test XML results (0.039 secs) into: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/8ab0fe1b/beam/sdks/java/io/mongodb/build/test-results/integrationTest>
Generating HTML test report...
Finished generating test html results (0.036 secs) into: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/8ab0fe1b/beam/sdks/java/io/mongodb/build/reports/tests/integrationTest>

> Task :beam-sdks-java-io-mongodb:integrationTest FAILED
:beam-sdks-java-io-mongodb:integrationTest (Thread[Task worker for ':' Thread 8,5,main]) completed. Took 7 mins 18.546 secs.

Deprecated Gradle features were used in this build, making it incompatible with Gradle 5.0.
See https://docs.gradle.org/4.7/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 5s
55 actionable tasks: 32 executed, 23 up-to-date

Publishing build scan...
https://gradle.com/s/vqtsqjtksnfqi


STDERR: 
FAILURE: Build failed with an exception.

* What went wrong:
Execution failed for task ':beam-sdks-java-io-mongodb:integrationTest'.
> There were failing tests. See the report at: file://<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/8ab0fe1b/beam/sdks/java/io/mongodb/build/reports/tests/integrationTest/index.html>

* Try:
Run with --debug option to get more log output. Run with --scan to get full insights.

* Exception is:
org.gradle.api.tasks.TaskExecutionException: Execution failed for task ':beam-sdks-java-io-mongodb:integrationTest'.
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeActions(ExecuteActionsTaskExecuter.java:103)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.execute(ExecuteActionsTaskExecuter.java:73)
	at org.gradle.api.internal.tasks.execution.OutputDirectoryCreatingTaskExecuter.execute(OutputDirectoryCreatingTaskExecuter.java:51)
	at org.gradle.api.internal.tasks.execution.SkipCachedTaskExecuter.execute(SkipCachedTaskExecuter.java:105)
	at org.gradle.api.internal.tasks.execution.SkipUpToDateTaskExecuter.execute(SkipUpToDateTaskExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.ResolveTaskOutputCachingStateExecuter.execute(ResolveTaskOutputCachingStateExecuter.java:54)
	at org.gradle.api.internal.tasks.execution.ResolveBuildCacheKeyExecuter.execute(ResolveBuildCacheKeyExecuter.java:66)
	at org.gradle.api.internal.tasks.execution.ValidatingTaskExecuter.execute(ValidatingTaskExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.SkipEmptySourceFilesTaskExecuter.execute(SkipEmptySourceFilesTaskExecuter.java:101)
	at org.gradle.api.internal.tasks.execution.FinalizeInputFilePropertiesTaskExecuter.execute(FinalizeInputFilePropertiesTaskExecuter.java:44)
	at org.gradle.api.internal.tasks.execution.CleanupStaleOutputsExecuter.execute(CleanupStaleOutputsExecuter.java:91)
	at org.gradle.api.internal.tasks.execution.ResolveTaskArtifactStateTaskExecuter.execute(ResolveTaskArtifactStateTaskExecuter.java:62)
	at org.gradle.api.internal.tasks.execution.SkipTaskWithNoActionsExecuter.execute(SkipTaskWithNoActionsExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.SkipOnlyIfTaskExecuter.execute(SkipOnlyIfTaskExecuter.java:54)
	at org.gradle.api.internal.tasks.execution.ExecuteAtMostOnceTaskExecuter.execute(ExecuteAtMostOnceTaskExecuter.java:43)
	at org.gradle.api.internal.tasks.execution.CatchExceptionTaskExecuter.execute(CatchExceptionTaskExecuter.java:34)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker$1.run(DefaultTaskGraphExecuter.java:256)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:317)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:309)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:185)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.run(DefaultBuildOperationExecutor.java:97)
	at org.gradle.internal.operations.DelegatingBuildOperationExecutor.run(DelegatingBuildOperationExecutor.java:31)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker.execute(DefaultTaskGraphExecuter.java:249)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker.execute(DefaultTaskGraphExecuter.java:238)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker$1.execute(DefaultTaskPlanExecutor.java:104)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker$1.execute(DefaultTaskPlanExecutor.java:98)
	at org.gradle.execution.taskgraph.DefaultTaskExecutionPlan.execute(DefaultTaskExecutionPlan.java:663)
	at org.gradle.execution.taskgraph.DefaultTaskExecutionPlan.executeWithTask(DefaultTaskExecutionPlan.java:596)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker.run(DefaultTaskPlanExecutor.java:98)
	at org.gradle.internal.concurrent.ExecutorPolicy$CatchAndRecordFailures.onExecute(ExecutorPolicy.java:63)
	at org.gradle.internal.concurrent.ManagedExecutorImpl$1.run(ManagedExecutorImpl.java:46)
	at org.gradle.internal.concurrent.ThreadFactoryImpl$ManagedThreadRunnable.run(ThreadFactoryImpl.java:55)
Caused by: org.gradle.api.GradleException: There were failing tests. See the report at: file://<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/8ab0fe1b/beam/sdks/java/io/mongodb/build/reports/tests/integrationTest/index.html>
	at org.gradle.api.tasks.testing.AbstractTestTask.handleTestFailures(AbstractTestTask.java:612)
	at org.gradle.api.tasks.testing.AbstractTestTask.executeTests(AbstractTestTask.java:484)
	at org.gradle.api.tasks.testing.Test.executeTests(Test.java:583)
	at org.gradle.internal.reflect.JavaMethod.invoke(JavaMethod.java:73)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.doExecute(StandardTaskAction.java:46)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.execute(StandardTaskAction.java:39)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.execute(StandardTaskAction.java:26)
	at org.gradle.api.internal.AbstractTask$TaskActionWrapper.execute(AbstractTask.java:794)
	at org.gradle.api.internal.AbstractTask$TaskActionWrapper.execute(AbstractTask.java:761)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter$1.run(ExecuteActionsTaskExecuter.java:124)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:317)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:309)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:185)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.run(DefaultBuildOperationExecutor.java:97)
	at org.gradle.internal.operations.DelegatingBuildOperationExecutor.run(DelegatingBuildOperationExecutor.java:31)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeAction(ExecuteActionsTaskExecuter.java:113)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeActions(ExecuteActionsTaskExecuter.java:95)
	... 31 more


* Get more help at https://help.gradle.org

2018-05-03 18:13:16,493 8ab0fe1b MainThread beam_integration_benchmark(1/1) ERROR    Error during benchmark beam_integration_benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-05-03 18:13:16,494 8ab0fe1b MainThread beam_integration_benchmark(1/1) INFO     Cleaning up benchmark beam_integration_benchmark
2018-05-03 18:13:16,494 8ab0fe1b MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525361350766> delete -f <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/src/.test-infra/kubernetes/mongodb/load-balancer/mongo.yml> --ignore-not-found
2018-05-03 18:13:17,171 8ab0fe1b MainThread beam_integration_benchmark(1/1) ERROR    Exception running benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 780, in RunBenchmarkTask
    RunBenchmark(spec, collector)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-05-03 18:13:17,171 8ab0fe1b MainThread beam_integration_benchmark(1/1) ERROR    Benchmark 1/1 beam_integration_benchmark (UID: beam_integration_benchmark0) failed. Execution will continue.
2018-05-03 18:13:17,197 8ab0fe1b MainThread INFO     Benchmark run statuses:
---------------------------------------------------------------------------------
Name                        UID                          Status  Failed Substatus
---------------------------------------------------------------------------------
beam_integration_benchmark  beam_integration_benchmark0  FAILED                  
---------------------------------------------------------------------------------
Success rate: 0.00% (0/1)
2018-05-03 18:13:17,198 8ab0fe1b MainThread INFO     Complete logs can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/8ab0fe1b/pkb.log>
2018-05-03 18:13:17,198 8ab0fe1b MainThread INFO     Completion statuses can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/8ab0fe1b/completion_statuses.json>
Build step 'Execute shell' marked build as failure

Build failed in Jenkins: beam_PerformanceTests_MongoDBIO_IT #125

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/125/display/redirect?page=changes>

Changes:

[echauchot] [BEAM-4138] Support runners that do not support committed metrics in

------------------------------------------
[...truncated 484.64 KB...]
    INFO: No server chosen by WritableServerSelector from cluster description ClusterDescription{type=UNKNOWN, connectionMode=SINGLE, all=[ServerDescription{address=35.224.247.178:27017, type=UNKNOWN, state=CONNECTING}]}. Waiting for 30000 ms before timing out
    May 03, 2018 12:14:24 PM com.mongodb.diagnostics.logging.SLF4JLogger info
    INFO: Exception in monitor thread while connecting to server 35.224.247.178:27017
    com.mongodb.MongoSocketOpenException: Exception opening socket
    	at com.mongodb.connection.SocketStream.open(SocketStream.java:63)
    	at com.mongodb.connection.InternalStreamConnection.open(InternalStreamConnection.java:114)
    	at com.mongodb.connection.DefaultServerMonitor$ServerMonitorRunnable.run(DefaultServerMonitor.java:128)
    	at java.lang.Thread.run(Thread.java:748)
    Caused by: java.net.SocketTimeoutException: connect timed out
    	at java.net.PlainSocketImpl.socketConnect(Native Method)
    	at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350)
    	at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206)
    	at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188)
    	at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392)
    	at java.net.Socket.connect(Socket.java:589)
    	at com.mongodb.connection.SocketStreamHelper.initialize(SocketStreamHelper.java:50)
    	at com.mongodb.connection.SocketStream.open(SocketStream.java:58)
    	... 3 more


Gradle Test Executor 1 finished executing tests.

> Task :beam-sdks-java-io-mongodb:integrationTest

org.apache.beam.sdk.io.mongodb.MongoDBIOIT > testWriteAndRead FAILED
    java.lang.RuntimeException: com.mongodb.MongoSocketReadException: Prematurely reached end of stream
    	at com.mongodb.connection.SocketStream.read(SocketStream.java:88)
    	at com.mongodb.connection.InternalStreamConnection.receiveResponseBuffers(InternalStreamConnection.java:491)
    	at com.mongodb.connection.InternalStreamConnection.receiveMessage(InternalStreamConnection.java:221)
    	at com.mongodb.connection.UsageTrackingInternalConnection.receiveMessage(UsageTrackingInternalConnection.java:102)
    	at com.mongodb.connection.DefaultConnectionPool$PooledConnection.receiveMessage(DefaultConnectionPool.java:435)
    	at com.mongodb.connection.WriteCommandProtocol.receiveMessage(WriteCommandProtocol.java:234)
    	at com.mongodb.connection.WriteCommandProtocol.execute(WriteCommandProtocol.java:104)
    	at com.mongodb.connection.InsertCommandProtocol.execute(InsertCommandProtocol.java:67)
    	at com.mongodb.connection.InsertCommandProtocol.execute(InsertCommandProtocol.java:37)
    	at com.mongodb.connection.DefaultServer$DefaultServerProtocolExecutor.execute(DefaultServer.java:159)
    	at com.mongodb.connection.DefaultServerConnection.executeProtocol(DefaultServerConnection.java:286)
    	at com.mongodb.connection.DefaultServerConnection.insertCommand(DefaultServerConnection.java:115)
    	at com.mongodb.operation.MixedBulkWriteOperation$Run$2.executeWriteCommandProtocol(MixedBulkWriteOperation.java:455)
    	at com.mongodb.operation.MixedBulkWriteOperation$Run$RunExecutor.execute(MixedBulkWriteOperation.java:646)
    	at com.mongodb.operation.MixedBulkWriteOperation$Run.execute(MixedBulkWriteOperation.java:401)
    	at com.mongodb.operation.MixedBulkWriteOperation$1.call(MixedBulkWriteOperation.java:179)
    	at com.mongodb.operation.MixedBulkWriteOperation$1.call(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.OperationHelper.withConnectionSource(OperationHelper.java:230)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:221)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.224.247.178:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.224.247.178:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.224.247.178:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    Workflow failed. Causes: S01:Generate sequence/Read(BoundedCountingSource)+Produce documents/Map+Write documents to MongoDB/ParDo(Write) failed., A work item was attempted 4 times without success. Each time the worker eventually lost contact with the service. The work item was attempted on: 
      mongodbioit0testwriteandr-05030506-m4ad-harness-8hms,
      mongodbioit0testwriteandr-05030506-m4ad-harness-8hms,
      mongodbioit0testwriteandr-05030506-m4ad-harness-8hms,
      mongodbioit0testwriteandr-05030506-m4ad-harness-8hms
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:134)
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:90)
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:55)
        at org.apache.beam.sdk.Pipeline.run(Pipeline.java:311)
        at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:346)
        at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:328)
        at org.apache.beam.sdk.io.mongodb.MongoDBIOIT.testWriteAndRead(MongoDBIOIT.java:118)

    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.224.247.178:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
        at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
        at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
        at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
        at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
        at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
        at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:158)
        at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:133)
        at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:128)
        at com.mongodb.operation.DropDatabaseOperation.execute(DropDatabaseOperation.java:51)
        at com.mongodb.operation.DropDatabaseOperation.execute(DropDatabaseOperation.java:36)
        at com.mongodb.Mongo.execute(Mongo.java:781)
        at com.mongodb.Mongo$2.execute(Mongo.java:764)
        at com.mongodb.MongoDatabaseImpl.drop(MongoDatabaseImpl.java:136)
        at org.apache.beam.sdk.io.mongodb.MongoDBIOIT.tearDown(MongoDBIOIT.java:103)

1 test completed, 1 failed
Finished generating test XML results (0.042 secs) into: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/c471ef9e/beam/sdks/java/io/mongodb/build/test-results/integrationTest>
Generating HTML test report...
Finished generating test html results (0.036 secs) into: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/c471ef9e/beam/sdks/java/io/mongodb/build/reports/tests/integrationTest>

> Task :beam-sdks-java-io-mongodb:integrationTest FAILED
:beam-sdks-java-io-mongodb:integrationTest (Thread[Task worker for ':',5,main]) completed. Took 8 mins 19.571 secs.

Deprecated Gradle features were used in this build, making it incompatible with Gradle 5.0.
See https://docs.gradle.org/4.7/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 10m 34s
55 actionable tasks: 34 executed, 21 up-to-date

Publishing build scan...
https://gradle.com/s/74wunbpslgwfa


STDERR: 
FAILURE: Build failed with an exception.

* What went wrong:
Execution failed for task ':beam-sdks-java-io-mongodb:integrationTest'.
> There were failing tests. See the report at: file://<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/c471ef9e/beam/sdks/java/io/mongodb/build/reports/tests/integrationTest/index.html>

* Try:
Run with --debug option to get more log output. Run with --scan to get full insights.

* Exception is:
org.gradle.api.tasks.TaskExecutionException: Execution failed for task ':beam-sdks-java-io-mongodb:integrationTest'.
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeActions(ExecuteActionsTaskExecuter.java:103)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.execute(ExecuteActionsTaskExecuter.java:73)
	at org.gradle.api.internal.tasks.execution.OutputDirectoryCreatingTaskExecuter.execute(OutputDirectoryCreatingTaskExecuter.java:51)
	at org.gradle.api.internal.tasks.execution.SkipCachedTaskExecuter.execute(SkipCachedTaskExecuter.java:105)
	at org.gradle.api.internal.tasks.execution.SkipUpToDateTaskExecuter.execute(SkipUpToDateTaskExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.ResolveTaskOutputCachingStateExecuter.execute(ResolveTaskOutputCachingStateExecuter.java:54)
	at org.gradle.api.internal.tasks.execution.ResolveBuildCacheKeyExecuter.execute(ResolveBuildCacheKeyExecuter.java:66)
	at org.gradle.api.internal.tasks.execution.ValidatingTaskExecuter.execute(ValidatingTaskExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.SkipEmptySourceFilesTaskExecuter.execute(SkipEmptySourceFilesTaskExecuter.java:101)
	at org.gradle.api.internal.tasks.execution.FinalizeInputFilePropertiesTaskExecuter.execute(FinalizeInputFilePropertiesTaskExecuter.java:44)
	at org.gradle.api.internal.tasks.execution.CleanupStaleOutputsExecuter.execute(CleanupStaleOutputsExecuter.java:91)
	at org.gradle.api.internal.tasks.execution.ResolveTaskArtifactStateTaskExecuter.execute(ResolveTaskArtifactStateTaskExecuter.java:62)
	at org.gradle.api.internal.tasks.execution.SkipTaskWithNoActionsExecuter.execute(SkipTaskWithNoActionsExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.SkipOnlyIfTaskExecuter.execute(SkipOnlyIfTaskExecuter.java:54)
	at org.gradle.api.internal.tasks.execution.ExecuteAtMostOnceTaskExecuter.execute(ExecuteAtMostOnceTaskExecuter.java:43)
	at org.gradle.api.internal.tasks.execution.CatchExceptionTaskExecuter.execute(CatchExceptionTaskExecuter.java:34)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker$1.run(DefaultTaskGraphExecuter.java:256)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:317)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:309)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:185)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.run(DefaultBuildOperationExecutor.java:97)
	at org.gradle.internal.operations.DelegatingBuildOperationExecutor.run(DelegatingBuildOperationExecutor.java:31)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker.execute(DefaultTaskGraphExecuter.java:249)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker.execute(DefaultTaskGraphExecuter.java:238)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker$1.execute(DefaultTaskPlanExecutor.java:104)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker$1.execute(DefaultTaskPlanExecutor.java:98)
	at org.gradle.execution.taskgraph.DefaultTaskExecutionPlan.execute(DefaultTaskExecutionPlan.java:663)
	at org.gradle.execution.taskgraph.DefaultTaskExecutionPlan.executeWithTask(DefaultTaskExecutionPlan.java:596)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker.run(DefaultTaskPlanExecutor.java:98)
	at org.gradle.internal.concurrent.ExecutorPolicy$CatchAndRecordFailures.onExecute(ExecutorPolicy.java:63)
	at org.gradle.internal.concurrent.ManagedExecutorImpl$1.run(ManagedExecutorImpl.java:46)
	at org.gradle.internal.concurrent.ThreadFactoryImpl$ManagedThreadRunnable.run(ThreadFactoryImpl.java:55)
Caused by: org.gradle.api.GradleException: There were failing tests. See the report at: file://<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/c471ef9e/beam/sdks/java/io/mongodb/build/reports/tests/integrationTest/index.html>
	at org.gradle.api.tasks.testing.AbstractTestTask.handleTestFailures(AbstractTestTask.java:612)
	at org.gradle.api.tasks.testing.AbstractTestTask.executeTests(AbstractTestTask.java:484)
	at org.gradle.api.tasks.testing.Test.executeTests(Test.java:583)
	at org.gradle.internal.reflect.JavaMethod.invoke(JavaMethod.java:73)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.doExecute(StandardTaskAction.java:46)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.execute(StandardTaskAction.java:39)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.execute(StandardTaskAction.java:26)
	at org.gradle.api.internal.AbstractTask$TaskActionWrapper.execute(AbstractTask.java:794)
	at org.gradle.api.internal.AbstractTask$TaskActionWrapper.execute(AbstractTask.java:761)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter$1.run(ExecuteActionsTaskExecuter.java:124)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:317)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:309)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:185)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.run(DefaultBuildOperationExecutor.java:97)
	at org.gradle.internal.operations.DelegatingBuildOperationExecutor.run(DelegatingBuildOperationExecutor.java:31)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeAction(ExecuteActionsTaskExecuter.java:113)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeActions(ExecuteActionsTaskExecuter.java:95)
	... 31 more


* Get more help at https://help.gradle.org

2018-05-03 12:14:36,828 c471ef9e MainThread beam_integration_benchmark(1/1) ERROR    Error during benchmark beam_integration_benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-05-03 12:14:36,829 c471ef9e MainThread beam_integration_benchmark(1/1) INFO     Cleaning up benchmark beam_integration_benchmark
2018-05-03 12:14:36,829 c471ef9e MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525341686437> delete -f <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/src/.test-infra/kubernetes/mongodb/load-balancer/mongo.yml> --ignore-not-found
2018-05-03 12:14:37,473 c471ef9e MainThread beam_integration_benchmark(1/1) ERROR    Exception running benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 780, in RunBenchmarkTask
    RunBenchmark(spec, collector)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-05-03 12:14:37,474 c471ef9e MainThread beam_integration_benchmark(1/1) ERROR    Benchmark 1/1 beam_integration_benchmark (UID: beam_integration_benchmark0) failed. Execution will continue.
2018-05-03 12:14:37,519 c471ef9e MainThread INFO     Benchmark run statuses:
---------------------------------------------------------------------------------
Name                        UID                          Status  Failed Substatus
---------------------------------------------------------------------------------
beam_integration_benchmark  beam_integration_benchmark0  FAILED                  
---------------------------------------------------------------------------------
Success rate: 0.00% (0/1)
2018-05-03 12:14:37,520 c471ef9e MainThread INFO     Complete logs can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/c471ef9e/pkb.log>
2018-05-03 12:14:37,520 c471ef9e MainThread INFO     Completion statuses can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/c471ef9e/completion_statuses.json>
Build step 'Execute shell' marked build as failure

Build failed in Jenkins: beam_PerformanceTests_MongoDBIO_IT #124

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/124/display/redirect?page=changes>

Changes:

[tgroh] Add KV and Window Coder Utilities to ModelCoders

[tgroh] Use ModelCoders in ProcessBundleDescriptors

[sidhom] Remove StateRequestHandler from JobBundleFactory

------------------------------------------
[...truncated 461.89 KB...]
    	at com.mongodb.operation.OperationHelper.withConnectionSource(OperationHelper.java:230)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:221)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn$DoFnInvoker.invokeProcessElement(Unknown Source)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:177)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:141)
    	at com.google.cloud.dataflow.worker.SimpleParDoFn.processElement(SimpleParDoFn.java:323)
    	at com.google.cloud.dataflow.worker.util.common.worker.ParDoOperation.process(ParDoOperation.java:43)
    	at com.google.cloud.dataflow.worker.util.common.worker.OutputReceiver.process(OutputReceiver.java:48)
    	at com.google.cloud.dataflow.worker.SimpleParDoFn$1.output(SimpleParDoFn.java:271)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.outputWindowedValue(SimpleDoFnRunner.java:211)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.access$700(SimpleDoFnRunner.java:66)
    	at org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:436)
    	at org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:424)
    	at org.apache.beam.sdk.transforms.MapElements$1.processElement(MapElements.java:129)
    	at org.apache.beam.sdk.transforms.MapElements$1$DoFnInvoker.invokeProcessElement(Unknown Source)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:177)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:141)
    	at com.google.cloud.dataflow.worker.SimpleParDoFn.processElement(SimpleParDoFn.java:323)
    	at com.google.cloud.dataflow.worker.util.common.worker.ParDoOperation.process(ParDoOperation.java:43)
    	at com.google.cloud.dataflow.worker.util.common.worker.OutputReceiver.process(OutputReceiver.java:48)
    	at com.google.cloud.dataflow.worker.util.common.worker.ReadOperation.runReadLoop(ReadOperation.java:200)
    	at com.google.cloud.dataflow.worker.util.common.worker.ReadOperation.start(ReadOperation.java:158)
    	at com.google.cloud.dataflow.worker.util.common.worker.MapTaskExecutor.execute(MapTaskExecutor.java:75)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:383)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:355)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:286)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:134)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:114)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:101)
    	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
    	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
    	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
    	at java.lang.Thread.run(Thread.java:745)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.202.200.85:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.202.200.85:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.202.200.85:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.202.200.85:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.202.200.85:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    Workflow failed. Causes: S01:Generate sequence/Read(BoundedCountingSource)+Produce documents/Map+Write documents to MongoDB/ParDo(Write) failed., A work item was attempted 4 times without success. Each time the worker eventually lost contact with the service. The work item was attempted on: 
      mongodbioit0testwriteandr-05022306-5b5d-harness-5f6t,
      mongodbioit0testwriteandr-05022306-5b5d-harness-5f6t,
      mongodbioit0testwriteandr-05022306-5b5d-harness-5f6t,
      mongodbioit0testwriteandr-05022306-5b5d-harness-5f6t
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:134)
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:90)
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:55)
        at org.apache.beam.sdk.Pipeline.run(Pipeline.java:311)
        at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:346)
        at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:328)
        at org.apache.beam.sdk.io.mongodb.MongoDBIOIT.testWriteAndRead(MongoDBIOIT.java:118)

1 test completed, 1 failed
Finished generating test XML results (0.046 secs) into: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/ba322f69/beam/sdks/java/io/mongodb/build/test-results/integrationTest>
Generating HTML test report...
Finished generating test html results (0.039 secs) into: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/ba322f69/beam/sdks/java/io/mongodb/build/reports/tests/integrationTest>
:beam-sdks-java-io-mongodb:integrationTest (Thread[Task worker for ':' Thread 9,5,main]) completed. Took 8 mins 11.143 secs.

Deprecated Gradle features were used in this build, making it incompatible with Gradle 5.0.
See https://docs.gradle.org/4.7/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 52s
55 actionable tasks: 42 executed, 13 up-to-date

Publishing build scan...
https://gradle.com/s/m5dimtl72dop2


STDERR: 
FAILURE: Build failed with an exception.

* What went wrong:
Execution failed for task ':beam-sdks-java-io-mongodb:integrationTest'.
> There were failing tests. See the report at: file://<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/ba322f69/beam/sdks/java/io/mongodb/build/reports/tests/integrationTest/index.html>

* Try:
Run with --debug option to get more log output. Run with --scan to get full insights.

* Exception is:
org.gradle.api.tasks.TaskExecutionException: Execution failed for task ':beam-sdks-java-io-mongodb:integrationTest'.
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeActions(ExecuteActionsTaskExecuter.java:103)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.execute(ExecuteActionsTaskExecuter.java:73)
	at org.gradle.api.internal.tasks.execution.OutputDirectoryCreatingTaskExecuter.execute(OutputDirectoryCreatingTaskExecuter.java:51)
	at org.gradle.api.internal.tasks.execution.SkipCachedTaskExecuter.execute(SkipCachedTaskExecuter.java:105)
	at org.gradle.api.internal.tasks.execution.SkipUpToDateTaskExecuter.execute(SkipUpToDateTaskExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.ResolveTaskOutputCachingStateExecuter.execute(ResolveTaskOutputCachingStateExecuter.java:54)
	at org.gradle.api.internal.tasks.execution.ResolveBuildCacheKeyExecuter.execute(ResolveBuildCacheKeyExecuter.java:66)
	at org.gradle.api.internal.tasks.execution.ValidatingTaskExecuter.execute(ValidatingTaskExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.SkipEmptySourceFilesTaskExecuter.execute(SkipEmptySourceFilesTaskExecuter.java:101)
	at org.gradle.api.internal.tasks.execution.FinalizeInputFilePropertiesTaskExecuter.execute(FinalizeInputFilePropertiesTaskExecuter.java:44)
	at org.gradle.api.internal.tasks.execution.CleanupStaleOutputsExecuter.execute(CleanupStaleOutputsExecuter.java:91)
	at org.gradle.api.internal.tasks.execution.ResolveTaskArtifactStateTaskExecuter.execute(ResolveTaskArtifactStateTaskExecuter.java:62)
	at org.gradle.api.internal.tasks.execution.SkipTaskWithNoActionsExecuter.execute(SkipTaskWithNoActionsExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.SkipOnlyIfTaskExecuter.execute(SkipOnlyIfTaskExecuter.java:54)
	at org.gradle.api.internal.tasks.execution.ExecuteAtMostOnceTaskExecuter.execute(ExecuteAtMostOnceTaskExecuter.java:43)
	at org.gradle.api.internal.tasks.execution.CatchExceptionTaskExecuter.execute(CatchExceptionTaskExecuter.java:34)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker$1.run(DefaultTaskGraphExecuter.java:256)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:317)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:309)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:185)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.run(DefaultBuildOperationExecutor.java:97)
	at org.gradle.internal.operations.DelegatingBuildOperationExecutor.run(DelegatingBuildOperationExecutor.java:31)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker.execute(DefaultTaskGraphExecuter.java:249)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker.execute(DefaultTaskGraphExecuter.java:238)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker$1.execute(DefaultTaskPlanExecutor.java:104)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker$1.execute(DefaultTaskPlanExecutor.java:98)
	at org.gradle.execution.taskgraph.DefaultTaskExecutionPlan.execute(DefaultTaskExecutionPlan.java:663)
	at org.gradle.execution.taskgraph.DefaultTaskExecutionPlan.executeWithTask(DefaultTaskExecutionPlan.java:596)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker.run(DefaultTaskPlanExecutor.java:98)
	at org.gradle.internal.concurrent.ExecutorPolicy$CatchAndRecordFailures.onExecute(ExecutorPolicy.java:63)
	at org.gradle.internal.concurrent.ManagedExecutorImpl$1.run(ManagedExecutorImpl.java:46)
	at org.gradle.internal.concurrent.ThreadFactoryImpl$ManagedThreadRunnable.run(ThreadFactoryImpl.java:55)
Caused by: org.gradle.api.GradleException: There were failing tests. See the report at: file://<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/ba322f69/beam/sdks/java/io/mongodb/build/reports/tests/integrationTest/index.html>
	at org.gradle.api.tasks.testing.AbstractTestTask.handleTestFailures(AbstractTestTask.java:612)
	at org.gradle.api.tasks.testing.AbstractTestTask.executeTests(AbstractTestTask.java:484)
	at org.gradle.api.tasks.testing.Test.executeTests(Test.java:583)
	at org.gradle.internal.reflect.JavaMethod.invoke(JavaMethod.java:73)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.doExecute(StandardTaskAction.java:46)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.execute(StandardTaskAction.java:39)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.execute(StandardTaskAction.java:26)
	at org.gradle.api.internal.AbstractTask$TaskActionWrapper.execute(AbstractTask.java:794)
	at org.gradle.api.internal.AbstractTask$TaskActionWrapper.execute(AbstractTask.java:761)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter$1.run(ExecuteActionsTaskExecuter.java:124)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:317)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:309)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:185)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.run(DefaultBuildOperationExecutor.java:97)
	at org.gradle.internal.operations.DelegatingBuildOperationExecutor.run(DelegatingBuildOperationExecutor.java:31)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeAction(ExecuteActionsTaskExecuter.java:113)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeActions(ExecuteActionsTaskExecuter.java:95)
	... 31 more


* Get more help at https://help.gradle.org

2018-05-03 06:14:58,214 ba322f69 MainThread beam_integration_benchmark(1/1) ERROR    Error during benchmark beam_integration_benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-05-03 06:14:58,215 ba322f69 MainThread beam_integration_benchmark(1/1) INFO     Cleaning up benchmark beam_integration_benchmark
2018-05-03 06:14:58,216 ba322f69 MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525323738309> delete -f <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/src/.test-infra/kubernetes/mongodb/load-balancer/mongo.yml> --ignore-not-found
2018-05-03 06:14:58,924 ba322f69 MainThread beam_integration_benchmark(1/1) ERROR    Exception running benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 780, in RunBenchmarkTask
    RunBenchmark(spec, collector)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-05-03 06:14:58,925 ba322f69 MainThread beam_integration_benchmark(1/1) ERROR    Benchmark 1/1 beam_integration_benchmark (UID: beam_integration_benchmark0) failed. Execution will continue.
2018-05-03 06:14:58,977 ba322f69 MainThread INFO     Benchmark run statuses:
---------------------------------------------------------------------------------
Name                        UID                          Status  Failed Substatus
---------------------------------------------------------------------------------
beam_integration_benchmark  beam_integration_benchmark0  FAILED                  
---------------------------------------------------------------------------------
Success rate: 0.00% (0/1)
2018-05-03 06:14:58,978 ba322f69 MainThread INFO     Complete logs can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/ba322f69/pkb.log>
2018-05-03 06:14:58,978 ba322f69 MainThread INFO     Completion statuses can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/ba322f69/completion_statuses.json>
Build step 'Execute shell' marked build as failure

Build failed in Jenkins: beam_PerformanceTests_MongoDBIO_IT #123

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/123/display/redirect?page=changes>

Changes:

[axelmagn] Create ArtifactSource Interface

[tgroh] Remove unused Test Utilities, Test

[axelmagn] Create RemoteBundle interface

[axelmagn] Create JobInfo for job declaration by operators.

[mairbek] Wait for input collection

[mairbek] Expose DatabaseAdminClient

[mairbek] Consistently use lower case table name and column names

[axelmagn] Create BundleFactory chain.

[mairbek] Addressed comments

------------------------------------------
[...truncated 438.74 KB...]
    	at com.mongodb.connection.DefaultConnectionPool$PooledConnection.receiveMessage(DefaultConnectionPool.java:435)
    	at com.mongodb.connection.WriteCommandProtocol.receiveMessage(WriteCommandProtocol.java:234)
    	at com.mongodb.connection.WriteCommandProtocol.execute(WriteCommandProtocol.java:104)
    	at com.mongodb.connection.InsertCommandProtocol.execute(InsertCommandProtocol.java:67)
    	at com.mongodb.connection.InsertCommandProtocol.execute(InsertCommandProtocol.java:37)
    	at com.mongodb.connection.DefaultServer$DefaultServerProtocolExecutor.execute(DefaultServer.java:159)
    	at com.mongodb.connection.DefaultServerConnection.executeProtocol(DefaultServerConnection.java:286)
    	at com.mongodb.connection.DefaultServerConnection.insertCommand(DefaultServerConnection.java:115)
    	at com.mongodb.operation.MixedBulkWriteOperation$Run$2.executeWriteCommandProtocol(MixedBulkWriteOperation.java:455)
    	at com.mongodb.operation.MixedBulkWriteOperation$Run$RunExecutor.execute(MixedBulkWriteOperation.java:646)
    	at com.mongodb.operation.MixedBulkWriteOperation$Run.execute(MixedBulkWriteOperation.java:401)
    	at com.mongodb.operation.MixedBulkWriteOperation$1.call(MixedBulkWriteOperation.java:179)
    	at com.mongodb.operation.MixedBulkWriteOperation$1.call(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.OperationHelper.withConnectionSource(OperationHelper.java:230)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:221)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.202.118.20:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.202.118.20:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.202.118.20:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.202.118.20:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.202.118.20:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    Workflow failed. Causes: S01:Generate sequence/Read(BoundedCountingSource)+Produce documents/Map+Write documents to MongoDB/ParDo(Write) failed., A work item was attempted 4 times without success. Each time the worker eventually lost contact with the service. The work item was attempted on: 
      mongodbioit0testwriteandr-05021718-ureo-harness-f94z,
      mongodbioit0testwriteandr-05021718-ureo-harness-f94z,
      mongodbioit0testwriteandr-05021718-ureo-harness-f94z,
      mongodbioit0testwriteandr-05021718-ureo-harness-f94z
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:134)
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:90)
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:55)
        at org.apache.beam.sdk.Pipeline.run(Pipeline.java:311)
        at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:346)
        at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:328)
        at org.apache.beam.sdk.io.mongodb.MongoDBIOIT.testWriteAndRead(MongoDBIOIT.java:118)

    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.202.118.20:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
        at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
        at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
        at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
        at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
        at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
        at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:158)
        at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:133)
        at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:128)
        at com.mongodb.operation.DropDatabaseOperation.execute(DropDatabaseOperation.java:51)
        at com.mongodb.operation.DropDatabaseOperation.execute(DropDatabaseOperation.java:36)
        at com.mongodb.Mongo.execute(Mongo.java:781)
        at com.mongodb.Mongo$2.execute(Mongo.java:764)
        at com.mongodb.MongoDatabaseImpl.drop(MongoDatabaseImpl.java:136)
        at org.apache.beam.sdk.io.mongodb.MongoDBIOIT.tearDown(MongoDBIOIT.java:103)

1 test completed, 1 failed
Finished generating test XML results (0.034 secs) into: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/b1ef04f7/beam/sdks/java/io/mongodb/build/test-results/integrationTest>
Generating HTML test report...
Finished generating test html results (0.032 secs) into: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/b1ef04f7/beam/sdks/java/io/mongodb/build/reports/tests/integrationTest>

> Task :beam-sdks-java-io-mongodb:integrationTest FAILED
:beam-sdks-java-io-mongodb:integrationTest (Thread[Task worker for ':' Thread 2,5,main]) completed. Took 8 mins 55.222 secs.

Deprecated Gradle features were used in this build, making it incompatible with Gradle 5.0.
See https://docs.gradle.org/4.7/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 10m 39s
55 actionable tasks: 32 executed, 23 up-to-date

Publishing build scan...
https://gradle.com/s/2j2j7anomdpru


STDERR: 
FAILURE: Build failed with an exception.

* What went wrong:
Execution failed for task ':beam-sdks-java-io-mongodb:integrationTest'.
> There were failing tests. See the report at: file://<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/b1ef04f7/beam/sdks/java/io/mongodb/build/reports/tests/integrationTest/index.html>

* Try:
Run with --debug option to get more log output. Run with --scan to get full insights.

* Exception is:
org.gradle.api.tasks.TaskExecutionException: Execution failed for task ':beam-sdks-java-io-mongodb:integrationTest'.
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeActions(ExecuteActionsTaskExecuter.java:103)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.execute(ExecuteActionsTaskExecuter.java:73)
	at org.gradle.api.internal.tasks.execution.OutputDirectoryCreatingTaskExecuter.execute(OutputDirectoryCreatingTaskExecuter.java:51)
	at org.gradle.api.internal.tasks.execution.SkipCachedTaskExecuter.execute(SkipCachedTaskExecuter.java:105)
	at org.gradle.api.internal.tasks.execution.SkipUpToDateTaskExecuter.execute(SkipUpToDateTaskExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.ResolveTaskOutputCachingStateExecuter.execute(ResolveTaskOutputCachingStateExecuter.java:54)
	at org.gradle.api.internal.tasks.execution.ResolveBuildCacheKeyExecuter.execute(ResolveBuildCacheKeyExecuter.java:66)
	at org.gradle.api.internal.tasks.execution.ValidatingTaskExecuter.execute(ValidatingTaskExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.SkipEmptySourceFilesTaskExecuter.execute(SkipEmptySourceFilesTaskExecuter.java:101)
	at org.gradle.api.internal.tasks.execution.FinalizeInputFilePropertiesTaskExecuter.execute(FinalizeInputFilePropertiesTaskExecuter.java:44)
	at org.gradle.api.internal.tasks.execution.CleanupStaleOutputsExecuter.execute(CleanupStaleOutputsExecuter.java:91)
	at org.gradle.api.internal.tasks.execution.ResolveTaskArtifactStateTaskExecuter.execute(ResolveTaskArtifactStateTaskExecuter.java:62)
	at org.gradle.api.internal.tasks.execution.SkipTaskWithNoActionsExecuter.execute(SkipTaskWithNoActionsExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.SkipOnlyIfTaskExecuter.execute(SkipOnlyIfTaskExecuter.java:54)
	at org.gradle.api.internal.tasks.execution.ExecuteAtMostOnceTaskExecuter.execute(ExecuteAtMostOnceTaskExecuter.java:43)
	at org.gradle.api.internal.tasks.execution.CatchExceptionTaskExecuter.execute(CatchExceptionTaskExecuter.java:34)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker$1.run(DefaultTaskGraphExecuter.java:256)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:317)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:309)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:185)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.run(DefaultBuildOperationExecutor.java:97)
	at org.gradle.internal.operations.DelegatingBuildOperationExecutor.run(DelegatingBuildOperationExecutor.java:31)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker.execute(DefaultTaskGraphExecuter.java:249)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker.execute(DefaultTaskGraphExecuter.java:238)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker$1.execute(DefaultTaskPlanExecutor.java:104)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker$1.execute(DefaultTaskPlanExecutor.java:98)
	at org.gradle.execution.taskgraph.DefaultTaskExecutionPlan.execute(DefaultTaskExecutionPlan.java:663)
	at org.gradle.execution.taskgraph.DefaultTaskExecutionPlan.executeWithTask(DefaultTaskExecutionPlan.java:596)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker.run(DefaultTaskPlanExecutor.java:98)
	at org.gradle.internal.concurrent.ExecutorPolicy$CatchAndRecordFailures.onExecute(ExecutorPolicy.java:63)
	at org.gradle.internal.concurrent.ManagedExecutorImpl$1.run(ManagedExecutorImpl.java:46)
	at org.gradle.internal.concurrent.ThreadFactoryImpl$ManagedThreadRunnable.run(ThreadFactoryImpl.java:55)
Caused by: org.gradle.api.GradleException: There were failing tests. See the report at: file://<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/b1ef04f7/beam/sdks/java/io/mongodb/build/reports/tests/integrationTest/index.html>
	at org.gradle.api.tasks.testing.AbstractTestTask.handleTestFailures(AbstractTestTask.java:612)
	at org.gradle.api.tasks.testing.AbstractTestTask.executeTests(AbstractTestTask.java:484)
	at org.gradle.api.tasks.testing.Test.executeTests(Test.java:583)
	at org.gradle.internal.reflect.JavaMethod.invoke(JavaMethod.java:73)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.doExecute(StandardTaskAction.java:46)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.execute(StandardTaskAction.java:39)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.execute(StandardTaskAction.java:26)
	at org.gradle.api.internal.AbstractTask$TaskActionWrapper.execute(AbstractTask.java:794)
	at org.gradle.api.internal.AbstractTask$TaskActionWrapper.execute(AbstractTask.java:761)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter$1.run(ExecuteActionsTaskExecuter.java:124)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:317)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:309)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:185)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.run(DefaultBuildOperationExecutor.java:97)
	at org.gradle.internal.operations.DelegatingBuildOperationExecutor.run(DelegatingBuildOperationExecutor.java:31)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeAction(ExecuteActionsTaskExecuter.java:113)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeActions(ExecuteActionsTaskExecuter.java:95)
	... 31 more


* Get more help at https://help.gradle.org

2018-05-03 00:27:14,501 b1ef04f7 MainThread beam_integration_benchmark(1/1) ERROR    Error during benchmark beam_integration_benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-05-03 00:27:14,502 b1ef04f7 MainThread beam_integration_benchmark(1/1) INFO     Cleaning up benchmark beam_integration_benchmark
2018-05-03 00:27:14,503 b1ef04f7 MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525305265974> delete -f <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/src/.test-infra/kubernetes/mongodb/load-balancer/mongo.yml> --ignore-not-found
2018-05-03 00:27:15,206 b1ef04f7 MainThread beam_integration_benchmark(1/1) ERROR    Exception running benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 780, in RunBenchmarkTask
    RunBenchmark(spec, collector)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-05-03 00:27:15,206 b1ef04f7 MainThread beam_integration_benchmark(1/1) ERROR    Benchmark 1/1 beam_integration_benchmark (UID: beam_integration_benchmark0) failed. Execution will continue.
2018-05-03 00:27:15,224 b1ef04f7 MainThread INFO     Benchmark run statuses:
---------------------------------------------------------------------------------
Name                        UID                          Status  Failed Substatus
---------------------------------------------------------------------------------
beam_integration_benchmark  beam_integration_benchmark0  FAILED                  
---------------------------------------------------------------------------------
Success rate: 0.00% (0/1)
2018-05-03 00:27:15,225 b1ef04f7 MainThread INFO     Complete logs can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/b1ef04f7/pkb.log>
2018-05-03 00:27:15,226 b1ef04f7 MainThread INFO     Completion statuses can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/b1ef04f7/completion_statuses.json>
Build step 'Execute shell' marked build as failure

Build failed in Jenkins: beam_PerformanceTests_MongoDBIO_IT #122

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/122/display/redirect?page=changes>

Changes:

[github] Count compressed records with a long to avoid overflow

[alan] [BEAM-4218] Fix failing javadoc build

[github] Fix DynamicDestinations documentation

------------------------------------------
[...truncated 90.54 KB...]
                              ^
    (see http://errorprone.info/bugpattern/TypeParameterUnusedInFormals)
<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/a1b20384/beam/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnInvoker.java>:86: warning: [TypeParameterUnusedInFormals] Declaring a type parameter that is only used in the return type is a misuse of generics: operations on the type parameter are unchecked, it hides unsafe casts at invocations of the method, and it interacts badly with method overload resolution.
  <RestrictionT, TrackerT extends RestrictionTracker<RestrictionT, ?>> TrackerT invokeNewTracker(
                                                                                ^
    (see http://errorprone.info/bugpattern/TypeParameterUnusedInFormals)
<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/a1b20384/beam/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnInvoker.java>:285: warning: [MissingOverride] restrictionTracker implements method in ArgumentProvider; expected @Override
    public RestrictionTracker<?, ?> restrictionTracker() {
                                    ^
    (see http://errorprone.info/bugpattern/MissingOverride)
  Did you mean '@Override public RestrictionTracker<?, ?> restrictionTracker() {'?
<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/a1b20384/beam/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Regex.java>:428: warning: [MissingOverride] expand implements method in PTransform; expected @Override
    public PCollection<String> expand(PCollection<String> in) {
                               ^
    (see http://errorprone.info/bugpattern/MissingOverride)
  Did you mean '@Override public PCollection<String> expand(PCollection<String> in) {'?
<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/a1b20384/beam/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Regex.java>:470: warning: [MissingOverride] expand implements method in PTransform; expected @Override
    public PCollection<String> expand(PCollection<String> in) {
                               ^
    (see http://errorprone.info/bugpattern/MissingOverride)
  Did you mean '@Override public PCollection<String> expand(PCollection<String> in) {'?
<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/a1b20384/beam/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Regex.java>:512: warning: [MissingOverride] expand implements method in PTransform; expected @Override
    public PCollection<List<String>> expand(PCollection<String> in) {
                                     ^
    (see http://errorprone.info/bugpattern/MissingOverride)
  Did you mean '@Override public PCollection<List<String>> expand(PCollection<String> in) {'?
<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/a1b20384/beam/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Regex.java>:564: warning: [MissingOverride] expand implements method in PTransform; expected @Override
    public PCollection<KV<String, String>> expand(PCollection<String> in) {
                                           ^
    (see http://errorprone.info/bugpattern/MissingOverride)
  Did you mean '@Override public PCollection<KV<String, String>> expand(PCollection<String> in) {'?
<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/a1b20384/beam/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Regex.java>:610: warning: [MissingOverride] expand implements method in PTransform; expected @Override
    public PCollection<KV<String, String>> expand(PCollection<String> in) {
                                           ^
    (see http://errorprone.info/bugpattern/MissingOverride)
  Did you mean '@Override public PCollection<KV<String, String>> expand(PCollection<String> in) {'?
<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/a1b20384/beam/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Regex.java>:652: warning: [MissingOverride] expand implements method in PTransform; expected @Override
    public PCollection<String> expand(PCollection<String> in) {
                               ^
    (see http://errorprone.info/bugpattern/MissingOverride)
  Did you mean '@Override public PCollection<String> expand(PCollection<String> in) {'?
<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/a1b20384/beam/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Regex.java>:694: warning: [MissingOverride] expand implements method in PTransform; expected @Override
    public PCollection<String> expand(PCollection<String> in) {
                               ^
    (see http://errorprone.info/bugpattern/MissingOverride)
  Did you mean '@Override public PCollection<String> expand(PCollection<String> in) {'?
<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/a1b20384/beam/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Regex.java>:735: warning: [MissingOverride] expand implements method in PTransform; expected @Override
    public PCollection<List<String>> expand(PCollection<String> in) {
                                     ^
    (see http://errorprone.info/bugpattern/MissingOverride)
  Did you mean '@Override public PCollection<List<String>> expand(PCollection<String> in) {'?
<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/a1b20384/beam/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Regex.java>:788: warning: [MissingOverride] expand implements method in PTransform; expected @Override
    public PCollection<KV<String, String>> expand(PCollection<String> in) {
                                           ^
    (see http://errorprone.info/bugpattern/MissingOverride)
  Did you mean '@Override public PCollection<KV<String, String>> expand(PCollection<String> in) {'?
<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/a1b20384/beam/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Regex.java>:835: warning: [MissingOverride] expand implements method in PTransform; expected @Override
    public PCollection<KV<String, String>> expand(PCollection<String> in) {
                                           ^
    (see http://errorprone.info/bugpattern/MissingOverride)
  Did you mean '@Override public PCollection<KV<String, String>> expand(PCollection<String> in) {'?
<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/a1b20384/beam/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Regex.java>:877: warning: [MissingOverride] expand implements method in PTransform; expected @Override
    public PCollection<String> expand(PCollection<String> in) {
                               ^
    (see http://errorprone.info/bugpattern/MissingOverride)
  Did you mean '@Override public PCollection<String> expand(PCollection<String> in) {'?
<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/a1b20384/beam/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Regex.java>:916: warning: [MissingOverride] expand implements method in PTransform; expected @Override
    public PCollection<String> expand(PCollection<String> in) {
                               ^
    (see http://errorprone.info/bugpattern/MissingOverride)
  Did you mean '@Override public PCollection<String> expand(PCollection<String> in) {'?
<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/a1b20384/beam/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Regex.java>:957: warning: [MissingOverride] expand implements method in PTransform; expected @Override
    public PCollection<String> expand(PCollection<String> in) {
                               ^
    (see http://errorprone.info/bugpattern/MissingOverride)
  Did you mean '@Override public PCollection<String> expand(PCollection<String> in) {'?
<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/a1b20384/beam/sdks/java/core/src/main/java/org/apache/beam/sdk/util/RowJsonDeserializer.java>:76: warning: [MutableConstantField] Constant field declarations should use the immutable type (such as ImmutableList) instead of the general collection interface type (such as List)
  private static final Map<TypeName, ValueExtractor<?>> JSON_VALUE_GETTERS =
                          ^
    (see http://errorprone.info/bugpattern/MutableConstantField)
  Did you mean 'private static final ImmutableMap<TypeName, ValueExtractor<?>> JSON_VALUE_GETTERS ='?
<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/a1b20384/beam/sdks/java/core/src/main/java/org/apache/beam/sdk/util/WeightedValue.java>:39: warning: [MissingOverride] getWeight implements method in Weighted; expected @Override
  public long getWeight() {
              ^
    (see http://errorprone.info/bugpattern/MissingOverride)
  Did you mean '@Override public long getWeight() {'?
/home/jenkins/.m2/repository/net/bytebuddy/byte-buddy/1.7.10/byte-buddy-1.7.10.jar(/net/bytebuddy/description/type/TypeDescription$AbstractBase.class): warning: Cannot find annotation method 'value()' in type 'SuppressFBWarnings'
/home/jenkins/.m2/repository/net/bytebuddy/byte-buddy/1.7.10/byte-buddy-1.7.10.jar(/net/bytebuddy/description/type/TypeDescription$AbstractBase.class): warning: Cannot find annotation method 'justification()' in type 'SuppressFBWarnings'
/home/jenkins/.m2/repository/net/bytebuddy/byte-buddy/1.7.10/byte-buddy-1.7.10.jar(/net/bytebuddy/dynamic/scaffold/MethodGraph$Compiler.class): warning: Cannot find annotation method 'value()' in type 'SuppressFBWarnings'
/home/jenkins/.m2/repository/net/bytebuddy/byte-buddy/1.7.10/byte-buddy-1.7.10.jar(/net/bytebuddy/dynamic/scaffold/MethodGraph$Compiler.class): warning: Cannot find annotation method 'justification()' in type 'SuppressFBWarnings'
/home/jenkins/.m2/repository/net/bytebuddy/byte-buddy/1.7.10/byte-buddy-1.7.10.jar(/net/bytebuddy/implementation/auxiliary/AuxiliaryType.class): warning: Cannot find annotation method 'value()' in type 'SuppressFBWarnings'
/home/jenkins/.m2/repository/net/bytebuddy/byte-buddy/1.7.10/byte-buddy-1.7.10.jar(/net/bytebuddy/implementation/auxiliary/AuxiliaryType.class): warning: Cannot find annotation method 'justification()' in type 'SuppressFBWarnings'
/home/jenkins/.m2/repository/net/bytebuddy/byte-buddy/1.7.10/byte-buddy-1.7.10.jar(/net/bytebuddy/ClassFileVersion.class): warning: Cannot find annotation method 'value()' in type 'SuppressFBWarnings'
/home/jenkins/.m2/repository/net/bytebuddy/byte-buddy/1.7.10/byte-buddy-1.7.10.jar(/net/bytebuddy/ClassFileVersion.class): warning: Cannot find annotation method 'justification()' in type 'SuppressFBWarnings'
Note: Some input files use or override a deprecated API.
Note: Recompile with -Xlint:deprecation for details.
Note: Some input files use unchecked or unsafe operations.
Note: Recompile with -Xlint:unchecked for details.
100 warnings

Deprecated Gradle features were used in this build, making it incompatible with Gradle 5.0.
See https://docs.gradle.org/4.7/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 1m 0s
130 actionable tasks: 124 executed, 4 from cache, 2 up-to-date

Publishing build scan...
https://gradle.com/s/6tcnacmb2fs3y


STDERR: 
FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/a1b20384/beam/sdks/python/build.gradle'> line: 36

* What went wrong:
Execution failed for task ':beam-sdks-python:setupVirtualenv'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Exception is:
org.gradle.api.tasks.TaskExecutionException: Execution failed for task ':beam-sdks-python:setupVirtualenv'.
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeActions(ExecuteActionsTaskExecuter.java:103)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.execute(ExecuteActionsTaskExecuter.java:73)
	at org.gradle.api.internal.tasks.execution.OutputDirectoryCreatingTaskExecuter.execute(OutputDirectoryCreatingTaskExecuter.java:51)
	at org.gradle.api.internal.tasks.execution.SkipCachedTaskExecuter.execute(SkipCachedTaskExecuter.java:105)
	at org.gradle.api.internal.tasks.execution.SkipUpToDateTaskExecuter.execute(SkipUpToDateTaskExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.ResolveTaskOutputCachingStateExecuter.execute(ResolveTaskOutputCachingStateExecuter.java:54)
	at org.gradle.api.internal.tasks.execution.ResolveBuildCacheKeyExecuter.execute(ResolveBuildCacheKeyExecuter.java:66)
	at org.gradle.api.internal.tasks.execution.ValidatingTaskExecuter.execute(ValidatingTaskExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.SkipEmptySourceFilesTaskExecuter.execute(SkipEmptySourceFilesTaskExecuter.java:101)
	at org.gradle.api.internal.tasks.execution.FinalizeInputFilePropertiesTaskExecuter.execute(FinalizeInputFilePropertiesTaskExecuter.java:44)
	at org.gradle.api.internal.tasks.execution.CleanupStaleOutputsExecuter.execute(CleanupStaleOutputsExecuter.java:91)
	at org.gradle.api.internal.tasks.execution.ResolveTaskArtifactStateTaskExecuter.execute(ResolveTaskArtifactStateTaskExecuter.java:62)
	at org.gradle.api.internal.tasks.execution.SkipTaskWithNoActionsExecuter.execute(SkipTaskWithNoActionsExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.SkipOnlyIfTaskExecuter.execute(SkipOnlyIfTaskExecuter.java:54)
	at org.gradle.api.internal.tasks.execution.ExecuteAtMostOnceTaskExecuter.execute(ExecuteAtMostOnceTaskExecuter.java:43)
	at org.gradle.api.internal.tasks.execution.CatchExceptionTaskExecuter.execute(CatchExceptionTaskExecuter.java:34)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker$1.run(DefaultTaskGraphExecuter.java:256)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:317)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:309)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:185)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.run(DefaultBuildOperationExecutor.java:97)
	at org.gradle.internal.operations.DelegatingBuildOperationExecutor.run(DelegatingBuildOperationExecutor.java:31)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker.execute(DefaultTaskGraphExecuter.java:249)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker.execute(DefaultTaskGraphExecuter.java:238)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker$1.execute(DefaultTaskPlanExecutor.java:104)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker$1.execute(DefaultTaskPlanExecutor.java:98)
	at org.gradle.execution.taskgraph.DefaultTaskExecutionPlan.execute(DefaultTaskExecutionPlan.java:663)
	at org.gradle.execution.taskgraph.DefaultTaskExecutionPlan.executeWithTask(DefaultTaskExecutionPlan.java:596)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker.run(DefaultTaskPlanExecutor.java:98)
	at org.gradle.internal.concurrent.ExecutorPolicy$CatchAndRecordFailures.onExecute(ExecutorPolicy.java:63)
	at org.gradle.internal.concurrent.ManagedExecutorImpl$1.run(ManagedExecutorImpl.java:46)
	at org.gradle.internal.concurrent.ThreadFactoryImpl$ManagedThreadRunnable.run(ThreadFactoryImpl.java:55)
Caused by: org.gradle.process.internal.ExecException: Process 'command 'sh'' finished with non-zero exit value 1
	at org.gradle.process.internal.DefaultExecHandle$ExecResultImpl.assertNormalExitValue(DefaultExecHandle.java:389)
	at org.gradle.process.internal.DefaultExecAction.execute(DefaultExecAction.java:36)
	at org.gradle.api.internal.file.DefaultFileOperations.exec(DefaultFileOperations.java:192)
	at org.gradle.api.internal.project.DefaultProject.exec(DefaultProject.java:1087)
	at org.gradle.groovy.scripts.DefaultScript.exec(DefaultScript.java:253)
	at org.gradle.internal.metaobject.BeanDynamicObject$MetaClassAdapter.invokeMethod(BeanDynamicObject.java:479)
	at org.gradle.internal.metaobject.BeanDynamicObject.tryInvokeMethod(BeanDynamicObject.java:191)
	at org.gradle.groovy.scripts.BasicScript$ScriptDynamicObject.tryInvokeMethod(BasicScript.java:130)
	at org.gradle.internal.metaobject.ConfigureDelegate.invokeMethod(ConfigureDelegate.java:78)
	at build_68n7fb5yqocfywj1p717ve33f$_run_closure2$_closure16.doCall(<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/a1b20384/beam/sdks/python/build.gradle>:36)
	at org.gradle.api.internal.AbstractTask$ClosureTaskAction.execute(AbstractTask.java:732)
	at org.gradle.api.internal.AbstractTask$ClosureTaskAction.execute(AbstractTask.java:705)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter$1.run(ExecuteActionsTaskExecuter.java:124)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:317)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:309)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:185)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.run(DefaultBuildOperationExecutor.java:97)
	at org.gradle.internal.operations.DelegatingBuildOperationExecutor.run(DelegatingBuildOperationExecutor.java:31)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeAction(ExecuteActionsTaskExecuter.java:113)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeActions(ExecuteActionsTaskExecuter.java:95)
	... 31 more


* Get more help at https://help.gradle.org

2018-05-02 18:01:48,787 a1b20384 MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525278658567> create -f <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/src/.test-infra/kubernetes/mongodb/load-balancer/mongo.yml>
2018-05-02 18:01:49,018 a1b20384 MainThread beam_integration_benchmark(1/1) INFO     Running benchmark beam_integration_benchmark
2018-05-02 18:01:49,022 a1b20384 MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525278658567> get svc mongo-load-balancer-service -ojsonpath={.status.loadBalancer.ingress[0].ip}
2018-05-02 18:01:59,207 a1b20384 MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525278658567> get svc mongo-load-balancer-service -ojsonpath={.status.loadBalancer.ingress[0].ip}
2018-05-02 18:02:09,370 a1b20384 MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525278658567> get svc mongo-load-balancer-service -ojsonpath={.status.loadBalancer.ingress[0].ip}
2018-05-02 18:02:19,513 a1b20384 MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525278658567> get svc mongo-load-balancer-service -ojsonpath={.status.loadBalancer.ingress[0].ip}
2018-05-02 18:02:29,650 a1b20384 MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525278658567> get svc mongo-load-balancer-service -ojsonpath={.status.loadBalancer.ingress[0].ip}
2018-05-02 18:02:39,781 a1b20384 MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525278658567> get svc mongo-load-balancer-service -ojsonpath={.status.loadBalancer.ingress[0].ip}
2018-05-02 18:02:49,916 a1b20384 MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525278658567> get svc mongo-load-balancer-service -ojsonpath={.status.loadBalancer.ingress[0].ip}
2018-05-02 18:03:00,049 a1b20384 MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525278658567> get svc mongo-load-balancer-service -ojsonpath={.status.loadBalancer.ingress[0].ip}
2018-05-02 18:03:10,177 a1b20384 MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525278658567> get svc mongo-load-balancer-service -ojsonpath={.status.loadBalancer.ingress[0].ip}
2018-05-02 18:03:20,313 a1b20384 MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525278658567> get svc mongo-load-balancer-service -ojsonpath={.status.loadBalancer.ingress[0].ip}
2018-05-02 18:03:30,441 a1b20384 MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525278658567> get svc mongo-load-balancer-service -ojsonpath={.status.loadBalancer.ingress[0].ip}
2018-05-02 18:03:40,580 a1b20384 MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525278658567> get svc mongo-load-balancer-service -ojsonpath={.status.loadBalancer.ingress[0].ip}
2018-05-02 18:03:50,727 a1b20384 MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525278658567> get svc mongo-load-balancer-service -ojsonpath={.status.loadBalancer.ingress[0].ip}
2018-05-02 18:04:00,889 a1b20384 MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525278658567> get svc mongo-load-balancer-service -ojsonpath={.status.loadBalancer.ingress[0].ip}
2018-05-02 18:04:11,020 a1b20384 MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525278658567> get svc mongo-load-balancer-service -ojsonpath={.status.loadBalancer.ingress[0].ip}
2018-05-02 18:04:21,149 a1b20384 MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525278658567> get svc mongo-load-balancer-service -ojsonpath={.status.loadBalancer.ingress[0].ip}
2018-05-02 18:04:31,282 a1b20384 MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525278658567> get svc mongo-load-balancer-service -ojsonpath={.status.loadBalancer.ingress[0].ip}
2018-05-02 18:04:41,417 a1b20384 MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525278658567> get svc mongo-load-balancer-service -ojsonpath={.status.loadBalancer.ingress[0].ip}
2018-05-02 18:04:51,581 a1b20384 MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525278658567> get svc mongo-load-balancer-service -ojsonpath={.status.loadBalancer.ingress[0].ip}
2018-05-02 18:04:51,773 a1b20384 MainThread beam_integration_benchmark(1/1) ERROR    Error during benchmark beam_integration_benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 148, in Run
    dynamic_pipeline_options)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/beam_pipeline_options.py",> line 100, in GenerateAllPipelineOptions
    EvaluateDynamicPipelineOptions(dynamic_pipeline_options))
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/beam_pipeline_options.py",> line 67, in EvaluateDynamicPipelineOptions
    argValue = RetrieveLoadBalancerIp(optionDescriptor)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/beam_pipeline_options.py",> line 153, in RetrieveLoadBalancerIp
    raise "Could not retrieve LoadBalancer IP address"
TypeError: exceptions must be old-style classes or derived from BaseException, not str
2018-05-02 18:04:51,775 a1b20384 MainThread beam_integration_benchmark(1/1) INFO     Cleaning up benchmark beam_integration_benchmark
2018-05-02 18:04:51,775 a1b20384 MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525278658567> delete -f <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/src/.test-infra/kubernetes/mongodb/load-balancer/mongo.yml> --ignore-not-found
2018-05-02 18:04:52,351 a1b20384 MainThread beam_integration_benchmark(1/1) ERROR    Exception running benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 780, in RunBenchmarkTask
    RunBenchmark(spec, collector)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 148, in Run
    dynamic_pipeline_options)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/beam_pipeline_options.py",> line 100, in GenerateAllPipelineOptions
    EvaluateDynamicPipelineOptions(dynamic_pipeline_options))
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/beam_pipeline_options.py",> line 67, in EvaluateDynamicPipelineOptions
    argValue = RetrieveLoadBalancerIp(optionDescriptor)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/beam_pipeline_options.py",> line 153, in RetrieveLoadBalancerIp
    raise "Could not retrieve LoadBalancer IP address"
TypeError: exceptions must be old-style classes or derived from BaseException, not str
2018-05-02 18:04:52,352 a1b20384 MainThread beam_integration_benchmark(1/1) ERROR    Benchmark 1/1 beam_integration_benchmark (UID: beam_integration_benchmark0) failed. Execution will continue.
2018-05-02 18:04:52,392 a1b20384 MainThread INFO     Benchmark run statuses:
---------------------------------------------------------------------------------
Name                        UID                          Status  Failed Substatus
---------------------------------------------------------------------------------
beam_integration_benchmark  beam_integration_benchmark0  FAILED                  
---------------------------------------------------------------------------------
Success rate: 0.00% (0/1)
2018-05-02 18:04:52,393 a1b20384 MainThread INFO     Complete logs can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/a1b20384/pkb.log>
2018-05-02 18:04:52,393 a1b20384 MainThread INFO     Completion statuses can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/a1b20384/completion_statuses.json>
Build step 'Execute shell' marked build as failure

Build failed in Jenkins: beam_PerformanceTests_MongoDBIO_IT #121

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/121/display/redirect>

------------------------------------------
[...truncated 528.50 KB...]
    java.lang.RuntimeException: com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches ReadPreferenceServerSelector{readPreference=primary}. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.188.178.165:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getReadConnectionSource(ClusterBinding.java:63)
    	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:89)
    	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:84)
    	at com.mongodb.operation.CommandReadOperation.execute(CommandReadOperation.java:55)
    	at com.mongodb.Mongo.execute(Mongo.java:772)
    	at com.mongodb.Mongo$2.execute(Mongo.java:759)
    	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:130)
    	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:124)
    	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:114)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$BoundedMongoDbSource.split(MongoDbIO.java:332)
    	at com.google.cloud.dataflow.worker.WorkerCustomSources.splitAndValidate(WorkerCustomSources.java:275)
    	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplitTyped(WorkerCustomSources.java:197)
    	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplitWithApiLimit(WorkerCustomSources.java:181)
    	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplit(WorkerCustomSources.java:160)
    	at com.google.cloud.dataflow.worker.WorkerCustomSourceOperationExecutor.execute(WorkerCustomSourceOperationExecutor.java:77)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:383)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:355)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:286)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:134)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:114)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:101)
    	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
    	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
    	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
    	at java.lang.Thread.run(Thread.java:745)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches ReadPreferenceServerSelector{readPreference=primary}. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.188.178.165:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getReadConnectionSource(ClusterBinding.java:63)
    	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:89)
    	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:84)
    	at com.mongodb.operation.CommandReadOperation.execute(CommandReadOperation.java:55)
    	at com.mongodb.Mongo.execute(Mongo.java:772)
    	at com.mongodb.Mongo$2.execute(Mongo.java:759)
    	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:130)
    	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:124)
    	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:114)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$BoundedMongoDbSource.split(MongoDbIO.java:332)
    	at com.google.cloud.dataflow.worker.WorkerCustomSources.splitAndValidate(WorkerCustomSources.java:275)
    	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplitTyped(WorkerCustomSources.java:197)
    	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplitWithApiLimit(WorkerCustomSources.java:181)
    	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplit(WorkerCustomSources.java:160)
    	at com.google.cloud.dataflow.worker.WorkerCustomSourceOperationExecutor.execute(WorkerCustomSourceOperationExecutor.java:77)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:383)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:355)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:286)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:134)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:114)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:101)
    	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
    	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
    	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
    	at java.lang.Thread.run(Thread.java:745)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches ReadPreferenceServerSelector{readPreference=primary}. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.188.178.165:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getReadConnectionSource(ClusterBinding.java:63)
    	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:89)
    	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:84)
    	at com.mongodb.operation.CommandReadOperation.execute(CommandReadOperation.java:55)
    	at com.mongodb.Mongo.execute(Mongo.java:772)
    	at com.mongodb.Mongo$2.execute(Mongo.java:759)
    	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:130)
    	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:124)
    	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:114)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$BoundedMongoDbSource.split(MongoDbIO.java:332)
    	at com.google.cloud.dataflow.worker.WorkerCustomSources.splitAndValidate(WorkerCustomSources.java:275)
    	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplitTyped(WorkerCustomSources.java:197)
    	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplitWithApiLimit(WorkerCustomSources.java:181)
    	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplit(WorkerCustomSources.java:160)
    	at com.google.cloud.dataflow.worker.WorkerCustomSourceOperationExecutor.execute(WorkerCustomSourceOperationExecutor.java:77)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:383)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:355)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:286)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:134)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:114)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:101)
    	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
    	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
    	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
    	at java.lang.Thread.run(Thread.java:745)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches ReadPreferenceServerSelector{readPreference=primary}. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.188.178.165:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getReadConnectionSource(ClusterBinding.java:63)
    	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:89)
    	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:84)
    	at com.mongodb.operation.CommandReadOperation.execute(CommandReadOperation.java:55)
    	at com.mongodb.Mongo.execute(Mongo.java:772)
    	at com.mongodb.Mongo$2.execute(Mongo.java:759)
    	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:130)
    	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:124)
    	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:114)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$BoundedMongoDbSource.split(MongoDbIO.java:332)
    	at com.google.cloud.dataflow.worker.WorkerCustomSources.splitAndValidate(WorkerCustomSources.java:275)
    	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplitTyped(WorkerCustomSources.java:197)
    	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplitWithApiLimit(WorkerCustomSources.java:181)
    	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplit(WorkerCustomSources.java:160)
    	at com.google.cloud.dataflow.worker.WorkerCustomSourceOperationExecutor.execute(WorkerCustomSourceOperationExecutor.java:77)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:383)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:355)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:286)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:134)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:114)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:101)
    	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
    	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
    	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
    	at java.lang.Thread.run(Thread.java:745)
    Workflow failed. Causes: S03:Read all documents/Read(BoundedMongoDbSource)+Map documents to Strings/Map+Calculate hashcode/WithKeys/AddKeys/Map+Calculate hashcode/Combine.perKey(Hashing)/GroupByKey+Calculate hashcode/Combine.perKey(Hashing)/Combine.GroupedValues/Partial+Calculate hashcode/Combine.perKey(Hashing)/GroupByKey/Reify+Calculate hashcode/Combine.perKey(Hashing)/GroupByKey/Write failed.
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:134)
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:90)
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:55)
        at org.apache.beam.sdk.Pipeline.run(Pipeline.java:311)
        at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:346)
        at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:328)
        at org.apache.beam.sdk.io.mongodb.MongoDBIOIT.testWriteAndRead(MongoDBIOIT.java:131)

1 test completed, 1 failed
Finished generating test XML results (0.047 secs) into: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/8340e907/beam/sdks/java/io/mongodb/build/test-results/integrationTest>
Generating HTML test report...
Finished generating test html results (0.043 secs) into: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/8340e907/beam/sdks/java/io/mongodb/build/reports/tests/integrationTest>
:beam-sdks-java-io-mongodb:integrationTest (Thread[Task worker for ':' Thread 12,5,main]) completed. Took 11 mins 27.447 secs.

Deprecated Gradle features were used in this build, making it incompatible with Gradle 5.0.
See https://docs.gradle.org/4.7/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 13m 7s
55 actionable tasks: 32 executed, 23 up-to-date

Publishing build scan...
https://gradle.com/s/ctwbboy4w2g3m


STDERR: 
FAILURE: Build failed with an exception.

* What went wrong:
Execution failed for task ':beam-sdks-java-io-mongodb:integrationTest'.
> There were failing tests. See the report at: file://<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/8340e907/beam/sdks/java/io/mongodb/build/reports/tests/integrationTest/index.html>

* Try:
Run with --debug option to get more log output. Run with --scan to get full insights.

* Exception is:
org.gradle.api.tasks.TaskExecutionException: Execution failed for task ':beam-sdks-java-io-mongodb:integrationTest'.
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeActions(ExecuteActionsTaskExecuter.java:103)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.execute(ExecuteActionsTaskExecuter.java:73)
	at org.gradle.api.internal.tasks.execution.OutputDirectoryCreatingTaskExecuter.execute(OutputDirectoryCreatingTaskExecuter.java:51)
	at org.gradle.api.internal.tasks.execution.SkipCachedTaskExecuter.execute(SkipCachedTaskExecuter.java:105)
	at org.gradle.api.internal.tasks.execution.SkipUpToDateTaskExecuter.execute(SkipUpToDateTaskExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.ResolveTaskOutputCachingStateExecuter.execute(ResolveTaskOutputCachingStateExecuter.java:54)
	at org.gradle.api.internal.tasks.execution.ResolveBuildCacheKeyExecuter.execute(ResolveBuildCacheKeyExecuter.java:66)
	at org.gradle.api.internal.tasks.execution.ValidatingTaskExecuter.execute(ValidatingTaskExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.SkipEmptySourceFilesTaskExecuter.execute(SkipEmptySourceFilesTaskExecuter.java:101)
	at org.gradle.api.internal.tasks.execution.FinalizeInputFilePropertiesTaskExecuter.execute(FinalizeInputFilePropertiesTaskExecuter.java:44)
	at org.gradle.api.internal.tasks.execution.CleanupStaleOutputsExecuter.execute(CleanupStaleOutputsExecuter.java:91)
	at org.gradle.api.internal.tasks.execution.ResolveTaskArtifactStateTaskExecuter.execute(ResolveTaskArtifactStateTaskExecuter.java:62)
	at org.gradle.api.internal.tasks.execution.SkipTaskWithNoActionsExecuter.execute(SkipTaskWithNoActionsExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.SkipOnlyIfTaskExecuter.execute(SkipOnlyIfTaskExecuter.java:54)
	at org.gradle.api.internal.tasks.execution.ExecuteAtMostOnceTaskExecuter.execute(ExecuteAtMostOnceTaskExecuter.java:43)
	at org.gradle.api.internal.tasks.execution.CatchExceptionTaskExecuter.execute(CatchExceptionTaskExecuter.java:34)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker$1.run(DefaultTaskGraphExecuter.java:256)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:317)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:309)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:185)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.run(DefaultBuildOperationExecutor.java:97)
	at org.gradle.internal.operations.DelegatingBuildOperationExecutor.run(DelegatingBuildOperationExecutor.java:31)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker.execute(DefaultTaskGraphExecuter.java:249)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker.execute(DefaultTaskGraphExecuter.java:238)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker$1.execute(DefaultTaskPlanExecutor.java:104)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker$1.execute(DefaultTaskPlanExecutor.java:98)
	at org.gradle.execution.taskgraph.DefaultTaskExecutionPlan.execute(DefaultTaskExecutionPlan.java:663)
	at org.gradle.execution.taskgraph.DefaultTaskExecutionPlan.executeWithTask(DefaultTaskExecutionPlan.java:596)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker.run(DefaultTaskPlanExecutor.java:98)
	at org.gradle.internal.concurrent.ExecutorPolicy$CatchAndRecordFailures.onExecute(ExecutorPolicy.java:63)
	at org.gradle.internal.concurrent.ManagedExecutorImpl$1.run(ManagedExecutorImpl.java:46)
	at org.gradle.internal.concurrent.ThreadFactoryImpl$ManagedThreadRunnable.run(ThreadFactoryImpl.java:55)
Caused by: org.gradle.api.GradleException: There were failing tests. See the report at: file://<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/8340e907/beam/sdks/java/io/mongodb/build/reports/tests/integrationTest/index.html>
	at org.gradle.api.tasks.testing.AbstractTestTask.handleTestFailures(AbstractTestTask.java:612)
	at org.gradle.api.tasks.testing.AbstractTestTask.executeTests(AbstractTestTask.java:484)
	at org.gradle.api.tasks.testing.Test.executeTests(Test.java:583)
	at org.gradle.internal.reflect.JavaMethod.invoke(JavaMethod.java:73)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.doExecute(StandardTaskAction.java:46)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.execute(StandardTaskAction.java:39)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.execute(StandardTaskAction.java:26)
	at org.gradle.api.internal.AbstractTask$TaskActionWrapper.execute(AbstractTask.java:794)
	at org.gradle.api.internal.AbstractTask$TaskActionWrapper.execute(AbstractTask.java:761)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter$1.run(ExecuteActionsTaskExecuter.java:124)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:317)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:309)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:185)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.run(DefaultBuildOperationExecutor.java:97)
	at org.gradle.internal.operations.DelegatingBuildOperationExecutor.run(DelegatingBuildOperationExecutor.java:31)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeAction(ExecuteActionsTaskExecuter.java:113)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeActions(ExecuteActionsTaskExecuter.java:95)
	... 31 more


* Get more help at https://help.gradle.org

2018-05-02 12:16:00,430 8340e907 MainThread beam_integration_benchmark(1/1) ERROR    Error during benchmark beam_integration_benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-05-02 12:16:00,431 8340e907 MainThread beam_integration_benchmark(1/1) INFO     Cleaning up benchmark beam_integration_benchmark
2018-05-02 12:16:00,432 8340e907 MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525255290434> delete -f <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/src/.test-infra/kubernetes/mongodb/load-balancer/mongo.yml> --ignore-not-found
2018-05-02 12:16:00,853 8340e907 MainThread beam_integration_benchmark(1/1) ERROR    Exception running benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 780, in RunBenchmarkTask
    RunBenchmark(spec, collector)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-05-02 12:16:00,853 8340e907 MainThread beam_integration_benchmark(1/1) ERROR    Benchmark 1/1 beam_integration_benchmark (UID: beam_integration_benchmark0) failed. Execution will continue.
2018-05-02 12:16:00,887 8340e907 MainThread INFO     Benchmark run statuses:
---------------------------------------------------------------------------------
Name                        UID                          Status  Failed Substatus
---------------------------------------------------------------------------------
beam_integration_benchmark  beam_integration_benchmark0  FAILED                  
---------------------------------------------------------------------------------
Success rate: 0.00% (0/1)
2018-05-02 12:16:00,888 8340e907 MainThread INFO     Complete logs can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/8340e907/pkb.log>
2018-05-02 12:16:00,888 8340e907 MainThread INFO     Completion statuses can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/8340e907/completion_statuses.json>
Build step 'Execute shell' marked build as failure

Build failed in Jenkins: beam_PerformanceTests_MongoDBIO_IT #120

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/120/display/redirect?page=changes>

Changes:

[github] Update perf.go: only trigger on processing bundles

------------------------------------------
[...truncated 457.06 KB...]
    	at com.mongodb.connection.WriteCommandProtocol.receiveMessage(WriteCommandProtocol.java:234)
    	at com.mongodb.connection.WriteCommandProtocol.execute(WriteCommandProtocol.java:104)
    	at com.mongodb.connection.InsertCommandProtocol.execute(InsertCommandProtocol.java:67)
    	at com.mongodb.connection.InsertCommandProtocol.execute(InsertCommandProtocol.java:37)
    	at com.mongodb.connection.DefaultServer$DefaultServerProtocolExecutor.execute(DefaultServer.java:159)
    	at com.mongodb.connection.DefaultServerConnection.executeProtocol(DefaultServerConnection.java:286)
    	at com.mongodb.connection.DefaultServerConnection.insertCommand(DefaultServerConnection.java:115)
    	at com.mongodb.operation.MixedBulkWriteOperation$Run$2.executeWriteCommandProtocol(MixedBulkWriteOperation.java:455)
    	at com.mongodb.operation.MixedBulkWriteOperation$Run$RunExecutor.execute(MixedBulkWriteOperation.java:646)
    	at com.mongodb.operation.MixedBulkWriteOperation$Run.execute(MixedBulkWriteOperation.java:401)
    	at com.mongodb.operation.MixedBulkWriteOperation$1.call(MixedBulkWriteOperation.java:179)
    	at com.mongodb.operation.MixedBulkWriteOperation$1.call(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.OperationHelper.withConnectionSource(OperationHelper.java:230)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:221)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn$DoFnInvoker.invokeProcessElement(Unknown Source)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:177)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:141)
    	at com.google.cloud.dataflow.worker.SimpleParDoFn.processElement(SimpleParDoFn.java:323)
    	at com.google.cloud.dataflow.worker.util.common.worker.ParDoOperation.process(ParDoOperation.java:43)
    	at com.google.cloud.dataflow.worker.util.common.worker.OutputReceiver.process(OutputReceiver.java:48)
    	at com.google.cloud.dataflow.worker.SimpleParDoFn$1.output(SimpleParDoFn.java:271)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.outputWindowedValue(SimpleDoFnRunner.java:211)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.access$700(SimpleDoFnRunner.java:66)
    	at org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:436)
    	at org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:424)
    	at org.apache.beam.sdk.transforms.MapElements$1.processElement(MapElements.java:129)
    	at org.apache.beam.sdk.transforms.MapElements$1$DoFnInvoker.invokeProcessElement(Unknown Source)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:177)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:141)
    	at com.google.cloud.dataflow.worker.SimpleParDoFn.processElement(SimpleParDoFn.java:323)
    	at com.google.cloud.dataflow.worker.util.common.worker.ParDoOperation.process(ParDoOperation.java:43)
    	at com.google.cloud.dataflow.worker.util.common.worker.OutputReceiver.process(OutputReceiver.java:48)
    	at com.google.cloud.dataflow.worker.util.common.worker.ReadOperation.runReadLoop(ReadOperation.java:200)
    	at com.google.cloud.dataflow.worker.util.common.worker.ReadOperation.start(ReadOperation.java:158)
    	at com.google.cloud.dataflow.worker.util.common.worker.MapTaskExecutor.execute(MapTaskExecutor.java:75)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:383)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:355)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:286)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:134)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:114)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:101)
    	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
    	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
    	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
    	at java.lang.Thread.run(Thread.java:745)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=104.198.23.164:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=104.198.23.164:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=104.198.23.164:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
    	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
    	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
    	at com.mongodb.Mongo.execute(Mongo.java:781)
    	at com.mongodb.Mongo$2.execute(Mongo.java:764)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
    	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
    Workflow failed. Causes: S01:Generate sequence/Read(BoundedCountingSource)+Produce documents/Map+Write documents to MongoDB/ParDo(Write) failed., A work item was attempted 4 times without success. Each time the worker eventually lost contact with the service. The work item was attempted on: 
      mongodbioit0testwriteandr-05012306-iypw-harness-g9b5,
      mongodbioit0testwriteandr-05012306-iypw-harness-g9b5,
      mongodbioit0testwriteandr-05012306-iypw-harness-g9b5,
      mongodbioit0testwriteandr-05012306-iypw-harness-g9b5
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:134)
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:90)
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:55)
        at org.apache.beam.sdk.Pipeline.run(Pipeline.java:311)
        at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:346)
        at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:328)
        at org.apache.beam.sdk.io.mongodb.MongoDBIOIT.testWriteAndRead(MongoDBIOIT.java:118)

    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=104.198.23.164:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
        at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
        at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
        at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
        at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
        at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
        at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:158)
        at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:133)
        at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:128)
        at com.mongodb.operation.DropDatabaseOperation.execute(DropDatabaseOperation.java:51)
        at com.mongodb.operation.DropDatabaseOperation.execute(DropDatabaseOperation.java:36)
        at com.mongodb.Mongo.execute(Mongo.java:781)
        at com.mongodb.Mongo$2.execute(Mongo.java:764)
        at com.mongodb.MongoDatabaseImpl.drop(MongoDatabaseImpl.java:136)
        at org.apache.beam.sdk.io.mongodb.MongoDBIOIT.tearDown(MongoDBIOIT.java:103)

1 test completed, 1 failed
Finished generating test XML results (0.035 secs) into: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/da92d9ee/beam/sdks/java/io/mongodb/build/test-results/integrationTest>
Generating HTML test report...
Finished generating test html results (0.042 secs) into: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/da92d9ee/beam/sdks/java/io/mongodb/build/reports/tests/integrationTest>

> Task :beam-sdks-java-io-mongodb:integrationTest FAILED
:beam-sdks-java-io-mongodb:integrationTest (Thread[Task worker for ':' Thread 7,5,main]) completed. Took 7 mins 9.647 secs.

Deprecated Gradle features were used in this build, making it incompatible with Gradle 5.0.
See https://docs.gradle.org/4.7/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 8m 54s
55 actionable tasks: 32 executed, 23 up-to-date

Publishing build scan...
https://gradle.com/s/ljstwnntbguno


STDERR: 
FAILURE: Build failed with an exception.

* What went wrong:
Execution failed for task ':beam-sdks-java-io-mongodb:integrationTest'.
> There were failing tests. See the report at: file://<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/da92d9ee/beam/sdks/java/io/mongodb/build/reports/tests/integrationTest/index.html>

* Try:
Run with --debug option to get more log output. Run with --scan to get full insights.

* Exception is:
org.gradle.api.tasks.TaskExecutionException: Execution failed for task ':beam-sdks-java-io-mongodb:integrationTest'.
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeActions(ExecuteActionsTaskExecuter.java:103)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.execute(ExecuteActionsTaskExecuter.java:73)
	at org.gradle.api.internal.tasks.execution.OutputDirectoryCreatingTaskExecuter.execute(OutputDirectoryCreatingTaskExecuter.java:51)
	at org.gradle.api.internal.tasks.execution.SkipCachedTaskExecuter.execute(SkipCachedTaskExecuter.java:105)
	at org.gradle.api.internal.tasks.execution.SkipUpToDateTaskExecuter.execute(SkipUpToDateTaskExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.ResolveTaskOutputCachingStateExecuter.execute(ResolveTaskOutputCachingStateExecuter.java:54)
	at org.gradle.api.internal.tasks.execution.ResolveBuildCacheKeyExecuter.execute(ResolveBuildCacheKeyExecuter.java:66)
	at org.gradle.api.internal.tasks.execution.ValidatingTaskExecuter.execute(ValidatingTaskExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.SkipEmptySourceFilesTaskExecuter.execute(SkipEmptySourceFilesTaskExecuter.java:101)
	at org.gradle.api.internal.tasks.execution.FinalizeInputFilePropertiesTaskExecuter.execute(FinalizeInputFilePropertiesTaskExecuter.java:44)
	at org.gradle.api.internal.tasks.execution.CleanupStaleOutputsExecuter.execute(CleanupStaleOutputsExecuter.java:91)
	at org.gradle.api.internal.tasks.execution.ResolveTaskArtifactStateTaskExecuter.execute(ResolveTaskArtifactStateTaskExecuter.java:62)
	at org.gradle.api.internal.tasks.execution.SkipTaskWithNoActionsExecuter.execute(SkipTaskWithNoActionsExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.SkipOnlyIfTaskExecuter.execute(SkipOnlyIfTaskExecuter.java:54)
	at org.gradle.api.internal.tasks.execution.ExecuteAtMostOnceTaskExecuter.execute(ExecuteAtMostOnceTaskExecuter.java:43)
	at org.gradle.api.internal.tasks.execution.CatchExceptionTaskExecuter.execute(CatchExceptionTaskExecuter.java:34)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker$1.run(DefaultTaskGraphExecuter.java:256)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:317)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:309)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:185)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.run(DefaultBuildOperationExecutor.java:97)
	at org.gradle.internal.operations.DelegatingBuildOperationExecutor.run(DelegatingBuildOperationExecutor.java:31)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker.execute(DefaultTaskGraphExecuter.java:249)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker.execute(DefaultTaskGraphExecuter.java:238)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker$1.execute(DefaultTaskPlanExecutor.java:104)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker$1.execute(DefaultTaskPlanExecutor.java:98)
	at org.gradle.execution.taskgraph.DefaultTaskExecutionPlan.execute(DefaultTaskExecutionPlan.java:663)
	at org.gradle.execution.taskgraph.DefaultTaskExecutionPlan.executeWithTask(DefaultTaskExecutionPlan.java:596)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker.run(DefaultTaskPlanExecutor.java:98)
	at org.gradle.internal.concurrent.ExecutorPolicy$CatchAndRecordFailures.onExecute(ExecutorPolicy.java:63)
	at org.gradle.internal.concurrent.ManagedExecutorImpl$1.run(ManagedExecutorImpl.java:46)
	at org.gradle.internal.concurrent.ThreadFactoryImpl$ManagedThreadRunnable.run(ThreadFactoryImpl.java:55)
Caused by: org.gradle.api.GradleException: There were failing tests. See the report at: file://<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/da92d9ee/beam/sdks/java/io/mongodb/build/reports/tests/integrationTest/index.html>
	at org.gradle.api.tasks.testing.AbstractTestTask.handleTestFailures(AbstractTestTask.java:612)
	at org.gradle.api.tasks.testing.AbstractTestTask.executeTests(AbstractTestTask.java:484)
	at org.gradle.api.tasks.testing.Test.executeTests(Test.java:583)
	at org.gradle.internal.reflect.JavaMethod.invoke(JavaMethod.java:73)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.doExecute(StandardTaskAction.java:46)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.execute(StandardTaskAction.java:39)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.execute(StandardTaskAction.java:26)
	at org.gradle.api.internal.AbstractTask$TaskActionWrapper.execute(AbstractTask.java:794)
	at org.gradle.api.internal.AbstractTask$TaskActionWrapper.execute(AbstractTask.java:761)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter$1.run(ExecuteActionsTaskExecuter.java:124)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:317)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:309)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:185)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.run(DefaultBuildOperationExecutor.java:97)
	at org.gradle.internal.operations.DelegatingBuildOperationExecutor.run(DelegatingBuildOperationExecutor.java:31)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeAction(ExecuteActionsTaskExecuter.java:113)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeActions(ExecuteActionsTaskExecuter.java:95)
	... 31 more


* Get more help at https://help.gradle.org

2018-05-02 06:13:09,274 da92d9ee MainThread beam_integration_benchmark(1/1) ERROR    Error during benchmark beam_integration_benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-05-02 06:13:09,275 da92d9ee MainThread beam_integration_benchmark(1/1) INFO     Cleaning up benchmark beam_integration_benchmark
2018-05-02 06:13:09,276 da92d9ee MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525237356468> delete -f <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/src/.test-infra/kubernetes/mongodb/load-balancer/mongo.yml> --ignore-not-found
2018-05-02 06:13:10,169 da92d9ee MainThread beam_integration_benchmark(1/1) ERROR    Exception running benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 780, in RunBenchmarkTask
    RunBenchmark(spec, collector)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-05-02 06:13:10,169 da92d9ee MainThread beam_integration_benchmark(1/1) ERROR    Benchmark 1/1 beam_integration_benchmark (UID: beam_integration_benchmark0) failed. Execution will continue.
2018-05-02 06:13:10,215 da92d9ee MainThread INFO     Benchmark run statuses:
---------------------------------------------------------------------------------
Name                        UID                          Status  Failed Substatus
---------------------------------------------------------------------------------
beam_integration_benchmark  beam_integration_benchmark0  FAILED                  
---------------------------------------------------------------------------------
Success rate: 0.00% (0/1)
2018-05-02 06:13:10,215 da92d9ee MainThread INFO     Complete logs can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/da92d9ee/pkb.log>
2018-05-02 06:13:10,216 da92d9ee MainThread INFO     Completion statuses can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/da92d9ee/completion_statuses.json>
Build step 'Execute shell' marked build as failure

Build failed in Jenkins: beam_PerformanceTests_MongoDBIO_IT #119

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/119/display/redirect?page=changes>

Changes:

[swegner] Findbugs cleanup; no functional changes

[github] [BEAM-3042] Refactor of TransformIOCounters (performance, inheritance).

[kirpichov] [BEAM-4144] Fixes SplittableParDoProcessFnTest

------------------------------------------
[...truncated 353.21 KB...]
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getReadConnectionSource(ClusterBinding.java:63)
    	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:89)
    	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:84)
    	at com.mongodb.operation.CommandReadOperation.execute(CommandReadOperation.java:55)
    	at com.mongodb.Mongo.execute(Mongo.java:772)
    	at com.mongodb.Mongo$2.execute(Mongo.java:759)
    	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:130)
    	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:124)
    	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:114)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$BoundedMongoDbSource.split(MongoDbIO.java:332)
    	at com.google.cloud.dataflow.worker.WorkerCustomSources.splitAndValidate(WorkerCustomSources.java:275)
    	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplitTyped(WorkerCustomSources.java:197)
    	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplitWithApiLimit(WorkerCustomSources.java:181)
    	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplit(WorkerCustomSources.java:160)
    	at com.google.cloud.dataflow.worker.WorkerCustomSourceOperationExecutor.execute(WorkerCustomSourceOperationExecutor.java:77)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:383)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:355)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:286)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:134)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:114)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:101)
    	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
    	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
    	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
    	at java.lang.Thread.run(Thread.java:745)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches ReadPreferenceServerSelector{readPreference=primary}. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.202.230.167:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getReadConnectionSource(ClusterBinding.java:63)
    	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:89)
    	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:84)
    	at com.mongodb.operation.CommandReadOperation.execute(CommandReadOperation.java:55)
    	at com.mongodb.Mongo.execute(Mongo.java:772)
    	at com.mongodb.Mongo$2.execute(Mongo.java:759)
    	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:130)
    	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:124)
    	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:114)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$BoundedMongoDbSource.split(MongoDbIO.java:332)
    	at com.google.cloud.dataflow.worker.WorkerCustomSources.splitAndValidate(WorkerCustomSources.java:275)
    	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplitTyped(WorkerCustomSources.java:197)
    	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplitWithApiLimit(WorkerCustomSources.java:181)
    	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplit(WorkerCustomSources.java:160)
    	at com.google.cloud.dataflow.worker.WorkerCustomSourceOperationExecutor.execute(WorkerCustomSourceOperationExecutor.java:77)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:383)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:355)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:286)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:134)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:114)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:101)
    	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
    	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
    	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
    	at java.lang.Thread.run(Thread.java:745)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches ReadPreferenceServerSelector{readPreference=primary}. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.202.230.167:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
    	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
    	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
    	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
    	at com.mongodb.binding.ClusterBinding.getReadConnectionSource(ClusterBinding.java:63)
    	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:89)
    	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:84)
    	at com.mongodb.operation.CommandReadOperation.execute(CommandReadOperation.java:55)
    	at com.mongodb.Mongo.execute(Mongo.java:772)
    	at com.mongodb.Mongo$2.execute(Mongo.java:759)
    	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:130)
    	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:124)
    	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:114)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$BoundedMongoDbSource.split(MongoDbIO.java:332)
    	at com.google.cloud.dataflow.worker.WorkerCustomSources.splitAndValidate(WorkerCustomSources.java:275)
    	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplitTyped(WorkerCustomSources.java:197)
    	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplitWithApiLimit(WorkerCustomSources.java:181)
    	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplit(WorkerCustomSources.java:160)
    	at com.google.cloud.dataflow.worker.WorkerCustomSourceOperationExecutor.execute(WorkerCustomSourceOperationExecutor.java:77)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:383)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:355)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:286)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:134)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:114)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:101)
    	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
    	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
    	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
    	at java.lang.Thread.run(Thread.java:745)
    com.mongodb.MongoCommandException: Command failed with error 26: 'ns not found' on server 35.202.230.167:27017. The full response is { "ok" : 0.0, "errmsg" : "ns not found", "code" : 26, "codeName" : "NamespaceNotFound" }
    	at com.mongodb.connection.ProtocolHelper.getCommandFailureException(ProtocolHelper.java:115)
    	at com.mongodb.connection.CommandProtocol.execute(CommandProtocol.java:114)
    	at com.mongodb.connection.DefaultServer$DefaultServerProtocolExecutor.execute(DefaultServer.java:159)
    	at com.mongodb.connection.DefaultServerConnection.executeProtocol(DefaultServerConnection.java:286)
    	at com.mongodb.connection.DefaultServerConnection.command(DefaultServerConnection.java:173)
    	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:215)
    	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:186)
    	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:178)
    	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:91)
    	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:84)
    	at com.mongodb.operation.CommandReadOperation.execute(CommandReadOperation.java:55)
    	at com.mongodb.Mongo.execute(Mongo.java:772)
    	at com.mongodb.Mongo$2.execute(Mongo.java:759)
    	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:130)
    	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:124)
    	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:114)
    	at org.apache.beam.sdk.io.mongodb.MongoDbIO$BoundedMongoDbSource.split(MongoDbIO.java:332)
    	at com.google.cloud.dataflow.worker.WorkerCustomSources.splitAndValidate(WorkerCustomSources.java:275)
    	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplitTyped(WorkerCustomSources.java:197)
    	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplitWithApiLimit(WorkerCustomSources.java:181)
    	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplit(WorkerCustomSources.java:160)
    	at com.google.cloud.dataflow.worker.WorkerCustomSourceOperationExecutor.execute(WorkerCustomSourceOperationExecutor.java:77)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:383)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:355)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:286)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:134)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:114)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:101)
    	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
    	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
    	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
    	at java.lang.Thread.run(Thread.java:745)
    Workflow failed. Causes: S03:Read all documents/Read(BoundedMongoDbSource)+Map documents to Strings/Map+Calculate hashcode/WithKeys/AddKeys/Map+Calculate hashcode/Combine.perKey(Hashing)/GroupByKey+Calculate hashcode/Combine.perKey(Hashing)/Combine.GroupedValues/Partial+Calculate hashcode/Combine.perKey(Hashing)/GroupByKey/Reify+Calculate hashcode/Combine.perKey(Hashing)/GroupByKey/Write failed.
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:134)
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:90)
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:55)
        at org.apache.beam.sdk.Pipeline.run(Pipeline.java:311)
        at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:346)
        at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:328)
        at org.apache.beam.sdk.io.mongodb.MongoDBIOIT.testWriteAndRead(MongoDBIOIT.java:131)

1 test completed, 1 failed
Finished generating test XML results (0.026 secs) into: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/f246d64e/beam/sdks/java/io/mongodb/build/test-results/integrationTest>
Generating HTML test report...
Finished generating test html results (0.029 secs) into: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/f246d64e/beam/sdks/java/io/mongodb/build/reports/tests/integrationTest>
:beam-sdks-java-io-mongodb:integrationTest (Thread[Task worker for ':' Thread 6,5,main]) completed. Took 10 mins 55.07 secs.

Deprecated Gradle features were used in this build, making it incompatible with Gradle 5.0.
See https://docs.gradle.org/4.7/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 11m 49s
55 actionable tasks: 19 executed, 13 from cache, 23 up-to-date

Publishing build scan...
https://gradle.com/s/4h5rxiyrfqtm2


STDERR: 
FAILURE: Build failed with an exception.

* What went wrong:
Execution failed for task ':beam-sdks-java-io-mongodb:integrationTest'.
> There were failing tests. See the report at: file://<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/f246d64e/beam/sdks/java/io/mongodb/build/reports/tests/integrationTest/index.html>

* Try:
Run with --debug option to get more log output. Run with --scan to get full insights.

* Exception is:
org.gradle.api.tasks.TaskExecutionException: Execution failed for task ':beam-sdks-java-io-mongodb:integrationTest'.
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeActions(ExecuteActionsTaskExecuter.java:103)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.execute(ExecuteActionsTaskExecuter.java:73)
	at org.gradle.api.internal.tasks.execution.OutputDirectoryCreatingTaskExecuter.execute(OutputDirectoryCreatingTaskExecuter.java:51)
	at org.gradle.api.internal.tasks.execution.SkipCachedTaskExecuter.execute(SkipCachedTaskExecuter.java:105)
	at org.gradle.api.internal.tasks.execution.SkipUpToDateTaskExecuter.execute(SkipUpToDateTaskExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.ResolveTaskOutputCachingStateExecuter.execute(ResolveTaskOutputCachingStateExecuter.java:54)
	at org.gradle.api.internal.tasks.execution.ResolveBuildCacheKeyExecuter.execute(ResolveBuildCacheKeyExecuter.java:66)
	at org.gradle.api.internal.tasks.execution.ValidatingTaskExecuter.execute(ValidatingTaskExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.SkipEmptySourceFilesTaskExecuter.execute(SkipEmptySourceFilesTaskExecuter.java:101)
	at org.gradle.api.internal.tasks.execution.FinalizeInputFilePropertiesTaskExecuter.execute(FinalizeInputFilePropertiesTaskExecuter.java:44)
	at org.gradle.api.internal.tasks.execution.CleanupStaleOutputsExecuter.execute(CleanupStaleOutputsExecuter.java:91)
	at org.gradle.api.internal.tasks.execution.ResolveTaskArtifactStateTaskExecuter.execute(ResolveTaskArtifactStateTaskExecuter.java:62)
	at org.gradle.api.internal.tasks.execution.SkipTaskWithNoActionsExecuter.execute(SkipTaskWithNoActionsExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.SkipOnlyIfTaskExecuter.execute(SkipOnlyIfTaskExecuter.java:54)
	at org.gradle.api.internal.tasks.execution.ExecuteAtMostOnceTaskExecuter.execute(ExecuteAtMostOnceTaskExecuter.java:43)
	at org.gradle.api.internal.tasks.execution.CatchExceptionTaskExecuter.execute(CatchExceptionTaskExecuter.java:34)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker$1.run(DefaultTaskGraphExecuter.java:256)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:317)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:309)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:185)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.run(DefaultBuildOperationExecutor.java:97)
	at org.gradle.internal.operations.DelegatingBuildOperationExecutor.run(DelegatingBuildOperationExecutor.java:31)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker.execute(DefaultTaskGraphExecuter.java:249)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker.execute(DefaultTaskGraphExecuter.java:238)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker$1.execute(DefaultTaskPlanExecutor.java:104)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker$1.execute(DefaultTaskPlanExecutor.java:98)
	at org.gradle.execution.taskgraph.DefaultTaskExecutionPlan.execute(DefaultTaskExecutionPlan.java:663)
	at org.gradle.execution.taskgraph.DefaultTaskExecutionPlan.executeWithTask(DefaultTaskExecutionPlan.java:596)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker.run(DefaultTaskPlanExecutor.java:98)
	at org.gradle.internal.concurrent.ExecutorPolicy$CatchAndRecordFailures.onExecute(ExecutorPolicy.java:63)
	at org.gradle.internal.concurrent.ManagedExecutorImpl$1.run(ManagedExecutorImpl.java:46)
	at org.gradle.internal.concurrent.ThreadFactoryImpl$ManagedThreadRunnable.run(ThreadFactoryImpl.java:55)
Caused by: org.gradle.api.GradleException: There were failing tests. See the report at: file://<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/f246d64e/beam/sdks/java/io/mongodb/build/reports/tests/integrationTest/index.html>
	at org.gradle.api.tasks.testing.AbstractTestTask.handleTestFailures(AbstractTestTask.java:612)
	at org.gradle.api.tasks.testing.AbstractTestTask.executeTests(AbstractTestTask.java:484)
	at org.gradle.api.tasks.testing.Test.executeTests(Test.java:583)
	at org.gradle.internal.reflect.JavaMethod.invoke(JavaMethod.java:73)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.doExecute(StandardTaskAction.java:46)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.execute(StandardTaskAction.java:39)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.execute(StandardTaskAction.java:26)
	at org.gradle.api.internal.AbstractTask$TaskActionWrapper.execute(AbstractTask.java:794)
	at org.gradle.api.internal.AbstractTask$TaskActionWrapper.execute(AbstractTask.java:761)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter$1.run(ExecuteActionsTaskExecuter.java:124)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:317)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:309)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:185)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.run(DefaultBuildOperationExecutor.java:97)
	at org.gradle.internal.operations.DelegatingBuildOperationExecutor.run(DelegatingBuildOperationExecutor.java:31)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeAction(ExecuteActionsTaskExecuter.java:113)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeActions(ExecuteActionsTaskExecuter.java:95)
	... 31 more


* Get more help at https://help.gradle.org

2018-05-02 00:22:16,657 f246d64e MainThread beam_integration_benchmark(1/1) ERROR    Error during benchmark beam_integration_benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-05-02 00:22:16,658 f246d64e MainThread beam_integration_benchmark(1/1) INFO     Cleaning up benchmark beam_integration_benchmark
2018-05-02 00:22:16,659 f246d64e MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525210897958> delete -f <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/src/.test-infra/kubernetes/mongodb/load-balancer/mongo.yml> --ignore-not-found
2018-05-02 00:22:17,073 f246d64e MainThread beam_integration_benchmark(1/1) ERROR    Exception running benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 780, in RunBenchmarkTask
    RunBenchmark(spec, collector)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-05-02 00:22:17,074 f246d64e MainThread beam_integration_benchmark(1/1) ERROR    Benchmark 1/1 beam_integration_benchmark (UID: beam_integration_benchmark0) failed. Execution will continue.
2018-05-02 00:22:17,105 f246d64e MainThread INFO     Benchmark run statuses:
---------------------------------------------------------------------------------
Name                        UID                          Status  Failed Substatus
---------------------------------------------------------------------------------
beam_integration_benchmark  beam_integration_benchmark0  FAILED                  
---------------------------------------------------------------------------------
Success rate: 0.00% (0/1)
2018-05-02 00:22:17,106 f246d64e MainThread INFO     Complete logs can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/f246d64e/pkb.log>
2018-05-02 00:22:17,106 f246d64e MainThread INFO     Completion statuses can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/f246d64e/completion_statuses.json>
Build step 'Execute shell' marked build as failure

Build failed in Jenkins: beam_PerformanceTests_MongoDBIO_IT #118

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/118/display/redirect?page=changes>

Changes:

[tgroh] Mark some ReduceFnRunner arguments Nullable

[Pablo] Add documentaton for quickstart tasks

------------------------------------------
[...truncated 899.29 KB...]
	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=104.198.180.190:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
	at com.mongodb.Mongo.execute(Mongo.java:781)
	at com.mongodb.Mongo$2.execute(Mongo.java:764)
	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=104.198.180.190:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
	at com.mongodb.Mongo.execute(Mongo.java:781)
	at com.mongodb.Mongo$2.execute(Mongo.java:764)
	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=104.198.180.190:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
	at com.mongodb.Mongo.execute(Mongo.java:781)
	at com.mongodb.Mongo$2.execute(Mongo.java:764)
	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
Workflow failed. Causes: S01:Generate sequence/Read(BoundedCountingSource)+Produce documents/Map+Write documents to MongoDB/ParDo(Write) failed., A work item was attempted 4 times without success. Each time the worker eventually lost contact with the service. The work item was attempted on: 
  mongodbioit0testwriteandr-05011116-1412-harness-13gm,
  mongodbioit0testwriteandr-05011116-1412-harness-13gm,
  mongodbioit0testwriteandr-05011116-1412-harness-13gm,
  mongodbioit0testwriteandr-05011116-1412-harness-13gm
	at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:134)
	at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:90)
	at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:55)
	at org.apache.beam.sdk.Pipeline.run(Pipeline.java:311)
	at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:346)
	at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:328)
	at org.apache.beam.sdk.io.mongodb.MongoDBIOIT.testWriteAndRead(MongoDBIOIT.java:118)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27)
	at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:317)
	at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:317)
	at org.junit.rules.RunRules.evaluate(RunRules.java:20)
	at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
	at org.apache.maven.surefire.junitcore.pc.Scheduler$1.run(Scheduler.java:410)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)

[ERROR] testWriteAndRead(org.apache.beam.sdk.io.mongodb.MongoDBIOIT)  Time elapsed: 0 s  <<< ERROR!
com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=104.198.180.190:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:158)
	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:133)
	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:128)
	at com.mongodb.operation.DropDatabaseOperation.execute(DropDatabaseOperation.java:51)
	at com.mongodb.operation.DropDatabaseOperation.execute(DropDatabaseOperation.java:36)
	at com.mongodb.Mongo.execute(Mongo.java:781)
	at com.mongodb.Mongo$2.execute(Mongo.java:764)
	at com.mongodb.MongoDatabaseImpl.drop(MongoDatabaseImpl.java:136)
	at org.apache.beam.sdk.io.mongodb.MongoDBIOIT.tearDown(MongoDBIOIT.java:103)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:33)
	at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:317)
	at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:317)
	at org.junit.rules.RunRules.evaluate(RunRules.java:20)
	at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
	at org.apache.maven.surefire.junitcore.pc.Scheduler$1.run(Scheduler.java:410)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)

[INFO] 
[INFO] Results:
[INFO] 
[ERROR] Errors: 
[ERROR] org.apache.beam.sdk.io.mongodb.MongoDBIOIT.testWriteAndRead(org.apache.beam.sdk.io.mongodb.MongoDBIOIT)
[ERROR]   Run 1: MongoDBIOIT.testWriteAndRead:118  Runtime com.mongodb.MongoSocketReadExceptio...
[ERROR]   Run 2: MongoDBIOIT.tearDown:103  MongoTimeout Timed out after 30000 ms while waiting...
[INFO] 
[INFO] 
[ERROR] Tests run: 1, Failures: 0, Errors: 1, Skipped: 0
[INFO] 
[INFO] 
[INFO] --- maven-dependency-plugin:3.0.2:analyze-only (default) @ beam-sdks-java-io-mongodb ---
[INFO] No dependency problems found
[INFO] 
[INFO] --- maven-failsafe-plugin:2.21.0:verify (default) @ beam-sdks-java-io-mongodb ---
[INFO] Failsafe report directory: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/79f84044/beam/sdks/java/io/mongodb/target/failsafe-reports>
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 09:39 min
[INFO] Finished at: 2018-05-01T18:25:50Z
[INFO] Final Memory: 84M/2356M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-failsafe-plugin:2.21.0:verify (default) on project beam-sdks-java-io-mongodb: There are test failures.
[ERROR] 
[ERROR] Please refer to <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/79f84044/beam/sdks/java/io/mongodb/target/failsafe-reports> for the individual test results.
[ERROR] Please refer to dump files (if any exist) [date]-jvmRun[N].dump, [date].dumpstream and [date]-jvmRun[N].dumpstream.
[ERROR] -> [Help 1]
org.apache.maven.lifecycle.LifecycleExecutionException: Failed to execute goal org.apache.maven.plugins:maven-failsafe-plugin:2.21.0:verify (default) on project beam-sdks-java-io-mongodb: There are test failures.

Please refer to <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/79f84044/beam/sdks/java/io/mongodb/target/failsafe-reports> for the individual test results.
Please refer to dump files (if any exist) [date]-jvmRun[N].dump, [date].dumpstream and [date]-jvmRun[N].dumpstream.
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:213)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:154)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:146)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:117)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:81)
    at org.apache.maven.lifecycle.internal.builder.singlethreaded.SingleThreadedBuilder.build (SingleThreadedBuilder.java:51)
    at org.apache.maven.lifecycle.internal.LifecycleStarter.execute (LifecycleStarter.java:128)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:309)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:194)
    at org.apache.maven.DefaultMaven.execute (DefaultMaven.java:107)
    at org.apache.maven.cli.MavenCli.execute (MavenCli.java:955)
    at org.apache.maven.cli.MavenCli.doMain (MavenCli.java:290)
    at org.apache.maven.cli.MavenCli.main (MavenCli.java:194)
    at sun.reflect.NativeMethodAccessorImpl.invoke0 (Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke (NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke (DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke (Method.java:498)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launchEnhanced (Launcher.java:289)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launch (Launcher.java:229)
    at org.codehaus.plexus.classworlds.launcher.Launcher.mainWithExitCode (Launcher.java:415)
    at org.codehaus.plexus.classworlds.launcher.Launcher.main (Launcher.java:356)
Caused by: org.apache.maven.plugin.MojoFailureException: There are test failures.

Please refer to <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/79f84044/beam/sdks/java/io/mongodb/target/failsafe-reports> for the individual test results.
Please refer to dump files (if any exist) [date]-jvmRun[N].dump, [date].dumpstream and [date]-jvmRun[N].dumpstream.
    at org.apache.maven.plugin.surefire.SurefireHelper.throwException (SurefireHelper.java:240)
    at org.apache.maven.plugin.surefire.SurefireHelper.reportExecution (SurefireHelper.java:112)
    at org.apache.maven.plugin.failsafe.VerifyMojo.execute (VerifyMojo.java:192)
    at org.apache.maven.plugin.DefaultBuildPluginManager.executeMojo (DefaultBuildPluginManager.java:134)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:208)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:154)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:146)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:117)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:81)
    at org.apache.maven.lifecycle.internal.builder.singlethreaded.SingleThreadedBuilder.build (SingleThreadedBuilder.java:51)
    at org.apache.maven.lifecycle.internal.LifecycleStarter.execute (LifecycleStarter.java:128)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:309)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:194)
    at org.apache.maven.DefaultMaven.execute (DefaultMaven.java:107)
    at org.apache.maven.cli.MavenCli.execute (MavenCli.java:955)
    at org.apache.maven.cli.MavenCli.doMain (MavenCli.java:290)
    at org.apache.maven.cli.MavenCli.main (MavenCli.java:194)
    at sun.reflect.NativeMethodAccessorImpl.invoke0 (Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke (NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke (DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke (Method.java:498)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launchEnhanced (Launcher.java:289)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launch (Launcher.java:229)
    at org.codehaus.plexus.classworlds.launcher.Launcher.mainWithExitCode (Launcher.java:415)
    at org.codehaus.plexus.classworlds.launcher.Launcher.main (Launcher.java:356)
[ERROR] 
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException

STDERR: 
2018-05-01 18:25:50,787 79f84044 MainThread beam_integration_benchmark(1/1) ERROR    Error during benchmark beam_integration_benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-05-01 18:25:50,788 79f84044 MainThread beam_integration_benchmark(1/1) INFO     Cleaning up benchmark beam_integration_benchmark
2018-05-01 18:25:50,789 79f84044 MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525196390464> delete -f <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/src/.test-infra/kubernetes/mongodb/load-balancer/mongo.yml> --ignore-not-found
2018-05-01 18:25:51,401 79f84044 MainThread beam_integration_benchmark(1/1) ERROR    Exception running benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 780, in RunBenchmarkTask
    RunBenchmark(spec, collector)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-05-01 18:25:51,401 79f84044 MainThread beam_integration_benchmark(1/1) ERROR    Benchmark 1/1 beam_integration_benchmark (UID: beam_integration_benchmark0) failed. Execution will continue.
2018-05-01 18:25:51,460 79f84044 MainThread INFO     Benchmark run statuses:
---------------------------------------------------------------------------------
Name                        UID                          Status  Failed Substatus
---------------------------------------------------------------------------------
beam_integration_benchmark  beam_integration_benchmark0  FAILED                  
---------------------------------------------------------------------------------
Success rate: 0.00% (0/1)
2018-05-01 18:25:51,461 79f84044 MainThread INFO     Complete logs can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/79f84044/pkb.log>
2018-05-01 18:25:51,461 79f84044 MainThread INFO     Completion statuses can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/79f84044/completion_statuses.json>
Build step 'Execute shell' marked build as failure

Build failed in Jenkins: beam_PerformanceTests_MongoDBIO_IT #117

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/117/display/redirect>

------------------------------------------
[...truncated 874.54 KB...]
	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.226.49.216:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
	at com.mongodb.Mongo.execute(Mongo.java:781)
	at com.mongodb.Mongo$2.execute(Mongo.java:764)
	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.226.49.216:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
	at com.mongodb.Mongo.execute(Mongo.java:781)
	at com.mongodb.Mongo$2.execute(Mongo.java:764)
	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.226.49.216:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:219)
	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)
	at com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)
	at com.mongodb.Mongo.execute(Mongo.java:781)
	at com.mongodb.Mongo$2.execute(Mongo.java:764)
	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)
	at com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)
	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.flush(MongoDbIO.java:667)
	at org.apache.beam.sdk.io.mongodb.MongoDbIO$Write$WriteFn.processElement(MongoDbIO.java:652)
Workflow failed. Causes: S01:Generate sequence/Read(BoundedCountingSource)+Produce documents/Map+Write documents to MongoDB/ParDo(Write) failed., A work item was attempted 4 times without success. Each time the worker eventually lost contact with the service. The work item was attempted on: 
  mongodbioit0testwriteandr-05010508-0ypr-harness-06qh,
  mongodbioit0testwriteandr-05010508-0ypr-harness-06qh,
  mongodbioit0testwriteandr-05010508-0ypr-harness-06qh,
  mongodbioit0testwriteandr-05010508-0ypr-harness-06qh
	at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:134)
	at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:90)
	at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:55)
	at org.apache.beam.sdk.Pipeline.run(Pipeline.java:311)
	at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:346)
	at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:328)
	at org.apache.beam.sdk.io.mongodb.MongoDBIOIT.testWriteAndRead(MongoDBIOIT.java:118)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27)
	at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:317)
	at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:317)
	at org.junit.rules.RunRules.evaluate(RunRules.java:20)
	at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
	at org.apache.maven.surefire.junitcore.pc.Scheduler$1.run(Scheduler.java:410)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)

[ERROR] testWriteAndRead(org.apache.beam.sdk.io.mongodb.MongoDBIOIT)  Time elapsed: 0 s  <<< ERROR!
com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.226.49.216:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:158)
	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:133)
	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:128)
	at com.mongodb.operation.DropDatabaseOperation.execute(DropDatabaseOperation.java:51)
	at com.mongodb.operation.DropDatabaseOperation.execute(DropDatabaseOperation.java:36)
	at com.mongodb.Mongo.execute(Mongo.java:781)
	at com.mongodb.Mongo$2.execute(Mongo.java:764)
	at com.mongodb.MongoDatabaseImpl.drop(MongoDatabaseImpl.java:136)
	at org.apache.beam.sdk.io.mongodb.MongoDBIOIT.tearDown(MongoDBIOIT.java:103)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:33)
	at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:317)
	at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:317)
	at org.junit.rules.RunRules.evaluate(RunRules.java:20)
	at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
	at org.apache.maven.surefire.junitcore.pc.Scheduler$1.run(Scheduler.java:410)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)

[INFO] 
[INFO] Results:
[INFO] 
[ERROR] Errors: 
[ERROR] org.apache.beam.sdk.io.mongodb.MongoDBIOIT.testWriteAndRead(org.apache.beam.sdk.io.mongodb.MongoDBIOIT)
[ERROR]   Run 1: MongoDBIOIT.testWriteAndRead:118  Runtime com.mongodb.MongoSocketReadExceptio...
[ERROR]   Run 2: MongoDBIOIT.tearDown:103  MongoTimeout Timed out after 30000 ms while waiting...
[INFO] 
[INFO] 
[ERROR] Tests run: 1, Failures: 0, Errors: 1, Skipped: 0
[INFO] 
[INFO] 
[INFO] --- maven-dependency-plugin:3.0.2:analyze-only (default) @ beam-sdks-java-io-mongodb ---
[INFO] No dependency problems found
[INFO] 
[INFO] --- maven-failsafe-plugin:2.21.0:verify (default) @ beam-sdks-java-io-mongodb ---
[INFO] Failsafe report directory: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/e82f731d/beam/sdks/java/io/mongodb/target/failsafe-reports>
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 07:37 min
[INFO] Finished at: 2018-05-01T12:15:52Z
[INFO] Final Memory: 84M/2376M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-failsafe-plugin:2.21.0:verify (default) on project beam-sdks-java-io-mongodb: There are test failures.
[ERROR] 
[ERROR] Please refer to <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/e82f731d/beam/sdks/java/io/mongodb/target/failsafe-reports> for the individual test results.
[ERROR] Please refer to dump files (if any exist) [date]-jvmRun[N].dump, [date].dumpstream and [date]-jvmRun[N].dumpstream.
[ERROR] -> [Help 1]
org.apache.maven.lifecycle.LifecycleExecutionException: Failed to execute goal org.apache.maven.plugins:maven-failsafe-plugin:2.21.0:verify (default) on project beam-sdks-java-io-mongodb: There are test failures.

Please refer to <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/e82f731d/beam/sdks/java/io/mongodb/target/failsafe-reports> for the individual test results.
Please refer to dump files (if any exist) [date]-jvmRun[N].dump, [date].dumpstream and [date]-jvmRun[N].dumpstream.
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:213)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:154)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:146)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:117)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:81)
    at org.apache.maven.lifecycle.internal.builder.singlethreaded.SingleThreadedBuilder.build (SingleThreadedBuilder.java:51)
    at org.apache.maven.lifecycle.internal.LifecycleStarter.execute (LifecycleStarter.java:128)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:309)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:194)
    at org.apache.maven.DefaultMaven.execute (DefaultMaven.java:107)
    at org.apache.maven.cli.MavenCli.execute (MavenCli.java:955)
    at org.apache.maven.cli.MavenCli.doMain (MavenCli.java:290)
    at org.apache.maven.cli.MavenCli.main (MavenCli.java:194)
    at sun.reflect.NativeMethodAccessorImpl.invoke0 (Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke (NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke (DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke (Method.java:498)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launchEnhanced (Launcher.java:289)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launch (Launcher.java:229)
    at org.codehaus.plexus.classworlds.launcher.Launcher.mainWithExitCode (Launcher.java:415)
    at org.codehaus.plexus.classworlds.launcher.Launcher.main (Launcher.java:356)
Caused by: org.apache.maven.plugin.MojoFailureException: There are test failures.

Please refer to <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/e82f731d/beam/sdks/java/io/mongodb/target/failsafe-reports> for the individual test results.
Please refer to dump files (if any exist) [date]-jvmRun[N].dump, [date].dumpstream and [date]-jvmRun[N].dumpstream.
    at org.apache.maven.plugin.surefire.SurefireHelper.throwException (SurefireHelper.java:240)
    at org.apache.maven.plugin.surefire.SurefireHelper.reportExecution (SurefireHelper.java:112)
    at org.apache.maven.plugin.failsafe.VerifyMojo.execute (VerifyMojo.java:192)
    at org.apache.maven.plugin.DefaultBuildPluginManager.executeMojo (DefaultBuildPluginManager.java:134)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:208)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:154)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:146)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:117)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:81)
    at org.apache.maven.lifecycle.internal.builder.singlethreaded.SingleThreadedBuilder.build (SingleThreadedBuilder.java:51)
    at org.apache.maven.lifecycle.internal.LifecycleStarter.execute (LifecycleStarter.java:128)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:309)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:194)
    at org.apache.maven.DefaultMaven.execute (DefaultMaven.java:107)
    at org.apache.maven.cli.MavenCli.execute (MavenCli.java:955)
    at org.apache.maven.cli.MavenCli.doMain (MavenCli.java:290)
    at org.apache.maven.cli.MavenCli.main (MavenCli.java:194)
    at sun.reflect.NativeMethodAccessorImpl.invoke0 (Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke (NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke (DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke (Method.java:498)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launchEnhanced (Launcher.java:289)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launch (Launcher.java:229)
    at org.codehaus.plexus.classworlds.launcher.Launcher.mainWithExitCode (Launcher.java:415)
    at org.codehaus.plexus.classworlds.launcher.Launcher.main (Launcher.java:356)
[ERROR] 
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException

STDERR: 
2018-05-01 12:15:52,693 e82f731d MainThread beam_integration_benchmark(1/1) ERROR    Error during benchmark beam_integration_benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-05-01 12:15:52,693 e82f731d MainThread beam_integration_benchmark(1/1) INFO     Cleaning up benchmark beam_integration_benchmark
2018-05-01 12:15:52,694 e82f731d MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525168902165> delete -f <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/src/.test-infra/kubernetes/mongodb/load-balancer/mongo.yml> --ignore-not-found
2018-05-01 12:15:53,266 e82f731d MainThread beam_integration_benchmark(1/1) ERROR    Exception running benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 780, in RunBenchmarkTask
    RunBenchmark(spec, collector)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-05-01 12:15:53,266 e82f731d MainThread beam_integration_benchmark(1/1) ERROR    Benchmark 1/1 beam_integration_benchmark (UID: beam_integration_benchmark0) failed. Execution will continue.
2018-05-01 12:15:53,295 e82f731d MainThread INFO     Benchmark run statuses:
---------------------------------------------------------------------------------
Name                        UID                          Status  Failed Substatus
---------------------------------------------------------------------------------
beam_integration_benchmark  beam_integration_benchmark0  FAILED                  
---------------------------------------------------------------------------------
Success rate: 0.00% (0/1)
2018-05-01 12:15:53,296 e82f731d MainThread INFO     Complete logs can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/e82f731d/pkb.log>
2018-05-01 12:15:53,296 e82f731d MainThread INFO     Completion statuses can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/e82f731d/completion_statuses.json>
Build step 'Execute shell' marked build as failure

Build failed in Jenkins: beam_PerformanceTests_MongoDBIO_IT #116

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/116/display/redirect?page=changes>

Changes:

[thw] [BEAM-4131] Include SDK into Python SDK harness container.

[thw] cleanup

------------------------------------------
[...truncated 907.88 KB...]
	at java.lang.Thread.run(Thread.java:745)
Caused by: com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches ReadPreferenceServerSelector{readPreference=primary}. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.226.189.22:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
	at com.mongodb.binding.ClusterBinding.getReadConnectionSource(ClusterBinding.java:63)
	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:210)
	at com.mongodb.operation.FindOperation.execute(FindOperation.java:482)
	at com.mongodb.operation.FindOperation.execute(FindOperation.java:79)
	at com.mongodb.Mongo.execute(Mongo.java:772)
	at com.mongodb.Mongo$2.execute(Mongo.java:759)
	at com.mongodb.OperationIterable.iterator(OperationIterable.java:47)
	at com.mongodb.FindIterableImpl.iterator(FindIterableImpl.java:143)
	at org.apache.beam.sdk.io.mongodb.MongoDbIO$BoundedMongoDbReader.start(MongoDbIO.java:456)
	at com.google.cloud.dataflow.worker.WorkerCustomSources$BoundedReaderIterator.start(WorkerCustomSources.java:592)
	... 14 more
java.io.IOException: Failed to start reading from source: org.apache.beam.sdk.io.mongodb.MongoDbIO$BoundedMongoDbSource@11ac8ab7
	at com.google.cloud.dataflow.worker.WorkerCustomSources$BoundedReaderIterator.start(WorkerCustomSources.java:595)
	at com.google.cloud.dataflow.worker.util.common.worker.ReadOperation$SynchronizedReaderIterator.start(ReadOperation.java:360)
	at com.google.cloud.dataflow.worker.util.common.worker.ReadOperation.runReadLoop(ReadOperation.java:193)
	at com.google.cloud.dataflow.worker.util.common.worker.ReadOperation.start(ReadOperation.java:158)
	at com.google.cloud.dataflow.worker.util.common.worker.MapTaskExecutor.execute(MapTaskExecutor.java:75)
	at com.google.cloud.dataflow.worker.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:383)
	at com.google.cloud.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:355)
	at com.google.cloud.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:286)
	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:134)
	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:114)
	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:101)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
	at java.lang.Thread.run(Thread.java:745)
Caused by: com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches ReadPreferenceServerSelector{readPreference=primary}. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.226.189.22:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
	at com.mongodb.binding.ClusterBinding.getReadConnectionSource(ClusterBinding.java:63)
	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:210)
	at com.mongodb.operation.FindOperation.execute(FindOperation.java:482)
	at com.mongodb.operation.FindOperation.execute(FindOperation.java:79)
	at com.mongodb.Mongo.execute(Mongo.java:772)
	at com.mongodb.Mongo$2.execute(Mongo.java:759)
	at com.mongodb.OperationIterable.iterator(OperationIterable.java:47)
	at com.mongodb.FindIterableImpl.iterator(FindIterableImpl.java:143)
	at org.apache.beam.sdk.io.mongodb.MongoDbIO$BoundedMongoDbReader.start(MongoDbIO.java:456)
	at com.google.cloud.dataflow.worker.WorkerCustomSources$BoundedReaderIterator.start(WorkerCustomSources.java:592)
	... 14 more
Workflow failed. Causes: S03:Read all documents/Read(BoundedMongoDbSource)+Map documents to Strings/Map+Calculate hashcode/WithKeys/AddKeys/Map+Calculate hashcode/Combine.perKey(Hashing)/GroupByKey+Calculate hashcode/Combine.perKey(Hashing)/Combine.GroupedValues/Partial+Calculate hashcode/Combine.perKey(Hashing)/GroupByKey/Reify+Calculate hashcode/Combine.perKey(Hashing)/GroupByKey/Write failed., A work item was attempted 4 times without success. Each time the worker eventually lost contact with the service. The work item was attempted on: 
  mongodbioit0testwriteandr-04302315-skwb-harness-v6rt,
  mongodbioit0testwriteandr-04302315-skwb-harness-v6rt,
  mongodbioit0testwriteandr-04302315-skwb-harness-v6rt,
  mongodbioit0testwriteandr-04302315-skwb-harness-v6rt
	at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:134)
	at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:90)
	at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:55)
	at org.apache.beam.sdk.Pipeline.run(Pipeline.java:311)
	at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:346)
	at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:328)
	at org.apache.beam.sdk.io.mongodb.MongoDBIOIT.testWriteAndRead(MongoDBIOIT.java:131)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27)
	at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:317)
	at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:317)
	at org.junit.rules.RunRules.evaluate(RunRules.java:20)
	at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
	at org.apache.maven.surefire.junitcore.pc.Scheduler$1.run(Scheduler.java:410)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)

[ERROR] testWriteAndRead(org.apache.beam.sdk.io.mongodb.MongoDBIOIT)  Time elapsed: 0 s  <<< ERROR!
com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.226.189.22:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:158)
	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:133)
	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:128)
	at com.mongodb.operation.DropDatabaseOperation.execute(DropDatabaseOperation.java:51)
	at com.mongodb.operation.DropDatabaseOperation.execute(DropDatabaseOperation.java:36)
	at com.mongodb.Mongo.execute(Mongo.java:781)
	at com.mongodb.Mongo$2.execute(Mongo.java:764)
	at com.mongodb.MongoDatabaseImpl.drop(MongoDatabaseImpl.java:136)
	at org.apache.beam.sdk.io.mongodb.MongoDBIOIT.tearDown(MongoDBIOIT.java:103)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:33)
	at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:317)
	at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:317)
	at org.junit.rules.RunRules.evaluate(RunRules.java:20)
	at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
	at org.apache.maven.surefire.junitcore.pc.Scheduler$1.run(Scheduler.java:410)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)

[INFO] 
[INFO] Results:
[INFO] 
[ERROR] Errors: 
[ERROR] org.apache.beam.sdk.io.mongodb.MongoDBIOIT.testWriteAndRead(org.apache.beam.sdk.io.mongodb.MongoDBIOIT)
[ERROR]   Run 1: MongoDBIOIT.testWriteAndRead:131  Runtime java.io.IOException: Failed to star...
[ERROR]   Run 2: MongoDBIOIT.tearDown:103  MongoTimeout Timed out after 30000 ms while waiting...
[INFO] 
[INFO] 
[ERROR] Tests run: 1, Failures: 0, Errors: 1, Skipped: 0
[INFO] 
[INFO] 
[INFO] --- maven-dependency-plugin:3.0.2:analyze-only (default) @ beam-sdks-java-io-mongodb ---
[INFO] No dependency problems found
[INFO] 
[INFO] --- maven-failsafe-plugin:2.21.0:verify (default) @ beam-sdks-java-io-mongodb ---
[INFO] Failsafe report directory: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/e2fd53c2/beam/sdks/java/io/mongodb/target/failsafe-reports>
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 13:15 min
[INFO] Finished at: 2018-05-01T06:21:44Z
[INFO] Final Memory: 95M/3210M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-failsafe-plugin:2.21.0:verify (default) on project beam-sdks-java-io-mongodb: There are test failures.
[ERROR] 
[ERROR] Please refer to <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/e2fd53c2/beam/sdks/java/io/mongodb/target/failsafe-reports> for the individual test results.
[ERROR] Please refer to dump files (if any exist) [date]-jvmRun[N].dump, [date].dumpstream and [date]-jvmRun[N].dumpstream.
[ERROR] -> [Help 1]
org.apache.maven.lifecycle.LifecycleExecutionException: Failed to execute goal org.apache.maven.plugins:maven-failsafe-plugin:2.21.0:verify (default) on project beam-sdks-java-io-mongodb: There are test failures.

Please refer to <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/e2fd53c2/beam/sdks/java/io/mongodb/target/failsafe-reports> for the individual test results.
Please refer to dump files (if any exist) [date]-jvmRun[N].dump, [date].dumpstream and [date]-jvmRun[N].dumpstream.
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:213)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:154)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:146)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:117)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:81)
    at org.apache.maven.lifecycle.internal.builder.singlethreaded.SingleThreadedBuilder.build (SingleThreadedBuilder.java:51)
    at org.apache.maven.lifecycle.internal.LifecycleStarter.execute (LifecycleStarter.java:128)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:309)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:194)
    at org.apache.maven.DefaultMaven.execute (DefaultMaven.java:107)
    at org.apache.maven.cli.MavenCli.execute (MavenCli.java:955)
    at org.apache.maven.cli.MavenCli.doMain (MavenCli.java:290)
    at org.apache.maven.cli.MavenCli.main (MavenCli.java:194)
    at sun.reflect.NativeMethodAccessorImpl.invoke0 (Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke (NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke (DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke (Method.java:498)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launchEnhanced (Launcher.java:289)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launch (Launcher.java:229)
    at org.codehaus.plexus.classworlds.launcher.Launcher.mainWithExitCode (Launcher.java:415)
    at org.codehaus.plexus.classworlds.launcher.Launcher.main (Launcher.java:356)
Caused by: org.apache.maven.plugin.MojoFailureException: There are test failures.

Please refer to <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/e2fd53c2/beam/sdks/java/io/mongodb/target/failsafe-reports> for the individual test results.
Please refer to dump files (if any exist) [date]-jvmRun[N].dump, [date].dumpstream and [date]-jvmRun[N].dumpstream.
    at org.apache.maven.plugin.surefire.SurefireHelper.throwException (SurefireHelper.java:240)
    at org.apache.maven.plugin.surefire.SurefireHelper.reportExecution (SurefireHelper.java:112)
    at org.apache.maven.plugin.failsafe.VerifyMojo.execute (VerifyMojo.java:192)
    at org.apache.maven.plugin.DefaultBuildPluginManager.executeMojo (DefaultBuildPluginManager.java:134)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:208)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:154)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:146)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:117)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:81)
    at org.apache.maven.lifecycle.internal.builder.singlethreaded.SingleThreadedBuilder.build (SingleThreadedBuilder.java:51)
    at org.apache.maven.lifecycle.internal.LifecycleStarter.execute (LifecycleStarter.java:128)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:309)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:194)
    at org.apache.maven.DefaultMaven.execute (DefaultMaven.java:107)
    at org.apache.maven.cli.MavenCli.execute (MavenCli.java:955)
    at org.apache.maven.cli.MavenCli.doMain (MavenCli.java:290)
    at org.apache.maven.cli.MavenCli.main (MavenCli.java:194)
    at sun.reflect.NativeMethodAccessorImpl.invoke0 (Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke (NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke (DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke (Method.java:498)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launchEnhanced (Launcher.java:289)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launch (Launcher.java:229)
    at org.codehaus.plexus.classworlds.launcher.Launcher.mainWithExitCode (Launcher.java:415)
    at org.codehaus.plexus.classworlds.launcher.Launcher.main (Launcher.java:356)
[ERROR] 
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException

STDERR: 
2018-05-01 06:21:45,382 e2fd53c2 MainThread beam_integration_benchmark(1/1) ERROR    Error during benchmark beam_integration_benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-05-01 06:21:45,383 e2fd53c2 MainThread beam_integration_benchmark(1/1) INFO     Cleaning up benchmark beam_integration_benchmark
2018-05-01 06:21:45,383 e2fd53c2 MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525150928284> delete -f <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/src/.test-infra/kubernetes/mongodb/load-balancer/mongo.yml> --ignore-not-found
2018-05-01 06:21:45,871 e2fd53c2 MainThread beam_integration_benchmark(1/1) ERROR    Exception running benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 780, in RunBenchmarkTask
    RunBenchmark(spec, collector)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-05-01 06:21:45,871 e2fd53c2 MainThread beam_integration_benchmark(1/1) ERROR    Benchmark 1/1 beam_integration_benchmark (UID: beam_integration_benchmark0) failed. Execution will continue.
2018-05-01 06:21:45,920 e2fd53c2 MainThread INFO     Benchmark run statuses:
---------------------------------------------------------------------------------
Name                        UID                          Status  Failed Substatus
---------------------------------------------------------------------------------
beam_integration_benchmark  beam_integration_benchmark0  FAILED                  
---------------------------------------------------------------------------------
Success rate: 0.00% (0/1)
2018-05-01 06:21:45,921 e2fd53c2 MainThread INFO     Complete logs can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/e2fd53c2/pkb.log>
2018-05-01 06:21:45,921 e2fd53c2 MainThread INFO     Completion statuses can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/e2fd53c2/completion_statuses.json>
Build step 'Execute shell' marked build as failure

Build failed in Jenkins: beam_PerformanceTests_MongoDBIO_IT #115

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/115/display/redirect?page=changes>

Changes:

[herohde] [BEAM-4175] Fix direct ouput pardo issue

[kenn] Add errorprone to basic Gradle config

[kenn] Fix suppressed failure in ResourceIdTester

[kenn] Fix erroneous tests in ViewTest

[kenn] Add missing @Test in CollectionCoderTest

[kenn] Sickbay broken LocalResourceIdTest case

[kenn] Remove inaccurate GuardedBy in both copies of DirectMetrics

[kenn] Fix suppressed failure in RetryHttpRequestInitializerTest

[kenn] Remove unnecessary type arguments in ReduceFnRunnerTest

[kenn] Add missing @Test in SplittableParDoProcessFnTest

[kenn] Remove unnecessary type argument in PipelineTranslationTest

[kenn] Fix compile-type constant int overflow in S3FileSystem

[kenn] Disable findbugs for JdbcIO where it gave false positives, now that we

[kenn] Sickbay broken GcsResourceIdTest case

[kenn] Sickbay broken HadoopResourceIdTest case

[kenn] Sickbay broken SplittableParDoProcessFnTest case

[kenn] Add missing @Test in S3ResourceIdTest

[kenn] Fix typo in CassandraIOIT

[kenn] Fix nonsensical @RunWith in ExampleEchoPipelineTest

[kenn] Fix missing @Test in both copies of WatermarkManagerTest

[kenn] Fix misuse of Arrays.asList in KinesisMockWriteTest

[kenn] Remove extraneous type variables in MongoDBGridFSIOTest

[kenn] Sickbay broken S3ResourceIdTest cases

[kenn] Sickbay broken WatermarkManagerTest case

[apilloud] [BEAM-3983][SQL] Add BigQuery table provider

[iemejia] Fix gradle script to build the docker image of 'sdks/java/container'

[Pablo] Creation of utils.py whit CountingSource class on it.

[Pablo] Changed import from examples.snippets to io.utils

------------------------------------------
[...truncated 104.59 KB...]
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
	at java.lang.Thread.run(Thread.java:745)
com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches ReadPreferenceServerSelector{readPreference=primary}. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.225.173.103:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
	at com.mongodb.binding.ClusterBinding.getReadConnectionSource(ClusterBinding.java:63)
	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:89)
	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:84)
	at com.mongodb.operation.CommandReadOperation.execute(CommandReadOperation.java:55)
	at com.mongodb.Mongo.execute(Mongo.java:772)
	at com.mongodb.Mongo$2.execute(Mongo.java:759)
	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:130)
	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:124)
	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:114)
	at org.apache.beam.sdk.io.mongodb.MongoDbIO$BoundedMongoDbSource.split(MongoDbIO.java:332)
	at com.google.cloud.dataflow.worker.WorkerCustomSources.splitAndValidate(WorkerCustomSources.java:275)
	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplitTyped(WorkerCustomSources.java:197)
	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplitWithApiLimit(WorkerCustomSources.java:181)
	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplit(WorkerCustomSources.java:160)
	at com.google.cloud.dataflow.worker.WorkerCustomSourceOperationExecutor.execute(WorkerCustomSourceOperationExecutor.java:77)
	at com.google.cloud.dataflow.worker.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:383)
	at com.google.cloud.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:355)
	at com.google.cloud.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:286)
	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:134)
	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:114)
	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:101)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
	at java.lang.Thread.run(Thread.java:745)
com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches ReadPreferenceServerSelector{readPreference=primary}. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.225.173.103:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
	at com.mongodb.binding.ClusterBinding.getReadConnectionSource(ClusterBinding.java:63)
	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:89)
	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:84)
	at com.mongodb.operation.CommandReadOperation.execute(CommandReadOperation.java:55)
	at com.mongodb.Mongo.execute(Mongo.java:772)
	at com.mongodb.Mongo$2.execute(Mongo.java:759)
	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:130)
	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:124)
	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:114)
	at org.apache.beam.sdk.io.mongodb.MongoDbIO$BoundedMongoDbSource.split(MongoDbIO.java:332)
	at com.google.cloud.dataflow.worker.WorkerCustomSources.splitAndValidate(WorkerCustomSources.java:275)
	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplitTyped(WorkerCustomSources.java:197)
	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplitWithApiLimit(WorkerCustomSources.java:181)
	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplit(WorkerCustomSources.java:160)
	at com.google.cloud.dataflow.worker.WorkerCustomSourceOperationExecutor.execute(WorkerCustomSourceOperationExecutor.java:77)
	at com.google.cloud.dataflow.worker.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:383)
	at com.google.cloud.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:355)
	at com.google.cloud.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:286)
	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:134)
	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:114)
	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:101)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
	at java.lang.Thread.run(Thread.java:745)
com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches ReadPreferenceServerSelector{readPreference=primary}. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.225.173.103:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
	at com.mongodb.binding.ClusterBinding.getReadConnectionSource(ClusterBinding.java:63)
	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:89)
	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:84)
	at com.mongodb.operation.CommandReadOperation.execute(CommandReadOperation.java:55)
	at com.mongodb.Mongo.execute(Mongo.java:772)
	at com.mongodb.Mongo$2.execute(Mongo.java:759)
	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:130)
	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:124)
	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:114)
	at org.apache.beam.sdk.io.mongodb.MongoDbIO$BoundedMongoDbSource.split(MongoDbIO.java:332)
	at com.google.cloud.dataflow.worker.WorkerCustomSources.splitAndValidate(WorkerCustomSources.java:275)
	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplitTyped(WorkerCustomSources.java:197)
	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplitWithApiLimit(WorkerCustomSources.java:181)
	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplit(WorkerCustomSources.java:160)
	at com.google.cloud.dataflow.worker.WorkerCustomSourceOperationExecutor.execute(WorkerCustomSourceOperationExecutor.java:77)
	at com.google.cloud.dataflow.worker.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:383)
	at com.google.cloud.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:355)
	at com.google.cloud.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:286)
	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:134)
	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:114)
	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:101)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
	at java.lang.Thread.run(Thread.java:745)
Workflow failed. Causes: S03:Read all documents/Read(BoundedMongoDbSource)+Map documents to Strings/Map+Calculate hashcode/WithKeys/AddKeys/Map+Calculate hashcode/Combine.perKey(Hashing)/GroupByKey+Calculate hashcode/Combine.perKey(Hashing)/Combine.GroupedValues/Partial+Calculate hashcode/Combine.perKey(Hashing)/GroupByKey/Reify+Calculate hashcode/Combine.perKey(Hashing)/GroupByKey/Write failed.
	at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:134)
	at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:90)
	at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:55)
	at org.apache.beam.sdk.Pipeline.run(Pipeline.java:311)
	at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:346)
	at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:328)
	at org.apache.beam.sdk.io.mongodb.MongoDBIOIT.testWriteAndRead(MongoDBIOIT.java:131)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27)
	at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:317)
	at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:317)
	at org.junit.rules.RunRules.evaluate(RunRules.java:20)
	at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
	at org.apache.maven.surefire.junitcore.pc.Scheduler$1.run(Scheduler.java:410)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)

[INFO] 
[INFO] Results:
[INFO] 
[ERROR] Errors: 
[ERROR]   MongoDBIOIT.testWriteAndRead:131  Runtime com.mongodb.MongoTimeoutException: ...
[INFO] 
[ERROR] Tests run: 1, Failures: 0, Errors: 1, Skipped: 0
[INFO] 
[INFO] 
[INFO] --- maven-dependency-plugin:3.0.2:analyze-only (default) @ beam-sdks-java-io-mongodb ---
[INFO] No dependency problems found
[INFO] 
[INFO] --- maven-failsafe-plugin:2.21.0:verify (default) @ beam-sdks-java-io-mongodb ---
[INFO] Failsafe report directory: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/7b1c8f68/beam/sdks/java/io/mongodb/target/failsafe-reports>
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 11:49 min
[INFO] Finished at: 2018-05-01T00:14:09Z
[INFO] Final Memory: 119M/3209M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-failsafe-plugin:2.21.0:verify (default) on project beam-sdks-java-io-mongodb: There are test failures.
[ERROR] 
[ERROR] Please refer to <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/7b1c8f68/beam/sdks/java/io/mongodb/target/failsafe-reports> for the individual test results.
[ERROR] Please refer to dump files (if any exist) [date]-jvmRun[N].dump, [date].dumpstream and [date]-jvmRun[N].dumpstream.
[ERROR] -> [Help 1]
org.apache.maven.lifecycle.LifecycleExecutionException: Failed to execute goal org.apache.maven.plugins:maven-failsafe-plugin:2.21.0:verify (default) on project beam-sdks-java-io-mongodb: There are test failures.

Please refer to <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/7b1c8f68/beam/sdks/java/io/mongodb/target/failsafe-reports> for the individual test results.
Please refer to dump files (if any exist) [date]-jvmRun[N].dump, [date].dumpstream and [date]-jvmRun[N].dumpstream.
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:213)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:154)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:146)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:117)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:81)
    at org.apache.maven.lifecycle.internal.builder.singlethreaded.SingleThreadedBuilder.build (SingleThreadedBuilder.java:51)
    at org.apache.maven.lifecycle.internal.LifecycleStarter.execute (LifecycleStarter.java:128)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:309)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:194)
    at org.apache.maven.DefaultMaven.execute (DefaultMaven.java:107)
    at org.apache.maven.cli.MavenCli.execute (MavenCli.java:955)
    at org.apache.maven.cli.MavenCli.doMain (MavenCli.java:290)
    at org.apache.maven.cli.MavenCli.main (MavenCli.java:194)
    at sun.reflect.NativeMethodAccessorImpl.invoke0 (Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke (NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke (DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke (Method.java:498)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launchEnhanced (Launcher.java:289)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launch (Launcher.java:229)
    at org.codehaus.plexus.classworlds.launcher.Launcher.mainWithExitCode (Launcher.java:415)
    at org.codehaus.plexus.classworlds.launcher.Launcher.main (Launcher.java:356)
Caused by: org.apache.maven.plugin.MojoFailureException: There are test failures.

Please refer to <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/7b1c8f68/beam/sdks/java/io/mongodb/target/failsafe-reports> for the individual test results.
Please refer to dump files (if any exist) [date]-jvmRun[N].dump, [date].dumpstream and [date]-jvmRun[N].dumpstream.
    at org.apache.maven.plugin.surefire.SurefireHelper.throwException (SurefireHelper.java:240)
    at org.apache.maven.plugin.surefire.SurefireHelper.reportExecution (SurefireHelper.java:112)
    at org.apache.maven.plugin.failsafe.VerifyMojo.execute (VerifyMojo.java:192)
    at org.apache.maven.plugin.DefaultBuildPluginManager.executeMojo (DefaultBuildPluginManager.java:134)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:208)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:154)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:146)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:117)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:81)
    at org.apache.maven.lifecycle.internal.builder.singlethreaded.SingleThreadedBuilder.build (SingleThreadedBuilder.java:51)
    at org.apache.maven.lifecycle.internal.LifecycleStarter.execute (LifecycleStarter.java:128)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:309)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:194)
    at org.apache.maven.DefaultMaven.execute (DefaultMaven.java:107)
    at org.apache.maven.cli.MavenCli.execute (MavenCli.java:955)
    at org.apache.maven.cli.MavenCli.doMain (MavenCli.java:290)
    at org.apache.maven.cli.MavenCli.main (MavenCli.java:194)
    at sun.reflect.NativeMethodAccessorImpl.invoke0 (Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke (NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke (DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke (Method.java:498)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launchEnhanced (Launcher.java:289)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launch (Launcher.java:229)
    at org.codehaus.plexus.classworlds.launcher.Launcher.mainWithExitCode (Launcher.java:415)
    at org.codehaus.plexus.classworlds.launcher.Launcher.main (Launcher.java:356)
[ERROR] 
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException

STDERR: 
2018-05-01 00:14:10,038 7b1c8f68 MainThread beam_integration_benchmark(1/1) ERROR    Error during benchmark beam_integration_benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-05-01 00:14:10,039 7b1c8f68 MainThread beam_integration_benchmark(1/1) INFO     Cleaning up benchmark beam_integration_benchmark
2018-05-01 00:14:10,039 7b1c8f68 MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525118500634> delete -f <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/src/.test-infra/kubernetes/mongodb/load-balancer/mongo.yml> --ignore-not-found
2018-05-01 00:14:10,483 7b1c8f68 MainThread beam_integration_benchmark(1/1) ERROR    Exception running benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 780, in RunBenchmarkTask
    RunBenchmark(spec, collector)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-05-01 00:14:10,483 7b1c8f68 MainThread beam_integration_benchmark(1/1) ERROR    Benchmark 1/1 beam_integration_benchmark (UID: beam_integration_benchmark0) failed. Execution will continue.
2018-05-01 00:14:10,533 7b1c8f68 MainThread INFO     Benchmark run statuses:
---------------------------------------------------------------------------------
Name                        UID                          Status  Failed Substatus
---------------------------------------------------------------------------------
beam_integration_benchmark  beam_integration_benchmark0  FAILED                  
---------------------------------------------------------------------------------
Success rate: 0.00% (0/1)
2018-05-01 00:14:10,533 7b1c8f68 MainThread INFO     Complete logs can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/7b1c8f68/pkb.log>
2018-05-01 00:14:10,534 7b1c8f68 MainThread INFO     Completion statuses can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/7b1c8f68/completion_statuses.json>
Build step 'Execute shell' marked build as failure

Build failed in Jenkins: beam_PerformanceTests_MongoDBIO_IT #114

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/114/display/redirect?page=changes>

Changes:

[ankurgoenka] fixing generic errors

[echauchot] [BEAM-3119] ensure metrics thread is shutdown

[echauchot] [BEAM-4088] Rebase on master, fix conflicts and adapt test to master

[echauchot] [BEAM-4088] Add a test with a real pipeline that has a DoFn with metrics

[echauchot] [BEAM-4088] Blocks until the threads in metricsExecutorService is done,

[pgerver] Enhance awsCredentialsProvider option description

------------------------------------------
[...truncated 91.34 KB...]
[INFO] Excluding com.google.api.grpc:grpc-google-longrunning-v1:jar:0.1.11 from the shaded jar.
[INFO] Excluding com.google.api.grpc:proto-google-longrunning-v1:jar:0.1.11 from the shaded jar.
[INFO] Excluding com.google.cloud.bigtable:bigtable-protos:jar:1.0.0-pre3 from the shaded jar.
[INFO] Excluding com.google.cloud.bigtable:bigtable-client-core:jar:1.0.0 from the shaded jar.
[INFO] Excluding commons-logging:commons-logging:jar:1.2 from the shaded jar.
[INFO] Excluding com.google.auth:google-auth-library-appengine:jar:0.7.0 from the shaded jar.
[INFO] Excluding io.opencensus:opencensus-contrib-grpc-util:jar:0.7.0 from the shaded jar.
[INFO] Excluding io.dropwizard.metrics:metrics-core:jar:3.1.2 from the shaded jar.
[INFO] Excluding com.google.protobuf:protobuf-java:jar:3.2.0 from the shaded jar.
[INFO] Excluding io.netty:netty-tcnative-boringssl-static:jar:1.1.33.Fork26 from the shaded jar.
[INFO] Excluding com.google.api.grpc:proto-google-cloud-spanner-admin-database-v1:jar:0.1.9 from the shaded jar.
[INFO] Excluding com.google.api.grpc:proto-google-common-protos:jar:0.1.9 from the shaded jar.
[INFO] Excluding com.google.api-client:google-api-client:jar:1.23.0 from the shaded jar.
[INFO] Excluding com.google.oauth-client:google-oauth-client:jar:1.23.0 from the shaded jar.
[INFO] Excluding com.google.http-client:google-http-client:jar:1.23.0 from the shaded jar.
[INFO] Excluding org.apache.httpcomponents:httpclient:jar:4.0.1 from the shaded jar.
[INFO] Excluding org.apache.httpcomponents:httpcore:jar:4.0.1 from the shaded jar.
[INFO] Excluding commons-codec:commons-codec:jar:1.3 from the shaded jar.
[INFO] Excluding com.google.http-client:google-http-client-jackson2:jar:1.23.0 from the shaded jar.
[INFO] Excluding com.google.apis:google-api-services-dataflow:jar:v1b3-rev221-1.23.0 from the shaded jar.
[INFO] Excluding com.google.apis:google-api-services-clouddebugger:jar:v2-rev233-1.23.0 from the shaded jar.
[INFO] Excluding com.google.apis:google-api-services-storage:jar:v1-rev124-1.23.0 from the shaded jar.
[INFO] Excluding com.google.auth:google-auth-library-credentials:jar:0.7.1 from the shaded jar.
[INFO] Excluding com.google.auth:google-auth-library-oauth2-http:jar:0.7.1 from the shaded jar.
[INFO] Excluding com.google.cloud.bigdataoss:util:jar:1.4.5 from the shaded jar.
[INFO] Excluding com.google.api-client:google-api-client-java6:jar:1.23.0 from the shaded jar.
[INFO] Excluding com.google.api-client:google-api-client-jackson2:jar:1.23.0 from the shaded jar.
[INFO] Excluding com.google.oauth-client:google-oauth-client-java6:jar:1.23.0 from the shaded jar.
[INFO] Replacing original artifact with shaded artifact.
[INFO] Replacing <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/b14d57ce/beam/sdks/java/io/mongodb/target/beam-sdks-java-io-mongodb-2.5.0-SNAPSHOT.jar> with <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/b14d57ce/beam/sdks/java/io/mongodb/target/beam-sdks-java-io-mongodb-2.5.0-SNAPSHOT-shaded.jar>
[INFO] Replacing original test artifact with shaded test artifact.
[INFO] Replacing <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/b14d57ce/beam/sdks/java/io/mongodb/target/beam-sdks-java-io-mongodb-2.5.0-SNAPSHOT-tests.jar> with <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/b14d57ce/beam/sdks/java/io/mongodb/target/beam-sdks-java-io-mongodb-2.5.0-SNAPSHOT-shaded-tests.jar>
[INFO] Dependency-reduced POM written at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/b14d57ce/beam/sdks/java/io/mongodb/dependency-reduced-pom.xml>
[INFO] 
[INFO] --- maven-failsafe-plugin:2.21.0:integration-test (default) @ beam-sdks-java-io-mongodb ---
[INFO] Failsafe report directory: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/b14d57ce/beam/sdks/java/io/mongodb/target/failsafe-reports>
[INFO] parallel='all', perCoreThreadCount=true, threadCount=4, useUnlimitedThreads=false, threadCountSuites=0, threadCountClasses=0, threadCountMethods=0, parallelOptimized=true
[INFO] 
[INFO] -------------------------------------------------------
[INFO]  T E S T S
[INFO] -------------------------------------------------------
[INFO] Running org.apache.beam.sdk.io.mongodb.MongoDBIOIT
[ERROR] Tests run: 2, Failures: 0, Errors: 2, Skipped: 0, Time elapsed: 364.834 s <<< FAILURE! - in org.apache.beam.sdk.io.mongodb.MongoDBIOIT
[ERROR] testWriteAndRead(org.apache.beam.sdk.io.mongodb.MongoDBIOIT)  Time elapsed: 364.834 s  <<< ERROR!
java.lang.NoClassDefFoundError: org/apache/beam/runners/dataflow/DataflowPipelineJob$1
	at org.apache.beam.runners.dataflow.DataflowPipelineJob.waitUntilFinish(DataflowPipelineJob.java:339)
	at org.apache.beam.runners.dataflow.DataflowPipelineJob.waitUntilFinish(DataflowPipelineJob.java:248)
	at org.apache.beam.runners.dataflow.TestDataflowRunner.waitForBatchJobTermination(TestDataflowRunner.java:190)
	at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:118)
	at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:90)
	at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:55)
	at org.apache.beam.sdk.Pipeline.run(Pipeline.java:311)
	at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:346)
	at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:328)
	at org.apache.beam.sdk.io.mongodb.MongoDBIOIT.testWriteAndRead(MongoDBIOIT.java:118)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27)
	at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:317)
	at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:317)
	at org.junit.rules.RunRules.evaluate(RunRules.java:20)
	at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
	at org.apache.maven.surefire.junitcore.pc.Scheduler$1.run(Scheduler.java:410)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
Caused by: java.lang.ClassNotFoundException: org.apache.beam.runners.dataflow.DataflowPipelineJob$1
	at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
	at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
	at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:338)
	at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
	... 32 more

[ERROR] testWriteAndRead(org.apache.beam.sdk.io.mongodb.MongoDBIOIT)  Time elapsed: 0 s  <<< ERROR!
com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.226.255.2:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:158)
	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:133)
	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:128)
	at com.mongodb.operation.DropDatabaseOperation.execute(DropDatabaseOperation.java:51)
	at com.mongodb.operation.DropDatabaseOperation.execute(DropDatabaseOperation.java:36)
	at com.mongodb.Mongo.execute(Mongo.java:781)
	at com.mongodb.Mongo$2.execute(Mongo.java:764)
	at com.mongodb.MongoDatabaseImpl.drop(MongoDatabaseImpl.java:136)
	at org.apache.beam.sdk.io.mongodb.MongoDBIOIT.tearDown(MongoDBIOIT.java:103)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:33)
	at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:317)
	at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:317)
	at org.junit.rules.RunRules.evaluate(RunRules.java:20)
	at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
	at org.apache.maven.surefire.junitcore.pc.Scheduler$1.run(Scheduler.java:410)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)

[INFO] 
[INFO] Results:
[INFO] 
[ERROR] Errors: 
[ERROR] org.apache.beam.sdk.io.mongodb.MongoDBIOIT.testWriteAndRead(org.apache.beam.sdk.io.mongodb.MongoDBIOIT)
[ERROR]   Run 1: MongoDBIOIT.testWriteAndRead:118  NoClassDefFound org/apache/beam/runners/dat...
[ERROR]   Run 2: MongoDBIOIT.tearDown:103  MongoTimeout Timed out after 30000 ms while waiting...
[INFO] 
[INFO] 
[ERROR] Tests run: 1, Failures: 0, Errors: 1, Skipped: 0
[INFO] 
[INFO] 
[INFO] --- maven-dependency-plugin:3.0.2:analyze-only (default) @ beam-sdks-java-io-mongodb ---
[INFO] No dependency problems found
[INFO] 
[INFO] --- maven-failsafe-plugin:2.21.0:verify (default) @ beam-sdks-java-io-mongodb ---
[INFO] Failsafe report directory: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/b14d57ce/beam/sdks/java/io/mongodb/target/failsafe-reports>
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 06:20 min
[INFO] Finished at: 2018-04-30T18:09:50Z
[INFO] Final Memory: 103M/2164M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-failsafe-plugin:2.21.0:verify (default) on project beam-sdks-java-io-mongodb: There are test failures.
[ERROR] 
[ERROR] Please refer to <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/b14d57ce/beam/sdks/java/io/mongodb/target/failsafe-reports> for the individual test results.
[ERROR] Please refer to dump files (if any exist) [date]-jvmRun[N].dump, [date].dumpstream and [date]-jvmRun[N].dumpstream.
[ERROR] -> [Help 1]
org.apache.maven.lifecycle.LifecycleExecutionException: Failed to execute goal org.apache.maven.plugins:maven-failsafe-plugin:2.21.0:verify (default) on project beam-sdks-java-io-mongodb: There are test failures.

Please refer to <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/b14d57ce/beam/sdks/java/io/mongodb/target/failsafe-reports> for the individual test results.
Please refer to dump files (if any exist) [date]-jvmRun[N].dump, [date].dumpstream and [date]-jvmRun[N].dumpstream.
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:213)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:154)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:146)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:117)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:81)
    at org.apache.maven.lifecycle.internal.builder.singlethreaded.SingleThreadedBuilder.build (SingleThreadedBuilder.java:51)
    at org.apache.maven.lifecycle.internal.LifecycleStarter.execute (LifecycleStarter.java:128)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:309)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:194)
    at org.apache.maven.DefaultMaven.execute (DefaultMaven.java:107)
    at org.apache.maven.cli.MavenCli.execute (MavenCli.java:955)
    at org.apache.maven.cli.MavenCli.doMain (MavenCli.java:290)
    at org.apache.maven.cli.MavenCli.main (MavenCli.java:194)
    at sun.reflect.NativeMethodAccessorImpl.invoke0 (Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke (NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke (DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke (Method.java:498)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launchEnhanced (Launcher.java:289)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launch (Launcher.java:229)
    at org.codehaus.plexus.classworlds.launcher.Launcher.mainWithExitCode (Launcher.java:415)
    at org.codehaus.plexus.classworlds.launcher.Launcher.main (Launcher.java:356)
Caused by: org.apache.maven.plugin.MojoFailureException: There are test failures.

Please refer to <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/b14d57ce/beam/sdks/java/io/mongodb/target/failsafe-reports> for the individual test results.
Please refer to dump files (if any exist) [date]-jvmRun[N].dump, [date].dumpstream and [date]-jvmRun[N].dumpstream.
    at org.apache.maven.plugin.surefire.SurefireHelper.throwException (SurefireHelper.java:240)
    at org.apache.maven.plugin.surefire.SurefireHelper.reportExecution (SurefireHelper.java:112)
    at org.apache.maven.plugin.failsafe.VerifyMojo.execute (VerifyMojo.java:192)
    at org.apache.maven.plugin.DefaultBuildPluginManager.executeMojo (DefaultBuildPluginManager.java:134)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:208)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:154)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:146)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:117)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:81)
    at org.apache.maven.lifecycle.internal.builder.singlethreaded.SingleThreadedBuilder.build (SingleThreadedBuilder.java:51)
    at org.apache.maven.lifecycle.internal.LifecycleStarter.execute (LifecycleStarter.java:128)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:309)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:194)
    at org.apache.maven.DefaultMaven.execute (DefaultMaven.java:107)
    at org.apache.maven.cli.MavenCli.execute (MavenCli.java:955)
    at org.apache.maven.cli.MavenCli.doMain (MavenCli.java:290)
    at org.apache.maven.cli.MavenCli.main (MavenCli.java:194)
    at sun.reflect.NativeMethodAccessorImpl.invoke0 (Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke (NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke (DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke (Method.java:498)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launchEnhanced (Launcher.java:289)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launch (Launcher.java:229)
    at org.codehaus.plexus.classworlds.launcher.Launcher.mainWithExitCode (Launcher.java:415)
    at org.codehaus.plexus.classworlds.launcher.Launcher.main (Launcher.java:356)
[ERROR] 
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException

STDERR: 
2018-04-30 18:09:50,800 b14d57ce MainThread beam_integration_benchmark(1/1) ERROR    Error during benchmark beam_integration_benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-04-30 18:09:50,801 b14d57ce MainThread beam_integration_benchmark(1/1) INFO     Cleaning up benchmark beam_integration_benchmark
2018-04-30 18:09:50,801 b14d57ce MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525100498118> delete -f <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/src/.test-infra/kubernetes/mongodb/load-balancer/mongo.yml> --ignore-not-found
2018-04-30 18:10:00,283 b14d57ce MainThread beam_integration_benchmark(1/1) ERROR    Exception running benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 780, in RunBenchmarkTask
    RunBenchmark(spec, collector)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-04-30 18:10:00,283 b14d57ce MainThread beam_integration_benchmark(1/1) ERROR    Benchmark 1/1 beam_integration_benchmark (UID: beam_integration_benchmark0) failed. Execution will continue.
2018-04-30 18:10:00,334 b14d57ce MainThread INFO     Benchmark run statuses:
---------------------------------------------------------------------------------
Name                        UID                          Status  Failed Substatus
---------------------------------------------------------------------------------
beam_integration_benchmark  beam_integration_benchmark0  FAILED                  
---------------------------------------------------------------------------------
Success rate: 0.00% (0/1)
2018-04-30 18:10:00,335 b14d57ce MainThread INFO     Complete logs can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/b14d57ce/pkb.log>
2018-04-30 18:10:00,335 b14d57ce MainThread INFO     Completion statuses can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/b14d57ce/completion_statuses.json>
Build step 'Execute shell' marked build as failure

Build failed in Jenkins: beam_PerformanceTests_MongoDBIO_IT #113

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/113/display/redirect?page=changes>

Changes:

[aljoscha.krettek] [BEAM-3909] Add tests for Flink DoFnOperator side-input checkpointing

------------------------------------------
[...truncated 874.27 KB...]
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
	at java.lang.Thread.run(Thread.java:745)
com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches ReadPreferenceServerSelector{readPreference=primary}. Client view of cluster state is {type=UNKNOWN, servers=[{address=104.197.195.202:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
	at com.mongodb.binding.ClusterBinding.getReadConnectionSource(ClusterBinding.java:63)
	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:89)
	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:84)
	at com.mongodb.operation.CommandReadOperation.execute(CommandReadOperation.java:55)
	at com.mongodb.Mongo.execute(Mongo.java:772)
	at com.mongodb.Mongo$2.execute(Mongo.java:759)
	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:130)
	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:124)
	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:114)
	at org.apache.beam.sdk.io.mongodb.MongoDbIO$BoundedMongoDbSource.split(MongoDbIO.java:332)
	at com.google.cloud.dataflow.worker.WorkerCustomSources.splitAndValidate(WorkerCustomSources.java:275)
	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplitTyped(WorkerCustomSources.java:197)
	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplitWithApiLimit(WorkerCustomSources.java:181)
	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplit(WorkerCustomSources.java:160)
	at com.google.cloud.dataflow.worker.WorkerCustomSourceOperationExecutor.execute(WorkerCustomSourceOperationExecutor.java:77)
	at com.google.cloud.dataflow.worker.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:383)
	at com.google.cloud.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:355)
	at com.google.cloud.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:286)
	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:134)
	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:114)
	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:101)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
	at java.lang.Thread.run(Thread.java:745)
com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches ReadPreferenceServerSelector{readPreference=primary}. Client view of cluster state is {type=UNKNOWN, servers=[{address=104.197.195.202:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
	at com.mongodb.binding.ClusterBinding.getReadConnectionSource(ClusterBinding.java:63)
	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:89)
	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:84)
	at com.mongodb.operation.CommandReadOperation.execute(CommandReadOperation.java:55)
	at com.mongodb.Mongo.execute(Mongo.java:772)
	at com.mongodb.Mongo$2.execute(Mongo.java:759)
	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:130)
	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:124)
	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:114)
	at org.apache.beam.sdk.io.mongodb.MongoDbIO$BoundedMongoDbSource.split(MongoDbIO.java:332)
	at com.google.cloud.dataflow.worker.WorkerCustomSources.splitAndValidate(WorkerCustomSources.java:275)
	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplitTyped(WorkerCustomSources.java:197)
	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplitWithApiLimit(WorkerCustomSources.java:181)
	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplit(WorkerCustomSources.java:160)
	at com.google.cloud.dataflow.worker.WorkerCustomSourceOperationExecutor.execute(WorkerCustomSourceOperationExecutor.java:77)
	at com.google.cloud.dataflow.worker.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:383)
	at com.google.cloud.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:355)
	at com.google.cloud.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:286)
	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:134)
	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:114)
	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:101)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
	at java.lang.Thread.run(Thread.java:745)
com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches ReadPreferenceServerSelector{readPreference=primary}. Client view of cluster state is {type=UNKNOWN, servers=[{address=104.197.195.202:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
	at com.mongodb.binding.ClusterBinding.getReadConnectionSource(ClusterBinding.java:63)
	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:89)
	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:84)
	at com.mongodb.operation.CommandReadOperation.execute(CommandReadOperation.java:55)
	at com.mongodb.Mongo.execute(Mongo.java:772)
	at com.mongodb.Mongo$2.execute(Mongo.java:759)
	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:130)
	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:124)
	at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:114)
	at org.apache.beam.sdk.io.mongodb.MongoDbIO$BoundedMongoDbSource.split(MongoDbIO.java:332)
	at com.google.cloud.dataflow.worker.WorkerCustomSources.splitAndValidate(WorkerCustomSources.java:275)
	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplitTyped(WorkerCustomSources.java:197)
	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplitWithApiLimit(WorkerCustomSources.java:181)
	at com.google.cloud.dataflow.worker.WorkerCustomSources.performSplit(WorkerCustomSources.java:160)
	at com.google.cloud.dataflow.worker.WorkerCustomSourceOperationExecutor.execute(WorkerCustomSourceOperationExecutor.java:77)
	at com.google.cloud.dataflow.worker.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:383)
	at com.google.cloud.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:355)
	at com.google.cloud.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:286)
	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:134)
	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:114)
	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:101)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
	at java.lang.Thread.run(Thread.java:745)
Workflow failed. Causes: S03:Read all documents/Read(BoundedMongoDbSource)+Map documents to Strings/Map+Calculate hashcode/WithKeys/AddKeys/Map+Calculate hashcode/Combine.perKey(Hashing)/GroupByKey+Calculate hashcode/Combine.perKey(Hashing)/Combine.GroupedValues/Partial+Calculate hashcode/Combine.perKey(Hashing)/GroupByKey/Reify+Calculate hashcode/Combine.perKey(Hashing)/GroupByKey/Write failed.
	at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:134)
	at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:90)
	at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:55)
	at org.apache.beam.sdk.Pipeline.run(Pipeline.java:311)
	at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:346)
	at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:328)
	at org.apache.beam.sdk.io.mongodb.MongoDBIOIT.testWriteAndRead(MongoDBIOIT.java:131)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27)
	at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:317)
	at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:317)
	at org.junit.rules.RunRules.evaluate(RunRules.java:20)
	at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
	at org.apache.maven.surefire.junitcore.pc.Scheduler$1.run(Scheduler.java:410)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)

[INFO] 
[INFO] Results:
[INFO] 
[ERROR] Errors: 
[ERROR]   MongoDBIOIT.testWriteAndRead:131  Runtime com.mongodb.MongoTimeoutException: ...
[INFO] 
[ERROR] Tests run: 1, Failures: 0, Errors: 1, Skipped: 0
[INFO] 
[INFO] 
[INFO] --- maven-dependency-plugin:3.0.2:analyze-only (default) @ beam-sdks-java-io-mongodb ---
[INFO] No dependency problems found
[INFO] 
[INFO] --- maven-failsafe-plugin:2.21.0:verify (default) @ beam-sdks-java-io-mongodb ---
[INFO] Failsafe report directory: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/d98e5994/beam/sdks/java/io/mongodb/target/failsafe-reports>
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 10:45 min
[INFO] Finished at: 2018-04-30T12:19:47Z
[INFO] Final Memory: 94M/3258M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-failsafe-plugin:2.21.0:verify (default) on project beam-sdks-java-io-mongodb: There are test failures.
[ERROR] 
[ERROR] Please refer to <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/d98e5994/beam/sdks/java/io/mongodb/target/failsafe-reports> for the individual test results.
[ERROR] Please refer to dump files (if any exist) [date]-jvmRun[N].dump, [date].dumpstream and [date]-jvmRun[N].dumpstream.
[ERROR] -> [Help 1]
org.apache.maven.lifecycle.LifecycleExecutionException: Failed to execute goal org.apache.maven.plugins:maven-failsafe-plugin:2.21.0:verify (default) on project beam-sdks-java-io-mongodb: There are test failures.

Please refer to <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/d98e5994/beam/sdks/java/io/mongodb/target/failsafe-reports> for the individual test results.
Please refer to dump files (if any exist) [date]-jvmRun[N].dump, [date].dumpstream and [date]-jvmRun[N].dumpstream.
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:213)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:154)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:146)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:117)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:81)
    at org.apache.maven.lifecycle.internal.builder.singlethreaded.SingleThreadedBuilder.build (SingleThreadedBuilder.java:51)
    at org.apache.maven.lifecycle.internal.LifecycleStarter.execute (LifecycleStarter.java:128)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:309)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:194)
    at org.apache.maven.DefaultMaven.execute (DefaultMaven.java:107)
    at org.apache.maven.cli.MavenCli.execute (MavenCli.java:955)
    at org.apache.maven.cli.MavenCli.doMain (MavenCli.java:290)
    at org.apache.maven.cli.MavenCli.main (MavenCli.java:194)
    at sun.reflect.NativeMethodAccessorImpl.invoke0 (Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke (NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke (DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke (Method.java:498)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launchEnhanced (Launcher.java:289)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launch (Launcher.java:229)
    at org.codehaus.plexus.classworlds.launcher.Launcher.mainWithExitCode (Launcher.java:415)
    at org.codehaus.plexus.classworlds.launcher.Launcher.main (Launcher.java:356)
Caused by: org.apache.maven.plugin.MojoFailureException: There are test failures.

Please refer to <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/d98e5994/beam/sdks/java/io/mongodb/target/failsafe-reports> for the individual test results.
Please refer to dump files (if any exist) [date]-jvmRun[N].dump, [date].dumpstream and [date]-jvmRun[N].dumpstream.
    at org.apache.maven.plugin.surefire.SurefireHelper.throwException (SurefireHelper.java:240)
    at org.apache.maven.plugin.surefire.SurefireHelper.reportExecution (SurefireHelper.java:112)
    at org.apache.maven.plugin.failsafe.VerifyMojo.execute (VerifyMojo.java:192)
    at org.apache.maven.plugin.DefaultBuildPluginManager.executeMojo (DefaultBuildPluginManager.java:134)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:208)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:154)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:146)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:117)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:81)
    at org.apache.maven.lifecycle.internal.builder.singlethreaded.SingleThreadedBuilder.build (SingleThreadedBuilder.java:51)
    at org.apache.maven.lifecycle.internal.LifecycleStarter.execute (LifecycleStarter.java:128)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:309)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:194)
    at org.apache.maven.DefaultMaven.execute (DefaultMaven.java:107)
    at org.apache.maven.cli.MavenCli.execute (MavenCli.java:955)
    at org.apache.maven.cli.MavenCli.doMain (MavenCli.java:290)
    at org.apache.maven.cli.MavenCli.main (MavenCli.java:194)
    at sun.reflect.NativeMethodAccessorImpl.invoke0 (Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke (NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke (DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke (Method.java:498)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launchEnhanced (Launcher.java:289)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launch (Launcher.java:229)
    at org.codehaus.plexus.classworlds.launcher.Launcher.mainWithExitCode (Launcher.java:415)
    at org.codehaus.plexus.classworlds.launcher.Launcher.main (Launcher.java:356)
[ERROR] 
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException

STDERR: 
2018-04-30 12:19:48,112 d98e5994 MainThread beam_integration_benchmark(1/1) ERROR    Error during benchmark beam_integration_benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-04-30 12:19:48,113 d98e5994 MainThread beam_integration_benchmark(1/1) INFO     Cleaning up benchmark beam_integration_benchmark
2018-04-30 12:19:48,113 d98e5994 MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525082500576> delete -f <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/src/.test-infra/kubernetes/mongodb/load-balancer/mongo.yml> --ignore-not-found
2018-04-30 12:19:48,634 d98e5994 MainThread beam_integration_benchmark(1/1) ERROR    Exception running benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 780, in RunBenchmarkTask
    RunBenchmark(spec, collector)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-04-30 12:19:48,635 d98e5994 MainThread beam_integration_benchmark(1/1) ERROR    Benchmark 1/1 beam_integration_benchmark (UID: beam_integration_benchmark0) failed. Execution will continue.
2018-04-30 12:19:48,692 d98e5994 MainThread INFO     Benchmark run statuses:
---------------------------------------------------------------------------------
Name                        UID                          Status  Failed Substatus
---------------------------------------------------------------------------------
beam_integration_benchmark  beam_integration_benchmark0  FAILED                  
---------------------------------------------------------------------------------
Success rate: 0.00% (0/1)
2018-04-30 12:19:48,693 d98e5994 MainThread INFO     Complete logs can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/d98e5994/pkb.log>
2018-04-30 12:19:48,693 d98e5994 MainThread INFO     Completion statuses can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/d98e5994/completion_statuses.json>
Build step 'Execute shell' marked build as failure

Build failed in Jenkins: beam_PerformanceTests_MongoDBIO_IT #112

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/112/display/redirect?page=changes>

Changes:

[chamikara] [BEAM-3973] Adds a parameter to the Cloud Spanner read connector that

------------------------------------------
[...truncated 887.44 KB...]
[INFO] Excluding io.grpc:grpc-protobuf:jar:1.2.0 from the shaded jar.
[INFO] Excluding com.google.api:api-common:jar:1.0.0-rc2 from the shaded jar.
[INFO] Excluding com.google.api:gax:jar:1.3.1 from the shaded jar.
[INFO] Excluding org.threeten:threetenbp:jar:1.3.3 from the shaded jar.
[INFO] Excluding com.google.cloud:google-cloud-core-grpc:jar:1.2.0 from the shaded jar.
[INFO] Excluding com.google.protobuf:protobuf-java-util:jar:3.2.0 from the shaded jar.
[INFO] Excluding com.google.code.gson:gson:jar:2.7 from the shaded jar.
[INFO] Excluding com.google.apis:google-api-services-pubsub:jar:v1-rev382-1.23.0 from the shaded jar.
[INFO] Excluding com.google.api.grpc:grpc-google-cloud-pubsub-v1:jar:0.1.18 from the shaded jar.
[INFO] Excluding com.google.api.grpc:proto-google-cloud-pubsub-v1:jar:0.1.18 from the shaded jar.
[INFO] Excluding com.google.api.grpc:proto-google-iam-v1:jar:0.1.18 from the shaded jar.
[INFO] Excluding com.google.cloud.datastore:datastore-v1-proto-client:jar:1.4.0 from the shaded jar.
[INFO] Excluding com.google.http-client:google-http-client-protobuf:jar:1.23.0 from the shaded jar.
[INFO] Excluding com.google.http-client:google-http-client-jackson:jar:1.23.0 from the shaded jar.
[INFO] Excluding com.google.cloud.datastore:datastore-v1-protos:jar:1.3.0 from the shaded jar.
[INFO] Excluding com.google.api.grpc:grpc-google-common-protos:jar:0.1.9 from the shaded jar.
[INFO] Excluding io.grpc:grpc-auth:jar:1.2.0 from the shaded jar.
[INFO] Excluding io.grpc:grpc-netty:jar:1.2.0 from the shaded jar.
[INFO] Excluding io.netty:netty-codec-http2:jar:4.1.8.Final from the shaded jar.
[INFO] Excluding io.netty:netty-codec-http:jar:4.1.8.Final from the shaded jar.
[INFO] Excluding io.netty:netty-handler-proxy:jar:4.1.8.Final from the shaded jar.
[INFO] Excluding io.netty:netty-codec-socks:jar:4.1.8.Final from the shaded jar.
[INFO] Excluding io.netty:netty-handler:jar:4.1.8.Final from the shaded jar.
[INFO] Excluding io.netty:netty-buffer:jar:4.1.8.Final from the shaded jar.
[INFO] Excluding io.netty:netty-common:jar:4.1.8.Final from the shaded jar.
[INFO] Excluding io.netty:netty-transport:jar:4.1.8.Final from the shaded jar.
[INFO] Excluding io.netty:netty-resolver:jar:4.1.8.Final from the shaded jar.
[INFO] Excluding io.netty:netty-codec:jar:4.1.8.Final from the shaded jar.
[INFO] Excluding io.grpc:grpc-stub:jar:1.2.0 from the shaded jar.
[INFO] Excluding io.grpc:grpc-all:jar:1.2.0 from the shaded jar.
[INFO] Excluding io.grpc:grpc-okhttp:jar:1.2.0 from the shaded jar.
[INFO] Excluding com.squareup.okhttp:okhttp:jar:2.5.0 from the shaded jar.
[INFO] Excluding com.squareup.okio:okio:jar:1.6.0 from the shaded jar.
[INFO] Excluding io.grpc:grpc-protobuf-lite:jar:1.2.0 from the shaded jar.
[INFO] Excluding io.grpc:grpc-protobuf-nano:jar:1.2.0 from the shaded jar.
[INFO] Excluding com.google.protobuf.nano:protobuf-javanano:jar:3.0.0-alpha-5 from the shaded jar.
[INFO] Excluding com.google.cloud:google-cloud-core:jar:1.0.2 from the shaded jar.
[INFO] Excluding org.json:json:jar:20160810 from the shaded jar.
[INFO] Excluding com.google.cloud:google-cloud-spanner:jar:0.20.0b-beta from the shaded jar.
[INFO] Excluding com.google.api.grpc:proto-google-cloud-spanner-v1:jar:0.1.11b from the shaded jar.
[INFO] Excluding com.google.api.grpc:proto-google-cloud-spanner-admin-instance-v1:jar:0.1.11 from the shaded jar.
[INFO] Excluding com.google.api.grpc:grpc-google-cloud-spanner-v1:jar:0.1.11b from the shaded jar.
[INFO] Excluding com.google.api.grpc:grpc-google-cloud-spanner-admin-database-v1:jar:0.1.11 from the shaded jar.
[INFO] Excluding com.google.api.grpc:grpc-google-cloud-spanner-admin-instance-v1:jar:0.1.11 from the shaded jar.
[INFO] Excluding com.google.api.grpc:grpc-google-longrunning-v1:jar:0.1.11 from the shaded jar.
[INFO] Excluding com.google.api.grpc:proto-google-longrunning-v1:jar:0.1.11 from the shaded jar.
[INFO] Excluding com.google.cloud.bigtable:bigtable-protos:jar:1.0.0-pre3 from the shaded jar.
[INFO] Excluding com.google.cloud.bigtable:bigtable-client-core:jar:1.0.0 from the shaded jar.
[INFO] Excluding commons-logging:commons-logging:jar:1.2 from the shaded jar.
[INFO] Excluding com.google.auth:google-auth-library-appengine:jar:0.7.0 from the shaded jar.
[INFO] Excluding io.opencensus:opencensus-contrib-grpc-util:jar:0.7.0 from the shaded jar.
[INFO] Excluding io.dropwizard.metrics:metrics-core:jar:3.1.2 from the shaded jar.
[INFO] Excluding com.google.protobuf:protobuf-java:jar:3.2.0 from the shaded jar.
[INFO] Excluding io.netty:netty-tcnative-boringssl-static:jar:1.1.33.Fork26 from the shaded jar.
[INFO] Excluding com.google.api.grpc:proto-google-cloud-spanner-admin-database-v1:jar:0.1.9 from the shaded jar.
[INFO] Excluding com.google.api.grpc:proto-google-common-protos:jar:0.1.9 from the shaded jar.
[INFO] Excluding com.google.api-client:google-api-client:jar:1.23.0 from the shaded jar.
[INFO] Excluding com.google.oauth-client:google-oauth-client:jar:1.23.0 from the shaded jar.
[INFO] Excluding com.google.http-client:google-http-client:jar:1.23.0 from the shaded jar.
[INFO] Excluding org.apache.httpcomponents:httpclient:jar:4.0.1 from the shaded jar.
[INFO] Excluding org.apache.httpcomponents:httpcore:jar:4.0.1 from the shaded jar.
[INFO] Excluding commons-codec:commons-codec:jar:1.3 from the shaded jar.
[INFO] Excluding com.google.http-client:google-http-client-jackson2:jar:1.23.0 from the shaded jar.
[INFO] Excluding com.google.apis:google-api-services-dataflow:jar:v1b3-rev221-1.23.0 from the shaded jar.
[INFO] Excluding com.google.apis:google-api-services-clouddebugger:jar:v2-rev233-1.23.0 from the shaded jar.
[INFO] Excluding com.google.apis:google-api-services-storage:jar:v1-rev124-1.23.0 from the shaded jar.
[INFO] Excluding com.google.auth:google-auth-library-credentials:jar:0.7.1 from the shaded jar.
[INFO] Excluding com.google.auth:google-auth-library-oauth2-http:jar:0.7.1 from the shaded jar.
[INFO] Excluding com.google.cloud.bigdataoss:util:jar:1.4.5 from the shaded jar.
[INFO] Excluding com.google.api-client:google-api-client-java6:jar:1.23.0 from the shaded jar.
[INFO] Excluding com.google.api-client:google-api-client-jackson2:jar:1.23.0 from the shaded jar.
[INFO] Excluding com.google.oauth-client:google-oauth-client-java6:jar:1.23.0 from the shaded jar.
[INFO] Replacing original artifact with shaded artifact.
[INFO] Replacing <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/fd007968/beam/sdks/java/io/mongodb/target/beam-sdks-java-io-mongodb-2.5.0-SNAPSHOT.jar> with <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/fd007968/beam/sdks/java/io/mongodb/target/beam-sdks-java-io-mongodb-2.5.0-SNAPSHOT-shaded.jar>
[INFO] Replacing original test artifact with shaded test artifact.
[INFO] Replacing <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/fd007968/beam/sdks/java/io/mongodb/target/beam-sdks-java-io-mongodb-2.5.0-SNAPSHOT-tests.jar> with <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/fd007968/beam/sdks/java/io/mongodb/target/beam-sdks-java-io-mongodb-2.5.0-SNAPSHOT-shaded-tests.jar>
[INFO] Dependency-reduced POM written at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/fd007968/beam/sdks/java/io/mongodb/dependency-reduced-pom.xml>
[INFO] 
[INFO] --- maven-failsafe-plugin:2.21.0:integration-test (default) @ beam-sdks-java-io-mongodb ---
[INFO] Failsafe report directory: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/fd007968/beam/sdks/java/io/mongodb/target/failsafe-reports>
[INFO] parallel='all', perCoreThreadCount=true, threadCount=4, useUnlimitedThreads=false, threadCountSuites=0, threadCountClasses=0, threadCountMethods=0, parallelOptimized=true
[INFO] 
[INFO] -------------------------------------------------------
[INFO]  T E S T S
[INFO] -------------------------------------------------------
[INFO] Running org.apache.beam.sdk.io.mongodb.MongoDBIOIT
[ERROR] Tests run: 1, Failures: 0, Errors: 1, Skipped: 0, Time elapsed: 686.556 s <<< FAILURE! - in org.apache.beam.sdk.io.mongodb.MongoDBIOIT
[ERROR] testWriteAndRead(org.apache.beam.sdk.io.mongodb.MongoDBIOIT)  Time elapsed: 686.556 s  <<< ERROR!
com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.225.162.7:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:158)
	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:133)
	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:128)
	at com.mongodb.operation.DropDatabaseOperation.execute(DropDatabaseOperation.java:51)
	at com.mongodb.operation.DropDatabaseOperation.execute(DropDatabaseOperation.java:36)
	at com.mongodb.Mongo.execute(Mongo.java:781)
	at com.mongodb.Mongo$2.execute(Mongo.java:764)
	at com.mongodb.MongoDatabaseImpl.drop(MongoDatabaseImpl.java:136)
	at org.apache.beam.sdk.io.mongodb.MongoDBIOIT.tearDown(MongoDBIOIT.java:103)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:33)
	at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:317)
	at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:317)
	at org.junit.rules.RunRules.evaluate(RunRules.java:20)
	at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
	at org.apache.maven.surefire.junitcore.pc.Scheduler$1.run(Scheduler.java:410)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)

[INFO] 
[INFO] Results:
[INFO] 
[ERROR] Errors: 
[ERROR]   MongoDBIOIT.tearDown:103  MongoTimeout Timed out after 30000 ms while waiting...
[INFO] 
[ERROR] Tests run: 1, Failures: 0, Errors: 1, Skipped: 0
[INFO] 
[INFO] 
[INFO] --- maven-dependency-plugin:3.0.2:analyze-only (default) @ beam-sdks-java-io-mongodb ---
[INFO] No dependency problems found
[INFO] 
[INFO] --- maven-failsafe-plugin:2.21.0:verify (default) @ beam-sdks-java-io-mongodb ---
[INFO] Failsafe report directory: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/fd007968/beam/sdks/java/io/mongodb/target/failsafe-reports>
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 11:39 min
[INFO] Finished at: 2018-04-30T06:19:17Z
[INFO] Final Memory: 86M/2382M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-failsafe-plugin:2.21.0:verify (default) on project beam-sdks-java-io-mongodb: There are test failures.
[ERROR] 
[ERROR] Please refer to <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/fd007968/beam/sdks/java/io/mongodb/target/failsafe-reports> for the individual test results.
[ERROR] Please refer to dump files (if any exist) [date]-jvmRun[N].dump, [date].dumpstream and [date]-jvmRun[N].dumpstream.
[ERROR] -> [Help 1]
org.apache.maven.lifecycle.LifecycleExecutionException: Failed to execute goal org.apache.maven.plugins:maven-failsafe-plugin:2.21.0:verify (default) on project beam-sdks-java-io-mongodb: There are test failures.

Please refer to <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/fd007968/beam/sdks/java/io/mongodb/target/failsafe-reports> for the individual test results.
Please refer to dump files (if any exist) [date]-jvmRun[N].dump, [date].dumpstream and [date]-jvmRun[N].dumpstream.
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:213)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:154)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:146)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:117)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:81)
    at org.apache.maven.lifecycle.internal.builder.singlethreaded.SingleThreadedBuilder.build (SingleThreadedBuilder.java:51)
    at org.apache.maven.lifecycle.internal.LifecycleStarter.execute (LifecycleStarter.java:128)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:309)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:194)
    at org.apache.maven.DefaultMaven.execute (DefaultMaven.java:107)
    at org.apache.maven.cli.MavenCli.execute (MavenCli.java:955)
    at org.apache.maven.cli.MavenCli.doMain (MavenCli.java:290)
    at org.apache.maven.cli.MavenCli.main (MavenCli.java:194)
    at sun.reflect.NativeMethodAccessorImpl.invoke0 (Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke (NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke (DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke (Method.java:498)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launchEnhanced (Launcher.java:289)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launch (Launcher.java:229)
    at org.codehaus.plexus.classworlds.launcher.Launcher.mainWithExitCode (Launcher.java:415)
    at org.codehaus.plexus.classworlds.launcher.Launcher.main (Launcher.java:356)
Caused by: org.apache.maven.plugin.MojoFailureException: There are test failures.

Please refer to <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/fd007968/beam/sdks/java/io/mongodb/target/failsafe-reports> for the individual test results.
Please refer to dump files (if any exist) [date]-jvmRun[N].dump, [date].dumpstream and [date]-jvmRun[N].dumpstream.
    at org.apache.maven.plugin.surefire.SurefireHelper.throwException (SurefireHelper.java:240)
    at org.apache.maven.plugin.surefire.SurefireHelper.reportExecution (SurefireHelper.java:112)
    at org.apache.maven.plugin.failsafe.VerifyMojo.execute (VerifyMojo.java:192)
    at org.apache.maven.plugin.DefaultBuildPluginManager.executeMojo (DefaultBuildPluginManager.java:134)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:208)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:154)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:146)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:117)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:81)
    at org.apache.maven.lifecycle.internal.builder.singlethreaded.SingleThreadedBuilder.build (SingleThreadedBuilder.java:51)
    at org.apache.maven.lifecycle.internal.LifecycleStarter.execute (LifecycleStarter.java:128)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:309)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:194)
    at org.apache.maven.DefaultMaven.execute (DefaultMaven.java:107)
    at org.apache.maven.cli.MavenCli.execute (MavenCli.java:955)
    at org.apache.maven.cli.MavenCli.doMain (MavenCli.java:290)
    at org.apache.maven.cli.MavenCli.main (MavenCli.java:194)
    at sun.reflect.NativeMethodAccessorImpl.invoke0 (Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke (NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke (DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke (Method.java:498)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launchEnhanced (Launcher.java:289)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launch (Launcher.java:229)
    at org.codehaus.plexus.classworlds.launcher.Launcher.mainWithExitCode (Launcher.java:415)
    at org.codehaus.plexus.classworlds.launcher.Launcher.main (Launcher.java:356)
[ERROR] 
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException

STDERR: 
2018-04-30 06:19:17,998 fd007968 MainThread beam_integration_benchmark(1/1) ERROR    Error during benchmark beam_integration_benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-04-30 06:19:17,999 fd007968 MainThread beam_integration_benchmark(1/1) INFO     Cleaning up benchmark beam_integration_benchmark
2018-04-30 06:19:18,000 fd007968 MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525064499144> delete -f <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/src/.test-infra/kubernetes/mongodb/load-balancer/mongo.yml> --ignore-not-found
2018-04-30 06:19:18,380 fd007968 MainThread beam_integration_benchmark(1/1) ERROR    Exception running benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 780, in RunBenchmarkTask
    RunBenchmark(spec, collector)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-04-30 06:19:18,380 fd007968 MainThread beam_integration_benchmark(1/1) ERROR    Benchmark 1/1 beam_integration_benchmark (UID: beam_integration_benchmark0) failed. Execution will continue.
2018-04-30 06:19:18,394 fd007968 MainThread INFO     Benchmark run statuses:
---------------------------------------------------------------------------------
Name                        UID                          Status  Failed Substatus
---------------------------------------------------------------------------------
beam_integration_benchmark  beam_integration_benchmark0  FAILED                  
---------------------------------------------------------------------------------
Success rate: 0.00% (0/1)
2018-04-30 06:19:18,395 fd007968 MainThread INFO     Complete logs can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/fd007968/pkb.log>
2018-04-30 06:19:18,395 fd007968 MainThread INFO     Completion statuses can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/fd007968/completion_statuses.json>
Build step 'Execute shell' marked build as failure

Build failed in Jenkins: beam_PerformanceTests_MongoDBIO_IT #111

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/111/display/redirect>

------------------------------------------
[...truncated 906.13 KB...]
	at java.lang.Thread.run(Thread.java:745)
Caused by: com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches ReadPreferenceServerSelector{readPreference=primary}. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.226.0.156:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
	at com.mongodb.binding.ClusterBinding.getReadConnectionSource(ClusterBinding.java:63)
	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:210)
	at com.mongodb.operation.FindOperation.execute(FindOperation.java:482)
	at com.mongodb.operation.FindOperation.execute(FindOperation.java:79)
	at com.mongodb.Mongo.execute(Mongo.java:772)
	at com.mongodb.Mongo$2.execute(Mongo.java:759)
	at com.mongodb.OperationIterable.iterator(OperationIterable.java:47)
	at com.mongodb.FindIterableImpl.iterator(FindIterableImpl.java:143)
	at org.apache.beam.sdk.io.mongodb.MongoDbIO$BoundedMongoDbReader.start(MongoDbIO.java:456)
	at com.google.cloud.dataflow.worker.WorkerCustomSources$BoundedReaderIterator.start(WorkerCustomSources.java:592)
	... 14 more
java.io.IOException: Failed to start reading from source: org.apache.beam.sdk.io.mongodb.MongoDbIO$BoundedMongoDbSource@387b69c8
	at com.google.cloud.dataflow.worker.WorkerCustomSources$BoundedReaderIterator.start(WorkerCustomSources.java:595)
	at com.google.cloud.dataflow.worker.util.common.worker.ReadOperation$SynchronizedReaderIterator.start(ReadOperation.java:360)
	at com.google.cloud.dataflow.worker.util.common.worker.ReadOperation.runReadLoop(ReadOperation.java:193)
	at com.google.cloud.dataflow.worker.util.common.worker.ReadOperation.start(ReadOperation.java:158)
	at com.google.cloud.dataflow.worker.util.common.worker.MapTaskExecutor.execute(MapTaskExecutor.java:75)
	at com.google.cloud.dataflow.worker.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:383)
	at com.google.cloud.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:355)
	at com.google.cloud.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:286)
	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:134)
	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:114)
	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:101)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
	at java.lang.Thread.run(Thread.java:745)
Caused by: com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches ReadPreferenceServerSelector{readPreference=primary}. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.226.0.156:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
	at com.mongodb.binding.ClusterBinding.getReadConnectionSource(ClusterBinding.java:63)
	at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:210)
	at com.mongodb.operation.FindOperation.execute(FindOperation.java:482)
	at com.mongodb.operation.FindOperation.execute(FindOperation.java:79)
	at com.mongodb.Mongo.execute(Mongo.java:772)
	at com.mongodb.Mongo$2.execute(Mongo.java:759)
	at com.mongodb.OperationIterable.iterator(OperationIterable.java:47)
	at com.mongodb.FindIterableImpl.iterator(FindIterableImpl.java:143)
	at org.apache.beam.sdk.io.mongodb.MongoDbIO$BoundedMongoDbReader.start(MongoDbIO.java:456)
	at com.google.cloud.dataflow.worker.WorkerCustomSources$BoundedReaderIterator.start(WorkerCustomSources.java:592)
	... 14 more
Workflow failed. Causes: S03:Read all documents/Read(BoundedMongoDbSource)+Map documents to Strings/Map+Calculate hashcode/WithKeys/AddKeys/Map+Calculate hashcode/Combine.perKey(Hashing)/GroupByKey+Calculate hashcode/Combine.perKey(Hashing)/Combine.GroupedValues/Partial+Calculate hashcode/Combine.perKey(Hashing)/GroupByKey/Reify+Calculate hashcode/Combine.perKey(Hashing)/GroupByKey/Write failed., A work item was attempted 4 times without success. Each time the worker eventually lost contact with the service. The work item was attempted on: 
  mongodbioit0testwriteandr-04291714-prxj-harness-qmwz,
  mongodbioit0testwriteandr-04291714-prxj-harness-psnj,
  mongodbioit0testwriteandr-04291714-prxj-harness-qmwz,
  mongodbioit0testwriteandr-04291714-prxj-harness-qmwz
	at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:134)
	at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:90)
	at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:55)
	at org.apache.beam.sdk.Pipeline.run(Pipeline.java:311)
	at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:346)
	at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:328)
	at org.apache.beam.sdk.io.mongodb.MongoDBIOIT.testWriteAndRead(MongoDBIOIT.java:131)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27)
	at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:317)
	at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:317)
	at org.junit.rules.RunRules.evaluate(RunRules.java:20)
	at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
	at org.apache.maven.surefire.junitcore.pc.Scheduler$1.run(Scheduler.java:410)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)

[ERROR] testWriteAndRead(org.apache.beam.sdk.io.mongodb.MongoDBIOIT)  Time elapsed: 0 s  <<< ERROR!
com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting for a server that matches WritableServerSelector. Client view of cluster state is {type=UNKNOWN, servers=[{address=35.226.0.156:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
	at com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
	at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
	at com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
	at com.mongodb.binding.ClusterBinding.getWriteConnectionSource(ClusterBinding.java:68)
	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:158)
	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:133)
	at com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:128)
	at com.mongodb.operation.DropDatabaseOperation.execute(DropDatabaseOperation.java:51)
	at com.mongodb.operation.DropDatabaseOperation.execute(DropDatabaseOperation.java:36)
	at com.mongodb.Mongo.execute(Mongo.java:781)
	at com.mongodb.Mongo$2.execute(Mongo.java:764)
	at com.mongodb.MongoDatabaseImpl.drop(MongoDatabaseImpl.java:136)
	at org.apache.beam.sdk.io.mongodb.MongoDBIOIT.tearDown(MongoDBIOIT.java:103)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:33)
	at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:317)
	at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:317)
	at org.junit.rules.RunRules.evaluate(RunRules.java:20)
	at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
	at org.apache.maven.surefire.junitcore.pc.Scheduler$1.run(Scheduler.java:410)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)

[INFO] 
[INFO] Results:
[INFO] 
[ERROR] Errors: 
[ERROR] org.apache.beam.sdk.io.mongodb.MongoDBIOIT.testWriteAndRead(org.apache.beam.sdk.io.mongodb.MongoDBIOIT)
[ERROR]   Run 1: MongoDBIOIT.testWriteAndRead:131  Runtime java.io.IOException: Failed to star...
[ERROR]   Run 2: MongoDBIOIT.tearDown:103  MongoTimeout Timed out after 30000 ms while waiting...
[INFO] 
[INFO] 
[ERROR] Tests run: 1, Failures: 0, Errors: 1, Skipped: 0
[INFO] 
[INFO] 
[INFO] --- maven-dependency-plugin:3.0.2:analyze-only (default) @ beam-sdks-java-io-mongodb ---
[INFO] No dependency problems found
[INFO] 
[INFO] --- maven-failsafe-plugin:2.21.0:verify (default) @ beam-sdks-java-io-mongodb ---
[INFO] Failsafe report directory: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/763177ea/beam/sdks/java/io/mongodb/target/failsafe-reports>
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 13:51 min
[INFO] Finished at: 2018-04-30T00:21:51Z
[INFO] Final Memory: 95M/3257M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-failsafe-plugin:2.21.0:verify (default) on project beam-sdks-java-io-mongodb: There are test failures.
[ERROR] 
[ERROR] Please refer to <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/763177ea/beam/sdks/java/io/mongodb/target/failsafe-reports> for the individual test results.
[ERROR] Please refer to dump files (if any exist) [date]-jvmRun[N].dump, [date].dumpstream and [date]-jvmRun[N].dumpstream.
[ERROR] -> [Help 1]
org.apache.maven.lifecycle.LifecycleExecutionException: Failed to execute goal org.apache.maven.plugins:maven-failsafe-plugin:2.21.0:verify (default) on project beam-sdks-java-io-mongodb: There are test failures.

Please refer to <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/763177ea/beam/sdks/java/io/mongodb/target/failsafe-reports> for the individual test results.
Please refer to dump files (if any exist) [date]-jvmRun[N].dump, [date].dumpstream and [date]-jvmRun[N].dumpstream.
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:213)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:154)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:146)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:117)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:81)
    at org.apache.maven.lifecycle.internal.builder.singlethreaded.SingleThreadedBuilder.build (SingleThreadedBuilder.java:51)
    at org.apache.maven.lifecycle.internal.LifecycleStarter.execute (LifecycleStarter.java:128)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:309)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:194)
    at org.apache.maven.DefaultMaven.execute (DefaultMaven.java:107)
    at org.apache.maven.cli.MavenCli.execute (MavenCli.java:955)
    at org.apache.maven.cli.MavenCli.doMain (MavenCli.java:290)
    at org.apache.maven.cli.MavenCli.main (MavenCli.java:194)
    at sun.reflect.NativeMethodAccessorImpl.invoke0 (Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke (NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke (DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke (Method.java:498)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launchEnhanced (Launcher.java:289)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launch (Launcher.java:229)
    at org.codehaus.plexus.classworlds.launcher.Launcher.mainWithExitCode (Launcher.java:415)
    at org.codehaus.plexus.classworlds.launcher.Launcher.main (Launcher.java:356)
Caused by: org.apache.maven.plugin.MojoFailureException: There are test failures.

Please refer to <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/763177ea/beam/sdks/java/io/mongodb/target/failsafe-reports> for the individual test results.
Please refer to dump files (if any exist) [date]-jvmRun[N].dump, [date].dumpstream and [date]-jvmRun[N].dumpstream.
    at org.apache.maven.plugin.surefire.SurefireHelper.throwException (SurefireHelper.java:240)
    at org.apache.maven.plugin.surefire.SurefireHelper.reportExecution (SurefireHelper.java:112)
    at org.apache.maven.plugin.failsafe.VerifyMojo.execute (VerifyMojo.java:192)
    at org.apache.maven.plugin.DefaultBuildPluginManager.executeMojo (DefaultBuildPluginManager.java:134)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:208)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:154)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:146)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:117)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:81)
    at org.apache.maven.lifecycle.internal.builder.singlethreaded.SingleThreadedBuilder.build (SingleThreadedBuilder.java:51)
    at org.apache.maven.lifecycle.internal.LifecycleStarter.execute (LifecycleStarter.java:128)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:309)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:194)
    at org.apache.maven.DefaultMaven.execute (DefaultMaven.java:107)
    at org.apache.maven.cli.MavenCli.execute (MavenCli.java:955)
    at org.apache.maven.cli.MavenCli.doMain (MavenCli.java:290)
    at org.apache.maven.cli.MavenCli.main (MavenCli.java:194)
    at sun.reflect.NativeMethodAccessorImpl.invoke0 (Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke (NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke (DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke (Method.java:498)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launchEnhanced (Launcher.java:289)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launch (Launcher.java:229)
    at org.codehaus.plexus.classworlds.launcher.Launcher.mainWithExitCode (Launcher.java:415)
    at org.codehaus.plexus.classworlds.launcher.Launcher.main (Launcher.java:356)
[ERROR] 
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException

STDERR: 
2018-04-30 00:21:51,948 763177ea MainThread beam_integration_benchmark(1/1) ERROR    Error during benchmark beam_integration_benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-04-30 00:21:51,949 763177ea MainThread beam_integration_benchmark(1/1) INFO     Cleaning up benchmark beam_integration_benchmark
2018-04-30 00:21:51,949 763177ea MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-1525032085276> delete -f <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/src/.test-infra/kubernetes/mongodb/load-balancer/mongo.yml> --ignore-not-found
2018-04-30 00:21:52,441 763177ea MainThread beam_integration_benchmark(1/1) ERROR    Exception running benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 780, in RunBenchmarkTask
    RunBenchmark(spec, collector)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-04-30 00:21:52,441 763177ea MainThread beam_integration_benchmark(1/1) ERROR    Benchmark 1/1 beam_integration_benchmark (UID: beam_integration_benchmark0) failed. Execution will continue.
2018-04-30 00:21:52,494 763177ea MainThread INFO     Benchmark run statuses:
---------------------------------------------------------------------------------
Name                        UID                          Status  Failed Substatus
---------------------------------------------------------------------------------
beam_integration_benchmark  beam_integration_benchmark0  FAILED                  
---------------------------------------------------------------------------------
Success rate: 0.00% (0/1)
2018-04-30 00:21:52,495 763177ea MainThread INFO     Complete logs can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/763177ea/pkb.log>
2018-04-30 00:21:52,495 763177ea MainThread INFO     Completion statuses can be found at: <https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/763177ea/completion_statuses.json>
Build step 'Execute shell' marked build as failure