You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@lucene.apache.org by Policeman Jenkins Server <je...@thetaphi.de> on 2018/08/20 23:46:47 UTC

[JENKINS] Lucene-Solr-7.x-Linux (64bit/jdk1.8.0_172) - Build # 2598 - Still Unstable!

Build: https://jenkins.thetaphi.de/job/Lucene-Solr-7.x-Linux/2598/
Java: 64bit/jdk1.8.0_172 -XX:-UseCompressedOops -XX:+UseG1GC

1 tests failed.
FAILED:  org.apache.solr.cloud.MoveReplicaHDFSTest.testFailedMove

Error Message:
No live SolrServers available to handle this request:[http://127.0.0.1:38205/solr/MoveReplicaHDFSTest_failed_coll_true, http://127.0.0.1:43513/solr/MoveReplicaHDFSTest_failed_coll_true]

Stack Trace:
org.apache.solr.client.solrj.SolrServerException: No live SolrServers available to handle this request:[http://127.0.0.1:38205/solr/MoveReplicaHDFSTest_failed_coll_true, http://127.0.0.1:43513/solr/MoveReplicaHDFSTest_failed_coll_true]
	at __randomizedtesting.SeedInfo.seed([56D234FA12787E63:FC1FE708A5ABABB3]:0)
	at org.apache.solr.client.solrj.impl.LBHttpSolrClient.request(LBHttpSolrClient.java:462)
	at org.apache.solr.client.solrj.impl.CloudSolrClient.sendRequest(CloudSolrClient.java:1109)
	at org.apache.solr.client.solrj.impl.CloudSolrClient.requestWithRetryOnStaleState(CloudSolrClient.java:886)
	at org.apache.solr.client.solrj.impl.CloudSolrClient.requestWithRetryOnStaleState(CloudSolrClient.java:996)
	at org.apache.solr.client.solrj.impl.CloudSolrClient.request(CloudSolrClient.java:819)
	at org.apache.solr.client.solrj.SolrRequest.process(SolrRequest.java:194)
	at org.apache.solr.client.solrj.SolrClient.query(SolrClient.java:942)
	at org.apache.solr.cloud.MoveReplicaTest.testFailedMove(MoveReplicaTest.java:289)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1737)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:934)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:970)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:984)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:943)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:829)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:879)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:890)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at java.lang.Thread.run(Thread.java:748)
Caused by: org.apache.solr.client.solrj.impl.HttpSolrClient$RemoteSolrException: Error from server at http://127.0.0.1:43513/solr/MoveReplicaHDFSTest_failed_coll_true: no servers hosting shard: shard1
	at org.apache.solr.client.solrj.impl.HttpSolrClient.executeMethod(HttpSolrClient.java:643)
	at org.apache.solr.client.solrj.impl.HttpSolrClient.request(HttpSolrClient.java:255)
	at org.apache.solr.client.solrj.impl.HttpSolrClient.request(HttpSolrClient.java:244)
	at org.apache.solr.client.solrj.impl.LBHttpSolrClient.doRequest(LBHttpSolrClient.java:483)
	at org.apache.solr.client.solrj.impl.LBHttpSolrClient.request(LBHttpSolrClient.java:436)
	... 46 more




Build Log:
[...truncated 13781 lines...]
   [junit4] Suite: org.apache.solr.cloud.MoveReplicaHDFSTest
   [junit4]   2> Creating dataDir: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.MoveReplicaHDFSTest_56D234FA12787E63-001/init-core-data-001
   [junit4]   2> 1711409 WARN  (SUITE-MoveReplicaHDFSTest-seed#[56D234FA12787E63]-worker) [    ] o.a.s.SolrTestCaseJ4 startTrackingSearchers: numOpens=9 numCloses=9
   [junit4]   2> 1711409 INFO  (SUITE-MoveReplicaHDFSTest-seed#[56D234FA12787E63]-worker) [    ] o.a.s.SolrTestCaseJ4 Using PointFields (NUMERIC_POINTS_SYSPROP=true) w/NUMERIC_DOCVALUES_SYSPROP=false
   [junit4]   2> 1711410 INFO  (SUITE-MoveReplicaHDFSTest-seed#[56D234FA12787E63]-worker) [    ] o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (false) via: @org.apache.solr.util.RandomizeSSL(reason=, value=NaN, ssl=NaN, clientAuth=NaN)
   [junit4]   2> 1711410 INFO  (SUITE-MoveReplicaHDFSTest-seed#[56D234FA12787E63]-worker) [    ] o.a.s.SolrTestCaseJ4 SecureRandom sanity checks: test.solr.allowed.securerandom=null & java.security.egd=file:/dev/./urandom
   [junit4]   2> 1711410 INFO  (SUITE-MoveReplicaHDFSTest-seed#[56D234FA12787E63]-worker) [    ] o.a.s.c.MiniSolrCloudCluster Starting cluster of 4 servers in /home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.MoveReplicaHDFSTest_56D234FA12787E63-001/tempDir-001
   [junit4]   2> 1711410 INFO  (SUITE-MoveReplicaHDFSTest-seed#[56D234FA12787E63]-worker) [    ] o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
   [junit4]   2> 1711410 INFO  (Thread-4591) [    ] o.a.s.c.ZkTestServer client port:0.0.0.0/0.0.0.0:0
   [junit4]   2> 1711410 INFO  (Thread-4591) [    ] o.a.s.c.ZkTestServer Starting server
   [junit4]   2> 1711412 ERROR (Thread-4591) [    ] o.a.z.s.ZooKeeperServer ZKShutdownHandler is not registered, so ZooKeeper server won't take any action on ERROR or SHUTDOWN server state changes
   [junit4]   2> 1711510 INFO  (SUITE-MoveReplicaHDFSTest-seed#[56D234FA12787E63]-worker) [    ] o.a.s.c.ZkTestServer start zk server on port:40637
   [junit4]   2> 1711512 INFO  (zkConnectionManagerCallback-7604-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1711516 INFO  (jetty-launcher-7601-thread-4) [    ] o.e.j.s.Server jetty-9.4.11.v20180605; built: 2018-06-05T18:24:03.829Z; git: d5fc0523cfa96bfebfbda19606cad384d772f04c; jvm 1.8.0_172-b11
   [junit4]   2> 1711516 INFO  (jetty-launcher-7601-thread-3) [    ] o.e.j.s.Server jetty-9.4.11.v20180605; built: 2018-06-05T18:24:03.829Z; git: d5fc0523cfa96bfebfbda19606cad384d772f04c; jvm 1.8.0_172-b11
   [junit4]   2> 1711516 INFO  (jetty-launcher-7601-thread-1) [    ] o.e.j.s.Server jetty-9.4.11.v20180605; built: 2018-06-05T18:24:03.829Z; git: d5fc0523cfa96bfebfbda19606cad384d772f04c; jvm 1.8.0_172-b11
   [junit4]   2> 1711516 INFO  (jetty-launcher-7601-thread-2) [    ] o.e.j.s.Server jetty-9.4.11.v20180605; built: 2018-06-05T18:24:03.829Z; git: d5fc0523cfa96bfebfbda19606cad384d772f04c; jvm 1.8.0_172-b11
   [junit4]   2> 1711517 INFO  (jetty-launcher-7601-thread-1) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 1711517 INFO  (jetty-launcher-7601-thread-1) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 1711517 INFO  (jetty-launcher-7601-thread-1) [    ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 1711517 INFO  (jetty-launcher-7601-thread-1) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@36c74ef2{/solr,null,AVAILABLE}
   [junit4]   2> 1711518 INFO  (jetty-launcher-7601-thread-4) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 1711518 INFO  (jetty-launcher-7601-thread-4) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 1711518 INFO  (jetty-launcher-7601-thread-3) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 1711518 INFO  (jetty-launcher-7601-thread-3) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 1711518 INFO  (jetty-launcher-7601-thread-4) [    ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 1711518 INFO  (jetty-launcher-7601-thread-3) [    ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 1711519 INFO  (jetty-launcher-7601-thread-2) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 1711519 INFO  (jetty-launcher-7601-thread-2) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 1711519 INFO  (jetty-launcher-7601-thread-2) [    ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 1711519 INFO  (jetty-launcher-7601-thread-4) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@6d2c8714{/solr,null,AVAILABLE}
   [junit4]   2> 1711519 INFO  (jetty-launcher-7601-thread-3) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@3a94b01d{/solr,null,AVAILABLE}
   [junit4]   2> 1711519 INFO  (jetty-launcher-7601-thread-2) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@7ce15b14{/solr,null,AVAILABLE}
   [junit4]   2> 1711520 INFO  (jetty-launcher-7601-thread-2) [    ] o.e.j.s.AbstractConnector Started ServerConnector@48179316{HTTP/1.1,[http/1.1]}{127.0.0.1:40599}
   [junit4]   2> 1711520 INFO  (jetty-launcher-7601-thread-3) [    ] o.e.j.s.AbstractConnector Started ServerConnector@6d90a4d1{HTTP/1.1,[http/1.1]}{127.0.0.1:43937}
   [junit4]   2> 1711520 INFO  (jetty-launcher-7601-thread-2) [    ] o.e.j.s.Server Started @1711555ms
   [junit4]   2> 1711520 INFO  (jetty-launcher-7601-thread-3) [    ] o.e.j.s.Server Started @1711555ms
   [junit4]   2> 1711520 INFO  (jetty-launcher-7601-thread-4) [    ] o.e.j.s.AbstractConnector Started ServerConnector@4ee1da7{HTTP/1.1,[http/1.1]}{127.0.0.1:41201}
   [junit4]   2> 1711520 INFO  (jetty-launcher-7601-thread-2) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=40599}
   [junit4]   2> 1711520 INFO  (jetty-launcher-7601-thread-3) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=43937}
   [junit4]   2> 1711520 INFO  (jetty-launcher-7601-thread-4) [    ] o.e.j.s.Server Started @1711555ms
   [junit4]   2> 1711520 INFO  (jetty-launcher-7601-thread-1) [    ] o.e.j.s.AbstractConnector Started ServerConnector@6f86841f{HTTP/1.1,[http/1.1]}{127.0.0.1:38205}
   [junit4]   2> 1711520 INFO  (jetty-launcher-7601-thread-4) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=41201}
   [junit4]   2> 1711520 INFO  (jetty-launcher-7601-thread-1) [    ] o.e.j.s.Server Started @1711555ms
   [junit4]   2> 1711520 INFO  (jetty-launcher-7601-thread-1) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=38205}
   [junit4]   2> 1711520 ERROR (jetty-launcher-7601-thread-2) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 1711520 ERROR (jetty-launcher-7601-thread-3) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 1711520 ERROR (jetty-launcher-7601-thread-1) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 1711520 INFO  (jetty-launcher-7601-thread-3) [    ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 1711520 INFO  (jetty-launcher-7601-thread-2) [    ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 1711520 INFO  (jetty-launcher-7601-thread-1) [    ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 1711520 ERROR (jetty-launcher-7601-thread-4) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 1711520 INFO  (jetty-launcher-7601-thread-3) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 7.5.0
   [junit4]   2> 1711520 INFO  (jetty-launcher-7601-thread-2) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 7.5.0
   [junit4]   2> 1711520 INFO  (jetty-launcher-7601-thread-1) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 7.5.0
   [junit4]   2> 1711520 INFO  (jetty-launcher-7601-thread-3) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 1711520 INFO  (jetty-launcher-7601-thread-4) [    ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 1711520 INFO  (jetty-launcher-7601-thread-3) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 1711520 INFO  (jetty-launcher-7601-thread-2) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 1711520 INFO  (jetty-launcher-7601-thread-4) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 7.5.0
   [junit4]   2> 1711520 INFO  (jetty-launcher-7601-thread-3) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2018-08-20T23:03:23.404Z
   [junit4]   2> 1711520 INFO  (jetty-launcher-7601-thread-2) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 1711520 INFO  (jetty-launcher-7601-thread-4) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 1711520 INFO  (jetty-launcher-7601-thread-2) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2018-08-20T23:03:23.404Z
   [junit4]   2> 1711520 INFO  (jetty-launcher-7601-thread-4) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 1711520 INFO  (jetty-launcher-7601-thread-1) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 1711520 INFO  (jetty-launcher-7601-thread-4) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2018-08-20T23:03:23.404Z
   [junit4]   2> 1711520 INFO  (jetty-launcher-7601-thread-1) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 1711520 INFO  (jetty-launcher-7601-thread-1) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2018-08-20T23:03:23.404Z
   [junit4]   2> 1711521 INFO  (zkConnectionManagerCallback-7612-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1711521 INFO  (zkConnectionManagerCallback-7610-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1711521 INFO  (zkConnectionManagerCallback-7607-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1711521 INFO  (zkConnectionManagerCallback-7608-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1711521 INFO  (jetty-launcher-7601-thread-2) [    ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 1711521 INFO  (jetty-launcher-7601-thread-3) [    ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 1711521 INFO  (jetty-launcher-7601-thread-4) [    ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 1711521 INFO  (jetty-launcher-7601-thread-1) [    ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 1711526 WARN  (NIOServerCxn.Factory:0.0.0.0/0.0.0.0:0) [    ] o.a.z.s.NIOServerCnxn Unable to read additional data from client sessionid 0x10017f4375b0002, likely client has closed socket
   [junit4]   2> 1711527 WARN  (NIOServerCxn.Factory:0.0.0.0/0.0.0.0:0) [    ] o.a.z.s.NIOServerCnxn Unable to read additional data from client sessionid 0x10017f4375b0003, likely client has closed socket
   [junit4]   2> 1711592 INFO  (jetty-launcher-7601-thread-3) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:40637/solr
   [junit4]   2> 1711593 INFO  (zkConnectionManagerCallback-7616-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1711594 INFO  (zkConnectionManagerCallback-7618-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1711618 INFO  (jetty-launcher-7601-thread-4) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:40637/solr
   [junit4]   2> 1711619 INFO  (zkConnectionManagerCallback-7624-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1711620 INFO  (zkConnectionManagerCallback-7626-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1711624 INFO  (jetty-launcher-7601-thread-3) [n:127.0.0.1:43937_solr    ] o.a.s.c.OverseerElectionContext I am going to be the leader 127.0.0.1:43937_solr
   [junit4]   2> 1711624 INFO  (jetty-launcher-7601-thread-4) [n:127.0.0.1:41201_solr    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:41201_solr
   [junit4]   2> 1711624 INFO  (jetty-launcher-7601-thread-3) [n:127.0.0.1:43937_solr    ] o.a.s.c.Overseer Overseer (id=72083931706097670-127.0.0.1:43937_solr-n_0000000000) starting
   [junit4]   2> 1711624 INFO  (zkCallback-7625-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 1711625 INFO  (zkCallback-7617-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 1711627 INFO  (zkConnectionManagerCallback-7633-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1711628 INFO  (jetty-launcher-7601-thread-3) [n:127.0.0.1:43937_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 1711628 INFO  (jetty-launcher-7601-thread-3) [n:127.0.0.1:43937_solr    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:40637/solr ready
   [junit4]   2> 1711629 INFO  (OverseerStateUpdate-72083931706097670-127.0.0.1:43937_solr-n_0000000000) [n:127.0.0.1:43937_solr    ] o.a.s.c.Overseer Starting to work on the main queue : 127.0.0.1:43937_solr
   [junit4]   2> 1711632 INFO  (jetty-launcher-7601-thread-3) [n:127.0.0.1:43937_solr    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 2147483647 transient cores
   [junit4]   2> 1711632 INFO  (jetty-launcher-7601-thread-3) [n:127.0.0.1:43937_solr    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:43937_solr
   [junit4]   2> 1711633 INFO  (zkCallback-7617-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 1711635 INFO  (zkCallback-7632-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 1711639 INFO  (zkCallback-7625-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 1711640 INFO  (jetty-launcher-7601-thread-3) [n:127.0.0.1:43937_solr    ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 1711640 INFO  (jetty-launcher-7601-thread-1) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:40637/solr
   [junit4]   2> 1711651 INFO  (zkConnectionManagerCallback-7641-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1711652 INFO  (zkConnectionManagerCallback-7643-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1711655 INFO  (jetty-launcher-7601-thread-3) [n:127.0.0.1:43937_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_43937.solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@699034ac
   [junit4]   2> 1711656 INFO  (jetty-launcher-7601-thread-4) [n:127.0.0.1:41201_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 1711656 INFO  (jetty-launcher-7601-thread-4) [n:127.0.0.1:41201_solr    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:40637/solr ready
   [junit4]   2> 1711660 INFO  (zkConnectionManagerCallback-7645-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1711660 INFO  (jetty-launcher-7601-thread-3) [n:127.0.0.1:43937_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_43937.solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@699034ac
   [junit4]   2> 1711660 INFO  (jetty-launcher-7601-thread-3) [n:127.0.0.1:43937_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_43937.solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@699034ac
   [junit4]   2> 1711661 INFO  (jetty-launcher-7601-thread-3) [n:127.0.0.1:43937_solr    ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.MoveReplicaHDFSTest_56D234FA12787E63-001/tempDir-001/node3/.
   [junit4]   2> 1711663 INFO  (jetty-launcher-7601-thread-1) [n:127.0.0.1:38205_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 1711663 INFO  (jetty-launcher-7601-thread-4) [n:127.0.0.1:41201_solr    ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 1711664 INFO  (jetty-launcher-7601-thread-1) [n:127.0.0.1:38205_solr    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 2147483647 transient cores
   [junit4]   2> 1711664 INFO  (jetty-launcher-7601-thread-1) [n:127.0.0.1:38205_solr    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:38205_solr
   [junit4]   2> 1711665 INFO  (zkCallback-7617-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 1711665 INFO  (zkCallback-7625-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 1711668 INFO  (zkCallback-7632-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 1711668 INFO  (zkCallback-7640-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 1711671 INFO  (zkCallback-7644-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 1711676 INFO  (jetty-launcher-7601-thread-4) [n:127.0.0.1:41201_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_41201.solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@699034ac
   [junit4]   2> 1711682 INFO  (jetty-launcher-7601-thread-4) [n:127.0.0.1:41201_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_41201.solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@699034ac
   [junit4]   2> 1711682 INFO  (jetty-launcher-7601-thread-4) [n:127.0.0.1:41201_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_41201.solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@699034ac
   [junit4]   2> 1711683 INFO  (jetty-launcher-7601-thread-4) [n:127.0.0.1:41201_solr    ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.MoveReplicaHDFSTest_56D234FA12787E63-001/tempDir-001/node4/.
   [junit4]   2> 1711683 INFO  (zkConnectionManagerCallback-7653-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1711684 INFO  (jetty-launcher-7601-thread-1) [n:127.0.0.1:38205_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (3)
   [junit4]   2> 1711684 INFO  (jetty-launcher-7601-thread-1) [n:127.0.0.1:38205_solr    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:40637/solr ready
   [junit4]   2> 1711687 INFO  (jetty-launcher-7601-thread-1) [n:127.0.0.1:38205_solr    ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 1711701 INFO  (jetty-launcher-7601-thread-1) [n:127.0.0.1:38205_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_38205.solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@699034ac
   [junit4]   2> 1711706 INFO  (jetty-launcher-7601-thread-1) [n:127.0.0.1:38205_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_38205.solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@699034ac
   [junit4]   2> 1711706 INFO  (jetty-launcher-7601-thread-1) [n:127.0.0.1:38205_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_38205.solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@699034ac
   [junit4]   2> 1711707 INFO  (jetty-launcher-7601-thread-1) [n:127.0.0.1:38205_solr    ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.MoveReplicaHDFSTest_56D234FA12787E63-001/tempDir-001/node1/.
   [junit4]   2> 1711725 INFO  (jetty-launcher-7601-thread-2) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:40637/solr
   [junit4]   2> 1711725 INFO  (zkConnectionManagerCallback-7658-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1711727 INFO  (zkConnectionManagerCallback-7660-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1711731 INFO  (jetty-launcher-7601-thread-2) [n:127.0.0.1:40599_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (3)
   [junit4]   2> 1711732 INFO  (jetty-launcher-7601-thread-2) [n:127.0.0.1:40599_solr    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 2147483647 transient cores
   [junit4]   2> 1711732 INFO  (jetty-launcher-7601-thread-2) [n:127.0.0.1:40599_solr    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:40599_solr
   [junit4]   2> 1711733 INFO  (zkCallback-7625-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 1711733 INFO  (zkCallback-7632-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 1711733 INFO  (zkCallback-7617-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 1711733 INFO  (zkCallback-7644-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 1711733 INFO  (zkCallback-7659-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 1711733 INFO  (zkCallback-7640-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 1711733 INFO  (zkCallback-7652-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 1711744 INFO  (zkConnectionManagerCallback-7667-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1711745 INFO  (jetty-launcher-7601-thread-2) [n:127.0.0.1:40599_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (4)
   [junit4]   2> 1711745 INFO  (jetty-launcher-7601-thread-2) [n:127.0.0.1:40599_solr    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:40637/solr ready
   [junit4]   2> 1711746 INFO  (jetty-launcher-7601-thread-2) [n:127.0.0.1:40599_solr    ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 1711755 INFO  (jetty-launcher-7601-thread-2) [n:127.0.0.1:40599_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_40599.solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@699034ac
   [junit4]   2> 1711759 INFO  (jetty-launcher-7601-thread-2) [n:127.0.0.1:40599_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_40599.solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@699034ac
   [junit4]   2> 1711759 INFO  (jetty-launcher-7601-thread-2) [n:127.0.0.1:40599_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_40599.solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@699034ac
   [junit4]   2> 1711760 INFO  (jetty-launcher-7601-thread-2) [n:127.0.0.1:40599_solr    ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.MoveReplicaHDFSTest_56D234FA12787E63-001/tempDir-001/node2/.
   [junit4]   2> 1711781 INFO  (zkConnectionManagerCallback-7670-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1711783 INFO  (zkConnectionManagerCallback-7675-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1711784 INFO  (SUITE-MoveReplicaHDFSTest-seed#[56D234FA12787E63]-worker) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (4)
   [junit4]   2> 1711785 INFO  (SUITE-MoveReplicaHDFSTest-seed#[56D234FA12787E63]-worker) [    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:40637/solr ready
   [junit4]   2> 1711787 INFO  (qtp2037031586-20945) [n:127.0.0.1:40599_solr    ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :overseerstatus with params action=OVERSEERSTATUS&wt=javabin&version=2 and sendToOCPQueue=true
   [junit4]   2> 1711789 INFO  (qtp2037031586-20945) [n:127.0.0.1:40599_solr    ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections params={action=OVERSEERSTATUS&wt=javabin&version=2} status=0 QTime=2
   [junit4]   1> Formatting using clusterid: testClusterID
   [junit4]   2> 1711860 WARN  (SUITE-MoveReplicaHDFSTest-seed#[56D234FA12787E63]-worker) [    ] o.a.h.m.i.MetricsConfig Cannot locate configuration: tried hadoop-metrics2-namenode.properties,hadoop-metrics2.properties
   [junit4]   2> 1711869 WARN  (SUITE-MoveReplicaHDFSTest-seed#[56D234FA12787E63]-worker) [    ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 1711870 INFO  (SUITE-MoveReplicaHDFSTest-seed#[56D234FA12787E63]-worker) [    ] o.m.log jetty-6.1.26
   [junit4]   2> 1711883 INFO  (SUITE-MoveReplicaHDFSTest-seed#[56D234FA12787E63]-worker) [    ] o.m.log Extract jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-2.7.4-tests.jar!/webapps/hdfs to ./temp/Jetty_localhost_localdomain_34241_hdfs____s902rs/webapp
   [junit4]   2> 1712370 INFO  (SUITE-MoveReplicaHDFSTest-seed#[56D234FA12787E63]-worker) [    ] o.m.log Started HttpServer2$SelectChannelConnectorWithSafeStartup@localhost.localdomain:34241
   [junit4]   2> 1712468 WARN  (SUITE-MoveReplicaHDFSTest-seed#[56D234FA12787E63]-worker) [    ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 1712468 INFO  (SUITE-MoveReplicaHDFSTest-seed#[56D234FA12787E63]-worker) [    ] o.m.log jetty-6.1.26
   [junit4]   2> 1712477 INFO  (SUITE-MoveReplicaHDFSTest-seed#[56D234FA12787E63]-worker) [    ] o.m.log Extract jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-2.7.4-tests.jar!/webapps/datanode to ./temp/Jetty_localhost_33071_datanode____.qx4g33/webapp
   [junit4]   2> 1712895 INFO  (SUITE-MoveReplicaHDFSTest-seed#[56D234FA12787E63]-worker) [    ] o.m.log Started HttpServer2$SelectChannelConnectorWithSafeStartup@localhost:33071
   [junit4]   2> 1712921 WARN  (SUITE-MoveReplicaHDFSTest-seed#[56D234FA12787E63]-worker) [    ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 1712922 INFO  (SUITE-MoveReplicaHDFSTest-seed#[56D234FA12787E63]-worker) [    ] o.m.log jetty-6.1.26
   [junit4]   2> 1712931 INFO  (SUITE-MoveReplicaHDFSTest-seed#[56D234FA12787E63]-worker) [    ] o.m.log Extract jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-2.7.4-tests.jar!/webapps/datanode to ./temp/Jetty_localhost_43101_datanode____7w0tiy/webapp
   [junit4]   2> 1713025 ERROR (DataNode: [[[DISK]file:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.MoveReplicaHDFSTest_56D234FA12787E63-001/tempDir-002/hdfsBaseDir/data/data1/, [DISK]file:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.MoveReplicaHDFSTest_56D234FA12787E63-001/tempDir-002/hdfsBaseDir/data/data2/]]  heartbeating to localhost.localdomain/127.0.0.1:40713) [    ] o.a.h.h.s.d.DirectoryScanner dfs.datanode.directoryscan.throttle.limit.ms.per.sec set to value below 1 ms/sec. Assuming default value of 1000
   [junit4]   2> 1713031 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0x16d829234cc07: from storage DS-ab9b7e91-e385-4614-a7b8-24d063bf7415 node DatanodeRegistration(127.0.0.1:43265, datanodeUuid=7cd45531-bd4a-4db5-9658-5e1718aaf862, infoPort=42543, infoSecurePort=0, ipcPort=44835, storageInfo=lv=-56;cid=testClusterID;nsid=1882875509;c=0), blocks: 0, hasStaleStorage: true, processing time: 0 msecs
   [junit4]   2> 1713031 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0x16d829234cc07: from storage DS-e3ad0e6c-5c04-4f52-927f-7be3c4fee1d3 node DatanodeRegistration(127.0.0.1:43265, datanodeUuid=7cd45531-bd4a-4db5-9658-5e1718aaf862, infoPort=42543, infoSecurePort=0, ipcPort=44835, storageInfo=lv=-56;cid=testClusterID;nsid=1882875509;c=0), blocks: 0, hasStaleStorage: false, processing time: 0 msecs
   [junit4]   2> 1713374 INFO  (SUITE-MoveReplicaHDFSTest-seed#[56D234FA12787E63]-worker) [    ] o.m.log Started HttpServer2$SelectChannelConnectorWithSafeStartup@localhost:43101
   [junit4]   2> 1713501 ERROR (DataNode: [[[DISK]file:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.MoveReplicaHDFSTest_56D234FA12787E63-001/tempDir-002/hdfsBaseDir/data/data3/, [DISK]file:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.MoveReplicaHDFSTest_56D234FA12787E63-001/tempDir-002/hdfsBaseDir/data/data4/]]  heartbeating to localhost.localdomain/127.0.0.1:40713) [    ] o.a.h.h.s.d.DirectoryScanner dfs.datanode.directoryscan.throttle.limit.ms.per.sec set to value below 1 ms/sec. Assuming default value of 1000
   [junit4]   2> 1713505 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0x16d82ae795b06: from storage DS-36d69802-5aac-4dee-8a1a-8d320442bf7a node DatanodeRegistration(127.0.0.1:43883, datanodeUuid=dafd446b-8f2e-46ad-95b3-f014821addee, infoPort=37385, infoSecurePort=0, ipcPort=34271, storageInfo=lv=-56;cid=testClusterID;nsid=1882875509;c=0), blocks: 0, hasStaleStorage: true, processing time: 0 msecs
   [junit4]   2> 1713505 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0x16d82ae795b06: from storage DS-73544c7c-4f5e-46ec-a517-152f109a9fe0 node DatanodeRegistration(127.0.0.1:43883, datanodeUuid=dafd446b-8f2e-46ad-95b3-f014821addee, infoPort=37385, infoSecurePort=0, ipcPort=34271, storageInfo=lv=-56;cid=testClusterID;nsid=1882875509;c=0), blocks: 0, hasStaleStorage: false, processing time: 0 msecs
   [junit4] IGNOR/A 0.00s J0 | MoveReplicaHDFSTest.testNormalFailedMove
   [junit4]    > Assumption #1: 'badapple' test group is disabled (@BadApple(bugUrl=https://issues.apache.org/jira/browse/SOLR-12028))
   [junit4]   2> 1713646 INFO  (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[56D234FA12787E63]) [    ] o.a.s.SolrTestCaseJ4 ###Starting testNormalMove
   [junit4]   2> 1713647 INFO  (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[56D234FA12787E63]) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (4)
   [junit4]   2> 1713648 INFO  (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[56D234FA12787E63]) [    ] o.e.j.s.Server jetty-9.4.11.v20180605; built: 2018-06-05T18:24:03.829Z; git: d5fc0523cfa96bfebfbda19606cad384d772f04c; jvm 1.8.0_172-b11
   [junit4]   2> 1713648 INFO  (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[56D234FA12787E63]) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 1713648 INFO  (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[56D234FA12787E63]) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 1713648 INFO  (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[56D234FA12787E63]) [    ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 1713649 INFO  (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[56D234FA12787E63]) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@1c49abb3{/solr,null,AVAILABLE}
   [junit4]   2> 1713649 INFO  (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[56D234FA12787E63]) [    ] o.e.j.s.AbstractConnector Started ServerConnector@561914ba{HTTP/1.1,[http/1.1]}{127.0.0.1:43513}
   [junit4]   2> 1713649 INFO  (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[56D234FA12787E63]) [    ] o.e.j.s.Server Started @1713684ms
   [junit4]   2> 1713649 INFO  (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[56D234FA12787E63]) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=43513}
   [junit4]   2> 1713649 ERROR (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[56D234FA12787E63]) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 1713649 INFO  (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[56D234FA12787E63]) [    ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 1713649 INFO  (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[56D234FA12787E63]) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 7.5.0
   [junit4]   2> 1713649 INFO  (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[56D234FA12787E63]) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 1713649 INFO  (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[56D234FA12787E63]) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 1713649 INFO  (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[56D234FA12787E63]) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2018-08-20T23:03:25.533Z
   [junit4]   2> 1713651 INFO  (zkConnectionManagerCallback-7679-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1713651 INFO  (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[56D234FA12787E63]) [    ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 1713717 INFO  (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[56D234FA12787E63]) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:40637/solr
   [junit4]   2> 1713718 INFO  (zkConnectionManagerCallback-7683-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1713720 INFO  (zkConnectionManagerCallback-7685-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1713723 INFO  (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[56D234FA12787E63]) [n:127.0.0.1:43513_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (4)
   [junit4]   2> 1713725 INFO  (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[56D234FA12787E63]) [n:127.0.0.1:43513_solr    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 2147483647 transient cores
   [junit4]   2> 1713725 INFO  (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[56D234FA12787E63]) [n:127.0.0.1:43513_solr    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:43513_solr
   [junit4]   2> 1713725 INFO  (zkCallback-7625-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (5)
   [junit4]   2> 1713726 INFO  (zkCallback-7617-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (5)
   [junit4]   2> 1713726 INFO  (zkCallback-7644-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (5)
   [junit4]   2> 1713726 INFO  (zkCallback-7640-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (5)
   [junit4]   2> 1713726 INFO  (zkCallback-7674-thread-2) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (5)
   [junit4]   2> 1713726 INFO  (zkCallback-7674-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (5)
   [junit4]   2> 1713726 INFO  (zkCallback-7666-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (5)
   [junit4]   2> 1713726 INFO  (zkCallback-7659-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (5)
   [junit4]   2> 1713727 INFO  (zkCallback-7632-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (5)
   [junit4]   2> 1713727 INFO  (zkCallback-7652-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (5)
   [junit4]   2> 1713727 INFO  (zkCallback-7684-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (5)
   [junit4]   2> 1713737 INFO  (zkConnectionManagerCallback-7692-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1713739 INFO  (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[56D234FA12787E63]) [n:127.0.0.1:43513_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (5)
   [junit4]   2> 1713739 INFO  (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[56D234FA12787E63]) [n:127.0.0.1:43513_solr    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:40637/solr ready
   [junit4]   2> 1713739 INFO  (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[56D234FA12787E63]) [n:127.0.0.1:43513_solr    ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 1713750 INFO  (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[56D234FA12787E63]) [n:127.0.0.1:43513_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_43513.solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@699034ac
   [junit4]   2> 1713756 INFO  (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[56D234FA12787E63]) [n:127.0.0.1:43513_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_43513.solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@699034ac
   [junit4]   2> 1713756 INFO  (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[56D234FA12787E63]) [n:127.0.0.1:43513_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_43513.solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@699034ac
   [junit4]   2> 1713756 INFO  (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[56D234FA12787E63]) [n:127.0.0.1:43513_solr    ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.MoveReplicaHDFSTest_56D234FA12787E63-001/tempDir-001/node5/.
   [junit4]   2> 1713802 INFO  (OverseerCollectionConfigSetProcessor-72083931706097670-127.0.0.1:43937_solr-n_0000000000) [n:127.0.0.1:43937_solr    ] o.a.s.c.OverseerTaskQueue Response ZK path: /overseer/collection-queue-work/qnr-0000000000 doesn't exist.  Requestor may have disconnected from ZooKeeper
   [junit4]   2> 1713803 INFO  (zkConnectionManagerCallback-7695-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1713804 INFO  (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[56D234FA12787E63]) [    ] o.a.s.c.MoveReplicaTest total_jettys: 5
   [junit4]   2> 1713805 INFO  (qtp989258362-20928) [n:127.0.0.1:38205_solr    ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params collection.configName=conf1&maxShardsPerNode=2&autoAddReplicas=false&name=MoveReplicaHDFSTest_coll_false&nrtReplicas=2&action=CREATE&numShards=2&wt=javabin&version=2 and sendToOCPQueue=true
   [junit4]   2> 1713807 INFO  (OverseerThreadFactory-7315-thread-2-processing-n:127.0.0.1:43937_solr) [n:127.0.0.1:43937_solr    ] o.a.s.c.a.c.CreateCollectionCmd Create collection MoveReplicaHDFSTest_coll_false
   [junit4]   2> 1713911 INFO  (OverseerStateUpdate-72083931706097670-127.0.0.1:43937_solr-n_0000000000) [n:127.0.0.1:43937_solr    ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"MoveReplicaHDFSTest_coll_false",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "core":"MoveReplicaHDFSTest_coll_false_shard1_replica_n1",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"http://127.0.0.1:38205/solr",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 1713912 INFO  (OverseerStateUpdate-72083931706097670-127.0.0.1:43937_solr-n_0000000000) [n:127.0.0.1:43937_solr    ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"MoveReplicaHDFSTest_coll_false",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "core":"MoveReplicaHDFSTest_coll_false_shard1_replica_n2",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"http://127.0.0.1:40599/solr",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 1713913 INFO  (OverseerStateUpdate-72083931706097670-127.0.0.1:43937_solr-n_0000000000) [n:127.0.0.1:43937_solr    ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"MoveReplicaHDFSTest_coll_false",
   [junit4]   2>   "shard":"shard2",
   [junit4]   2>   "core":"MoveReplicaHDFSTest_coll_false_shard2_replica_n4",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"http://127.0.0.1:43937/solr",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 1713913 INFO  (OverseerStateUpdate-72083931706097670-127.0.0.1:43937_solr-n_0000000000) [n:127.0.0.1:43937_solr    ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"MoveReplicaHDFSTest_coll_false",
   [junit4]   2>   "shard":"shard2",
   [junit4]   2>   "core":"MoveReplicaHDFSTest_coll_false_shard2_replica_n6",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"http://127.0.0.1:41201/solr",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 1714117 INFO  (qtp2037031586-20929) [n:127.0.0.1:40599_solr    x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node5&collection.configName=conf1&newCollection=true&name=MoveReplicaHDFSTest_coll_false_shard1_replica_n2&action=CREATE&numShards=2&collection=MoveReplicaHDFSTest_coll_false&shard=shard1&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 1714117 INFO  (qtp989258362-20923) [n:127.0.0.1:38205_solr    x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node3&collection.configName=conf1&newCollection=true&name=MoveReplicaHDFSTest_coll_false_shard1_replica_n1&action=CREATE&numShards=2&collection=MoveReplicaHDFSTest_coll_false&shard=shard1&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 1714117 INFO  (qtp2041598349-20942) [n:127.0.0.1:43937_solr    x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node7&collection.configName=conf1&newCollection=true&name=MoveReplicaHDFSTest_coll_false_shard2_replica_n4&action=CREATE&numShards=2&collection=MoveReplicaHDFSTest_coll_false&shard=shard2&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 1714117 INFO  (qtp1741268163-20941) [n:127.0.0.1:41201_solr    x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node8&collection.configName=conf1&newCollection=true&name=MoveReplicaHDFSTest_coll_false_shard2_replica_n6&action=CREATE&numShards=2&collection=MoveReplicaHDFSTest_coll_false&shard=shard2&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 1714118 INFO  (qtp1741268163-20941) [n:127.0.0.1:41201_solr    x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 2147483647 transient cores
   [junit4]   2> 1715126 INFO  (qtp1741268163-20941) [n:127.0.0.1:41201_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.c.SolrConfig Using Lucene MatchVersion: 7.5.0
   [junit4]   2> 1715126 INFO  (qtp2037031586-20929) [n:127.0.0.1:40599_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.c.SolrConfig Using Lucene MatchVersion: 7.5.0
   [junit4]   2> 1715128 INFO  (qtp2041598349-20942) [n:127.0.0.1:43937_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.c.SolrConfig Using Lucene MatchVersion: 7.5.0
   [junit4]   2> 1715128 INFO  (qtp989258362-20923) [n:127.0.0.1:38205_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 7.5.0
   [junit4]   2> 1715136 INFO  (qtp2037031586-20929) [n:127.0.0.1:40599_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.s.IndexSchema [MoveReplicaHDFSTest_coll_false_shard1_replica_n2] Schema name=minimal
   [junit4]   2> 1715136 INFO  (qtp1741268163-20941) [n:127.0.0.1:41201_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.s.IndexSchema [MoveReplicaHDFSTest_coll_false_shard2_replica_n6] Schema name=minimal
   [junit4]   2> 1715150 INFO  (qtp1741268163-20941) [n:127.0.0.1:41201_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 1715150 INFO  (qtp2037031586-20929) [n:127.0.0.1:40599_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 1715150 INFO  (qtp2037031586-20929) [n:127.0.0.1:40599_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.c.CoreContainer Creating SolrCore 'MoveReplicaHDFSTest_coll_false_shard1_replica_n2' using configuration from collection MoveReplicaHDFSTest_coll_false, trusted=true
   [junit4]   2> 1715150 INFO  (qtp1741268163-20941) [n:127.0.0.1:41201_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.c.CoreContainer Creating SolrCore 'MoveReplicaHDFSTest_coll_false_shard2_replica_n6' using configuration from collection MoveReplicaHDFSTest_coll_false, trusted=true
   [junit4]   2> 1715150 INFO  (qtp2037031586-20929) [n:127.0.0.1:40599_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_40599.solr.core.MoveReplicaHDFSTest_coll_false.shard1.replica_n2' (registry 'solr.core.MoveReplicaHDFSTest_coll_false.shard1.replica_n2') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@699034ac
   [junit4]   2> 1715150 INFO  (qtp1741268163-20941) [n:127.0.0.1:41201_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_41201.solr.core.MoveReplicaHDFSTest_coll_false.shard2.replica_n6' (registry 'solr.core.MoveReplicaHDFSTest_coll_false.shard2.replica_n6') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@699034ac
   [junit4]   2> 1715150 INFO  (qtp2041598349-20942) [n:127.0.0.1:43937_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.s.IndexSchema [MoveReplicaHDFSTest_coll_false_shard2_replica_n4] Schema name=minimal
   [junit4]   2> 1715150 INFO  (qtp989258362-20923) [n:127.0.0.1:38205_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.s.IndexSchema [MoveReplicaHDFSTest_coll_false_shard1_replica_n1] Schema name=minimal
   [junit4]   2> 1715150 INFO  (qtp2037031586-20929) [n:127.0.0.1:40599_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://localhost.localdomain:40713/data
   [junit4]   2> 1715150 INFO  (qtp2037031586-20929) [n:127.0.0.1:40599_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 1715150 INFO  (qtp2037031586-20929) [n:127.0.0.1:40599_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.c.SolrCore solr.RecoveryStrategy.Builder
   [junit4]   2> 1715150 INFO  (qtp2037031586-20929) [n:127.0.0.1:40599_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.c.SolrCore [[MoveReplicaHDFSTest_coll_false_shard1_replica_n2] ] Opening new SolrCore at [/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.MoveReplicaHDFSTest_56D234FA12787E63-001/tempDir-001/node2/MoveReplicaHDFSTest_coll_false_shard1_replica_n2], dataDir=[hdfs://localhost.localdomain:40713/data/MoveReplicaHDFSTest_coll_false/core_node5/data/]
   [junit4]   2> 1715151 INFO  (qtp1741268163-20941) [n:127.0.0.1:41201_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://localhost.localdomain:40713/data
   [junit4]   2> 1715151 INFO  (qtp1741268163-20941) [n:127.0.0.1:41201_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 1715151 INFO  (qtp1741268163-20941) [n:127.0.0.1:41201_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.c.SolrCore solr.RecoveryStrategy.Builder
   [junit4]   2> 1715151 INFO  (qtp1741268163-20941) [n:127.0.0.1:41201_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.c.SolrCore [[MoveReplicaHDFSTest_coll_false_shard2_replica_n6] ] Opening new SolrCore at [/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.MoveReplicaHDFSTest_56D234FA12787E63-001/tempDir-001/node4/MoveReplicaHDFSTest_coll_false_shard2_replica_n6], dataDir=[hdfs://localhost.localdomain:40713/data/MoveReplicaHDFSTest_coll_false/core_node8/data/]
   [junit4]   2> 1715152 INFO  (qtp2037031586-20929) [n:127.0.0.1:40599_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:40713/data/MoveReplicaHDFSTest_coll_false/core_node5/data/snapshot_metadata
   [junit4]   2> 1715152 INFO  (qtp989258362-20923) [n:127.0.0.1:38205_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 1715152 INFO  (qtp2041598349-20942) [n:127.0.0.1:43937_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 1715152 INFO  (qtp989258362-20923) [n:127.0.0.1:38205_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.c.CoreContainer Creating SolrCore 'MoveReplicaHDFSTest_coll_false_shard1_replica_n1' using configuration from collection MoveReplicaHDFSTest_coll_false, trusted=true
   [junit4]   2> 1715152 INFO  (qtp2041598349-20942) [n:127.0.0.1:43937_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.c.CoreContainer Creating SolrCore 'MoveReplicaHDFSTest_coll_false_shard2_replica_n4' using configuration from collection MoveReplicaHDFSTest_coll_false, trusted=true
   [junit4]   2> 1715152 INFO  (qtp1741268163-20941) [n:127.0.0.1:41201_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:40713/data/MoveReplicaHDFSTest_coll_false/core_node8/data/snapshot_metadata
   [junit4]   2> 1715153 INFO  (qtp989258362-20923) [n:127.0.0.1:38205_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_38205.solr.core.MoveReplicaHDFSTest_coll_false.shard1.replica_n1' (registry 'solr.core.MoveReplicaHDFSTest_coll_false.shard1.replica_n1') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@699034ac
   [junit4]   2> 1715153 INFO  (qtp2041598349-20942) [n:127.0.0.1:43937_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_43937.solr.core.MoveReplicaHDFSTest_coll_false.shard2.replica_n4' (registry 'solr.core.MoveReplicaHDFSTest_coll_false.shard2.replica_n4') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@699034ac
   [junit4]   2> 1715153 INFO  (qtp989258362-20923) [n:127.0.0.1:38205_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://localhost.localdomain:40713/data
   [junit4]   2> 1715153 INFO  (qtp2041598349-20942) [n:127.0.0.1:43937_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://localhost.localdomain:40713/data
   [junit4]   2> 1715153 INFO  (qtp989258362-20923) [n:127.0.0.1:38205_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 1715153 INFO  (qtp989258362-20923) [n:127.0.0.1:38205_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.c.SolrCore solr.RecoveryStrategy.Builder
   [junit4]   2> 1715153 INFO  (qtp2041598349-20942) [n:127.0.0.1:43937_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 1715153 INFO  (qtp2041598349-20942) [n:127.0.0.1:43937_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.c.SolrCore solr.RecoveryStrategy.Builder
   [junit4]   2> 1715153 INFO  (qtp989258362-20923) [n:127.0.0.1:38205_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.c.SolrCore [[MoveReplicaHDFSTest_coll_false_shard1_replica_n1] ] Opening new SolrCore at [/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.MoveReplicaHDFSTest_56D234FA12787E63-001/tempDir-001/node1/MoveReplicaHDFSTest_coll_false_shard1_replica_n1], dataDir=[hdfs://localhost.localdomain:40713/data/MoveReplicaHDFSTest_coll_false/core_node3/data/]
   [junit4]   2> 1715153 INFO  (qtp2041598349-20942) [n:127.0.0.1:43937_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.c.SolrCore [[MoveReplicaHDFSTest_coll_false_shard2_replica_n4] ] Opening new SolrCore at [/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/test/J0/temp/solr.cloud.MoveReplicaHDFSTest_56D234FA12787E63-001/tempDir-001/node3/MoveReplicaHDFSTest_coll_false_shard2_replica_n4], dataDir=[hdfs://localhost.localdomain:40713/data/MoveReplicaHDFSTest_coll_false/core_node7/data/]
   [junit4]   2> 1715154 INFO  (qtp989258362-20923) [n:127.0.0.1:38205_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:40713/data/MoveReplicaHDFSTest_coll_false/core_node3/data/snapshot_metadata
   [junit4]   2> 1715154 INFO  (qtp2041598349-20942) [n:127.0.0.1:43937_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:40713/data/MoveReplicaHDFSTest_coll_false/core_node7/data/snapshot_metadata
   [junit4]   2> 1715166 INFO  (qtp1741268163-20941) [n:127.0.0.1:41201_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:40713/data/MoveReplicaHDFSTest_coll_false/core_node8/data
   [junit4]   2> 1715166 INFO  (qtp989258362-20923) [n:127.0.0.1:38205_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:40713/data/MoveReplicaHDFSTest_coll_false/core_node3/data
   [junit4]   2> 1715168 INFO  (qtp2037031586-20929) [n:127.0.0.1:40599_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:40713/data/MoveReplicaHDFSTest_coll_false/core_node5/data
   [junit4]   2> 1715169 INFO  (qtp2041598349-20942) [n:127.0.0.1:43937_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:40713/data/MoveReplicaHDFSTest_coll_false/core_node7/data
   [junit4]   2> 1715191 INFO  (qtp989258362-20923) [n:127.0.0.1:38205_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:40713/data/MoveReplicaHDFSTest_coll_false/core_node3/data/index
   [junit4]   2> 1715191 INFO  (qtp1741268163-20941) [n:127.0.0.1:41201_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:40713/data/MoveReplicaHDFSTest_coll_false/core_node8/data/index
   [junit4]   2> 1715196 INFO  (qtp2037031586-20929) [n:127.0.0.1:40599_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:40713/data/MoveReplicaHDFSTest_coll_false/core_node5/data/index
   [junit4]   2> 1715198 INFO  (qtp2041598349-20942) [n:127.0.0.1:43937_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:40713/data/MoveReplicaHDFSTest_coll_false/core_node7/data/index
   [junit4]   2> 1715241 INFO  (Block report processor) [    ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:43265 is added to blk_1073741825_1001{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-73544c7c-4f5e-46ec-a517-152f109a9fe0:NORMAL:127.0.0.1:43883|RBW], ReplicaUC[[DISK]DS-ab9b7e91-e385-4614-a7b8-24d063bf7415:NORMAL:127.0.0.1:43265|RBW]]} size 0
   [junit4]   2> 1715245 INFO  (Block report processor) [    ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:43883 is added to blk_1073741825_1001 size 69
   [junit4]   2> 1715247 INFO  (Block report processor) [    ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:43265 is added to blk_1073741827_1003{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-36d69802-5aac-4dee-8a1a-8d320442bf7a:NORMAL:127.0.0.1:43883|RBW], ReplicaUC[[DISK]DS-e3ad0e6c-5c04-4f52-927f-7be3c4fee1d3:NORMAL:127.0.0.1:43265|RBW]]} size 0
   [junit4]   2> 1715248 INFO  (Block report processor) [    ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:43883 is added to blk_1073741827_1003 size 69
   [junit4]   2> 1715249 INFO  (Block report processor) [    ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:43883 is added to blk_1073741828_1004{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-73544c7c-4f5e-46ec-a517-152f109a9fe0:NORMAL:127.0.0.1:43883|RBW], ReplicaUC[[DISK]DS-e3ad0e6c-5c04-4f52-927f-7be3c4fee1d3:NORMAL:127.0.0.1:43265|RBW]]} size 0
   [junit4]   2> 1715250 INFO  (Block report processor) [    ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:43265 is added to blk_1073741828_1004{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-73544c7c-4f5e-46ec-a517-152f109a9fe0:NORMAL:127.0.0.1:43883|RBW], ReplicaUC[[DISK]DS-e3ad0e6c-5c04-4f52-927f-7be3c4fee1d3:NORMAL:127.0.0.1:43265|RBW]]} size 0
   [junit4]   2> 1715299 INFO  (Block report processor) [    ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:43265 is added to blk_1073741826_1002{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-ab9b7e91-e385-4614-a7b8-24d063bf7415:NORMAL:127.0.0.1:43265|RBW], ReplicaUC[[DISK]DS-73544c7c-4f5e-46ec-a517-152f109a9fe0:NORMAL:127.0.0.1:43883|RBW]]} size 0
   [junit4]   2> 1715300 INFO  (Block report processor) [    ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:43883 is added to blk_1073741826_1002{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-ab9b7e91-e385-4614-a7b8-24d063bf7415:NORMAL:127.0.0.1:43265|RBW], ReplicaUC[[DISK]DS-73544c7c-4f5e-46ec-a517-152f109a9fe0:NORMAL:127.0.0.1:43883|RBW]]} size 0
   [junit4]   2> 1715332 INFO  (qtp2037031586-20929) [n:127.0.0.1:40599_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 1715332 INFO  (qtp2037031586-20929) [n:127.0.0.1:40599_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 1715333 INFO  (qtp2037031586-20929) [n:127.0.0.1:40599_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=3
   [junit4]   2> 1715336 INFO  (qtp1741268163-20941) [n:127.0.0.1:41201_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 1715336 INFO  (qtp1741268163-20941) [n:127.0.0.1:41201_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 1715336 INFO  (qtp1741268163-20941) [n:127.0.0.1:41201_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=3
   [junit4]   2> 1715347 INFO  (qtp989258362-20923) [n:127.0.0.1:38205_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 1715348 INFO  (qtp989258362-20923) [n:127.0.0.1:38205_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 1715348 INFO  (qtp989258362-20923) [n:127.0.0.1:38205_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=3
   [junit4]   2> 1715350 INFO  (qtp2037031586-20929) [n:127.0.0.1:40599_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 1715350 INFO  (qtp2037031586-20929) [n:127.0.0.1:40599_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 1715366 INFO  (qtp1741268163-20941) [n:127.0.0.1:41201_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 1715366 INFO  (qtp1741268163-20941) [n:127.0.0.1:41201_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 1715367 INFO  (qtp2041598349-20942) [n:127.0.0.1:43937_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 1715368 INFO  (qtp2041598349-20942) [n:127.0.0.1:43937_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 1715368 INFO  (qtp2041598349-20942) [n:127.0.0.1:43937_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=3
   [junit4]   2> 1715371 INFO  (qtp989258362-20923) [n:127.0.0.1:38205_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 1715371 INFO  (qtp989258362-20923) [n:127.0.0.1:38205_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 1715373 INFO  (qtp2037031586-20929) [n:127.0.0.1:40599_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.s.SolrIndexSearcher Opening [Searcher@4871b8af[MoveReplicaHDFSTest_coll_false_shard1_replica_n2] main]
   [junit4]   2> 1715373 INFO  (qtp2037031586-20929) [n:127.0.0.1:40599_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 1715374 INFO  (qtp2037031586-20929) [n:127.0.0.1:40599_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 1715374 INFO  (qtp1741268163-20941) [n:127.0.0.1:41201_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.s.SolrIndexSearcher Opening [Searcher@6796e5d1[MoveReplicaHDFSTest_coll_false_shard2_replica_n6] main]
   [junit4]   2> 1715374 INFO  (qtp1741268163-20941) [n:127.0.0.1:41201_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 1715374 INFO  (qtp2037031586-20929) [n:127.0.0.1:40599_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 1715375 INFO  (qtp1741268163-20941) [n:127.0.0.1:41201_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 1715375 INFO  (qtp1741268163-20941) [n:127.0.0.1:41201_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 1715375 INFO  (searcherExecutor-7347-thread-1-processing-n:127.0.0.1:40599_solr x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2 c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5) [n:127.0.0.1:40599_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.c.SolrCore [MoveReplicaHDFSTest_coll_false_shard1_replica_n2] Registered new searcher Searcher@4871b8af[MoveReplicaHDFSTest_coll_false_shard1_replica_n2] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 1715375 INFO  (qtp2037031586-20929) [n:127.0.0.1:40599_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1609360953582813184
   [junit4]   2> 1715376 INFO  (searcherExecutor-7348-thread-1-processing-n:127.0.0.1:41201_solr x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6 c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8) [n:127.0.0.1:41201_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.c.SolrCore [MoveReplicaHDFSTest_coll_false_shard2_replica_n6] Registered new searcher Searcher@6796e5d1[MoveReplicaHDFSTest_coll_false_shard2_replica_n6] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 1715376 INFO  (qtp1741268163-20941) [n:127.0.0.1:41201_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1609360953583861760
   [junit4]   2> 1715378 INFO  (qtp2041598349-20942) [n:127.0.0.1:43937_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 1715378 INFO  (qtp2041598349-20942) [n:127.0.0.1:43937_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 1715378 INFO  (qtp989258362-20923) [n:127.0.0.1:38205_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.s.SolrIndexSearcher Opening [Searcher@7ec02288[MoveReplicaHDFSTest_coll_false_shard1_replica_n1] main]
   [junit4]   2> 1715379 INFO  (qtp989258362-20923) [n:127.0.0.1:38205_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 1715379 INFO  (qtp1741268163-20941) [n:127.0.0.1:41201_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.c.ZkShardTerms Successful update of terms at /collections/MoveReplicaHDFSTest_coll_false/terms/shard2 to Terms{values={core_node8=0}, version=0}
   [junit4]   2> 1715379 INFO  (qtp2037031586-20929) [n:127.0.0.1:40599_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.c.ZkShardTerms Successful update of terms at /collections/MoveReplicaHDFSTest_coll_false/terms/shard1 to Terms{values={core_node5=0}, version=0}
   [junit4]   2> 1715379 INFO  (qtp989258362-20923) [n:127.0.0.1:38205_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 1715380 INFO  (qtp989258362-20923) [n:127.0.0.1:38205_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 1715381 INFO  (searcherExecutor-7349-thread-1-processing-n:127.0.0.1:38205_solr x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3) [n:127.0.0.1:38205_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.c.SolrCore [MoveReplicaHDFSTest_coll_false_shard1_replica_n1] Registered new searcher Searcher@7ec02288[MoveReplicaHDFSTest_coll_false_shard1_replica_n1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 1715381 INFO  (qtp989258362-20923) [n:127.0.0.1:38205_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1609360953589104640
   [junit4]   2> 1715381 INFO  (qtp2037031586-20929) [n:127.0.0.1:40599_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.c.ShardLeaderElectionContext Waiting until we see more replicas up for shard shard1: total=2 found=1 timeoutin=9999ms
   [junit4]   2> 1715381 INFO  (qtp1741268163-20941) [n:127.0.0.1:41201_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.c.ShardLeaderElectionContext Waiting until we see more replicas up for shard shard2: total=2 found=1 timeoutin=9999ms
   [junit4]   2> 1715384 INFO  (qtp989258362-20923) [n:127.0.0.1:38205_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.c.ZkShardTerms Successful update of terms at /collections/MoveReplicaHDFSTest_coll_false/terms/shard1 to Terms{values={core_node3=0, core_node5=0}, version=1}
   [junit4]   2> 1715385 INFO  (qtp2041598349-20942) [n:127.0.0.1:43937_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.s.SolrIndexSearcher Opening [Searcher@6b593269[MoveReplicaHDFSTest_coll_false_shard2_replica_n4] main]
   [junit4]   2> 1715385 INFO  (qtp2041598349-20942) [n:127.0.0.1:43937_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 1715386 INFO  (qtp2041598349-20942) [n:127.0.0.1:43937_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 1715386 INFO  (qtp2041598349-20942) [n:127.0.0.1:43937_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 1715387 INFO  (searcherExecutor-7350-thread-1-processing-n:127.0.0.1:43937_solr x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7) [n:127.0.0.1:43937_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.c.SolrCore [MoveReplicaHDFSTest_coll_false_shard2_replica_n4] Registered new searcher Searcher@6b593269[MoveReplicaHDFSTest_coll_false_shard2_replica_n4] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 1715387 INFO  (qtp2041598349-20942) [n:127.0.0.1:43937_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1609360953595396096
   [junit4]   2> 1715389 INFO  (qtp2041598349-20942) [n:127.0.0.1:43937_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.c.ZkShardTerms Successful update of terms at /collections/MoveReplicaHDFSTest_coll_false/terms/shard2 to Terms{values={core_node7=0, core_node8=0}, version=1}
   [junit4]   2> 1715882 INFO  (qtp1741268163-20941) [n:127.0.0.1:41201_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 1715882 INFO  (qtp2037031586-20929) [n:127.0.0.1:40599_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 1715882 INFO  (qtp1741268163-20941) [n:127.0.0.1:41201_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 1715882 INFO  (qtp2037031586-20929) [n:127.0.0.1:40599_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 1715882 INFO  (qtp1741268163-20941) [n:127.0.0.1:41201_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.c.SyncStrategy Sync replicas to http://127.0.0.1:41201/solr/MoveReplicaHDFSTest_coll_false_shard2_replica_n6/
   [junit4]   2> 1715882 INFO  (qtp2037031586-20929) [n:127.0.0.1:40599_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.c.SyncStrategy Sync replicas to http://127.0.0.1:40599/solr/MoveReplicaHDFSTest_coll_false_shard1_replica_n2/
   [junit4]   2> 1715883 INFO  (qtp1741268163-20941) [n:127.0.0.1:41201_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.u.PeerSync PeerSync: core=MoveReplicaHDFSTest_coll_false_shard2_replica_n6 url=http://127.0.0.1:41201/solr START replicas=[http://127.0.0.1:43937/solr/MoveReplicaHDFSTest_coll_false_shard2_replica_n4/] nUpdates=100
   [junit4]   2> 1715883 INFO  (qtp2037031586-20929) [n:127.0.0.1:40599_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.u.PeerSync PeerSync: core=MoveReplicaHDFSTest_coll_false_shard1_replica_n2 url=http://127.0.0.1:40599/solr START replicas=[http://127.0.0.1:38205/solr/MoveReplicaHDFSTest_coll_false_shard1_replica_n1/] nUpdates=100
   [junit4]   2> 1715883 INFO  (qtp1741268163-20941) [n:127.0.0.1:41201_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.u.PeerSync PeerSync: core=MoveReplicaHDFSTest_coll_false_shard2_replica_n6 url=http://127.0.0.1:41201/solr DONE.  We have no versions.  sync failed.
   [junit4]   2> 1715883 INFO  (qtp2037031586-20929) [n:127.0.0.1:40599_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.u.PeerSync PeerSync: core=MoveReplicaHDFSTest_coll_false_shard1_replica_n2 url=http://127.0.0.1:40599/solr DONE.  We have no versions.  sync failed.
   [junit4]   2> 1715885 INFO  (qtp989258362-20924) [n:127.0.0.1:38205_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1] o.a.s.c.S.Request [MoveReplicaHDFSTest_coll_false_shard1_replica_n1]  webapp=/solr path=/get params={distrib=false&qt=/get&fingerprint=false&getVersions=100&wt=javabin&version=2} status=0 QTime=1
   [junit4]   2> 1715885 INFO  (qtp2041598349-20931) [n:127.0.0.1:43937_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4] o.a.s.c.S.Request [MoveReplicaHDFSTest_coll_false_shard2_replica_n4]  webapp=/solr path=/get params={distrib=false&qt=/get&fingerprint=false&getVersions=100&wt=javabin&version=2} status=0 QTime=1
   [junit4]   2> 1715885 INFO  (qtp1741268163-20941) [n:127.0.0.1:41201_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.c.SyncStrategy Leader's attempt to sync with shard failed, moving to the next candidate
   [junit4]   2> 1715885 INFO  (qtp2037031586-20929) [n:127.0.0.1:40599_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.c.SyncStrategy Leader's attempt to sync with shard failed, moving to the next candidate
   [junit4]   2> 1715886 INFO  (qtp1741268163-20941) [n:127.0.0.1:41201_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.c.ShardLeaderElectionContext We failed sync, but we have no versions - we can't sync in that case - we were active before, so become leader anyway
   [junit4]   2> 1715886 INFO  (qtp2037031586-20929) [n:127.0.0.1:40599_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.c.ShardLeaderElectionContext We failed sync, but we have no versions - we can't sync in that case - we were active before, so become leader anyway
   [junit4]   2> 1715886 INFO  (qtp1741268163-20941) [n:127.0.0.1:41201_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n6] o.a.s.c.ShardLeaderElectionContext Found all replicas participating in election, clear LIR
   [junit4]   2> 1715886 INFO  (qtp2037031586-20929) [n:127.0.0.1:40599_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n2] o.a.s.c.ShardLeader

[...truncated too long message...]

configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/top-level-ivy-settings.xml

resolve:

common.init:

compile-lucene-core:

init:

-clover.disable:

-clover.load:

-clover.classpath:

-clover.setup:

clover:

compile-core:

-clover.disable:

-clover.load:

-clover.classpath:

-clover.setup:

clover:

common.compile-core:

compile-core:

common.compile-test:
    [mkdir] Created dir: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/classes/test
    [javac] Compiling 927 source files to /home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/classes/test
    [javac] Note: Some input files use or override a deprecated API.
    [javac] Note: Recompile with -Xlint:deprecation for details.
    [javac] Note: Some input files use unchecked or unsafe operations.
    [javac] Note: Recompile with -Xlint:unchecked for details.
    [javac] Creating empty /home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/classes/test/org/apache/solr/cloud/autoscaling/sim/package-info.class
     [copy] Copying 1 file to /home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/classes/test

common-solr.compile-test:

compile-test:

BUILD SUCCESSFUL
Total time: 47 seconds
[repro] ant test-nocompile -Dtests.dups=5 -Dtests.maxfailures=5 -Dtests.class="*.MoveReplicaHDFSTest" -Dtests.showOutput=onerror "-Dargs=-XX:-UseCompressedOops -XX:+UseG1GC" -Dtests.seed=56D234FA12787E63 -Dtests.multiplier=3 -Dtests.slow=true -Dtests.locale=es-ES -Dtests.timezone=Asia/Ulaanbaatar -Dtests.asserts=true -Dtests.file.encoding=ISO-8859-1
Buildfile: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/build.xml

-clover.disable:

ivy-configure:
[ivy:configure] :: Apache Ivy 2.4.0 - 20141213170938 :: http://ant.apache.org/ivy/ ::
[ivy:configure] :: loading settings :: file = /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/top-level-ivy-settings.xml

install-junit4-taskdef:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

resolve-groovy:
[ivy:cachepath] :: resolving dependencies :: org.codehaus.groovy#groovy-all-caller;working
[ivy:cachepath] 	confs: [default]
[ivy:cachepath] 	found org.codehaus.groovy#groovy-all;2.4.15 in public
[ivy:cachepath] :: resolution report :: resolve 18ms :: artifacts dl 0ms
	---------------------------------------------------------------------
	|                  |            modules            ||   artifacts   |
	|       conf       | number| search|dwnlded|evicted|| number|dwnlded|
	---------------------------------------------------------------------
	|      default     |   1   |   0   |   0   |   0   ||   1   |   0   |
	---------------------------------------------------------------------

-init-totals:

-test:
    [mkdir] Created dir: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/test
[junit4:pickseed] Seed property 'tests.seed' already defined: 56D234FA12787E63
    [mkdir] Created dir: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/test/temp
   [junit4] <JUnit4> says kaixo! Master seed: 56D234FA12787E63
   [junit4] Executing 5 suites with 3 JVMs.
   [junit4] 
   [junit4] Started J2 PID(13156@serv1.sd-datasolutions.de).
   [junit4] Started J0 PID(13157@serv1.sd-datasolutions.de).
   [junit4] Started J1 PID(13158@serv1.sd-datasolutions.de).
   [junit4] Suite: org.apache.solr.cloud.MoveReplicaHDFSTest
   [junit4] OK      15.0s J1 | MoveReplicaHDFSTest.testNormalFailedMove
   [junit4] OK      18.5s J1 | MoveReplicaHDFSTest.testNormalMove
   [junit4] OK      20.4s J1 | MoveReplicaHDFSTest.test
   [junit4] OK      32.8s J1 | MoveReplicaHDFSTest.testFailedMove
   [junit4] Completed [1/5] on J1 in 110.35s, 4 tests
   [junit4] 
   [junit4] Suite: org.apache.solr.cloud.MoveReplicaHDFSTest
   [junit4] OK      15.4s J0 | MoveReplicaHDFSTest.testNormalFailedMove
   [junit4] OK      20.6s J0 | MoveReplicaHDFSTest.testNormalMove
   [junit4] OK      19.7s J0 | MoveReplicaHDFSTest.test
   [junit4] OK      33.4s J0 | MoveReplicaHDFSTest.testFailedMove
   [junit4] Completed [2/5] on J0 in 114.34s, 4 tests
   [junit4] 
   [junit4] Duplicate suite name used with XML reports: org.apache.solr.cloud.MoveReplicaHDFSTest. This may confuse tools that process XML reports. Set 'ignoreDuplicateSuites' to true to skip this message.
   [junit4] Suite: org.apache.solr.cloud.MoveReplicaHDFSTest
   [junit4] OK      15.5s J2 | MoveReplicaHDFSTest.testNormalFailedMove
   [junit4] OK      20.6s J2 | MoveReplicaHDFSTest.testNormalMove
   [junit4] OK      22.2s J2 | MoveReplicaHDFSTest.test
   [junit4] OK      32.4s J2 | MoveReplicaHDFSTest.testFailedMove
   [junit4] Completed [3/5] on J2 in 114.67s, 4 tests
   [junit4] 
   [junit4] Suite: org.apache.solr.cloud.MoveReplicaHDFSTest
   [junit4] OK      10.2s J0 | MoveReplicaHDFSTest.testNormalFailedMove
   [junit4] OK      16.0s J0 | MoveReplicaHDFSTest.testNormalMove
   [junit4] OK      19.3s J0 | MoveReplicaHDFSTest.test
   [junit4] OK      33.2s J0 | MoveReplicaHDFSTest.testFailedMove
   [junit4] Completed [4/5] on J0 in 92.18s, 4 tests
   [junit4] 
   [junit4] Suite: org.apache.solr.cloud.MoveReplicaHDFSTest
   [junit4] OK      11.1s J1 | MoveReplicaHDFSTest.testNormalFailedMove
   [junit4] OK      20.7s J1 | MoveReplicaHDFSTest.testNormalMove
   [junit4] OK      20.0s J1 | MoveReplicaHDFSTest.test
   [junit4] OK      32.5s J1 | MoveReplicaHDFSTest.testFailedMove
   [junit4] Completed [5/5] on J1 in 96.87s, 4 tests
   [junit4] 
   [junit4] JVM J0:     0.39 ..   207.94 =   207.55s
   [junit4] JVM J1:     0.39 ..   208.79 =   208.41s
   [junit4] JVM J2:     0.38 ..   115.84 =   115.46s
   [junit4] Execution time total: 3 minutes 28 seconds
   [junit4] Tests summary: 5 suites, 20 tests
   [junit4] Could not remove temporary path: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/test/J0 (java.nio.file.DirectoryNotEmptyException: Remaining files: [/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/test/J0/temp])
   [junit4] Could not remove temporary path: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/test/J1 (java.nio.file.DirectoryNotEmptyException: Remaining files: [/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/test/J1/temp])
   [junit4] Could not remove temporary path: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/test/J2 (java.nio.file.DirectoryNotEmptyException: Remaining files: [/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/test/J2/temp])
     [echo] 5 slowest tests:
[junit4:tophints] 1284.30s | org.apache.solr.cloud.BasicDistributedZkTest
[junit4:tophints] 590.82s | org.apache.solr.cloud.cdcr.CdcrWithNodesRestartsTest
[junit4:tophints] 518.09s | org.apache.solr.cloud.cdcr.CdcrOpsAndBoundariesTest
[junit4:tophints] 504.32s | org.apache.solr.cloud.api.collections.ShardSplitTest
[junit4:tophints] 123.10s | org.apache.solr.cloud.autoscaling.SearchRateTriggerIntegrationTest

-check-totals:

test-nocompile:

BUILD SUCCESSFUL
Total time: 3 minutes 30 seconds
[repro] Failures:
[repro]   0/5 failed: org.apache.solr.cloud.MoveReplicaHDFSTest
[repro] Exiting with code 0
+ mv lucene/build lucene/build.repro
+ mv solr/build solr/build.repro
+ mv lucene/build.orig lucene/build
+ mv solr/build.orig solr/build
Archiving artifacts
Setting ANT_1_8_2_HOME=/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
java.lang.InterruptedException: no matches found within 10000
	at hudson.FilePath$ValidateAntFileMask.hasMatch(FilePath.java:2846)
	at hudson.FilePath$ValidateAntFileMask.invoke(FilePath.java:2725)
	at hudson.FilePath$ValidateAntFileMask.invoke(FilePath.java:2706)
	at hudson.FilePath.act(FilePath.java:1077)
	at hudson.FilePath.act(FilePath.java:1060)
	at hudson.FilePath.validateAntFileMask(FilePath.java:2704)
	at hudson.tasks.ArtifactArchiver.perform(ArtifactArchiver.java:243)
	at hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
	at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
	at hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
	at hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
	at hudson.model.Build$BuildExecution.post2(Build.java:186)
	at hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
	at hudson.model.Run.execute(Run.java:1840)
	at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
	at hudson.model.ResourceController.execute(ResourceController.java:97)
	at hudson.model.Executor.run(Executor.java:429)
No artifacts found that match the file pattern "**/*.events,heapdumps/**,**/*_pid*.log". Configuration error?
[WARNINGS] Parsing warnings in console log with parser Java Compiler (javac)
Setting ANT_1_8_2_HOME=/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
Setting ANT_1_8_2_HOME=/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
<Git Blamer> Using GitBlamer to create author and commit information for all warnings.
<Git Blamer> GIT_COMMIT=82c64af84b903df40e457ed6e338b3abf43a7534, workspace=/var/lib/jenkins/workspace/Lucene-Solr-7.x-Linux
[WARNINGS] Computing warning deltas based on reference build #2597
Recording test results
Setting ANT_1_8_2_HOME=/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
Build step 'Publish JUnit test result report' changed build result to UNSTABLE
Email was triggered for: Unstable (Test Failures)
Sending email for trigger: Unstable (Test Failures)
Setting ANT_1_8_2_HOME=/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
Setting ANT_1_8_2_HOME=/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
Setting ANT_1_8_2_HOME=/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
Setting ANT_1_8_2_HOME=/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2

[JENKINS] Lucene-Solr-7.x-Linux (64bit/jdk-10.0.1) - Build # 2599 - Failure!

Posted by Policeman Jenkins Server <je...@thetaphi.de>.
Build: https://jenkins.thetaphi.de/job/Lucene-Solr-7.x-Linux/2599/
Java: 64bit/jdk-10.0.1 -XX:+UseCompressedOops -XX:+UseConcMarkSweepGC

All tests passed

Build Log:
[...truncated 1870 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/core/test/temp/junit4-J0-20180821_021511_7291980339155977634120.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/core/test/temp/junit4-J1-20180821_021511_7297331889486537913199.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 5 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/core/test/temp/junit4-J2-20180821_021511_72914501739054690388906.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 295 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/test-framework/test/temp/junit4-J1-20180821_022105_18713294192602373707420.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/test-framework/test/temp/junit4-J0-20180821_022105_18716554266093828865791.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/test-framework/test/temp/junit4-J2-20180821_022105_187524127100673896938.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 1083 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/analysis/common/test/temp/junit4-J1-20180821_022211_6743099380288076543589.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/analysis/common/test/temp/junit4-J2-20180821_022211_67410251589089366967350.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/analysis/common/test/temp/junit4-J0-20180821_022211_6742812636994815368314.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 252 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/analysis/icu/test/temp/junit4-J1-20180821_022400_86516789123232680773985.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/analysis/icu/test/temp/junit4-J2-20180821_022400_8658055013140064139443.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 6 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/analysis/icu/test/temp/junit4-J0-20180821_022400_86518279881294826439118.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 253 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/analysis/kuromoji/test/temp/junit4-J0-20180821_022412_58312836857134872436897.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/analysis/kuromoji/test/temp/junit4-J1-20180821_022412_5833035167253489163607.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 6 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/analysis/kuromoji/test/temp/junit4-J2-20180821_022412_58310932184497452090377.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 162 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/analysis/morfologik/test/temp/junit4-J1-20180821_022433_2366616678403765476605.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/analysis/morfologik/test/temp/junit4-J0-20180821_022433_2367589330799089844870.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/analysis/morfologik/test/temp/junit4-J2-20180821_022433_2366819201517042072704.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 204 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/analysis/nori/test/temp/junit4-J0-20180821_022436_9321644097455869600708.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/analysis/nori/test/temp/junit4-J2-20180821_022436_9324455349237153019775.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 6 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/analysis/nori/test/temp/junit4-J1-20180821_022436_9323473280883304069696.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 168 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/analysis/opennlp/test/temp/junit4-J2-20180821_022444_86513716590085775950804.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 6 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/analysis/opennlp/test/temp/junit4-J0-20180821_022444_86517983737170000288505.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/analysis/opennlp/test/temp/junit4-J1-20180821_022444_86511747815748741306036.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 173 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/analysis/phonetic/test/temp/junit4-J2-20180821_022447_5745229223794367912835.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/analysis/phonetic/test/temp/junit4-J1-20180821_022447_5745061994857576368194.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 6 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/analysis/phonetic/test/temp/junit4-J0-20180821_022447_5746955925478842073497.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 161 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/analysis/smartcn/test/temp/junit4-J0-20180821_022455_7373423668859221737223.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/analysis/smartcn/test/temp/junit4-J1-20180821_022455_73712715541336153382984.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 165 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/analysis/stempel/test/temp/junit4-J2-20180821_022502_6346899341887569015490.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/analysis/stempel/test/temp/junit4-J1-20180821_022502_63414662936178440135565.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/analysis/stempel/test/temp/junit4-J0-20180821_022502_63417477458884476666935.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 206 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/backward-codecs/test/temp/junit4-J1-20180821_022505_5207102117108746445522.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 19 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/backward-codecs/test/temp/junit4-J2-20180821_022505_5206904025722959459023.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 9 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/backward-codecs/test/temp/junit4-J0-20180821_022505_52016812592198035084728.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 1404 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/benchmark/test/temp/junit4-J2-20180821_022649_64314118001852133274889.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/benchmark/test/temp/junit4-J1-20180821_022649_64315757558966372333425.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/benchmark/test/temp/junit4-J0-20180821_022649_642273816189762902640.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 250 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/classification/test/temp/junit4-J0-20180821_022657_7245378193813922531750.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/classification/test/temp/junit4-J1-20180821_022657_7249459845974270082905.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/classification/test/temp/junit4-J2-20180821_022657_7244793040243253313702.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 267 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/codecs/test/temp/junit4-J0-20180821_022709_52914447384749882625966.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/codecs/test/temp/junit4-J1-20180821_022709_52912691169969349829566.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 4 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/codecs/test/temp/junit4-J2-20180821_022709_52917818642093372097513.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 228 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/demo/test/temp/junit4-J0-20180821_023009_20710785500130149724977.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 6 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/demo/test/temp/junit4-J2-20180821_023009_20715312723842145808277.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/demo/test/temp/junit4-J1-20180821_023009_2075378493709212453552.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 173 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/expressions/test/temp/junit4-J1-20180821_023012_114750656020077387695.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/expressions/test/temp/junit4-J0-20180821_023012_11417224714782379355032.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/expressions/test/temp/junit4-J2-20180821_023012_11412353545321573954429.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 232 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/facet/test/temp/junit4-J0-20180821_023017_5693486710534876634740.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/facet/test/temp/junit4-J1-20180821_023017_5691552679250828695463.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/facet/test/temp/junit4-J2-20180821_023017_5695446323656253076024.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 182 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/grouping/test/temp/junit4-J1-20180821_023052_3537775282401109785957.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/grouping/test/temp/junit4-J0-20180821_023052_35311245451949215384452.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/grouping/test/temp/junit4-J2-20180821_023052_3533967176109547714120.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 255 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/highlighter/test/temp/junit4-J0-20180821_023104_18415486731315863238726.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/highlighter/test/temp/junit4-J1-20180821_023104_184987975220481740523.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 6 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/highlighter/test/temp/junit4-J2-20180821_023104_1841045135711755706852.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 166 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/join/test/temp/junit4-J1-20180821_023122_838140969338084000646.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/join/test/temp/junit4-J2-20180821_023122_8383155695998365434154.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/join/test/temp/junit4-J0-20180821_023122_83812580065629796744123.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 155 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/memory/test/temp/junit4-J0-20180821_023140_63716614762547763465885.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/memory/test/temp/junit4-J1-20180821_023140_63713009326703944723434.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 184 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/misc/test/temp/junit4-J2-20180821_023148_88210873175556690254020.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 8 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/misc/test/temp/junit4-J0-20180821_023148_8826818052769698023.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/misc/test/temp/junit4-J1-20180821_023148_8824165211217070597797.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 338 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/queries/test/temp/junit4-J0-20180821_023203_3351400375149686795010.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 6 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/queries/test/temp/junit4-J1-20180821_023203_33517062557172579138675.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/queries/test/temp/junit4-J2-20180821_023203_3354130181700186065034.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 229 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/queryparser/test/temp/junit4-J0-20180821_023210_5799929006383039579450.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/queryparser/test/temp/junit4-J2-20180821_023210_57910962546298155685147.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/queryparser/test/temp/junit4-J1-20180821_023210_57910062589231360834705.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 204 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/replicator/test/temp/junit4-J1-20180821_023217_3015469232492620430167.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 8 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/replicator/test/temp/junit4-J2-20180821_023217_30113396089502071460789.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 6 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/replicator/test/temp/junit4-J0-20180821_023217_30116616049660144758871.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 209 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/sandbox/test/temp/junit4-J0-20180821_023229_42511258401912809648547.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 7 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/sandbox/test/temp/junit4-J1-20180821_023229_4251517044192157306998.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 6 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/sandbox/test/temp/junit4-J2-20180821_023229_42512979063130783422535.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 292 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/spatial-extras/test/temp/junit4-J0-20180821_023508_98914688683292820093983.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 9 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/spatial-extras/test/temp/junit4-J2-20180821_023508_9895947627867594098573.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/spatial-extras/test/temp/junit4-J1-20180821_023508_98910189868320151314133.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 153 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/spatial/test/temp/junit4-J0-20180821_023537_93216739580679292201167.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 181 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/spatial3d/test/temp/junit4-J2-20180821_023539_66415935594926196791391.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 6 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/spatial3d/test/temp/junit4-J1-20180821_023539_6644594546723212006281.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 5 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/spatial3d/test/temp/junit4-J0-20180821_023539_6644785437370414917218.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 261 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/suggest/test/temp/junit4-J0-20180821_023644_4956479025708751896447.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/suggest/test/temp/junit4-J1-20180821_023644_49517824479079693673090.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/suggest/test/temp/junit4-J2-20180821_023644_4951610007043336351935.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 3802 lines...]
   [junit4] JVM J1: stdout was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/test/temp/junit4-J1-20180821_023742_6156065188529073243165.sysout
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] #
   [junit4] # A fatal error has been detected by the Java Runtime Environment:
   [junit4] #
   [junit4] #  SIGSEGV (0xb) at pc=0x00007f1ab06ee409, pid=22664, tid=22724
   [junit4] #
   [junit4] # JRE version: OpenJDK Runtime Environment (10.0.1+10) (build 10.0.1+10)
   [junit4] # Java VM: OpenJDK 64-Bit Server VM (10.0.1+10, mixed mode, tiered, compressed oops, concurrent mark sweep gc, linux-amd64)
   [junit4] # Problematic frame:
   [junit4] # V  [libjvm.so+0xc48409]  PhaseIdealLoop::split_up(Node*, Node*, Node*) [clone .part.40]+0x619
   [junit4] #
   [junit4] # No core dump will be written. Core dumps have been disabled. To enable core dumping, try "ulimit -c unlimited" before starting Java again
   [junit4] #
   [junit4] # An error report file with more information is saved as:
   [junit4] # /home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/test/J1/hs_err_pid22664.log
   [junit4] #
   [junit4] # Compiler replay data is saved as:
   [junit4] # /home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/test/J1/replay_pid22664.log
   [junit4] #
   [junit4] # If you would like to submit a bug report, please visit:
   [junit4] #   http://bugreport.java.com/bugreport/crash.jsp
   [junit4] #
   [junit4] <<< JVM J1: EOF ----

   [junit4] JVM J1: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/test/temp/junit4-J1-20180821_023742_6157076073971694811723.syserr
   [junit4] >>> JVM J1 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J1: EOF ----

[...truncated 974 lines...]
   [junit4] JVM J2: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/test/temp/junit4-J2-20180821_023742_6152798427379137095182.syserr
   [junit4] >>> JVM J2 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J2: EOF ----

[...truncated 3 lines...]
   [junit4] JVM J0: stderr was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/test/temp/junit4-J0-20180821_023742_6153220352680539098647.syserr
   [junit4] >>> JVM J0 emitted unexpected output (verbatim) ----
   [junit4] OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
   [junit4] <<< JVM J0: EOF ----

[...truncated 3 lines...]
   [junit4] ERROR: JVM J1 ended with an exception, command line: /home/jenkins/tools/java/64bit/jdk-10.0.1/bin/java -XX:+UseCompressedOops -XX:+UseConcMarkSweepGC -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/home/jenkins/workspace/Lucene-Solr-7.x-Linux/heapdumps -ea -esa --illegal-access=deny -Dtests.prefix=tests -Dtests.seed=20F382737ED33E24 -Xmx512M -Dtests.iters= -Dtests.verbose=false -Dtests.infostream=false -Dtests.codec=random -Dtests.postingsformat=random -Dtests.docvaluesformat=random -Dtests.locale=random -Dtests.timezone=random -Dtests.directory=random -Dtests.linedocsfile=europarl.lines.txt.gz -Dtests.luceneMatchVersion=7.5.0 -Dtests.cleanthreads=perClass -Djava.util.logging.config.file=/home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/tools/junit4/logging.properties -Dtests.nightly=false -Dtests.weekly=false -Dtests.monster=false -Dtests.slow=true -Dtests.asserts=true -Dtests.multiplier=3 -DtempDir=./temp -Djava.io.tmpdir=./temp -Djunit4.tempDir=/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/test/temp -Dcommon.dir=/home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene -Dclover.db.dir=/home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/clover/db -Djava.security.policy=/home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/tools/junit4/solr-tests.policy -Dtests.LUCENE_VERSION=7.5.0 -Djetty.testMode=1 -Djetty.insecurerandom=1 -Dsolr.directoryFactory=org.apache.solr.core.MockDirectoryFactory -Djava.awt.headless=true -Djdk.map.althashing.threshold=0 -Dtests.src.home=/home/jenkins/workspace/Lucene-Solr-7.x-Linux -Djava.security.egd=file:/dev/./urandom -Djunit4.childvm.cwd=/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/test/J1 -Djunit4.childvm.id=1 -Djunit4.childvm.count=3 -Djava.security.manager=org.apache.lucene.util.TestSecurityManager -Dtests.filterstacks=true -Dtests.leaveTemporary=false -Dtests.badapples=false -classpath /home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/classes/test:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-test-framework/classes/java:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/test-framework/lib/junit4-ant-2.6.0.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/src/test-files:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/test-framework/classes/java:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/codecs/classes/java:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-solrj/classes/java:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/classes/java:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/analysis/common/lucene-analyzers-common-7.5.0-SNAPSHOT.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/analysis/kuromoji/lucene-analyzers-kuromoji-7.5.0-SNAPSHOT.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/analysis/nori/lucene-analyzers-nori-7.5.0-SNAPSHOT.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/analysis/phonetic/lucene-analyzers-phonetic-7.5.0-SNAPSHOT.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/codecs/lucene-codecs-7.5.0-SNAPSHOT.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/backward-codecs/lucene-backward-codecs-7.5.0-SNAPSHOT.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/highlighter/lucene-highlighter-7.5.0-SNAPSHOT.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/memory/lucene-memory-7.5.0-SNAPSHOT.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/misc/lucene-misc-7.5.0-SNAPSHOT.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/spatial-extras/lucene-spatial-extras-7.5.0-SNAPSHOT.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/spatial3d/lucene-spatial3d-7.5.0-SNAPSHOT.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/expressions/lucene-expressions-7.5.0-SNAPSHOT.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/suggest/lucene-suggest-7.5.0-SNAPSHOT.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/grouping/lucene-grouping-7.5.0-SNAPSHOT.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/queries/lucene-queries-7.5.0-SNAPSHOT.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/queryparser/lucene-queryparser-7.5.0-SNAPSHOT.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/join/lucene-join-7.5.0-SNAPSHOT.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/sandbox/lucene-sandbox-7.5.0-SNAPSHOT.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/classification/lucene-classification-7.5.0-SNAPSHOT.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/antlr4-runtime-4.5.1-1.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/asm-5.1.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/asm-commons-5.1.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/avatica-core-1.10.0.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/caffeine-2.4.0.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/calcite-core-1.13.0.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/calcite-linq4j-1.13.0.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/commons-cli-1.2.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/commons-codec-1.10.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/commons-collections-3.2.2.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/commons-compiler-2.7.6.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/commons-configuration-1.6.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/commons-exec-1.3.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/commons-fileupload-1.3.3.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/commons-io-2.5.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/commons-lang-2.6.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/commons-lang3-3.6.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/curator-client-2.8.0.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/curator-framework-2.8.0.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/curator-recipes-2.8.0.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/disruptor-3.4.0.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/dom4j-1.6.1.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/eigenbase-properties-1.1.5.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/guava-14.0.1.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/hadoop-annotations-2.7.4.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/hadoop-auth-2.7.4.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/hadoop-common-2.7.4.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/hadoop-hdfs-2.7.4.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/hppc-0.8.1.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/htrace-core-3.2.0-incubating.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/jackson-annotations-2.9.5.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/jackson-core-2.9.5.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/jackson-core-asl-1.9.13.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/jackson-databind-2.9.5.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/jackson-dataformat-smile-2.9.5.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/jackson-mapper-asl-1.9.13.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/janino-2.7.6.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/jcl-over-slf4j-1.7.24.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/joda-time-2.2.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/log4j-1.2-api-2.11.0.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/log4j-api-2.11.0.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/log4j-core-2.11.0.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/log4j-slf4j-impl-2.11.0.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/org.restlet-2.3.0.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/org.restlet.ext.servlet-2.3.0.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/protobuf-java-3.1.0.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/rrd4j-3.2.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/spatial4j-0.7.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/lib/t-digest-3.1.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/solrj/lib/commons-io-2.5.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/solrj/lib/commons-math3-3.6.1.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/solrj/lib/httpclient-4.5.3.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/solrj/lib/httpcore-4.4.6.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/solrj/lib/httpmime-4.5.3.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/solrj/lib/jcl-over-slf4j-1.7.24.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/solrj/lib/noggit-0.8.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/solrj/lib/slf4j-api-1.7.24.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/solrj/lib/stax2-api-3.1.4.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/solrj/lib/woodstox-core-asl-4.4.1.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/solrj/lib/zookeeper-3.4.11.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/server/lib/gmetric4j-1.0.7.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/server/lib/javax.servlet-api-3.1.0.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/server/lib/jetty-continuation-9.4.11.v20180605.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/server/lib/jetty-deploy-9.4.11.v20180605.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/server/lib/jetty-http-9.4.11.v20180605.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/server/lib/jetty-io-9.4.11.v20180605.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/server/lib/jetty-jmx-9.4.11.v20180605.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/server/lib/jetty-rewrite-9.4.11.v20180605.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/server/lib/jetty-security-9.4.11.v20180605.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/server/lib/jetty-server-9.4.11.v20180605.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/server/lib/jetty-servlet-9.4.11.v20180605.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/server/lib/jetty-servlets-9.4.11.v20180605.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/server/lib/jetty-util-9.4.11.v20180605.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/server/lib/jetty-webapp-9.4.11.v20180605.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/server/lib/jetty-xml-9.4.11.v20180605.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/server/lib/metrics-core-3.2.6.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/server/lib/metrics-ganglia-3.2.6.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/server/lib/metrics-graphite-3.2.6.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/server/lib/metrics-jetty9-3.2.6.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/server/lib/metrics-jvm-3.2.6.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/example/example-DIH/solr/db/lib/derby-10.9.1.0.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/example/example-DIH/solr/db/lib/hsqldb-2.4.0.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/core/classes/java:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/test-framework/lib/junit-4.10.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/test-framework/lib/randomizedtesting-runner-2.6.0.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/antlr-2.7.7.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/antlr4-runtime-4.5.1-1.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/apacheds-core-2.0.0-M15.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/apacheds-core-api-2.0.0-M15.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/apacheds-core-avl-2.0.0-M15.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/apacheds-core-shared-2.0.0-M15.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/apacheds-i18n-2.0.0-M15.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/apacheds-interceptor-kerberos-2.0.0-M15.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/apacheds-interceptors-admin-2.0.0-M15.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/apacheds-interceptors-authn-2.0.0-M15.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/apacheds-interceptors-authz-2.0.0-M15.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/apacheds-interceptors-changelog-2.0.0-M15.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/apacheds-interceptors-collective-2.0.0-M15.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/apacheds-interceptors-event-2.0.0-M15.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/apacheds-interceptors-exception-2.0.0-M15.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/apacheds-interceptors-journal-2.0.0-M15.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/apacheds-interceptors-normalization-2.0.0-M15.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/apacheds-interceptors-operational-2.0.0-M15.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/apacheds-interceptors-referral-2.0.0-M15.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/apacheds-interceptors-schema-2.0.0-M15.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/apacheds-interceptors-subtree-2.0.0-M15.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/apacheds-interceptors-trigger-2.0.0-M15.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/apacheds-jdbm-partition-2.0.0-M15.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/apacheds-jdbm1-2.0.0-M2.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/apacheds-kerberos-codec-2.0.0-M15.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/apacheds-ldif-partition-2.0.0-M15.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/apacheds-mavibot-partition-2.0.0-M15.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/apacheds-protocol-kerberos-2.0.0-M15.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/apacheds-protocol-ldap-2.0.0-M15.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/apacheds-protocol-shared-2.0.0-M15.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/apacheds-xdbm-partition-2.0.0-M15.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/api-all-1.0.0-M20.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/bcprov-jdk15on-1.54.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/byte-buddy-1.6.2.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/commons-collections-3.2.2.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/commons-math3-3.6.1.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/ehcache-core-2.4.4.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/hadoop-common-2.7.4-tests.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/hadoop-hdfs-2.7.4-tests.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/hadoop-minikdc-2.7.4.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/htrace-core-3.2.0-incubating.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/jersey-core-1.9.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/jersey-server-1.9.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/jetty-6.1.26.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/jetty-sslengine-6.1.26.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/jetty-util-6.1.26.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/mina-core-2.0.0-M5.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/mockito-core-2.6.2.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/netty-all-4.0.36.Final.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/core/test-lib/objenesis-2.5.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/analysis/icu/lucene-analyzers-icu-7.5.0-SNAPSHOT.jar:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/contrib/solr-analysis-extras/classes/java:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/queryparser/classes/test:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/build/backward-codecs/classes/test:/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/contrib/analysis-extras/lib/icu4j-62.1.jar:/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2/lib/ant-launcher.jar:/home/jenkins/.ant/lib/ivy-2.4.0.jar:/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2/lib/ant-netrexx.jar:/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2/lib/ant-apache-xalan2.jar:/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2/lib/ant.jar:/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2/lib/ant-javamail.jar:/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2/lib/ant-commons-net.jar:/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2/lib/ant-apache-regexp.jar:/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2/lib/ant-apache-oro.jar:/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2/lib/ant-jmf.jar:/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2/lib/ant-apache-log4j.jar:/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2/lib/ant-jsch.jar:/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2/lib/ant-jai.jar:/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2/lib/ant-apache-bsf.jar:/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2/lib/ant-junit4.jar:/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2/lib/ant-apache-resolver.jar:/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2/lib/ant-apache-bcel.jar:/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2/lib/ant-junit.jar:/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2/lib/ant-jdepend.jar:/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2/lib/ant-testutil.jar:/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2/lib/ant-antlr.jar:/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2/lib/ant-swing.jar:/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2/lib/ant-commons-logging.jar:/home/jenkins/.ivy2/cache/com.carrotsearch.randomizedtesting/junit4-ant/jars/junit4-ant-2.6.0.jar com.carrotsearch.ant.tasks.junit4.slave.SlaveMainSafe -eventsfile /home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/test/temp/junit4-J1-20180821_023742_61510477085530698574103.events @/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/test/temp/junit4-J1-20180821_023742_6159458654129153516231.suites -stdin
   [junit4] ERROR: JVM J1 ended with an exception: Forked process returned with error code: 134. Very likely a JVM crash.  See process stdout at: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/test/temp/junit4-J1-20180821_023742_6156065188529073243165.sysout See process stderr at: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/test/temp/junit4-J1-20180821_023742_6157076073971694811723.syserr
   [junit4] 	at com.carrotsearch.ant.tasks.junit4.JUnit4.executeSlave(JUnit4.java:1519)
   [junit4] 	at com.carrotsearch.ant.tasks.junit4.JUnit4.access$000(JUnit4.java:126)
   [junit4] 	at com.carrotsearch.ant.tasks.junit4.JUnit4$2.call(JUnit4.java:982)
   [junit4] 	at com.carrotsearch.ant.tasks.junit4.JUnit4$2.call(JUnit4.java:979)
   [junit4] 	at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
   [junit4] 	at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1135)
   [junit4] 	at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)
   [junit4] 	at java.base/java.lang.Thread.run(Thread.java:844)

BUILD FAILED
/home/jenkins/workspace/Lucene-Solr-7.x-Linux/build.xml:633: The following error occurred while executing this line:
/home/jenkins/workspace/Lucene-Solr-7.x-Linux/build.xml:577: The following error occurred while executing this line:
/home/jenkins/workspace/Lucene-Solr-7.x-Linux/build.xml:59: The following error occurred while executing this line:
/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build.xml:267: The following error occurred while executing this line:
/home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/common-build.xml:558: The following error occurred while executing this line:
/home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/common-build.xml:1568: The following error occurred while executing this line:
/home/jenkins/workspace/Lucene-Solr-7.x-Linux/lucene/common-build.xml:1092: At least one slave process threw an exception, first: Forked process returned with error code: 134. Very likely a JVM crash.  See process stdout at: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/test/temp/junit4-J1-20180821_023742_6156065188529073243165.sysout See process stderr at: /home/jenkins/workspace/Lucene-Solr-7.x-Linux/solr/build/solr-core/test/temp/junit4-J1-20180821_023742_6157076073971694811723.syserr

Total time: 75 minutes 57 seconds
Build step 'Invoke Ant' marked build as failure
Archiving artifacts
Setting ANT_1_8_2_HOME=/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
[WARNINGS] Skipping publisher since build result is FAILURE
Recording test results
Setting ANT_1_8_2_HOME=/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
Email was triggered for: Failure - Any
Sending email for trigger: Failure - Any
Setting ANT_1_8_2_HOME=/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
Setting ANT_1_8_2_HOME=/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
Setting ANT_1_8_2_HOME=/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
Setting ANT_1_8_2_HOME=/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2