You are viewing a plain text version of this content. The canonical link for it is here.
Posted to builds@lucene.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2019/11/10 03:07:27 UTC
[JENKINS] Lucene-Solr-NightlyTests-master - Build # 2013 - Failure
Build: https://builds.apache.org/job/Lucene-Solr-NightlyTests-master/2013/
2 tests failed.
FAILED: org.apache.solr.cloud.RollingRestartTest.test
Error Message:
Timeout occurred while waiting response from server at: http://127.0.0.1:41439
Stack Trace:
org.apache.solr.client.solrj.SolrServerException: Timeout occurred while waiting response from server at: http://127.0.0.1:41439
at __randomizedtesting.SeedInfo.seed([5BAD2857752BE768:D3F9178DDBD78A90]:0)
at org.apache.solr.client.solrj.impl.HttpSolrClient.executeMethod(HttpSolrClient.java:676)
at org.apache.solr.client.solrj.impl.HttpSolrClient.request(HttpSolrClient.java:265)
at org.apache.solr.client.solrj.impl.HttpSolrClient.request(HttpSolrClient.java:248)
at org.apache.solr.client.solrj.impl.LBSolrClient.doRequest(LBSolrClient.java:368)
at org.apache.solr.client.solrj.impl.LBSolrClient.request(LBSolrClient.java:296)
at org.apache.solr.client.solrj.impl.BaseCloudSolrClient.sendRequest(BaseCloudSolrClient.java:1127)
at org.apache.solr.client.solrj.impl.BaseCloudSolrClient.requestWithRetryOnStaleState(BaseCloudSolrClient.java:896)
at org.apache.solr.client.solrj.impl.BaseCloudSolrClient.request(BaseCloudSolrClient.java:828)
at org.apache.solr.client.solrj.SolrRequest.process(SolrRequest.java:207)
at org.apache.solr.client.solrj.SolrRequest.process(SolrRequest.java:224)
at org.apache.solr.cloud.RollingRestartTest.restartWithRolesTest(RollingRestartTest.java:74)
at org.apache.solr.cloud.RollingRestartTest.test(RollingRestartTest.java:53)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938)
at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974)
at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988)
at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:1082)
at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:1054)
at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947)
at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832)
at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883)
at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
at java.base/java.lang.Thread.run(Thread.java:834)
Caused by: java.net.SocketTimeoutException: Read timed out
at java.base/java.net.SocketInputStream.socketRead0(Native Method)
at java.base/java.net.SocketInputStream.socketRead(SocketInputStream.java:115)
at java.base/java.net.SocketInputStream.read(SocketInputStream.java:168)
at java.base/java.net.SocketInputStream.read(SocketInputStream.java:140)
at org.apache.http.impl.io.SessionInputBufferImpl.streamRead(SessionInputBufferImpl.java:137)
at org.apache.http.impl.io.SessionInputBufferImpl.fillBuffer(SessionInputBufferImpl.java:153)
at org.apache.http.impl.io.SessionInputBufferImpl.readLine(SessionInputBufferImpl.java:282)
at org.apache.http.impl.conn.DefaultHttpResponseParser.parseHead(DefaultHttpResponseParser.java:138)
at org.apache.http.impl.conn.DefaultHttpResponseParser.parseHead(DefaultHttpResponseParser.java:56)
at org.apache.http.impl.io.AbstractMessageParser.parse(AbstractMessageParser.java:259)
at org.apache.http.impl.DefaultBHttpClientConnection.receiveResponseHeader(DefaultBHttpClientConnection.java:163)
at org.apache.http.impl.conn.CPoolProxy.receiveResponseHeader(CPoolProxy.java:165)
at org.apache.http.protocol.HttpRequestExecutor.doReceiveResponse(HttpRequestExecutor.java:273)
at org.apache.http.protocol.HttpRequestExecutor.execute(HttpRequestExecutor.java:125)
at org.apache.http.impl.execchain.MainClientExec.execute(MainClientExec.java:272)
at org.apache.http.impl.execchain.ProtocolExec.execute(ProtocolExec.java:185)
at org.apache.http.impl.execchain.RetryExec.execute(RetryExec.java:89)
at org.apache.http.impl.execchain.RedirectExec.execute(RedirectExec.java:110)
at org.apache.http.impl.client.InternalHttpClient.doExecute(InternalHttpClient.java:185)
at org.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:83)
at org.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:56)
at org.apache.solr.client.solrj.impl.HttpSolrClient.executeMethod(HttpSolrClient.java:564)
... 51 more
FAILED: org.apache.solr.cloud.hdfs.StressHdfsTest.test
Error Message:
Error from server at http://127.0.0.1:41799/delete_data_dir: Error trying to proxy request for url: http://127.0.0.1:34158/delete_data_dir/update
Stack Trace:
org.apache.solr.client.solrj.impl.HttpSolrClient$RemoteSolrException: Error from server at http://127.0.0.1:41799/delete_data_dir: Error trying to proxy request for url: http://127.0.0.1:34158/delete_data_dir/update
at __randomizedtesting.SeedInfo.seed([5BAD2857752BE768:D3F9178DDBD78A90]:0)
at org.apache.solr.client.solrj.impl.HttpSolrClient.executeMethod(HttpSolrClient.java:665)
at org.apache.solr.client.solrj.impl.HttpSolrClient.request(HttpSolrClient.java:265)
at org.apache.solr.client.solrj.impl.HttpSolrClient.request(HttpSolrClient.java:248)
at org.apache.solr.client.solrj.SolrRequest.process(SolrRequest.java:207)
at org.apache.solr.client.solrj.SolrClient.add(SolrClient.java:177)
at org.apache.solr.client.solrj.SolrClient.add(SolrClient.java:138)
at org.apache.solr.client.solrj.SolrClient.add(SolrClient.java:156)
at org.apache.solr.cloud.hdfs.StressHdfsTest.createAndDeleteCollection(StressHdfsTest.java:185)
at org.apache.solr.cloud.hdfs.StressHdfsTest.test(StressHdfsTest.java:103)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938)
at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974)
at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988)
at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:1082)
at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:1054)
at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947)
at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832)
at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883)
at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
at java.base/java.lang.Thread.run(Thread.java:834)
Build Log:
[...truncated 13841 lines...]
[junit4] Suite: org.apache.solr.cloud.hdfs.StressHdfsTest
[junit4] 2> 157443 INFO (SUITE-StressHdfsTest-seed#[5BAD2857752BE768]-worker) [ ] o.a.s.SolrTestCaseJ4 SecureRandom sanity checks: test.solr.allowed.securerandom=null & java.security.egd=file:/dev/./urandom
[junit4] 2> 157460 INFO (SUITE-StressHdfsTest-seed#[5BAD2857752BE768]-worker) [ ] o.a.s.SolrTestCaseJ4 Created dataDir: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.StressHdfsTest_5BAD2857752BE768-001/data-dir-13-001
[junit4] 2> 157460 WARN (SUITE-StressHdfsTest-seed#[5BAD2857752BE768]-worker) [ ] o.a.s.SolrTestCaseJ4 startTrackingSearchers: numOpens=4 numCloses=4
[junit4] 2> 157461 INFO (SUITE-StressHdfsTest-seed#[5BAD2857752BE768]-worker) [ ] o.a.s.SolrTestCaseJ4 Using PointFields (NUMERIC_POINTS_SYSPROP=true) w/NUMERIC_DOCVALUES_SYSPROP=true
[junit4] 2> 157462 INFO (SUITE-StressHdfsTest-seed#[5BAD2857752BE768]-worker) [ ] o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (false) via: @org.apache.solr.SolrTestCaseJ4$SuppressSSL(bugUrl="https://issues.apache.org/jira/browse/SOLR-5776")
[junit4] 2> 157462 INFO (SUITE-StressHdfsTest-seed#[5BAD2857752BE768]-worker) [ ] o.a.s.BaseDistributedSearchTestCase Setting hostContext system property: /
[junit4] 2> 161207 WARN (SUITE-StressHdfsTest-seed#[5BAD2857752BE768]-worker) [ ] o.a.h.u.NativeCodeLoader Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
[junit4] 1> Formatting using clusterid: testClusterID
[junit4] 2> 169432 WARN (SUITE-StressHdfsTest-seed#[5BAD2857752BE768]-worker) [ ] o.a.h.m.i.MetricsConfig Cannot locate configuration: tried hadoop-metrics2-namenode.properties,hadoop-metrics2.properties
[junit4] 2> 171161 WARN (SUITE-StressHdfsTest-seed#[5BAD2857752BE768]-worker) [ ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
[junit4] 2> 171395 INFO (SUITE-StressHdfsTest-seed#[5BAD2857752BE768]-worker) [ ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 11.0.4+10-LTS
[junit4] 2> 171524 INFO (SUITE-StressHdfsTest-seed#[5BAD2857752BE768]-worker) [ ] o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 171524 INFO (SUITE-StressHdfsTest-seed#[5BAD2857752BE768]-worker) [ ] o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 171524 INFO (SUITE-StressHdfsTest-seed#[5BAD2857752BE768]-worker) [ ] o.e.j.s.session node0 Scavenging every 660000ms
[junit4] 2> 171541 INFO (SUITE-StressHdfsTest-seed#[5BAD2857752BE768]-worker) [ ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@53f2d96d{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
[junit4] 2> 174555 INFO (SUITE-StressHdfsTest-seed#[5BAD2857752BE768]-worker) [ ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@731a96f3{hdfs,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/jetty-lucene2-us-west.apache.org-38387-hdfs-_-any-11713975252299262543.dir/webapp/,AVAILABLE}{/hdfs}
[junit4] 2> 174571 INFO (SUITE-StressHdfsTest-seed#[5BAD2857752BE768]-worker) [ ] o.e.j.s.AbstractConnector Started ServerConnector@4c7d541f{HTTP/1.1,[http/1.1]}{lucene2-us-west.apache.org:38387}
[junit4] 2> 174571 INFO (SUITE-StressHdfsTest-seed#[5BAD2857752BE768]-worker) [ ] o.e.j.s.Server Started @174631ms
[junit4] 2> 182053 WARN (SUITE-StressHdfsTest-seed#[5BAD2857752BE768]-worker) [ ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
[junit4] 2> 182104 INFO (SUITE-StressHdfsTest-seed#[5BAD2857752BE768]-worker) [ ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 11.0.4+10-LTS
[junit4] 2> 182139 INFO (SUITE-StressHdfsTest-seed#[5BAD2857752BE768]-worker) [ ] o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 182139 INFO (SUITE-StressHdfsTest-seed#[5BAD2857752BE768]-worker) [ ] o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 182140 INFO (SUITE-StressHdfsTest-seed#[5BAD2857752BE768]-worker) [ ] o.e.j.s.session node0 Scavenging every 600000ms
[junit4] 2> 182160 INFO (SUITE-StressHdfsTest-seed#[5BAD2857752BE768]-worker) [ ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@2b6ca7ab{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
[junit4] 2> 183360 INFO (SUITE-StressHdfsTest-seed#[5BAD2857752BE768]-worker) [ ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@34baaba9{datanode,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/jetty-localhost-44387-datanode-_-any-7130926177527968353.dir/webapp/,AVAILABLE}{/datanode}
[junit4] 2> 183380 INFO (SUITE-StressHdfsTest-seed#[5BAD2857752BE768]-worker) [ ] o.e.j.s.AbstractConnector Started ServerConnector@142da90d{HTTP/1.1,[http/1.1]}{localhost:44387}
[junit4] 2> 183380 INFO (SUITE-StressHdfsTest-seed#[5BAD2857752BE768]-worker) [ ] o.e.j.s.Server Started @183440ms
[junit4] 2> 192946 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0x6ea0f81033dda575: Processing first storage report for DS-32f82278-8580-48a8-b501-e1b78675b94c from datanode 2b72e100-4196-4869-ba36-44231324f5ae
[junit4] 2> 192982 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0x6ea0f81033dda575: from storage DS-32f82278-8580-48a8-b501-e1b78675b94c node DatanodeRegistration(127.0.0.1:45451, datanodeUuid=2b72e100-4196-4869-ba36-44231324f5ae, infoPort=46182, infoSecurePort=0, ipcPort=34793, storageInfo=lv=-57;cid=testClusterID;nsid=1012152498;c=1573343007945), blocks: 0, hasStaleStorage: true, processing time: 2 msecs, invalidatedBlocks: 0
[junit4] 2> 192990 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0x6ea0f81033dda575: Processing first storage report for DS-fe1c4c09-ca00-495e-8eb6-6355ad3727cc from datanode 2b72e100-4196-4869-ba36-44231324f5ae
[junit4] 2> 192990 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0x6ea0f81033dda575: from storage DS-fe1c4c09-ca00-495e-8eb6-6355ad3727cc node DatanodeRegistration(127.0.0.1:45451, datanodeUuid=2b72e100-4196-4869-ba36-44231324f5ae, infoPort=46182, infoSecurePort=0, ipcPort=34793, storageInfo=lv=-57;cid=testClusterID;nsid=1012152498;c=1573343007945), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0
[junit4] 2> 194713 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
[junit4] 2> 194732 INFO (ZkTestServer Run Thread) [ ] o.a.s.c.ZkTestServer client port:0.0.0.0/0.0.0.0:0
[junit4] 2> 194732 INFO (ZkTestServer Run Thread) [ ] o.a.s.c.ZkTestServer Starting server
[junit4] 2> 194843 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.ZkTestServer start zk server on port:44669
[junit4] 2> 194843 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.ZkTestServer waitForServerUp: 127.0.0.1:44669
[junit4] 2> 194843 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.ZkTestServer parse host and port list: 127.0.0.1:44669
[junit4] 2> 194843 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.ZkTestServer connecting to 127.0.0.1 44669
[junit4] 2> 194944 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 195078 INFO (zkConnectionManagerCallback-222-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 195090 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 195275 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 195364 INFO (zkConnectionManagerCallback-224-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 195364 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 195437 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/solrconfig-tlog.xml to /configs/conf1/solrconfig.xml
[junit4] 2> 195465 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/schema.xml to /configs/conf1/schema.xml
[junit4] 2> 195482 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml to /configs/conf1/solrconfig.snippet.randomindexconfig.xml
[junit4] 2> 195530 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/stopwords.txt to /configs/conf1/stopwords.txt
[junit4] 2> 195546 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/protwords.txt to /configs/conf1/protwords.txt
[junit4] 2> 195555 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/currency.xml to /configs/conf1/currency.xml
[junit4] 2> 195576 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/enumsConfig.xml to /configs/conf1/enumsConfig.xml
[junit4] 2> 195577 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/open-exchange-rates.json to /configs/conf1/open-exchange-rates.json
[junit4] 2> 195578 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/mapping-ISOLatin1Accent.txt to /configs/conf1/mapping-ISOLatin1Accent.txt
[junit4] 2> 195592 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/old_synonyms.txt to /configs/conf1/old_synonyms.txt
[junit4] 2> 195594 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/synonyms.txt to /configs/conf1/synonyms.txt
[junit4] 2> 195648 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.AbstractFullDistribZkTestBase Will use NRT replicas unless explicitly asked otherwise
[junit4] 2> 197150 WARN (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
[junit4] 2> 197152 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.s.e.JettySolrRunner Start Jetty (configured port=0, binding port=0)
[junit4] 2> 197152 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
[junit4] 2> 197152 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 11.0.4+10-LTS
[junit4] 2> 197166 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 197166 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 197166 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.e.j.s.session node0 Scavenging every 660000ms
[junit4] 2> 197167 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@6468cbda{/,null,AVAILABLE}
[junit4] 2> 197169 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.e.j.s.AbstractConnector Started ServerConnector@11e6c865{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:36109}
[junit4] 2> 197169 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.e.j.s.Server Started @197229ms
[junit4] 2> 197169 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/, solr.data.dir=hdfs://lucene2-us-west.apache.org:32919/hdfs__lucene2-us-west.apache.org_32919__home_jenkins_jenkins-slave_workspace_Lucene-Solr-NightlyTests-master_checkout_solr_build_solr-core_test_J1_temp_solr.cloud.hdfs.StressHdfsTest_5BAD2857752BE768-001_tempDir-002_control_data, hostPort=36109, coreRootDirectory=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.StressHdfsTest_5BAD2857752BE768-001/control-001/cores, replicaType=NRT}
[junit4] 2> 197169 ERROR (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
[junit4] 2> 197169 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
[junit4] 2> 197169 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version 9.0.0
[junit4] 2> 197169 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 197169 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 197170 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time: 2019-11-09T23:43:58.844024Z
[junit4] 2> 197188 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 197292 INFO (zkConnectionManagerCallback-226-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 197304 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 197455 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
[junit4] 2> 197455 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.SolrXmlConfig Loading container configuration from /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.StressHdfsTest_5BAD2857752BE768-001/control-001/solr.xml
[junit4] 2> 197495 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
[junit4] 2> 197495 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
[junit4] 2> 197497 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@7be9d315, but no JMX reporters were configured - adding default JMX reporter.
[junit4] 2> 198052 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=false]
[junit4] 2> 198070 WARN (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@7a290f31[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 198070 WARN (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@7a290f31[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 198087 WARN (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@6a398303[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 198087 WARN (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@6a398303[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 198088 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:44669/solr
[junit4] 2> 198098 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 198151 INFO (zkConnectionManagerCallback-233-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 198171 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 198407 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [n:127.0.0.1:36109_ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 198495 INFO (zkConnectionManagerCallback-235-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 198496 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [n:127.0.0.1:36109_ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 199965 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [n:127.0.0.1:36109_ ] o.a.s.c.OverseerElectionContext I am going to be the leader 127.0.0.1:36109_
[junit4] 2> 199979 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [n:127.0.0.1:36109_ ] o.a.s.c.Overseer Overseer (id=75706667183177732-127.0.0.1:36109_-n_0000000000) starting
[junit4] 2> 200052 INFO (OverseerStateUpdate-75706667183177732-127.0.0.1:36109_-n_0000000000) [n:127.0.0.1:36109_ ] o.a.s.c.Overseer Starting to work on the main queue : 127.0.0.1:36109_
[junit4] 2> 200110 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [n:127.0.0.1:36109_ ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:36109_
[junit4] 2> 200199 INFO (zkCallback-234-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
[junit4] 2> 200527 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [n:127.0.0.1:36109_ ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
[junit4] 2> 200838 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [n:127.0.0.1:36109_ ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7be9d315
[junit4] 2> 200946 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [n:127.0.0.1:36109_ ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7be9d315
[junit4] 2> 200949 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [n:127.0.0.1:36109_ ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7be9d315
[junit4] 2> 201021 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [n:127.0.0.1:36109_ ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.StressHdfsTest_5BAD2857752BE768-001/control-001/cores
[junit4] 2> 201535 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 201641 INFO (zkConnectionManagerCallback-244-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 201645 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 201663 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
[junit4] 2> 201664 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:44669/solr ready
[junit4] 2> 201809 INFO (qtp2144897355-776) [n:127.0.0.1:36109_ ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params collection.configName=conf1&name=control_collection&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=127.0.0.1:36109_&wt=javabin&version=2 and sendToOCPQueue=true
[junit4] 2> 201922 INFO (OverseerThreadFactory-328-thread-1-processing-n:127.0.0.1:36109_) [n:127.0.0.1:36109_ ] o.a.s.c.a.c.CreateCollectionCmd Create collection control_collection
[junit4] 2> 202660 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ x:control_collection_shard1_replica_n1 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf1&newCollection=true&name=control_collection_shard1_replica_n1&action=CREATE&numShards=1&collection=control_collection&shard=shard1&wt=javabin&version=2&replicaType=NRT
[junit4] 2> 202660 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ x:control_collection_shard1_replica_n1 ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
[junit4] 2> 203905 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
[junit4] 2> 204431 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.s.IndexSchema [control_collection_shard1_replica_n1] Schema name=test
[junit4] 2> 205724 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
[junit4] 2> 206075 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.CoreContainer Creating SolrCore 'control_collection_shard1_replica_n1' using configuration from collection control_collection, trusted=true
[junit4] 2> 206109 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.control_collection.shard1.replica_n1' (registry 'solr.core.control_collection.shard1.replica_n1') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7be9d315
[junit4] 2> 206168 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://lucene2-us-west.apache.org:32919/solr_hdfs_home
[junit4] 2> 206168 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
[junit4] 2> 206168 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.SolrCore [[control_collection_shard1_replica_n1] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.StressHdfsTest_5BAD2857752BE768-001/control-001/cores/control_collection_shard1_replica_n1], dataDir=[hdfs://lucene2-us-west.apache.org:32919/solr_hdfs_home/control_collection/core_node2/data/]
[junit4] 2> 206236 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:32919/solr_hdfs_home/control_collection/core_node2/data/snapshot_metadata
[junit4] 2> 206510 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
[junit4] 2> 206511 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
[junit4] 2> 206511 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
[junit4] 2> 207579 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.s.b.BlockDirectory Block cache on write is disabled
[junit4] 2> 207653 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:32919/solr_hdfs_home/control_collection/core_node2/data
[junit4] 2> 208020 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:32919/solr_hdfs_home/control_collection/core_node2/data/index
[junit4] 2> 208126 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
[junit4] 2> 208126 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
[junit4] 2> 208126 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
[junit4] 2> 208275 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.s.b.BlockDirectory Block cache on write is disabled
[junit4] 2> 208275 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=8, maxMergeAtOnceExplicit=9, maxMergedSegmentMB=21.4580078125, floorSegmentMB=0.90625, forceMergeDeletesPctAllowed=11.961965357568095, segmentsPerTier=38.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.76044293700138, deletesPctAllowed=45.87390477859195
[junit4] 2> 210937 WARN (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A, b=B}}}
[junit4] 2> 211840 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
[junit4] 2> 211840 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
[junit4] 2> 211840 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=2
[junit4] 2> 212080 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
[junit4] 2> 212080 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
[junit4] 2> 212116 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.MockRandomMergePolicy: org.apache.lucene.index.MockRandomMergePolicy@3223c54a
[junit4] 2> 212766 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.s.SolrIndexSearcher Opening [Searcher@4c04b476[control_collection_shard1_replica_n1] main]
[junit4] 2> 212781 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
[junit4] 2> 212782 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
[junit4] 2> 212817 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
[junit4] 2> 212838 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1649769766727974912
[junit4] 2> 212947 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.ZkShardTerms Successful update of terms at /collections/control_collection/terms/shard1 to Terms{values={core_node2=0}, version=0}
[junit4] 2> 212947 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/control_collection/leaders/shard1
[junit4] 2> 212956 INFO (searcherExecutor-333-thread-1-processing-n:127.0.0.1:36109_ x:control_collection_shard1_replica_n1 c:control_collection s:shard1) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.SolrCore [control_collection_shard1_replica_n1] Registered new searcher Searcher@4c04b476[control_collection_shard1_replica_n1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
[junit4] 2> 212966 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
[junit4] 2> 212966 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
[junit4] 2> 212966 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.SyncStrategy Sync replicas to http://127.0.0.1:36109/control_collection_shard1_replica_n1/
[junit4] 2> 212966 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
[junit4] 2> 212966 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.SyncStrategy http://127.0.0.1:36109/control_collection_shard1_replica_n1/ has no replicas
[junit4] 2> 212983 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContextBase Creating leader registration node /collections/control_collection/leaders/shard1/leader after winning as /collections/control_collection/leader_elect/shard1/election/75706667183177732-core_node2-n_0000000000
[junit4] 2> 213022 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContext I am the new leader: http://127.0.0.1:36109/control_collection_shard1_replica_n1/ shard1
[junit4] 2> 213037 INFO (zkCallback-234-thread-1) [ ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
[junit4] 2> 213039 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.c.ZkController I am the leader, no recovery necessary
[junit4] 2> 213058 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ c:control_collection s:shard1 x:control_collection_shard1_replica_n1 ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={qt=/admin/cores&collection.configName=conf1&newCollection=true&name=control_collection_shard1_replica_n1&action=CREATE&numShards=1&collection=control_collection&shard=shard1&wt=javabin&version=2&replicaType=NRT} status=0 QTime=10399
[junit4] 2> 213184 INFO (qtp2144897355-776) [n:127.0.0.1:36109_ ] o.a.s.h.a.CollectionsHandler Wait for new collection to be active for at most 45 seconds. Check all shard replicas
[junit4] 2> 213219 INFO (zkCallback-234-thread-1) [ ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
[junit4] 2> 213220 INFO (zkCallback-234-thread-2) [ ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
[junit4] 2> 213235 INFO (qtp2144897355-776) [n:127.0.0.1:36109_ ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections params={collection.configName=conf1&name=control_collection&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=127.0.0.1:36109_&wt=javabin&version=2} status=0 QTime=11426
[junit4] 2> 213248 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.AbstractFullDistribZkTestBase Waiting to see 1 active replicas in collection: control_collection
[junit4] 2> 213475 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 213556 INFO (zkConnectionManagerCallback-250-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 213568 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 213570 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
[junit4] 2> 213572 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:44669/solr ready
[junit4] 2> 213587 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.ChaosMonkey monkey: init - expire sessions:false cause connection loss:false
[junit4] 2> 213614 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params collection.configName=conf1&name=collection1&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=&stateFormat=2&wt=javabin&version=2 and sendToOCPQueue=true
[junit4] 2> 213885 INFO (OverseerThreadFactory-328-thread-2-processing-n:127.0.0.1:36109_) [n:127.0.0.1:36109_ ] o.a.s.c.a.c.CreateCollectionCmd Create collection collection1
[junit4] 2> 213901 INFO (OverseerCollectionConfigSetProcessor-75706667183177732-127.0.0.1:36109_-n_0000000000) [n:127.0.0.1:36109_ ] o.a.s.c.OverseerTaskQueue Response ZK path: /overseer/collection-queue-work/qnr-0000000000 doesn't exist. Requestor may have disconnected from ZooKeeper
[junit4] 2> 214194 WARN (OverseerThreadFactory-328-thread-2-processing-n:127.0.0.1:36109_) [n:127.0.0.1:36109_ ] o.a.s.c.a.c.CreateCollectionCmd It is unusual to create a collection (collection1) without cores.
[junit4] 2> 214208 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ ] o.a.s.h.a.CollectionsHandler Wait for new collection to be active for at most 45 seconds. Check all shard replicas
[junit4] 2> 214209 INFO (qtp2144897355-778) [n:127.0.0.1:36109_ ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections params={collection.configName=conf1&name=collection1&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=&stateFormat=2&wt=javabin&version=2} status=0 QTime=595
[junit4] 2> 214243 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.SolrCloudTestCase active slice count: 1 expected:1
[junit4] 2> 214243 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.SolrCloudTestCase active replica count: 0 expected replica count: 0
[junit4] 2> 214243 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.SolrCloudTestCase active slice count: 1 expected:1
[junit4] 2> 214243 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.SolrCloudTestCase active replica count: 0 expected replica count: 0
[junit4] 2> 214244 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.SolrCloudTestCase active slice count: 1 expected:1
[junit4] 2> 214244 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.SolrCloudTestCase active replica count: 0 expected replica count: 0
[junit4] 2> 214244 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.AbstractFullDistribZkTestBase Creating jetty instances pullReplicaCount=0 numOtherReplicas=7
[junit4] 2> 215941 INFO (OverseerCollectionConfigSetProcessor-75706667183177732-127.0.0.1:36109_-n_0000000000) [n:127.0.0.1:36109_ ] o.a.s.c.OverseerTaskQueue Response ZK path: /overseer/collection-queue-work/qnr-0000000002 doesn't exist. Requestor may have disconnected from ZooKeeper
[junit4] 2> 216016 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 1 in directory /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.StressHdfsTest_5BAD2857752BE768-001/shard-1-001 of type NRT
[junit4] 2> 216087 WARN (closeThreadPool-251-thread-1) [ ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
[junit4] 2> 216088 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.c.s.e.JettySolrRunner Start Jetty (configured port=0, binding port=0)
[junit4] 2> 216088 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
[junit4] 2> 216088 INFO (closeThreadPool-251-thread-1) [ ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 11.0.4+10-LTS
[junit4] 2> 216224 INFO (closeThreadPool-251-thread-1) [ ] o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 216224 INFO (closeThreadPool-251-thread-1) [ ] o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 216224 INFO (closeThreadPool-251-thread-1) [ ] o.e.j.s.session node0 Scavenging every 660000ms
[junit4] 2> 216241 INFO (closeThreadPool-251-thread-1) [ ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@71e83c6d{/,null,AVAILABLE}
[junit4] 2> 216254 INFO (closeThreadPool-251-thread-1) [ ] o.e.j.s.AbstractConnector Started ServerConnector@25260cdd{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:46017}
[junit4] 2> 216254 INFO (closeThreadPool-251-thread-1) [ ] o.e.j.s.Server Started @216314ms
[junit4] 2> 216254 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/, solrconfig=solrconfig.xml, solr.data.dir=hdfs://lucene2-us-west.apache.org:32919/hdfs__lucene2-us-west.apache.org_32919__home_jenkins_jenkins-slave_workspace_Lucene-Solr-NightlyTests-master_checkout_solr_build_solr-core_test_J1_temp_solr.cloud.hdfs.StressHdfsTest_5BAD2857752BE768-001_tempDir-002_jetty1, hostPort=46017, coreRootDirectory=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.StressHdfsTest_5BAD2857752BE768-001/shard-1-001/cores, replicaType=NRT}
[junit4] 2> 216255 ERROR (closeThreadPool-251-thread-1) [ ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
[junit4] 2> 216255 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
[junit4] 2> 216261 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version 9.0.0
[junit4] 2> 216261 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 216261 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 216261 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time: 2019-11-09T23:44:17.935394Z
[junit4] 2> 216314 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 216423 INFO (zkConnectionManagerCallback-253-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 216423 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 216562 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
[junit4] 2> 216562 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.c.SolrXmlConfig Loading container configuration from /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.StressHdfsTest_5BAD2857752BE768-001/shard-1-001/solr.xml
[junit4] 2> 216637 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
[junit4] 2> 216637 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
[junit4] 2> 216638 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@7be9d315, but no JMX reporters were configured - adding default JMX reporter.
[junit4] 2> 217845 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=false]
[junit4] 2> 217865 WARN (closeThreadPool-251-thread-1) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@7e949885[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 217865 WARN (closeThreadPool-251-thread-1) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@7e949885[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 217902 WARN (closeThreadPool-251-thread-1) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@7b3793b2[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 217902 WARN (closeThreadPool-251-thread-1) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@7b3793b2[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 217903 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:44669/solr
[junit4] 2> 217916 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 218067 INFO (zkConnectionManagerCallback-260-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 218067 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 218233 INFO (closeThreadPool-251-thread-1) [n:127.0.0.1:46017_ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 218287 INFO (zkConnectionManagerCallback-262-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 218288 INFO (closeThreadPool-251-thread-1) [n:127.0.0.1:46017_ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 218399 INFO (closeThreadPool-251-thread-1) [n:127.0.0.1:46017_ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
[junit4] 2> 218433 INFO (closeThreadPool-251-thread-1) [n:127.0.0.1:46017_ ] o.a.s.c.ZkController Publish node=127.0.0.1:46017_ as DOWN
[junit4] 2> 218435 INFO (closeThreadPool-251-thread-1) [n:127.0.0.1:46017_ ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
[junit4] 2> 218435 INFO (closeThreadPool-251-thread-1) [n:127.0.0.1:46017_ ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:46017_
[junit4] 2> 218470 INFO (zkCallback-234-thread-3) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
[junit4] 2> 218486 INFO (zkCallback-249-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
[junit4] 2> 218506 INFO (zkCallback-261-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
[junit4] 2> 218708 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 2 in directory /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.StressHdfsTest_5BAD2857752BE768-001/shard-2-001 of type NRT
[junit4] 2> 218717 INFO (closeThreadPool-251-thread-1) [n:127.0.0.1:46017_ ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
[junit4] 2> 218798 WARN (closeThreadPool-251-thread-2) [ ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
[junit4] 2> 218799 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.c.s.e.JettySolrRunner Start Jetty (configured port=0, binding port=0)
[junit4] 2> 218799 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
[junit4] 2> 218799 INFO (closeThreadPool-251-thread-2) [ ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 11.0.4+10-LTS
[junit4] 2> 218903 INFO (closeThreadPool-251-thread-1) [n:127.0.0.1:46017_ ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7be9d315
[junit4] 2> 219050 INFO (closeThreadPool-251-thread-1) [n:127.0.0.1:46017_ ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7be9d315
[junit4] 2> 219070 INFO (closeThreadPool-251-thread-1) [n:127.0.0.1:46017_ ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7be9d315
[junit4] 2> 219152 INFO (closeThreadPool-251-thread-1) [n:127.0.0.1:46017_ ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.StressHdfsTest_5BAD2857752BE768-001/shard-1-001/cores
[junit4] 2> 219169 INFO (closeThreadPool-251-thread-2) [ ] o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 219169 INFO (closeThreadPool-251-thread-2) [ ] o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 219169 INFO (closeThreadPool-251-thread-2) [ ] o.e.j.s.session node0 Scavenging every 600000ms
[junit4] 2> 219170 INFO (closeThreadPool-251-thread-2) [ ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@2914fa09{/,null,AVAILABLE}
[junit4] 2> 219277 INFO (closeThreadPool-251-thread-2) [ ] o.e.j.s.AbstractConnector Started ServerConnector@19418992{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:41799}
[junit4] 2> 219277 INFO (closeThreadPool-251-thread-2) [ ] o.e.j.s.Server Started @219337ms
[junit4] 2> 219277 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/, solrconfig=solrconfig.xml, solr.data.dir=hdfs://lucene2-us-west.apache.org:32919/hdfs__lucene2-us-west.apache.org_32919__home_jenkins_jenkins-slave_workspace_Lucene-Solr-NightlyTests-master_checkout_solr_build_solr-core_test_J1_temp_solr.cloud.hdfs.StressHdfsTest_5BAD2857752BE768-001_tempDir-002_jetty2, hostPort=41799, coreRootDirectory=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.StressHdfsTest_5BAD2857752BE768-001/shard-2-001/cores, replicaType=NRT}
[junit4] 2> 219278 ERROR (closeThreadPool-251-thread-2) [ ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
[junit4] 2> 219278 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
[junit4] 2> 219278 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version 9.0.0
[junit4] 2> 219278 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 219278 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 219278 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time: 2019-11-09T23:44:20.952539Z
[junit4] 2> 219306 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 219769 INFO (zkConnectionManagerCallback-268-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 219777 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 219952 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
[junit4] 2> 219952 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.c.SolrXmlConfig Loading container configuration from /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.StressHdfsTest_5BAD2857752BE768-001/shard-2-001/solr.xml
[junit4] 2> 220076 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
[junit4] 2> 220076 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
[junit4] 2> 220111 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@7be9d315, but no JMX reporters were configured - adding default JMX reporter.
[junit4] 2> 220150 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.c.AbstractFullDistribZkTestBase waitForLiveNode: 127.0.0.1:46017_
[junit4] 2> 222135 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 3 in directory /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.StressHdfsTest_5BAD2857752BE768-001/shard-3-001 of type NRT
[junit4] 2> 222169 WARN (closeThreadPool-251-thread-1) [ ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
[junit4] 2> 222170 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.c.s.e.JettySolrRunner Start Jetty (configured port=0, binding port=0)
[junit4] 2> 222170 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
[junit4] 2> 222170 INFO (closeThreadPool-251-thread-1) [ ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 11.0.4+10-LTS
[junit4] 2> 222315 INFO (closeThreadPool-251-thread-1) [ ] o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 222315 INFO (closeThreadPool-251-thread-1) [ ] o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 222315 INFO (closeThreadPool-251-thread-1) [ ] o.e.j.s.session node0 Scavenging every 600000ms
[junit4] 2> 222316 INFO (closeThreadPool-251-thread-1) [ ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@637273bd{/,null,AVAILABLE}
[junit4] 2> 222405 INFO (closeThreadPool-251-thread-1) [ ] o.e.j.s.AbstractConnector Started ServerConnector@185ee432{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:45419}
[junit4] 2> 222405 INFO (closeThreadPool-251-thread-1) [ ] o.e.j.s.Server Started @222465ms
[junit4] 2> 222405 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/, solrconfig=solrconfig.xml, solr.data.dir=hdfs://lucene2-us-west.apache.org:32919/hdfs__lucene2-us-west.apache.org_32919__home_jenkins_jenkins-slave_workspace_Lucene-Solr-NightlyTests-master_checkout_solr_build_solr-core_test_J1_temp_solr.cloud.hdfs.StressHdfsTest_5BAD2857752BE768-001_tempDir-002_jetty3, hostPort=45419, coreRootDirectory=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/../../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.StressHdfsTest_5BAD2857752BE768-001/shard-3-001/cores, replicaType=NRT}
[junit4] 2> 222441 ERROR (closeThreadPool-251-thread-1) [ ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
[junit4] 2> 222441 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
[junit4] 2> 222441 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version 9.0.0
[junit4] 2> 222441 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 222441 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 222441 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time: 2019-11-09T23:44:24.115598Z
[junit4] 2> 222477 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 222608 INFO (zkConnectionManagerCallback-271-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 222608 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 222734 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
[junit4] 2> 222734 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.c.SolrXmlConfig Loading container configuration from /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.StressHdfsTest_5BAD2857752BE768-001/shard-3-001/solr.xml
[junit4] 2> 222820 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
[junit4] 2> 222820 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
[junit4] 2> 222822 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@7be9d315, but no JMX reporters were configured - adding default JMX reporter.
[junit4] 2> 224210 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=false]
[junit4] 2> 224228 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=false]
[junit4] 2> 224231 WARN (closeThreadPool-251-thread-1) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@4d6b0491[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 224231 WARN (closeThreadPool-251-thread-1) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@4d6b0491[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 224389 WARN (closeThreadPool-251-thread-1) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@7dba970a[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 224389 WARN (closeThreadPool-251-thread-1) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@7dba970a[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 224390 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:44669/solr
[junit4] 2> 224412 WARN (closeThreadPool-251-thread-2) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@9aa32ba[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 224412 WARN (closeThreadPool-251-thread-2) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@9aa32ba[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 224424 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 224693 WARN (closeThreadPool-251-thread-2) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@57ca31d0[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 224693 WARN (closeThreadPool-251-thread-2) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@57ca31d0[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 224695 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:44669/solr
[junit4] 2> 224695 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 224704 INFO (zkConnectionManagerCallback-279-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 224716 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 224802 INFO (zkConnectionManagerCallback-284-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 224802 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 224915 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 225236 INFO (zkConnectionManagerCallback-286-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 225236 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 225374 INFO (closeThreadPool-251-thread-2) [n:127.0.0.1:41799_ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 225484 INFO (zkConnectionManagerCallback-288-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 225484 INFO (closeThreadPool-251-thread-2) [n:127.0.0.1:41799_ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 225560 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
[junit4] 2> 225631 INFO (closeThreadPool-251-thread-2) [n:127.0.0.1:41799_ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
[junit4] 2> 225649 INFO (closeThreadPool-251-thread-2) [n:127.0.0.1:41799_ ] o.a.s.c.ZkController Publish node=127.0.0.1:41799_ as DOWN
[junit4] 2> 225922 INFO (closeThreadPool-251-thread-2) [n:127.0.0.1:41799_ ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
[junit4] 2> 225922 INFO (closeThreadPool-251-thread-2) [n:127.0.0.1:41799_ ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:41799_
[junit4] 2> 225941 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.c.ZkController Publish node=127.0.0.1:45419_ as DOWN
[junit4] 2> 225959 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
[junit4] 2> 225959 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:45419_
[junit4] 2> 225977 INFO (zkCallback-249-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
[junit4] 2> 225983 INFO (zkCallback-261-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
[junit4] 2> 225983 INFO (zkCallback-234-thread-2) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
[junit4] 2> 225996 INFO (zkCallback-234-thread-3) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
[junit4] 2> 226020 INFO (zkCallback-249-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
[junit4] 2> 226020 INFO (zkCallback-261-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
[junit4] 2> 226048 INFO (zkCallback-285-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (4)
[junit4] 2> 226049 INFO (zkCallback-287-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (4)
[junit4] 2> 226093 INFO (closeThreadPool-251-thread-2) [n:127.0.0.1:41799_ ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
[junit4] 2> 226279 INFO (closeThreadPool-251-thread-2) [n:127.0.0.1:41799_ ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7be9d315
[junit4] 2> 226351 INFO (closeThreadPool-251-thread-2) [n:127.0.0.1:41799_ ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7be9d315
[junit4] 2> 226351 INFO (closeThreadPool-251-thread-2) [n:127.0.0.1:41799_ ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7be9d315
[junit4] 2> 226353 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
[junit4] 2> 226367 INFO (closeThreadPool-251-thread-2) [n:127.0.0.1:41799_ ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.StressHdfsTest_5BAD2857752BE768-001/shard-2-001/cores
[junit4] 2> 226542 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7be9d315
[junit4] 2> 226707 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7be9d315
[junit4] 2> 226708 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7be9d315
[junit4] 2> 226745 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/../../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.StressHdfsTest_5BAD2857752BE768-001/shard-3-001/cores
[junit4] 2> 227665 INFO (closeThreadPool-251-thread-1) [ ] o.a.s.c.AbstractFullDistribZkTestBase waitForLiveNode: 127.0.0.1:45419_
[junit4] 2> 227760 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.c.AbstractFullDistribZkTestBase waitForLiveNode: 127.0.0.1:41799_
[junit4] 2> 227960 INFO (TEST-StressHdfsTest.test-seed#[5BAD2857752BE768]) [ ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 4 in directory /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.StressHdfsTest_5BAD2857752BE768-001/shard-4-001 of type NRT
[junit4] 2> 227960 WARN (closeThreadPool-251-thread-2) [ ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
[junit4] 2> 227961 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.c.s.e.JettySolrRunner Start Jetty (configured port=0, binding port=0)
[junit4] 2> 227961 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
[junit4] 2> 227961 INFO (closeThreadPool-251-thread-2) [ ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 11.0.4+10-LTS
[junit4] 2> 228091 INFO (closeThreadPool-251-thread-2) [ ] o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 228091 INFO (closeThreadPool-251-thread-2) [ ] o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 228091 INFO (closeThreadPool-251-thread-2) [ ] o.e.j.s.session node0 Scavenging every 600000ms
[junit4] 2> 228105 INFO (closeThreadPool-251-thread-2) [ ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@3fc80a59{/,null,AVAILABLE}
[junit4] 2> 228288 INFO (closeThreadPool-251-thread-2) [ ] o.e.j.s.AbstractConnector Started ServerConnector@33e19b8d{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:46493}
[junit4] 2> 228288 INFO (closeThreadPool-251-thread-2) [ ] o.e.j.s.Server Started @228348ms
[junit4] 2> 228288 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/, solrconfig=solrconfig.xml, solr.data.dir=hdfs://lucene2-us-west.apache.org:32919/hdfs__lucene2-us-west.apache.org_32919__home_jenkins_jenkins-slave_workspace_Lucene-Solr-NightlyTests-master_checkout_solr_build_solr-core_test_J1_temp_solr.cloud.hdfs.StressHdfsTest_5BAD2857752BE768-001_tempDir-002_jetty4, hostPort=46493, coreRootDirectory=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/../../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.StressHdfsTest_5BAD2857752BE768-001/shard-4-001/cores, replicaType=NRT}
[junit4] 2> 228288 ERROR (closeThreadPool-251-thread-2) [ ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
[junit4] 2> 228288 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
[junit4] 2> 228289 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version 9.0.0
[junit4] 2> 228289 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 228289 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 228289 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time: 2019-11-09T23:44:29.963114Z
[junit4] 2> 228348 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 228508 INFO (zkConnectionManagerCallback-298-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 228508 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 228653 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
[junit4] 2> 228653 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.c.SolrXmlConfig Loading container configuration from /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.StressHdfsTest_5BAD2857752BE768-001/shard-4-001/solr.xml
[junit4] 2> 228657 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
[junit4] 2> 228657 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
[junit4] 2> 228727 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@7be9d315, but no JMX reporters were configured - adding default JMX reporter.
[junit4] 2> 230208 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=false]
[junit4] 2> 230262 WARN (closeThreadPool-251-thread-2) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@24d0ce54[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 230262 WARN (closeThreadPool-251-thread-2) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@24d0ce54[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 230437 WARN (closeThreadPool-251-thread-2) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@d8f5b57[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 230437 WARN (closeThreadPool-251-thread-2) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@d8f5b57[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 230439 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:44669/solr
[junit4] 2> 230485 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 230652 INFO (zkConnectionManagerCallback-305-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 230652 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 230782 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 230879 INFO (zkConnectionManagerCallback-307-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 230879 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 231023 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (4)
[junit4] 2> 231041 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.c.ZkController Publish node=127.0.0.1:46493_ as DOWN
[junit4] 2> 231079 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
[junit4] 2> 231079 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:46493_
[junit4] 2> 231127 INFO (zkCallback-285-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (5)
[junit4] 2> 231127 INFO (zkCallback-261-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (5)
[junit4] 2> 231127 INFO (zkCallback-249-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (5)
[junit4] 2> 231127 INFO (zkCallback-287-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (5)
[junit4] 2> 231127 INFO (zkCallback-234-thread-3) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (5)
[junit4] 2> 231155 INFO (zkCallback-306-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (5)
[junit4] 2> 231470 INFO (closeThreadPool-251-thread-2) [ ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
[junit4] 2> 231551
[...truncated too long message...]
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
[junit4] > at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
[junit4] > at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
[junit4] > at java.base/java.lang.reflect.Method.invoke(Method.java:566)
[junit4] > at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:1082)
[junit4] > at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:1054)
[junit4] > at java.base/java.lang.Thread.run(Thread.java:834)
[junit4] > Caused by: java.net.SocketTimeoutException: Read timed out
[junit4] > at java.base/java.net.SocketInputStream.socketRead0(Native Method)
[junit4] > at java.base/java.net.SocketInputStream.socketRead(SocketInputStream.java:115)
[junit4] > at java.base/java.net.SocketInputStream.read(SocketInputStream.java:168)
[junit4] > at java.base/java.net.SocketInputStream.read(SocketInputStream.java:140)
[junit4] > at org.apache.http.impl.io.SessionInputBufferImpl.streamRead(SessionInputBufferImpl.java:137)
[junit4] > at org.apache.http.impl.io.SessionInputBufferImpl.fillBuffer(SessionInputBufferImpl.java:153)
[junit4] > at org.apache.http.impl.io.SessionInputBufferImpl.readLine(SessionInputBufferImpl.java:282)
[junit4] > at org.apache.http.impl.conn.DefaultHttpResponseParser.parseHead(DefaultHttpResponseParser.java:138)
[junit4] > at org.apache.http.impl.conn.DefaultHttpResponseParser.parseHead(DefaultHttpResponseParser.java:56)
[junit4] > at org.apache.http.impl.io.AbstractMessageParser.parse(AbstractMessageParser.java:259)
[junit4] > at org.apache.http.impl.DefaultBHttpClientConnection.receiveResponseHeader(DefaultBHttpClientConnection.java:163)
[junit4] > at org.apache.http.impl.conn.CPoolProxy.receiveResponseHeader(CPoolProxy.java:165)
[junit4] > at org.apache.http.protocol.HttpRequestExecutor.doReceiveResponse(HttpRequestExecutor.java:273)
[junit4] > at org.apache.http.protocol.HttpRequestExecutor.execute(HttpRequestExecutor.java:125)
[junit4] > at org.apache.http.impl.execchain.MainClientExec.execute(MainClientExec.java:272)
[junit4] > at org.apache.http.impl.execchain.ProtocolExec.execute(ProtocolExec.java:185)
[junit4] > at org.apache.http.impl.execchain.RetryExec.execute(RetryExec.java:89)
[junit4] > at org.apache.http.impl.execchain.RedirectExec.execute(RedirectExec.java:110)
[junit4] > at org.apache.http.impl.client.InternalHttpClient.doExecute(InternalHttpClient.java:185)
[junit4] > at org.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:83)
[junit4] > at org.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:56)
[junit4] > at org.apache.solr.client.solrj.impl.HttpSolrClient.executeMethod(HttpSolrClient.java:564)
[junit4] > ... 51 more
[junit4] 2> NOTE: leaving temporary files on disk at: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.RollingRestartTest_5BAD2857752BE768-001
[junit4] 2> Nov 10, 2019 12:02:31 AM com.carrotsearch.randomizedtesting.ThreadLeakControl checkThreadLeaks
[junit4] 2> WARNING: Will linger awaiting termination of 1 leaked thread(s).
[junit4] 2> NOTE: test params are: codec=Asserting(Lucene80): {}, docValues:{}, maxPointsInLeafNode=500, maxMBSortInHeap=5.290704132859334, sim=Asserting(org.apache.lucene.search.similarities.AssertingSimilarity@2c806c53), locale=en-MO, timezone=Japan
[junit4] 2> NOTE: Linux 4.4.0-112-generic amd64/Oracle Corporation 11.0.4 (64-bit)/cpus=4,threads=1,free=141151672,total=525336576
[junit4] 2> NOTE: All tests run in this JVM: [DistributedFacetPivotSmallAdvancedTest, MultiSolrCloudTestCaseTest, SchemaVersionSpecificBehaviorTest, ZkNodePropsTest, TestFieldCollectionResource, TestConfigSets, StressHdfsTest, TestCSVResponseWriter, TestMaxScoreQueryParser, CloudExitableDirectoryReaderTest, DimensionalRoutedAliasUpdateProcessorTest, TestChildDocTransformerHierarchy, TestRandomFaceting, LoggingHandlerTest, TestPhraseSuggestions, RankQueryTest, RollingRestartTest]
[junit4] Completed [103/888 (2!)] on J1 in 216.86s, 1 test, 1 error <<< FAILURES!
[...truncated 52211 lines...]
-ecj-javadoc-lint-tests:
[mkdir] Created dir: /tmp/ecj2084710748
[ecj-lint] Compiling 48 source files to /tmp/ecj2084710748
[ecj-lint] invalid Class-Path header in manifest of jar file: /home/jenkins/.ivy2/cache/org.restlet.jee/org.restlet/jars/org.restlet-2.3.0.jar
[ecj-lint] invalid Class-Path header in manifest of jar file: /home/jenkins/.ivy2/cache/org.restlet.jee/org.restlet.ext.servlet/jars/org.restlet.ext.servlet-2.3.0.jar
[ecj-lint] ----------
[ecj-lint] 1. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/MockInitialContextFactory.java (at line 23)
[ecj-lint] import javax.naming.NamingException;
[ecj-lint] ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
[ecj-lint] The type javax.naming.NamingException is not accessible
[ecj-lint] ----------
[ecj-lint] 2. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/MockInitialContextFactory.java (at line 28)
[ecj-lint] public class MockInitialContextFactory implements InitialContextFactory {
[ecj-lint] ^^^^^^^^^^^^^^^^^^^^^^^^^
[ecj-lint] The type MockInitialContextFactory must implement the inherited abstract method InitialContextFactory.getInitialContext(Hashtable<?,?>)
[ecj-lint] ----------
[ecj-lint] 3. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/MockInitialContextFactory.java (at line 30)
[ecj-lint] private final javax.naming.Context context;
[ecj-lint] ^^^^^^^^^^^^^^^^^^^^
[ecj-lint] The type javax.naming.Context is not accessible
[ecj-lint] ----------
[ecj-lint] 4. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/MockInitialContextFactory.java (at line 33)
[ecj-lint] context = mock(javax.naming.Context.class);
[ecj-lint] ^^^^^^^
[ecj-lint] context cannot be resolved to a variable
[ecj-lint] ----------
[ecj-lint] 5. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/MockInitialContextFactory.java (at line 33)
[ecj-lint] context = mock(javax.naming.Context.class);
[ecj-lint] ^^^^^^^^^^^^^^^^^^^^
[ecj-lint] The type javax.naming.Context is not accessible
[ecj-lint] ----------
[ecj-lint] 6. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/MockInitialContextFactory.java (at line 36)
[ecj-lint] when(context.lookup(anyString())).thenAnswer(invocation -> objects.get(invocation.getArgument(0)));
[ecj-lint] ^^^^^^^
[ecj-lint] context cannot be resolved
[ecj-lint] ----------
[ecj-lint] 7. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/MockInitialContextFactory.java (at line 38)
[ecj-lint] } catch (NamingException e) {
[ecj-lint] ^^^^^^^^^^^^^^^
[ecj-lint] NamingException cannot be resolved to a type
[ecj-lint] ----------
[ecj-lint] 8. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/MockInitialContextFactory.java (at line 45)
[ecj-lint] public javax.naming.Context getInitialContext(Hashtable env) {
[ecj-lint] ^^^^^^^^^^^^^^^^^^^^
[ecj-lint] The type javax.naming.Context is not accessible
[ecj-lint] ----------
[ecj-lint] 9. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/MockInitialContextFactory.java (at line 46)
[ecj-lint] return context;
[ecj-lint] ^^^^^^^
[ecj-lint] context cannot be resolved to a variable
[ecj-lint] ----------
[ecj-lint] 9 problems (9 errors)
BUILD FAILED
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/build.xml:652: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/build.xml:101: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build.xml:651: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/common-build.xml:479: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/common-build.xml:2033: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/common-build.xml:2066: Compile failed; see the compiler error output for details.
Total time: 587 minutes 53 seconds
Build step 'Invoke Ant' marked build as failure
Archiving artifacts
Recording test results
Email was triggered for: Failure - Any
Sending email for trigger: Failure - Any
[JENKINS] Lucene-Solr-NightlyTests-master - Build # 2016 - Still
unstable
Posted by Apache Jenkins Server <je...@builds.apache.org>.
Build: https://builds.apache.org/job/Lucene-Solr-NightlyTests-master/2016/
1 tests failed.
FAILED: org.apache.solr.cloud.MoveReplicaHDFSTest.test
Error Message:
expected not same
Stack Trace:
java.lang.AssertionError: expected not same
at __randomizedtesting.SeedInfo.seed([C201F022DD32A68D:4A55CFF873CECB75]:0)
at org.junit.Assert.fail(Assert.java:88)
at org.junit.Assert.failSame(Assert.java:819)
at org.junit.Assert.assertNotSame(Assert.java:798)
at org.junit.Assert.assertNotSame(Assert.java:811)
at org.apache.solr.cloud.MoveReplicaTest.test(MoveReplicaTest.java:147)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938)
at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974)
at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988)
at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947)
at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832)
at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883)
at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
at java.base/java.lang.Thread.run(Thread.java:834)
Build Log:
[...truncated 15324 lines...]
[junit4] Suite: org.apache.solr.cloud.MoveReplicaHDFSTest
[junit4] 2> 4813397 INFO (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.a.s.SolrTestCaseJ4 Created dataDir: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_C201F022DD32A68D-001/data-dir-182-001
[junit4] 2> 4813397 WARN (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.a.s.SolrTestCaseJ4 startTrackingSearchers: numOpens=14 numCloses=14
[junit4] 2> 4813397 INFO (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.a.s.SolrTestCaseJ4 Using PointFields (NUMERIC_POINTS_SYSPROP=true) w/NUMERIC_DOCVALUES_SYSPROP=true
[junit4] 2> 4813398 INFO (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (false) via: @org.apache.solr.util.RandomizeSSL(reason="", ssl=0.0/0.0, value=0.0/0.0, clientAuth=0.0/0.0)
[junit4] 2> 4813398 INFO (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.a.s.SolrTestCaseJ4 SecureRandom sanity checks: test.solr.allowed.securerandom=null & java.security.egd=file:/dev/./urandom
[junit4] 1> Formatting using clusterid: testClusterID
[junit4] 2> 4813436 WARN (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.a.h.m.i.MetricsConfig Cannot locate configuration: tried hadoop-metrics2-namenode.properties,hadoop-metrics2.properties
[junit4] 2> 4813444 WARN (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
[junit4] 2> 4813446 INFO (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 11.0.4+10-LTS
[junit4] 2> 4813447 INFO (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 4813447 INFO (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 4813447 INFO (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.e.j.s.session node0 Scavenging every 660000ms
[junit4] 2> 4813448 INFO (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@139bab63{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
[junit4] 2> 4813583 INFO (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@4fdf947d{hdfs,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/jetty-lucene2-us-west.apache.org-33844-hdfs-_-any-12167834953344529272.dir/webapp/,AVAILABLE}{/hdfs}
[junit4] 2> 4813583 INFO (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.e.j.s.AbstractConnector Started ServerConnector@217ee445{HTTP/1.1,[http/1.1]}{lucene2-us-west.apache.org:33844}
[junit4] 2> 4813583 INFO (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.e.j.s.Server Started @4813670ms
[junit4] 2> 4813640 WARN (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
[junit4] 2> 4813641 INFO (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 11.0.4+10-LTS
[junit4] 2> 4813642 INFO (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 4813642 INFO (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 4813642 INFO (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.e.j.s.session node0 Scavenging every 600000ms
[junit4] 2> 4813642 INFO (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@4a4d0606{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
[junit4] 2> 4813775 INFO (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@647efeec{datanode,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/jetty-localhost-44000-datanode-_-any-9755091353983186414.dir/webapp/,AVAILABLE}{/datanode}
[junit4] 2> 4813775 INFO (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.e.j.s.AbstractConnector Started ServerConnector@1220cbd4{HTTP/1.1,[http/1.1]}{localhost:44000}
[junit4] 2> 4813775 INFO (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.e.j.s.Server Started @4813862ms
[junit4] 2> 4813841 WARN (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
[junit4] 2> 4813842 INFO (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 11.0.4+10-LTS
[junit4] 2> 4813842 INFO (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 4813842 INFO (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 4813842 INFO (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.e.j.s.session node0 Scavenging every 600000ms
[junit4] 2> 4813843 INFO (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@c8e4c58{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
[junit4] 2> 4813958 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0x8c4201de01aab82d: Processing first storage report for DS-dcd28d8c-54df-4a90-b005-aea85723a1e6 from datanode 60b3af80-d51c-46e6-b27a-65908421715b
[junit4] 2> 4813958 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0x8c4201de01aab82d: from storage DS-dcd28d8c-54df-4a90-b005-aea85723a1e6 node DatanodeRegistration(127.0.0.1:43383, datanodeUuid=60b3af80-d51c-46e6-b27a-65908421715b, infoPort=33555, infoSecurePort=0, ipcPort=46198, storageInfo=lv=-57;cid=testClusterID;nsid=963592837;c=1573584395591), blocks: 0, hasStaleStorage: true, processing time: 0 msecs, invalidatedBlocks: 0
[junit4] 2> 4813958 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0x8c4201de01aab82d: Processing first storage report for DS-bec91b77-a036-4823-b45e-b060a0078b13 from datanode 60b3af80-d51c-46e6-b27a-65908421715b
[junit4] 2> 4813958 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0x8c4201de01aab82d: from storage DS-bec91b77-a036-4823-b45e-b060a0078b13 node DatanodeRegistration(127.0.0.1:43383, datanodeUuid=60b3af80-d51c-46e6-b27a-65908421715b, infoPort=33555, infoSecurePort=0, ipcPort=46198, storageInfo=lv=-57;cid=testClusterID;nsid=963592837;c=1573584395591), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0
[junit4] 2> 4814024 INFO (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@75bb30c{datanode,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/jetty-localhost-35274-datanode-_-any-9107664281253266924.dir/webapp/,AVAILABLE}{/datanode}
[junit4] 2> 4814024 INFO (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.e.j.s.AbstractConnector Started ServerConnector@22cdb802{HTTP/1.1,[http/1.1]}{localhost:35274}
[junit4] 2> 4814024 INFO (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.e.j.s.Server Started @4814111ms
[junit4] 2> 4814108 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0xe4901a0b5868e152: Processing first storage report for DS-f2c3fc80-8e2c-4db3-87ca-9f83dc44c466 from datanode e9ca55f1-537e-47d5-90f3-74ae94ab9c16
[junit4] 2> 4814108 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0xe4901a0b5868e152: from storage DS-f2c3fc80-8e2c-4db3-87ca-9f83dc44c466 node DatanodeRegistration(127.0.0.1:34925, datanodeUuid=e9ca55f1-537e-47d5-90f3-74ae94ab9c16, infoPort=35127, infoSecurePort=0, ipcPort=35978, storageInfo=lv=-57;cid=testClusterID;nsid=963592837;c=1573584395591), blocks: 0, hasStaleStorage: true, processing time: 1 msecs, invalidatedBlocks: 0
[junit4] 2> 4814109 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0xe4901a0b5868e152: Processing first storage report for DS-a0c33f60-a9e4-458c-983d-26ce4ebb7ace from datanode e9ca55f1-537e-47d5-90f3-74ae94ab9c16
[junit4] 2> 4814109 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0xe4901a0b5868e152: from storage DS-a0c33f60-a9e4-458c-983d-26ce4ebb7ace node DatanodeRegistration(127.0.0.1:34925, datanodeUuid=e9ca55f1-537e-47d5-90f3-74ae94ab9c16, infoPort=35127, infoSecurePort=0, ipcPort=35978, storageInfo=lv=-57;cid=testClusterID;nsid=963592837;c=1573584395591), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0
[junit4] IGNOR/A 0.00s J2 | MoveReplicaHDFSTest.testFailedMove
[junit4] > Assumption #1: 'awaitsfix' test group is disabled (@AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/SOLR-12080"))
[junit4] IGNOR/A 0.00s J2 | MoveReplicaHDFSTest.testNormalFailedMove
[junit4] > Assumption #1: 'badapple' test group is disabled (@BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028"))
[junit4] 2> 4814497 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[C201F022DD32A68D]) [ ] o.a.s.SolrTestCaseJ4 ###Starting testNormalMove
[junit4] 2> 4814498 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[C201F022DD32A68D]) [ ] o.a.s.c.MiniSolrCloudCluster Starting cluster of 4 servers in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_C201F022DD32A68D-001/tempDir-002
[junit4] 2> 4814498 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[C201F022DD32A68D]) [ ] o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
[junit4] 2> 4814498 INFO (ZkTestServer Run Thread) [ ] o.a.s.c.ZkTestServer client port:0.0.0.0/0.0.0.0:0
[junit4] 2> 4814499 INFO (ZkTestServer Run Thread) [ ] o.a.s.c.ZkTestServer Starting server
[junit4] 2> 4814599 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[C201F022DD32A68D]) [ ] o.a.s.c.ZkTestServer start zk server on port:38431
[junit4] 2> 4814599 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[C201F022DD32A68D]) [ ] o.a.s.c.ZkTestServer waitForServerUp: 127.0.0.1:38431
[junit4] 2> 4814599 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[C201F022DD32A68D]) [ ] o.a.s.c.ZkTestServer parse host and port list: 127.0.0.1:38431
[junit4] 2> 4814599 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[C201F022DD32A68D]) [ ] o.a.s.c.ZkTestServer connecting to 127.0.0.1 38431
[junit4] 2> 4814601 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[C201F022DD32A68D]) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 4814603 INFO (zkConnectionManagerCallback-10101-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 4814603 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[C201F022DD32A68D]) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 4814605 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[C201F022DD32A68D]) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 4814606 INFO (zkConnectionManagerCallback-10103-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 4814606 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[C201F022DD32A68D]) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 4814607 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[C201F022DD32A68D]) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 4814608 INFO (zkConnectionManagerCallback-10105-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 4814608 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[C201F022DD32A68D]) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 4814710 WARN (jetty-launcher-10106-thread-1) [ ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
[junit4] 2> 4814711 INFO (jetty-launcher-10106-thread-1) [ ] o.a.s.c.s.e.JettySolrRunner Start Jetty (configured port=0, binding port=0)
[junit4] 2> 4814711 INFO (jetty-launcher-10106-thread-1) [ ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
[junit4] 2> 4814711 INFO (jetty-launcher-10106-thread-1) [ ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 11.0.4+10-LTS
[junit4] 2> 4814711 WARN (jetty-launcher-10106-thread-2) [ ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
[junit4] 2> 4814712 INFO (jetty-launcher-10106-thread-2) [ ] o.a.s.c.s.e.JettySolrRunner Start Jetty (configured port=0, binding port=0)
[junit4] 2> 4814712 INFO (jetty-launcher-10106-thread-2) [ ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
[junit4] 2> 4814712 INFO (jetty-launcher-10106-thread-2) [ ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 11.0.4+10-LTS
[junit4] 2> 4814712 WARN (jetty-launcher-10106-thread-3) [ ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
[junit4] 2> 4814713 INFO (jetty-launcher-10106-thread-3) [ ] o.a.s.c.s.e.JettySolrRunner Start Jetty (configured port=0, binding port=0)
[junit4] 2> 4814713 INFO (jetty-launcher-10106-thread-3) [ ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
[junit4] 2> 4814713 INFO (jetty-launcher-10106-thread-3) [ ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 11.0.4+10-LTS
[junit4] 2> 4814713 WARN (jetty-launcher-10106-thread-4) [ ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
[junit4] 2> 4814713 INFO (jetty-launcher-10106-thread-4) [ ] o.a.s.c.s.e.JettySolrRunner Start Jetty (configured port=0, binding port=0)
[junit4] 2> 4814713 INFO (jetty-launcher-10106-thread-4) [ ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
[junit4] 2> 4814713 INFO (jetty-launcher-10106-thread-4) [ ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 11.0.4+10-LTS
[junit4] 2> 4814715 INFO (jetty-launcher-10106-thread-2) [ ] o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 4814715 INFO (jetty-launcher-10106-thread-2) [ ] o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 4814715 INFO (jetty-launcher-10106-thread-2) [ ] o.e.j.s.session node0 Scavenging every 660000ms
[junit4] 2> 4814716 INFO (jetty-launcher-10106-thread-2) [ ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@69ac8dea{/solr,null,AVAILABLE}
[junit4] 2> 4814716 INFO (jetty-launcher-10106-thread-4) [ ] o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 4814716 INFO (jetty-launcher-10106-thread-4) [ ] o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 4814716 INFO (jetty-launcher-10106-thread-4) [ ] o.e.j.s.session node0 Scavenging every 600000ms
[junit4] 2> 4814716 INFO (jetty-launcher-10106-thread-1) [ ] o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 4814716 INFO (jetty-launcher-10106-thread-1) [ ] o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 4814716 INFO (jetty-launcher-10106-thread-1) [ ] o.e.j.s.session node0 Scavenging every 600000ms
[junit4] 2> 4814716 INFO (jetty-launcher-10106-thread-4) [ ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@6dcb93ff{/solr,null,AVAILABLE}
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-2) [ ] o.e.j.s.AbstractConnector Started ServerConnector@1ee635ba{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:45595}
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-2) [ ] o.e.j.s.Server Started @4814804ms
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-2) [ ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=45595}
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-4) [ ] o.e.j.s.AbstractConnector Started ServerConnector@34b7af99{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:43582}
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-4) [ ] o.e.j.s.Server Started @4814804ms
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-4) [ ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=43582}
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-1) [ ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@62d5ad83{/solr,null,AVAILABLE}
[junit4] 2> 4814717 ERROR (jetty-launcher-10106-thread-2) [ ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-2) [ ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
[junit4] 2> 4814717 ERROR (jetty-launcher-10106-thread-4) [ ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-2) [ ] o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version 9.0.0
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-4) [ ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-2) [ ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-2) [ ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-2) [ ] o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time: 2019-11-12T18:46:36.894496Z
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-4) [ ] o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version 9.0.0
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-4) [ ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-1) [ ] o.e.j.s.AbstractConnector Started ServerConnector@1d10bc8e{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:39856}
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-4) [ ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-4) [ ] o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time: 2019-11-12T18:46:36.894556Z
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-1) [ ] o.e.j.s.Server Started @4814804ms
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-1) [ ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=39856}
[junit4] 2> 4814717 ERROR (jetty-launcher-10106-thread-1) [ ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-1) [ ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-1) [ ] o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version 9.0.0
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-1) [ ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-1) [ ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 4814717 INFO (jetty-launcher-10106-thread-1) [ ] o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time: 2019-11-12T18:46:36.894871Z
[junit4] 2> 4814718 INFO (jetty-launcher-10106-thread-3) [ ] o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 4814718 INFO (jetty-launcher-10106-thread-3) [ ] o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 4814718 INFO (jetty-launcher-10106-thread-3) [ ] o.e.j.s.session node0 Scavenging every 600000ms
[junit4] 2> 4814718 INFO (jetty-launcher-10106-thread-1) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 4814719 INFO (jetty-launcher-10106-thread-4) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 4814719 INFO (jetty-launcher-10106-thread-2) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 4814719 INFO (jetty-launcher-10106-thread-3) [ ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@10e11c86{/solr,null,AVAILABLE}
[junit4] 2> 4814720 INFO (jetty-launcher-10106-thread-3) [ ] o.e.j.s.AbstractConnector Started ServerConnector@4ea841d5{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:40378}
[junit4] 2> 4814720 INFO (jetty-launcher-10106-thread-3) [ ] o.e.j.s.Server Started @4814807ms
[junit4] 2> 4814720 INFO (jetty-launcher-10106-thread-3) [ ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=40378}
[junit4] 2> 4814720 ERROR (jetty-launcher-10106-thread-3) [ ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
[junit4] 2> 4814720 INFO (jetty-launcher-10106-thread-3) [ ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
[junit4] 2> 4814720 INFO (zkConnectionManagerCallback-10110-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 4814720 INFO (jetty-launcher-10106-thread-4) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 4814720 INFO (jetty-launcher-10106-thread-3) [ ] o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version 9.0.0
[junit4] 2> 4814720 INFO (jetty-launcher-10106-thread-3) [ ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 4814720 INFO (jetty-launcher-10106-thread-3) [ ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 4814720 INFO (jetty-launcher-10106-thread-3) [ ] o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time: 2019-11-12T18:46:36.897513Z
[junit4] 2> 4814721 INFO (jetty-launcher-10106-thread-3) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 4814721 INFO (jetty-launcher-10106-thread-4) [ ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
[junit4] 2> 4814721 INFO (zkConnectionManagerCallback-10112-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 4814721 INFO (jetty-launcher-10106-thread-1) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 4814723 INFO (jetty-launcher-10106-thread-4) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
[junit4] 2> 4814723 INFO (jetty-launcher-10106-thread-4) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
[junit4] 2> 4814725 INFO (jetty-launcher-10106-thread-4) [ ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d, but no JMX reporters were configured - adding default JMX reporter.
[junit4] 2> 4814725 INFO (jetty-launcher-10106-thread-1) [ ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
[junit4] 2> 4814725 INFO (zkConnectionManagerCallback-10108-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 4814725 INFO (jetty-launcher-10106-thread-2) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 4814726 INFO (zkConnectionManagerCallback-10114-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 4814726 INFO (jetty-launcher-10106-thread-3) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 4814726 INFO (jetty-launcher-10106-thread-2) [ ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
[junit4] 2> 4814727 INFO (jetty-launcher-10106-thread-1) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
[junit4] 2> 4814727 INFO (jetty-launcher-10106-thread-1) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
[junit4] 2> 4814728 INFO (jetty-launcher-10106-thread-2) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
[junit4] 2> 4814728 INFO (jetty-launcher-10106-thread-2) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
[junit4] 2> 4814729 INFO (jetty-launcher-10106-thread-1) [ ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d, but no JMX reporters were configured - adding default JMX reporter.
[junit4] 2> 4814729 INFO (jetty-launcher-10106-thread-3) [ ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
[junit4] 2> 4814729 INFO (jetty-launcher-10106-thread-2) [ ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d, but no JMX reporters were configured - adding default JMX reporter.
[junit4] 2> 4814732 INFO (jetty-launcher-10106-thread-3) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
[junit4] 2> 4814732 INFO (jetty-launcher-10106-thread-3) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
[junit4] 2> 4814733 INFO (jetty-launcher-10106-thread-3) [ ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d, but no JMX reporters were configured - adding default JMX reporter.
[junit4] 2> 4814996 INFO (jetty-launcher-10106-thread-3) [ ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
[junit4] 2> 4814997 WARN (jetty-launcher-10106-thread-3) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@48d9f27e[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 4814997 WARN (jetty-launcher-10106-thread-3) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@48d9f27e[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 4814999 WARN (jetty-launcher-10106-thread-3) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@5819beb9[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 4814999 WARN (jetty-launcher-10106-thread-3) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@5819beb9[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 4815001 INFO (jetty-launcher-10106-thread-3) [ ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:38431/solr
[junit4] 2> 4815002 INFO (jetty-launcher-10106-thread-3) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 4815002 INFO (zkConnectionManagerCallback-10122-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 4815002 INFO (jetty-launcher-10106-thread-3) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 4815016 INFO (jetty-launcher-10106-thread-4) [ ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
[junit4] 2> 4815016 WARN (jetty-launcher-10106-thread-4) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@5305c1f6[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 4815016 WARN (jetty-launcher-10106-thread-4) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@5305c1f6[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 4815018 WARN (jetty-launcher-10106-thread-4) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@1e376863[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 4815018 WARN (jetty-launcher-10106-thread-4) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@1e376863[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 4815019 INFO (jetty-launcher-10106-thread-4) [ ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:38431/solr
[junit4] 2> 4815020 INFO (jetty-launcher-10106-thread-4) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 4815021 INFO (zkConnectionManagerCallback-10128-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 4815021 INFO (jetty-launcher-10106-thread-4) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 4815210 INFO (jetty-launcher-10106-thread-2) [ ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
[junit4] 2> 4815210 WARN (jetty-launcher-10106-thread-2) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@5ad9de7[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 4815211 WARN (jetty-launcher-10106-thread-2) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@5ad9de7[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 4815213 WARN (jetty-launcher-10106-thread-2) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@3088a09d[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 4815213 WARN (jetty-launcher-10106-thread-2) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@3088a09d[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 4815214 INFO (jetty-launcher-10106-thread-2) [ ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:38431/solr
[junit4] 2> 4815215 INFO (jetty-launcher-10106-thread-2) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 4815216 INFO (zkConnectionManagerCallback-10136-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 4815216 INFO (jetty-launcher-10106-thread-2) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 4815227 INFO (jetty-launcher-10106-thread-4) [n:127.0.0.1:43582_solr ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 4815228 INFO (zkConnectionManagerCallback-10138-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 4815228 INFO (jetty-launcher-10106-thread-4) [n:127.0.0.1:43582_solr ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 4815300 INFO (jetty-launcher-10106-thread-4) [n:127.0.0.1:43582_solr ] o.a.s.c.OverseerElectionContext I am going to be the leader 127.0.0.1:43582_solr
[junit4] 2> 4815301 INFO (jetty-launcher-10106-thread-4) [n:127.0.0.1:43582_solr ] o.a.s.c.Overseer Overseer (id=75722484966359050-127.0.0.1:43582_solr-n_0000000000) starting
[junit4] 2> 4815306 INFO (OverseerStateUpdate-75722484966359050-127.0.0.1:43582_solr-n_0000000000) [n:127.0.0.1:43582_solr ] o.a.s.c.Overseer Starting to work on the main queue : 127.0.0.1:43582_solr
[junit4] 2> 4815306 INFO (jetty-launcher-10106-thread-4) [n:127.0.0.1:43582_solr ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:43582_solr
[junit4] 2> 4815307 INFO (OverseerStateUpdate-75722484966359050-127.0.0.1:43582_solr-n_0000000000) [n:127.0.0.1:43582_solr ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
[junit4] 2> 4815321 INFO (jetty-launcher-10106-thread-4) [n:127.0.0.1:43582_solr ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
[junit4] 2> 4815328 INFO (jetty-launcher-10106-thread-3) [n:127.0.0.1:40378_solr ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 4815329 INFO (zkConnectionManagerCallback-10144-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 4815329 INFO (jetty-launcher-10106-thread-3) [n:127.0.0.1:40378_solr ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 4815333 INFO (jetty-launcher-10106-thread-3) [n:127.0.0.1:40378_solr ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
[junit4] 2> 4815337 INFO (jetty-launcher-10106-thread-3) [n:127.0.0.1:40378_solr ] o.a.s.c.ZkController Publish node=127.0.0.1:40378_solr as DOWN
[junit4] 2> 4815338 INFO (jetty-launcher-10106-thread-4) [n:127.0.0.1:43582_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d
[junit4] 2> 4815338 INFO (jetty-launcher-10106-thread-3) [n:127.0.0.1:40378_solr ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
[junit4] 2> 4815338 INFO (jetty-launcher-10106-thread-3) [n:127.0.0.1:40378_solr ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:40378_solr
[junit4] 2> 4815339 INFO (zkCallback-10137-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
[junit4] 2> 4815339 INFO (zkCallback-10143-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
[junit4] 2> 4815348 INFO (jetty-launcher-10106-thread-4) [n:127.0.0.1:43582_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d
[junit4] 2> 4815348 INFO (jetty-launcher-10106-thread-4) [n:127.0.0.1:43582_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d
[junit4] 2> 4815350 INFO (jetty-launcher-10106-thread-4) [n:127.0.0.1:43582_solr ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_C201F022DD32A68D-001/tempDir-002/node4/.
[junit4] 2> 4815363 INFO (jetty-launcher-10106-thread-3) [n:127.0.0.1:40378_solr ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
[junit4] 2> 4815380 INFO (jetty-launcher-10106-thread-3) [n:127.0.0.1:40378_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d
[junit4] 2> 4815391 INFO (jetty-launcher-10106-thread-3) [n:127.0.0.1:40378_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d
[junit4] 2> 4815391 INFO (jetty-launcher-10106-thread-3) [n:127.0.0.1:40378_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d
[junit4] 2> 4815393 INFO (jetty-launcher-10106-thread-3) [n:127.0.0.1:40378_solr ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_C201F022DD32A68D-001/tempDir-002/node3/.
[junit4] 2> 4815419 INFO (jetty-launcher-10106-thread-1) [ ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
[junit4] 2> 4815420 WARN (jetty-launcher-10106-thread-1) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@191fa1e6[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 4815420 WARN (jetty-launcher-10106-thread-1) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@191fa1e6[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 4815422 WARN (jetty-launcher-10106-thread-1) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@6d087d73[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 4815422 WARN (jetty-launcher-10106-thread-1) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@6d087d73[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 4815424 INFO (jetty-launcher-10106-thread-1) [ ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:38431/solr
[junit4] 2> 4815424 INFO (jetty-launcher-10106-thread-1) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 4815425 INFO (zkConnectionManagerCallback-10154-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 4815425 INFO (jetty-launcher-10106-thread-1) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 4815428 INFO (jetty-launcher-10106-thread-2) [n:127.0.0.1:45595_solr ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 4815429 INFO (zkConnectionManagerCallback-10156-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 4815429 INFO (jetty-launcher-10106-thread-2) [n:127.0.0.1:45595_solr ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 4815433 INFO (jetty-launcher-10106-thread-2) [n:127.0.0.1:45595_solr ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
[junit4] 2> 4815437 INFO (jetty-launcher-10106-thread-2) [n:127.0.0.1:45595_solr ] o.a.s.c.ZkController Publish node=127.0.0.1:45595_solr as DOWN
[junit4] 2> 4815438 INFO (jetty-launcher-10106-thread-2) [n:127.0.0.1:45595_solr ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
[junit4] 2> 4815438 INFO (jetty-launcher-10106-thread-2) [n:127.0.0.1:45595_solr ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:45595_solr
[junit4] 2> 4815439 INFO (zkCallback-10143-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
[junit4] 2> 4815439 INFO (zkCallback-10137-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
[junit4] 2> 4815439 INFO (zkCallback-10155-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
[junit4] 2> 4815452 INFO (jetty-launcher-10106-thread-2) [n:127.0.0.1:45595_solr ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
[junit4] 2> 4815468 INFO (jetty-launcher-10106-thread-2) [n:127.0.0.1:45595_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d
[junit4] 2> 4815479 INFO (jetty-launcher-10106-thread-2) [n:127.0.0.1:45595_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d
[junit4] 2> 4815479 INFO (jetty-launcher-10106-thread-2) [n:127.0.0.1:45595_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d
[junit4] 2> 4815481 INFO (jetty-launcher-10106-thread-2) [n:127.0.0.1:45595_solr ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_C201F022DD32A68D-001/tempDir-002/node2/.
[junit4] 2> 4815528 INFO (jetty-launcher-10106-thread-1) [n:127.0.0.1:39856_solr ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 4815529 INFO (zkConnectionManagerCallback-10162-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 4815529 INFO (jetty-launcher-10106-thread-1) [n:127.0.0.1:39856_solr ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 4815534 INFO (jetty-launcher-10106-thread-1) [n:127.0.0.1:39856_solr ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (3)
[junit4] 2> 4815537 INFO (jetty-launcher-10106-thread-1) [n:127.0.0.1:39856_solr ] o.a.s.c.ZkController Publish node=127.0.0.1:39856_solr as DOWN
[junit4] 2> 4815538 INFO (jetty-launcher-10106-thread-1) [n:127.0.0.1:39856_solr ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
[junit4] 2> 4815538 INFO (jetty-launcher-10106-thread-1) [n:127.0.0.1:39856_solr ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:39856_solr
[junit4] 2> 4815539 INFO (zkCallback-10143-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
[junit4] 2> 4815539 INFO (zkCallback-10137-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
[junit4] 2> 4815540 INFO (zkCallback-10161-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
[junit4] 2> 4815540 INFO (zkCallback-10155-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
[junit4] 2> 4815553 INFO (jetty-launcher-10106-thread-1) [n:127.0.0.1:39856_solr ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
[junit4] 2> 4815569 INFO (jetty-launcher-10106-thread-1) [n:127.0.0.1:39856_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d
[junit4] 2> 4815580 INFO (jetty-launcher-10106-thread-1) [n:127.0.0.1:39856_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d
[junit4] 2> 4815580 INFO (jetty-launcher-10106-thread-1) [n:127.0.0.1:39856_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d
[junit4] 2> 4815582 INFO (jetty-launcher-10106-thread-1) [n:127.0.0.1:39856_solr ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_C201F022DD32A68D-001/tempDir-002/node1/.
[junit4] 2> 4815635 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[C201F022DD32A68D]) [ ] o.a.s.c.MiniSolrCloudCluster waitForAllNodes: numServers=4
[junit4] 2> 4815636 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[C201F022DD32A68D]) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 4815637 INFO (zkConnectionManagerCallback-10171-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 4815637 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[C201F022DD32A68D]) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 4815639 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[C201F022DD32A68D]) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (4)
[junit4] 2> 4815640 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[C201F022DD32A68D]) [ ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:38431/solr ready
[junit4] 2> 4815646 INFO (qtp332877956-36821) [n:127.0.0.1:39856_solr ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :overseerstatus with params action=OVERSEERSTATUS&wt=javabin&version=2 and sendToOCPQueue=true
[junit4] 2> 4815658 INFO (qtp332877956-36821) [n:127.0.0.1:39856_solr ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections params={action=OVERSEERSTATUS&wt=javabin&version=2} status=0 QTime=12
[junit4] 2> 4815659 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[C201F022DD32A68D]) [ ] o.a.s.c.MoveReplicaTest total_jettys: 4
[junit4] 2> 4815660 INFO (qtp332877956-36826) [n:127.0.0.1:39856_solr ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params pullReplicas=1&collection.configName=conf1&maxShardsPerNode=2&autoAddReplicas=false&name=MoveReplicaHDFSTest_coll_false&nrtReplicas=1&action=CREATE&numShards=2&tlogReplicas=0&wt=javabin&version=2 and sendToOCPQueue=true
[junit4] 2> 4815663 INFO (OverseerThreadFactory-9837-thread-2-processing-n:127.0.0.1:43582_solr) [n:127.0.0.1:43582_solr ] o.a.s.c.a.c.CreateCollectionCmd Create collection MoveReplicaHDFSTest_coll_false
[junit4] 2> 4815663 INFO (OverseerCollectionConfigSetProcessor-75722484966359050-127.0.0.1:43582_solr-n_0000000000) [n:127.0.0.1:43582_solr ] o.a.s.c.OverseerTaskQueue Response ZK path: /overseer/collection-queue-work/qnr-0000000000 doesn't exist. Requestor may have disconnected from ZooKeeper
[junit4] 2> 4815798 INFO (OverseerStateUpdate-75722484966359050-127.0.0.1:43582_solr-n_0000000000) [n:127.0.0.1:43582_solr ] o.a.s.c.o.SliceMutator createReplica() {
[junit4] 2> "operation":"ADDREPLICA",
[junit4] 2> "collection":"MoveReplicaHDFSTest_coll_false",
[junit4] 2> "shard":"shard1",
[junit4] 2> "core":"MoveReplicaHDFSTest_coll_false_shard1_replica_n1",
[junit4] 2> "state":"down",
[junit4] 2> "base_url":"http://127.0.0.1:43582/solr",
[junit4] 2> "type":"NRT",
[junit4] 2> "waitForFinalState":"false"}
[junit4] 2> 4815800 INFO (OverseerStateUpdate-75722484966359050-127.0.0.1:43582_solr-n_0000000000) [n:127.0.0.1:43582_solr ] o.a.s.c.o.SliceMutator createReplica() {
[junit4] 2> "operation":"ADDREPLICA",
[junit4] 2> "collection":"MoveReplicaHDFSTest_coll_false",
[junit4] 2> "shard":"shard1",
[junit4] 2> "core":"MoveReplicaHDFSTest_coll_false_shard1_replica_p2",
[junit4] 2> "state":"down",
[junit4] 2> "base_url":"http://127.0.0.1:40378/solr",
[junit4] 2> "type":"PULL",
[junit4] 2> "waitForFinalState":"false"}
[junit4] 2> 4815802 INFO (OverseerStateUpdate-75722484966359050-127.0.0.1:43582_solr-n_0000000000) [n:127.0.0.1:43582_solr ] o.a.s.c.o.SliceMutator createReplica() {
[junit4] 2> "operation":"ADDREPLICA",
[junit4] 2> "collection":"MoveReplicaHDFSTest_coll_false",
[junit4] 2> "shard":"shard2",
[junit4] 2> "core":"MoveReplicaHDFSTest_coll_false_shard2_replica_n4",
[junit4] 2> "state":"down",
[junit4] 2> "base_url":"http://127.0.0.1:39856/solr",
[junit4] 2> "type":"NRT",
[junit4] 2> "waitForFinalState":"false"}
[junit4] 2> 4815804 INFO (OverseerStateUpdate-75722484966359050-127.0.0.1:43582_solr-n_0000000000) [n:127.0.0.1:43582_solr ] o.a.s.c.o.SliceMutator createReplica() {
[junit4] 2> "operation":"ADDREPLICA",
[junit4] 2> "collection":"MoveReplicaHDFSTest_coll_false",
[junit4] 2> "shard":"shard2",
[junit4] 2> "core":"MoveReplicaHDFSTest_coll_false_shard2_replica_p6",
[junit4] 2> "state":"down",
[junit4] 2> "base_url":"http://127.0.0.1:45595/solr",
[junit4] 2> "type":"PULL",
[junit4] 2> "waitForFinalState":"false"}
[junit4] 2> 4816008 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node3&collection.configName=conf1&newCollection=true&name=MoveReplicaHDFSTest_coll_false_shard1_replica_n1&action=CREATE&numShards=2&collection=MoveReplicaHDFSTest_coll_false&shard=shard1&wt=javabin&version=2&replicaType=NRT
[junit4] 2> 4816008 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node7&collection.configName=conf1&newCollection=true&name=MoveReplicaHDFSTest_coll_false_shard2_replica_n4&action=CREATE&numShards=2&collection=MoveReplicaHDFSTest_coll_false&shard=shard2&wt=javabin&version=2&replicaType=NRT
[junit4] 2> 4816009 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
[junit4] 2> 4816009 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node5&collection.configName=conf1&newCollection=true&name=MoveReplicaHDFSTest_coll_false_shard1_replica_p2&action=CREATE&numShards=2&collection=MoveReplicaHDFSTest_coll_false&shard=shard1&wt=javabin&version=2&replicaType=PULL
[junit4] 2> 4816016 INFO (qtp270649806-36814) [n:127.0.0.1:45595_solr x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node8&collection.configName=conf1&newCollection=true&name=MoveReplicaHDFSTest_coll_false_shard2_replica_p6&action=CREATE&numShards=2&collection=MoveReplicaHDFSTest_coll_false&shard=shard2&wt=javabin&version=2&replicaType=PULL
[junit4] 2> 4817022 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
[junit4] 2> 4817022 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
[junit4] 2> 4817026 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
[junit4] 2> 4817031 INFO (qtp270649806-36814) [n:127.0.0.1:45595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
[junit4] 2> 4817032 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.s.IndexSchema [MoveReplicaHDFSTest_coll_false_shard1_replica_p2] Schema name=minimal
[junit4] 2> 4817034 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
[junit4] 2> 4817034 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.c.CoreContainer Creating SolrCore 'MoveReplicaHDFSTest_coll_false_shard1_replica_p2' using configuration from collection MoveReplicaHDFSTest_coll_false, trusted=true
[junit4] 2> 4817034 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.MoveReplicaHDFSTest_coll_false.shard1.replica_p2' (registry 'solr.core.MoveReplicaHDFSTest_coll_false.shard1.replica_p2') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d
[junit4] 2> 4817035 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.s.IndexSchema [MoveReplicaHDFSTest_coll_false_shard2_replica_n4] Schema name=minimal
[junit4] 2> 4817037 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home
[junit4] 2> 4817037 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
[junit4] 2> 4817037 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.c.SolrCore [[MoveReplicaHDFSTest_coll_false_shard1_replica_p2] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_C201F022DD32A68D-001/tempDir-002/node3/MoveReplicaHDFSTest_coll_false_shard1_replica_p2], dataDir=[hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node5/data/]
[junit4] 2> 4817037 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.s.IndexSchema [MoveReplicaHDFSTest_coll_false_shard1_replica_n1] Schema name=minimal
[junit4] 2> 4817038 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
[junit4] 2> 4817038 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.CoreContainer Creating SolrCore 'MoveReplicaHDFSTest_coll_false_shard2_replica_n4' using configuration from collection MoveReplicaHDFSTest_coll_false, trusted=true
[junit4] 2> 4817038 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.MoveReplicaHDFSTest_coll_false.shard2.replica_n4' (registry 'solr.core.MoveReplicaHDFSTest_coll_false.shard2.replica_n4') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d
[junit4] 2> 4817039 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node5/data/snapshot_metadata
[junit4] 2> 4817045 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home
[junit4] 2> 4817046 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
[junit4] 2> 4817046 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.SolrCore [[MoveReplicaHDFSTest_coll_false_shard2_replica_n4] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_C201F022DD32A68D-001/tempDir-002/node1/MoveReplicaHDFSTest_coll_false_shard2_replica_n4], dataDir=[hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node7/data/]
[junit4] 2> 4817046 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
[junit4] 2> 4817046 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.CoreContainer Creating SolrCore 'MoveReplicaHDFSTest_coll_false_shard1_replica_n1' using configuration from collection MoveReplicaHDFSTest_coll_false, trusted=true
[junit4] 2> 4817046 INFO (qtp270649806-36814) [n:127.0.0.1:45595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.s.IndexSchema [MoveReplicaHDFSTest_coll_false_shard2_replica_p6] Schema name=minimal
[junit4] 2> 4817046 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.MoveReplicaHDFSTest_coll_false.shard1.replica_n1' (registry 'solr.core.MoveReplicaHDFSTest_coll_false.shard1.replica_n1') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d
[junit4] 2> 4817047 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home
[junit4] 2> 4817047 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
[junit4] 2> 4817047 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.SolrCore [[MoveReplicaHDFSTest_coll_false_shard1_replica_n1] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_C201F022DD32A68D-001/tempDir-002/node4/MoveReplicaHDFSTest_coll_false_shard1_replica_n1], dataDir=[hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node3/data/]
[junit4] 2> 4817047 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node7/data/snapshot_metadata
[junit4] 2> 4817048 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node3/data/snapshot_metadata
[junit4] 2> 4817048 INFO (qtp270649806-36814) [n:127.0.0.1:45595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
[junit4] 2> 4817048 INFO (qtp270649806-36814) [n:127.0.0.1:45595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.CoreContainer Creating SolrCore 'MoveReplicaHDFSTest_coll_false_shard2_replica_p6' using configuration from collection MoveReplicaHDFSTest_coll_false, trusted=true
[junit4] 2> 4817048 INFO (qtp270649806-36814) [n:127.0.0.1:45595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.MoveReplicaHDFSTest_coll_false.shard2.replica_p6' (registry 'solr.core.MoveReplicaHDFSTest_coll_false.shard2.replica_p6') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@52cbc70d
[junit4] 2> 4817048 INFO (qtp270649806-36814) [n:127.0.0.1:45595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home
[junit4] 2> 4817048 INFO (qtp270649806-36814) [n:127.0.0.1:45595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
[junit4] 2> 4817048 INFO (qtp270649806-36814) [n:127.0.0.1:45595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.SolrCore [[MoveReplicaHDFSTest_coll_false_shard2_replica_p6] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_C201F022DD32A68D-001/tempDir-002/node2/MoveReplicaHDFSTest_coll_false_shard2_replica_p6], dataDir=[hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node8/data/]
[junit4] 2> 4817049 INFO (qtp270649806-36814) [n:127.0.0.1:45595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node8/data/snapshot_metadata
[junit4] 2> 4817052 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
[junit4] 2> 4817052 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [33554432] will allocate [1] slabs and use ~[33554432] bytes
[junit4] 2> 4817052 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.c.HdfsDirectoryFactory Creating new global HDFS BlockCache
[junit4] 2> 4817054 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
[junit4] 2> 4817054 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [33554432] will allocate [1] slabs and use ~[33554432] bytes
[junit4] 2> 4817054 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
[junit4] 2> 4817054 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [33554432] will allocate [1] slabs and use ~[33554432] bytes
[junit4] 2> 4817055 INFO (qtp270649806-36814) [n:127.0.0.1:45595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
[junit4] 2> 4817055 INFO (qtp270649806-36814) [n:127.0.0.1:45595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [33554432] will allocate [1] slabs and use ~[33554432] bytes
[junit4] 2> 4817073 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.s.b.BlockDirectory Block cache on write is disabled
[junit4] 2> 4817073 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.s.b.BlockDirectory Block cache on write is disabled
[junit4] 2> 4817074 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.s.b.BlockDirectory Block cache on write is disabled
[junit4] 2> 4817074 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node5/data
[junit4] 2> 4817075 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node3/data
[junit4] 2> 4817075 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node7/data
[junit4] 2> 4817083 INFO (qtp270649806-36814) [n:127.0.0.1:45595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.s.b.BlockDirectory Block cache on write is disabled
[junit4] 2> 4817084 INFO (qtp270649806-36814) [n:127.0.0.1:45595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node8/data
[junit4] 2> 4817092 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node5/data/index
[junit4] 2> 4817098 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
[junit4] 2> 4817098 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [33554432] will allocate [1] slabs and use ~[33554432] bytes
[junit4] 2> 4817099 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node3/data/index
[junit4] 2> 4817100 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node7/data/index
[junit4] 2> 4817101 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.s.b.BlockDirectory Block cache on write is disabled
[junit4] 2> 4817118 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
[junit4] 2> 4817119 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
[junit4] 2> 4817119 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [33554432] will allocate [1] slabs and use ~[33554432] bytes
[junit4] 2> 4817120 INFO (qtp270649806-36814) [n:127.0.0.1:45595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:43389/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node8/data/index
[junit4] 2> 4817121 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [33554432] will allocate [1] slabs and use ~[33554432] bytes
[junit4] 2> 4817123 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.s.b.BlockDirectory Block cache on write is disabled
[junit4] 2> 4817125 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.s.b.BlockDirectory Block cache on write is disabled
[junit4] 2> 4817127 INFO (qtp270649806-36814) [n:127.0.0.1:45595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
[junit4] 2> 4817127 INFO (qtp270649806-36814) [n:127.0.0.1:45595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [33554432] will allocate [1] slabs and use ~[33554432] bytes
[junit4] 2> 4817132 INFO (qtp270649806-36814) [n:127.0.0.1:45595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.s.b.BlockDirectory Block cache on write is disabled
[junit4] 2> 4817238 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
[junit4] 2> 4817238 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
[junit4] 2> 4817245 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
[junit4] 2> 4817245 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
[junit4] 2> 4817245 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=3
[junit4] 2> 4817246 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
[junit4] 2> 4817246 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
[junit4] 2> 4817246 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=3
[junit4] 2> 4817252 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.s.SolrIndexSearcher Opening [Searcher@4fb2eb70[MoveReplicaHDFSTest_coll_false_shard1_replica_p2] main]
[junit4] 2> 4817254 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
[junit4] 2> 4817254 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
[junit4] 2> 4817255 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
[junit4] 2> 4817256 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
[junit4] 2> 4817256 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
[junit4] 2> 4817274 INFO (searcherExecutor-9857-thread-1-processing-n:127.0.0.1:40378_solr x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5) [n:127.0.0.1:40378_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.c.SolrCore [MoveReplicaHDFSTest_coll_false_shard1_replica_p2] Registered new searcher Searcher@4fb2eb70[MoveReplicaHDFSTest_coll_false_shard1_replica_p2] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
[junit4] 2> 4817276 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
[junit4] 2> 4817276 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
[junit4] 2> 4817277 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.c.ZkController MoveReplicaHDFSTest_coll_false_shard1_replica_p2 starting background replication from leader
[junit4] 2> 4817277 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.c.ReplicateFromLeader Will start replication from leader with poll interval: 00:00:01
[junit4] 2> 4817279 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.h.ReplicationHandler Poll scheduled at an interval of 1000ms
[junit4] 2> 4817279 INFO (qtp1081059240-36833) [n:127.0.0.1:40378_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
[junit4] 2> 4817285 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.s.SolrIndexSearcher Opening [Searcher@76d745ac[MoveReplicaHDFSTest_coll_false_shard2_replica_n4] main]
[junit4] 2> 4817286 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
[junit4] 2> 4817287 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
[junit4] 2> 4817287 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.s.SolrIndexSearcher Opening [Searcher@58ad5b4c[MoveReplicaHDFSTest_coll_false_shard1_replica_n1] main]
[junit4] 2> 4817287 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
[junit4] 2> 4817287 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1650022835252363264
[junit4] 2> 4817288 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
[junit4] 2> 4817288 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
[junit4] 2> 4817289 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
[junit4] 2> 4817289 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1650022835254460416
[junit4] 2> 4817290 INFO (searcherExecutor-9858-thread-1-processing-n:127.0.0.1:39856_solr x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7) [n:127.0.0.1:39856_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.SolrCore [MoveReplicaHDFSTest_coll_false_shard2_replica_n4] Registered new searcher Searcher@76d745ac[MoveReplicaHDFSTest_coll_false_shard2_replica_n4] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
[junit4] 2> 4817292 INFO (searcherExecutor-9859-thread-1-processing-n:127.0.0.1:43582_solr x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3) [n:127.0.0.1:43582_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.SolrCore [MoveReplicaHDFSTest_coll_false_shard1_replica_n1] Registered new searcher Searcher@58ad5b4c[MoveReplicaHDFSTest_coll_false_shard1_replica_n1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
[junit4] 2> 4817294 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.ZkShardTerms Successful update of terms at /collections/MoveReplicaHDFSTest_coll_false/terms/shard2 to Terms{values={core_node7=0}, version=0}
[junit4] 2> 4817294 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/MoveReplicaHDFSTest_coll_false/leaders/shard2
[junit4] 2> 4817295 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.ZkShardTerms Successful update of terms at /collections/MoveReplicaHDFSTest_coll_false/terms/shard1 to Terms{values={core_node3=0}, version=0}
[junit4] 2> 4817295 INFO (qtp980436647-36825) [n:127.0.0.1:43582_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/MoveReplicaHDFSTest_coll_false/leaders/shard1
[junit4] 2> 4817299 INFO (qtp332877956-36834) [n:127.0.0.1:39856_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.ShardLeaderElectionContext Enough replicas found to con
[...truncated too long message...]
ng
[junit4] 2> 4840875 INFO (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.e.j.s.h.ContextHandler Stopped o.e.j.s.ServletContextHandler@c8e4c58{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,UNAVAILABLE}
[junit4] 2> 4840877 WARN (BP-1876136977-127.0.0.1-1573584395591 heartbeating to lucene2-us-west.apache.org/127.0.0.1:43389) [ ] o.a.h.h.s.d.IncrementalBlockReportManager IncrementalBlockReportManager interrupted
[junit4] 2> 4840877 WARN (BP-1876136977-127.0.0.1-1573584395591 heartbeating to lucene2-us-west.apache.org/127.0.0.1:43389) [ ] o.a.h.h.s.d.DataNode Ending block pool service for: Block pool BP-1876136977-127.0.0.1-1573584395591 (Datanode Uuid e9ca55f1-537e-47d5-90f3-74ae94ab9c16) service to lucene2-us-west.apache.org/127.0.0.1:43389
[junit4] 2> 4840885 WARN (refreshUsed-/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_C201F022DD32A68D-001/tempDir-001/hdfsBaseDir/data/data3/current/BP-1876136977-127.0.0.1-1573584395591) [ ] o.a.h.f.CachingGetSpaceUsed Thread Interrupted waiting to refresh disk information: sleep interrupted
[junit4] 2> 4840893 WARN (refreshUsed-/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_C201F022DD32A68D-001/tempDir-001/hdfsBaseDir/data/data4/current/BP-1876136977-127.0.0.1-1573584395591) [ ] o.a.h.f.CachingGetSpaceUsed Thread Interrupted waiting to refresh disk information: sleep interrupted
[junit4] 2> 4840894 WARN (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.a.h.h.s.d.DirectoryScanner DirectoryScanner: shutdown has been called
[junit4] 2> 4840903 INFO (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.e.j.s.h.ContextHandler Stopped o.e.j.w.WebAppContext@647efeec{datanode,/,null,UNAVAILABLE}{/datanode}
[junit4] 2> 4840903 INFO (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.e.j.s.AbstractConnector Stopped ServerConnector@1220cbd4{HTTP/1.1,[http/1.1]}{localhost:0}
[junit4] 2> 4840903 INFO (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.e.j.s.session node0 Stopped scavenging
[junit4] 2> 4840903 INFO (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.e.j.s.h.ContextHandler Stopped o.e.j.s.ServletContextHandler@4a4d0606{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,UNAVAILABLE}
[junit4] 2> 4840906 WARN (BP-1876136977-127.0.0.1-1573584395591 heartbeating to lucene2-us-west.apache.org/127.0.0.1:43389) [ ] o.a.h.h.s.d.IncrementalBlockReportManager IncrementalBlockReportManager interrupted
[junit4] 2> 4840906 WARN (BP-1876136977-127.0.0.1-1573584395591 heartbeating to lucene2-us-west.apache.org/127.0.0.1:43389) [ ] o.a.h.h.s.d.DataNode Ending block pool service for: Block pool BP-1876136977-127.0.0.1-1573584395591 (Datanode Uuid 60b3af80-d51c-46e6-b27a-65908421715b) service to lucene2-us-west.apache.org/127.0.0.1:43389
[junit4] 2> 4840913 WARN (refreshUsed-/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_C201F022DD32A68D-001/tempDir-001/hdfsBaseDir/data/data1/current/BP-1876136977-127.0.0.1-1573584395591) [ ] o.a.h.f.CachingGetSpaceUsed Thread Interrupted waiting to refresh disk information: sleep interrupted
[junit4] 2> 4840920 WARN (refreshUsed-/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_C201F022DD32A68D-001/tempDir-001/hdfsBaseDir/data/data2/current/BP-1876136977-127.0.0.1-1573584395591) [ ] o.a.h.f.CachingGetSpaceUsed Thread Interrupted waiting to refresh disk information: sleep interrupted
[junit4] 2> 4840928 INFO (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.e.j.s.h.ContextHandler Stopped o.e.j.w.WebAppContext@4fdf947d{hdfs,/,null,UNAVAILABLE}{/hdfs}
[junit4] 2> 4840929 INFO (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.e.j.s.AbstractConnector Stopped ServerConnector@217ee445{HTTP/1.1,[http/1.1]}{lucene2-us-west.apache.org:0}
[junit4] 2> 4840929 INFO (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.e.j.s.session node0 Stopped scavenging
[junit4] 2> 4840929 INFO (SUITE-MoveReplicaHDFSTest-seed#[C201F022DD32A68D]-worker) [ ] o.e.j.s.h.ContextHandler Stopped o.e.j.s.ServletContextHandler@139bab63{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,UNAVAILABLE}
[junit4] 2> NOTE: leaving temporary files on disk at: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_C201F022DD32A68D-001
[junit4] 2> Nov 12, 2019 6:47:03 PM com.carrotsearch.randomizedtesting.ThreadLeakControl checkThreadLeaks
[junit4] 2> WARNING: Will linger awaiting termination of 32 leaked thread(s).
[junit4] 2> NOTE: test params are: codec=Asserting(Lucene80): {_root_=PostingsFormat(name=LuceneVarGapDocFreqInterval), id=TestBloomFilteredLucenePostings(BloomFilteringPostingsFormat(Lucene50(blocksize=128)))}, docValues:{_version_=DocValuesFormat(name=Asserting)}, maxPointsInLeafNode=1871, maxMBSortInHeap=6.2570705227755194, sim=Asserting(org.apache.lucene.search.similarities.AssertingSimilarity@4e364a29), locale=seh-MZ, timezone=SystemV/YST9
[junit4] 2> NOTE: Linux 4.4.0-112-generic amd64/Oracle Corporation 11.0.4 (64-bit)/cpus=4,threads=5,free=59608232,total=536870912
[junit4] 2> NOTE: All tests run in this JVM: [SolrRrdBackendFactoryTest, TestJoin, UpdateRequestProcessorFactoryTest, TestJavabinTupleStreamParser, AnalysisErrorHandlingTest, OverseerModifyCollectionTest, TestBinaryField, TestSimExecutePlanAction, NodeAddedTriggerIntegrationTest, TestBM25SimilarityFactory, AsyncCallRequestStatusResponseTest, TestPolicyCloud, LukeRequestHandlerTest, TestElisionMultitermQuery, TestXmlQParser, TestLegacyField, ReturnFieldsTest, SolrXmlInZkTest, HdfsUnloadDistributedZkTest, SimpleFacetsTest, ManagedSchemaRoundRobinCloudTest, CdcrRequestHandlerTest, ConfigureRecoveryStrategyTest, TestFaceting, CursorMarkTest, SolrTestCaseJ4Test, ZkControllerTest, BasicAuthOnSingleNodeTest, IndexSizeEstimatorTest, TokenizerChainTest, TestSystemIdResolver, TestPostingsSolrHighlighter, DeleteStatusTest, DeleteNodeTest, TestStressReorder, BadCopyFieldTest, TestClusterProperties, TestReplicationHandlerBackup, DistributedIntervalFacetingTest, TestSortByMinMaxFunction, URLClassifyProcessorTest, TestHashPartitioner, SpellCheckComponentTest, DistributedSpellCheckComponentTest, SolrIndexMetricsTest, TestMiniSolrCloudClusterSSL, DocValuesMultiTest, HLLSerializationTest, AnalyticsMergeStrategyTest, MetricsHistoryHandlerTest, TestSolrConfigHandlerCloud, RemoteQueryErrorTest, TestHalfAndHalfDocValues, SolrCoreTest, PhrasesIdentificationComponentTest, ClusterStateTest, BasicDistributedZkTest, SpellCheckCollatorTest, OrderedExecutorTest, HighlighterConfigTest, TestNoOpRegenerator, OverseerTaskQueueTest, TestCoreContainer, JWTAuthPluginTest, TestHttpServletCarrier, TestSnapshotCloudManager, QueryResultKeyTest, HttpPartitionTest, TestHttpShardHandlerFactory, BlockJoinFacetDistribTest, TestReplicaProperties, HdfsDirectoryTest, TestPullReplica, ChangedSchemaMergeTest, SoftAutoCommitTest, TestShardHandlerFactory, TestScoreJoinQPNoScore, TestDistribDocBasedVersion, TestSolr4Spatial2, TestLMJelinekMercerSimilarityFactory, ClassificationUpdateProcessorFactoryTest, TestDynamicLoadingUrl, PingRequestHandlerTest, ConnectionReuseTest, CustomHighlightComponentTest, TestCloudRecovery, ScheduledMaintenanceTriggerTest, TestExactSharedStatsCache, LegacyCloudClusterPropTest, TestSolrJ, TestFieldCacheSort, ExternalFileFieldSortTest, TestRandomFlRTGCloud, UnloadDistributedZkTest, TestManagedSchemaThreadSafety, DistributedQueryComponentCustomSortTest, TestSolrCloudWithHadoopAuthPlugin, SmileWriterTest, OutputWriterTest, TestStandardQParsers, TriggerIntegrationTest, TestMacroExpander, TestSchemaVersionResource, CoreAdminRequestStatusTest, TestIndexSearcher, CircularListTest, TestDocTermOrdsUninvertLimit, TestSubQueryTransformerDistrib, FieldAnalysisRequestHandlerTest, TestCloudPhrasesIdentificationComponent, TestCollectionsAPIViaSolrCloudCluster, IgnoreCommitOptimizeUpdateProcessorFactoryTest, TestExportWriter, TestTolerantSearch, CollectionsAPIAsyncDistributedZkTest, SliceStateTest, TestNumericRangeQuery64, TaggingAttributeTest, TestPerFieldSimilarity, TestSimComputePlanAction, TestFieldResource, CdcrBootstrapTest, TestStressCloudBlindAtomicUpdates, TestRangeQuery, TimeZoneUtilsTest, TestSimPolicyCloud, UninvertDocValuesMergePolicyTest, ZkShardTermsTest, TestComponentsName, DistributedFacetPivotLargeTest, ResponseLogComponentTest, TestFunctionQuery, TestZkChroot, TestDFRSimilarityFactory, GraphQueryTest, TestCoreBackup, NodeLostTriggerTest, CustomCollectionTest, TestRecovery, TestSolrCloudSnapshots, TestSolrQueryResponse, DistributedDebugComponentTest, NestedAtomicUpdateTest, QueryEqualityTest, ChaosMonkeySafeLeaderWithPullReplicasTest, ConnectionManagerTest, CreateRoutedAliasTest, DistribDocExpirationUpdateProcessorTest, DistribJoinFromCollectionTest, DistributedQueueTest, DocValuesNotIndexedTest, ForceLeaderTest, LeaderElectionContextKeyTest, LeaderElectionIntegrationTest, LeaderFailoverAfterPartitionTest, MetricsHistoryWithAuthIntegrationTest, MigrateRouteKeyTest, MoveReplicaHDFSTest]
[junit4] Completed [502/888 (1!)] on J2 in 30.62s, 4 tests, 1 failure, 2 skipped <<< FAILURES!
[...truncated 54210 lines...]
[JENKINS] Lucene-Solr-NightlyTests-master - Build # 2015 - Failure
Posted by Apache Jenkins Server <je...@builds.apache.org>.
Build: https://builds.apache.org/job/Lucene-Solr-NightlyTests-master/2015/
1 tests failed.
FAILED: org.apache.solr.cloud.MoveReplicaHDFSTest.test
Error Message:
expected not same
Stack Trace:
java.lang.AssertionError: expected not same
at __randomizedtesting.SeedInfo.seed([B654420E9A68B436:3E007DD43494D9CE]:0)
at org.junit.Assert.fail(Assert.java:88)
at org.junit.Assert.failSame(Assert.java:819)
at org.junit.Assert.assertNotSame(Assert.java:798)
at org.junit.Assert.assertNotSame(Assert.java:811)
at org.apache.solr.cloud.MoveReplicaTest.test(MoveReplicaTest.java:147)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938)
at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974)
at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988)
at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947)
at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832)
at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883)
at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
at java.base/java.lang.Thread.run(Thread.java:834)
Build Log:
[...truncated 13989 lines...]
[junit4] Suite: org.apache.solr.cloud.MoveReplicaHDFSTest
[junit4] 2> 927744 INFO (SUITE-MoveReplicaHDFSTest-seed#[B654420E9A68B436]-worker) [ ] o.a.s.SolrTestCaseJ4 Created dataDir: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_B654420E9A68B436-001/data-dir-35-001
[junit4] 2> 927744 INFO (SUITE-MoveReplicaHDFSTest-seed#[B654420E9A68B436]-worker) [ ] o.a.s.SolrTestCaseJ4 Using TrieFields (NUMERIC_POINTS_SYSPROP=false) w/NUMERIC_DOCVALUES_SYSPROP=true
[junit4] 2> 927745 INFO (SUITE-MoveReplicaHDFSTest-seed#[B654420E9A68B436]-worker) [ ] o.a.s.SolrTestCaseJ4 Randomized ssl (true) and clientAuth (true) via: @org.apache.solr.util.RandomizeSSL(reason="", ssl=0.0/0.0, value=0.0/0.0, clientAuth=0.0/0.0)
[junit4] 2> 927745 INFO (SUITE-MoveReplicaHDFSTest-seed#[B654420E9A68B436]-worker) [ ] o.a.s.SolrTestCaseJ4 SecureRandom sanity checks: test.solr.allowed.securerandom=null & java.security.egd=file:/dev/./urandom
[junit4] 1> Formatting using clusterid: testClusterID
[junit4] 2> 927813 WARN (SUITE-MoveReplicaHDFSTest-seed#[B654420E9A68B436]-worker) [ ] o.a.h.m.i.MetricsConfig Cannot locate configuration: tried hadoop-metrics2-namenode.properties,hadoop-metrics2.properties
[junit4] 2> 927826 WARN (SUITE-MoveReplicaHDFSTest-seed#[B654420E9A68B436]-worker) [ ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
[junit4] 2> 927828 INFO (SUITE-MoveReplicaHDFSTest-seed#[B654420E9A68B436]-worker) [ ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 11.0.4+10-LTS
[junit4] 2> 927836 INFO (SUITE-MoveReplicaHDFSTest-seed#[B654420E9A68B436]-worker) [ ] o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 927837 INFO (SUITE-MoveReplicaHDFSTest-seed#[B654420E9A68B436]-worker) [ ] o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 927837 INFO (SUITE-MoveReplicaHDFSTest-seed#[B654420E9A68B436]-worker) [ ] o.e.j.s.session node0 Scavenging every 660000ms
[junit4] 2> 927837 INFO (SUITE-MoveReplicaHDFSTest-seed#[B654420E9A68B436]-worker) [ ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@4d7517dc{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
[junit4] 2> 928033 INFO (SUITE-MoveReplicaHDFSTest-seed#[B654420E9A68B436]-worker) [ ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@615a69aa{hdfs,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/jetty-lucene2-us-west.apache.org-44614-hdfs-_-any-9083643514930335542.dir/webapp/,AVAILABLE}{/hdfs}
[junit4] 2> 928033 INFO (SUITE-MoveReplicaHDFSTest-seed#[B654420E9A68B436]-worker) [ ] o.e.j.s.AbstractConnector Started ServerConnector@2a4b3702{HTTP/1.1,[http/1.1]}{lucene2-us-west.apache.org:44614}
[junit4] 2> 928033 INFO (SUITE-MoveReplicaHDFSTest-seed#[B654420E9A68B436]-worker) [ ] o.e.j.s.Server Started @928091ms
[junit4] 2> 928141 WARN (SUITE-MoveReplicaHDFSTest-seed#[B654420E9A68B436]-worker) [ ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
[junit4] 2> 928142 INFO (SUITE-MoveReplicaHDFSTest-seed#[B654420E9A68B436]-worker) [ ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 11.0.4+10-LTS
[junit4] 2> 928148 INFO (SUITE-MoveReplicaHDFSTest-seed#[B654420E9A68B436]-worker) [ ] o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 928148 INFO (SUITE-MoveReplicaHDFSTest-seed#[B654420E9A68B436]-worker) [ ] o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 928148 INFO (SUITE-MoveReplicaHDFSTest-seed#[B654420E9A68B436]-worker) [ ] o.e.j.s.session node0 Scavenging every 600000ms
[junit4] 2> 928149 INFO (SUITE-MoveReplicaHDFSTest-seed#[B654420E9A68B436]-worker) [ ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@41437e03{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
[junit4] 2> 928284 INFO (SUITE-MoveReplicaHDFSTest-seed#[B654420E9A68B436]-worker) [ ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@33d85234{datanode,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/jetty-localhost-45893-datanode-_-any-9192429820572968021.dir/webapp/,AVAILABLE}{/datanode}
[junit4] 2> 928285 INFO (SUITE-MoveReplicaHDFSTest-seed#[B654420E9A68B436]-worker) [ ] o.e.j.s.AbstractConnector Started ServerConnector@20beaa55{HTTP/1.1,[http/1.1]}{localhost:45893}
[junit4] 2> 928285 INFO (SUITE-MoveReplicaHDFSTest-seed#[B654420E9A68B436]-worker) [ ] o.e.j.s.Server Started @928342ms
[junit4] 2> 928417 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0xc508465f81912249: Processing first storage report for DS-30bf5c18-9548-40e5-bf82-364e110e17e9 from datanode 50567c2a-3b64-44b8-9852-22d976b1c0d2
[junit4] 2> 928417 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0xc508465f81912249: from storage DS-30bf5c18-9548-40e5-bf82-364e110e17e9 node DatanodeRegistration(127.0.0.1:42962, datanodeUuid=50567c2a-3b64-44b8-9852-22d976b1c0d2, infoPort=33233, infoSecurePort=0, ipcPort=33413, storageInfo=lv=-57;cid=testClusterID;nsid=1664912620;c=1573502744279), blocks: 0, hasStaleStorage: true, processing time: 0 msecs, invalidatedBlocks: 0
[junit4] 2> 928418 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0xc508465f81912249: Processing first storage report for DS-5ae76fe0-0395-45b2-9583-14f40e922dbb from datanode 50567c2a-3b64-44b8-9852-22d976b1c0d2
[junit4] 2> 928418 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0xc508465f81912249: from storage DS-5ae76fe0-0395-45b2-9583-14f40e922dbb node DatanodeRegistration(127.0.0.1:42962, datanodeUuid=50567c2a-3b64-44b8-9852-22d976b1c0d2, infoPort=33233, infoSecurePort=0, ipcPort=33413, storageInfo=lv=-57;cid=testClusterID;nsid=1664912620;c=1573502744279), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0
[junit4] 2> 928443 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[B654420E9A68B436]) [ ] o.a.s.SolrTestCaseJ4 ###Starting testNormalMove
[junit4] 2> 928444 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[B654420E9A68B436]) [ ] o.a.s.c.MiniSolrCloudCluster Starting cluster of 4 servers in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_B654420E9A68B436-001/tempDir-002
[junit4] 2> 928444 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[B654420E9A68B436]) [ ] o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
[junit4] 2> 928445 INFO (ZkTestServer Run Thread) [ ] o.a.s.c.ZkTestServer client port:0.0.0.0/0.0.0.0:0
[junit4] 2> 928445 INFO (ZkTestServer Run Thread) [ ] o.a.s.c.ZkTestServer Starting server
[junit4] 2> 928545 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[B654420E9A68B436]) [ ] o.a.s.c.ZkTestServer start zk server on port:46350
[junit4] 2> 928545 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[B654420E9A68B436]) [ ] o.a.s.c.ZkTestServer waitForServerUp: 127.0.0.1:46350
[junit4] 2> 928545 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[B654420E9A68B436]) [ ] o.a.s.c.ZkTestServer parse host and port list: 127.0.0.1:46350
[junit4] 2> 928545 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[B654420E9A68B436]) [ ] o.a.s.c.ZkTestServer connecting to 127.0.0.1 46350
[junit4] 2> 928547 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[B654420E9A68B436]) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 928570 INFO (zkConnectionManagerCallback-1790-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 928570 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[B654420E9A68B436]) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 928574 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[B654420E9A68B436]) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 928575 INFO (zkConnectionManagerCallback-1792-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 928575 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[B654420E9A68B436]) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 928577 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[B654420E9A68B436]) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 928578 INFO (zkConnectionManagerCallback-1794-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 928578 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[B654420E9A68B436]) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 928700 WARN (jetty-launcher-1795-thread-2) [ ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
[junit4] 2> 928700 INFO (jetty-launcher-1795-thread-2) [ ] o.a.s.c.s.e.JettySolrRunner Start Jetty (configured port=0, binding port=0)
[junit4] 2> 928700 INFO (jetty-launcher-1795-thread-2) [ ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
[junit4] 2> 928700 INFO (jetty-launcher-1795-thread-2) [ ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 11.0.4+10-LTS
[junit4] 2> 928702 WARN (jetty-launcher-1795-thread-3) [ ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
[junit4] 2> 928703 INFO (jetty-launcher-1795-thread-3) [ ] o.a.s.c.s.e.JettySolrRunner Start Jetty (configured port=0, binding port=0)
[junit4] 2> 928703 INFO (jetty-launcher-1795-thread-3) [ ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
[junit4] 2> 928703 INFO (jetty-launcher-1795-thread-3) [ ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 11.0.4+10-LTS
[junit4] 2> 928720 WARN (jetty-launcher-1795-thread-1) [ ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
[junit4] 2> 928721 INFO (jetty-launcher-1795-thread-1) [ ] o.a.s.c.s.e.JettySolrRunner Start Jetty (configured port=0, binding port=0)
[junit4] 2> 928721 INFO (jetty-launcher-1795-thread-1) [ ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
[junit4] 2> 928721 INFO (jetty-launcher-1795-thread-1) [ ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 11.0.4+10-LTS
[junit4] 2> 928728 WARN (jetty-launcher-1795-thread-4) [ ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
[junit4] 2> 928729 INFO (jetty-launcher-1795-thread-4) [ ] o.a.s.c.s.e.JettySolrRunner Start Jetty (configured port=0, binding port=0)
[junit4] 2> 928729 INFO (jetty-launcher-1795-thread-4) [ ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
[junit4] 2> 928729 INFO (jetty-launcher-1795-thread-4) [ ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 11.0.4+10-LTS
[junit4] 2> 928741 INFO (jetty-launcher-1795-thread-1) [ ] o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 928741 INFO (jetty-launcher-1795-thread-1) [ ] o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 928741 INFO (jetty-launcher-1795-thread-1) [ ] o.e.j.s.session node0 Scavenging every 660000ms
[junit4] 2> 928742 INFO (jetty-launcher-1795-thread-1) [ ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@21e1baea{/solr,null,AVAILABLE}
[junit4] 2> 928743 INFO (jetty-launcher-1795-thread-1) [ ] o.e.j.s.AbstractConnector Started ServerConnector@205430db{ssl,[ssl, alpn, http/1.1, h2]}{127.0.0.1:34574}
[junit4] 2> 928743 INFO (jetty-launcher-1795-thread-1) [ ] o.e.j.s.Server Started @928800ms
[junit4] 2> 928743 INFO (jetty-launcher-1795-thread-1) [ ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=34574}
[junit4] 2> 928744 ERROR (jetty-launcher-1795-thread-1) [ ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
[junit4] 2> 928744 INFO (jetty-launcher-1795-thread-1) [ ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
[junit4] 2> 928744 INFO (jetty-launcher-1795-thread-1) [ ] o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version 9.0.0
[junit4] 2> 928744 INFO (jetty-launcher-1795-thread-1) [ ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 928744 INFO (jetty-launcher-1795-thread-1) [ ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 928744 INFO (jetty-launcher-1795-thread-1) [ ] o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time: 2019-11-11T20:05:45.247243Z
[junit4] 2> 928770 INFO (jetty-launcher-1795-thread-4) [ ] o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 928770 INFO (jetty-launcher-1795-thread-4) [ ] o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 928770 INFO (jetty-launcher-1795-thread-4) [ ] o.e.j.s.session node0 Scavenging every 600000ms
[junit4] 2> 928771 INFO (jetty-launcher-1795-thread-4) [ ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@7fbe21e4{/solr,null,AVAILABLE}
[junit4] 2> 928772 INFO (jetty-launcher-1795-thread-4) [ ] o.e.j.s.AbstractConnector Started ServerConnector@4948e70{ssl,[ssl, alpn, http/1.1, h2]}{127.0.0.1:40116}
[junit4] 2> 928772 INFO (jetty-launcher-1795-thread-4) [ ] o.e.j.s.Server Started @928829ms
[junit4] 2> 928772 INFO (jetty-launcher-1795-thread-4) [ ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=40116}
[junit4] 2> 928772 ERROR (jetty-launcher-1795-thread-4) [ ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
[junit4] 2> 928772 INFO (jetty-launcher-1795-thread-4) [ ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
[junit4] 2> 928772 INFO (jetty-launcher-1795-thread-4) [ ] o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version 9.0.0
[junit4] 2> 928773 INFO (jetty-launcher-1795-thread-4) [ ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 928773 INFO (jetty-launcher-1795-thread-4) [ ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 928773 INFO (jetty-launcher-1795-thread-4) [ ] o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time: 2019-11-11T20:05:45.276047Z
[junit4] 2> 928784 INFO (jetty-launcher-1795-thread-4) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 928800 INFO (jetty-launcher-1795-thread-1) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 928801 INFO (jetty-launcher-1795-thread-2) [ ] o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 928801 INFO (jetty-launcher-1795-thread-2) [ ] o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 928801 INFO (jetty-launcher-1795-thread-2) [ ] o.e.j.s.session node0 Scavenging every 660000ms
[junit4] 2> 928801 INFO (jetty-launcher-1795-thread-2) [ ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@1c4f8f01{/solr,null,AVAILABLE}
[junit4] 2> 928803 INFO (jetty-launcher-1795-thread-2) [ ] o.e.j.s.AbstractConnector Started ServerConnector@152aca95{ssl,[ssl, alpn, http/1.1, h2]}{127.0.0.1:36024}
[junit4] 2> 928803 INFO (jetty-launcher-1795-thread-2) [ ] o.e.j.s.Server Started @928860ms
[junit4] 2> 928803 INFO (jetty-launcher-1795-thread-2) [ ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=36024}
[junit4] 2> 928803 ERROR (jetty-launcher-1795-thread-2) [ ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
[junit4] 2> 928803 INFO (jetty-launcher-1795-thread-2) [ ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
[junit4] 2> 928803 INFO (jetty-launcher-1795-thread-2) [ ] o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version 9.0.0
[junit4] 2> 928803 INFO (jetty-launcher-1795-thread-2) [ ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 928803 INFO (jetty-launcher-1795-thread-2) [ ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 928803 INFO (jetty-launcher-1795-thread-2) [ ] o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time: 2019-11-11T20:05:45.306737Z
[junit4] 2> 928812 INFO (zkConnectionManagerCallback-1799-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 928812 INFO (jetty-launcher-1795-thread-4) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 928813 INFO (zkConnectionManagerCallback-1797-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 928813 INFO (jetty-launcher-1795-thread-4) [ ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
[junit4] 2> 928813 INFO (jetty-launcher-1795-thread-1) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 928816 INFO (jetty-launcher-1795-thread-4) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
[junit4] 2> 928816 INFO (jetty-launcher-1795-thread-4) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
[junit4] 2> 928817 INFO (jetty-launcher-1795-thread-4) [ ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@86fa7ba, but no JMX reporters were configured - adding default JMX reporter.
[junit4] 2> 928818 INFO (jetty-launcher-1795-thread-1) [ ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
[junit4] 2> 928821 INFO (jetty-launcher-1795-thread-1) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
[junit4] 2> 928821 INFO (jetty-launcher-1795-thread-1) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
[junit4] 2> 928822 INFO (jetty-launcher-1795-thread-1) [ ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@86fa7ba, but no JMX reporters were configured - adding default JMX reporter.
[junit4] 2> 928829 INFO (jetty-launcher-1795-thread-2) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 928836 INFO (zkConnectionManagerCallback-1801-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 928836 INFO (jetty-launcher-1795-thread-2) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 928836 INFO (jetty-launcher-1795-thread-3) [ ] o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 928836 INFO (jetty-launcher-1795-thread-3) [ ] o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 928837 INFO (jetty-launcher-1795-thread-3) [ ] o.e.j.s.session node0 Scavenging every 660000ms
[junit4] 2> 928837 INFO (jetty-launcher-1795-thread-3) [ ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@7f18263a{/solr,null,AVAILABLE}
[junit4] 2> 928839 INFO (jetty-launcher-1795-thread-3) [ ] o.e.j.s.AbstractConnector Started ServerConnector@15ead0e9{ssl,[ssl, alpn, http/1.1, h2]}{127.0.0.1:42389}
[junit4] 2> 928839 INFO (jetty-launcher-1795-thread-3) [ ] o.e.j.s.Server Started @928896ms
[junit4] 2> 928839 INFO (jetty-launcher-1795-thread-3) [ ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=42389}
[junit4] 2> 928839 ERROR (jetty-launcher-1795-thread-3) [ ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
[junit4] 2> 928839 INFO (jetty-launcher-1795-thread-3) [ ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
[junit4] 2> 928839 INFO (jetty-launcher-1795-thread-3) [ ] o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version 9.0.0
[junit4] 2> 928839 INFO (jetty-launcher-1795-thread-3) [ ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 928839 INFO (jetty-launcher-1795-thread-3) [ ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 928839 INFO (jetty-launcher-1795-thread-3) [ ] o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time: 2019-11-11T20:05:45.342920Z
[junit4] 2> 928843 INFO (jetty-launcher-1795-thread-3) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 928848 INFO (zkConnectionManagerCallback-1803-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 928848 INFO (jetty-launcher-1795-thread-3) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 928921 INFO (jetty-launcher-1795-thread-2) [ ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
[junit4] 2> 928922 INFO (jetty-launcher-1795-thread-3) [ ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
[junit4] 2> 928939 INFO (jetty-launcher-1795-thread-3) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
[junit4] 2> 928939 INFO (jetty-launcher-1795-thread-3) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
[junit4] 2> 928949 INFO (jetty-launcher-1795-thread-2) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
[junit4] 2> 928949 INFO (jetty-launcher-1795-thread-2) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
[junit4] 2> 928950 INFO (jetty-launcher-1795-thread-2) [ ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@86fa7ba, but no JMX reporters were configured - adding default JMX reporter.
[junit4] 2> 928962 INFO (jetty-launcher-1795-thread-3) [ ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@86fa7ba, but no JMX reporters were configured - adding default JMX reporter.
[junit4] 2> 929042 INFO (jetty-launcher-1795-thread-4) [ ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
[junit4] 2> 929048 WARN (jetty-launcher-1795-thread-4) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@794b5105[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 929048 WARN (jetty-launcher-1795-thread-4) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@794b5105[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 929063 WARN (jetty-launcher-1795-thread-4) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@ad15977[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 929063 WARN (jetty-launcher-1795-thread-4) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@ad15977[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 929065 INFO (jetty-launcher-1795-thread-4) [ ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:46350/solr
[junit4] 2> 929066 INFO (jetty-launcher-1795-thread-4) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 929067 INFO (zkConnectionManagerCallback-1811-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 929067 INFO (jetty-launcher-1795-thread-4) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 929150 INFO (jetty-launcher-1795-thread-1) [ ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
[junit4] 2> 929164 WARN (jetty-launcher-1795-thread-1) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@262cfe38[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 929164 WARN (jetty-launcher-1795-thread-1) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@262cfe38[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 929196 WARN (jetty-launcher-1795-thread-1) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@21eece8d[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 929196 WARN (jetty-launcher-1795-thread-1) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@21eece8d[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 929198 INFO (jetty-launcher-1795-thread-1) [ ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:46350/solr
[junit4] 2> 929203 INFO (jetty-launcher-1795-thread-4) [n:127.0.0.1:40116_solr ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 929245 INFO (zkConnectionManagerCallback-1816-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 929246 INFO (jetty-launcher-1795-thread-4) [n:127.0.0.1:40116_solr ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 929256 INFO (jetty-launcher-1795-thread-1) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 929264 INFO (zkConnectionManagerCallback-1820-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 929264 INFO (jetty-launcher-1795-thread-1) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 929375 INFO (jetty-launcher-1795-thread-1) [n:127.0.0.1:34574_solr ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 929377 INFO (jetty-launcher-1795-thread-4) [n:127.0.0.1:40116_solr ] o.a.s.c.OverseerElectionContext I am going to be the leader 127.0.0.1:40116_solr
[junit4] 2> 929377 INFO (jetty-launcher-1795-thread-4) [n:127.0.0.1:40116_solr ] o.a.s.c.Overseer Overseer (id=75717133839630344-127.0.0.1:40116_solr-n_0000000000) starting
[junit4] 2> 929380 INFO (zkConnectionManagerCallback-1825-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 929380 INFO (jetty-launcher-1795-thread-1) [n:127.0.0.1:34574_solr ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 929386 INFO (jetty-launcher-1795-thread-1) [n:127.0.0.1:34574_solr ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:34574_solr
[junit4] 2> 929392 INFO (zkCallback-1815-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
[junit4] 2> 929397 INFO (jetty-launcher-1795-thread-2) [ ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
[junit4] 2> 929399 WARN (jetty-launcher-1795-thread-2) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@4fc44e04[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 929399 WARN (jetty-launcher-1795-thread-2) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@4fc44e04[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 929406 INFO (OverseerStateUpdate-75717133839630344-127.0.0.1:40116_solr-n_0000000000) [n:127.0.0.1:40116_solr ] o.a.s.c.Overseer Starting to work on the main queue : 127.0.0.1:40116_solr
[junit4] 2> 929408 INFO (zkCallback-1824-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
[junit4] 2> 929408 INFO (jetty-launcher-1795-thread-4) [n:127.0.0.1:40116_solr ] o.a.s.c.ZkController Publish node=127.0.0.1:40116_solr as DOWN
[junit4] 2> 929410 INFO (jetty-launcher-1795-thread-4) [n:127.0.0.1:40116_solr ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
[junit4] 2> 929410 INFO (jetty-launcher-1795-thread-4) [n:127.0.0.1:40116_solr ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:40116_solr
[junit4] 2> 929412 INFO (zkCallback-1815-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
[junit4] 2> 929413 INFO (zkCallback-1824-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
[junit4] 2> 929416 INFO (jetty-launcher-1795-thread-1) [n:127.0.0.1:34574_solr ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
[junit4] 2> 929440 INFO (jetty-launcher-1795-thread-1) [n:127.0.0.1:34574_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@86fa7ba
[junit4] 2> 929448 INFO (jetty-launcher-1795-thread-4) [n:127.0.0.1:40116_solr ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
[junit4] 2> 929472 INFO (jetty-launcher-1795-thread-1) [n:127.0.0.1:34574_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@86fa7ba
[junit4] 2> 929472 INFO (jetty-launcher-1795-thread-1) [n:127.0.0.1:34574_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@86fa7ba
[junit4] 2> 929475 INFO (jetty-launcher-1795-thread-1) [n:127.0.0.1:34574_solr ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_B654420E9A68B436-001/tempDir-002/node1/.
[junit4] 2> 929483 WARN (jetty-launcher-1795-thread-2) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@20dba287[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 929483 WARN (jetty-launcher-1795-thread-2) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@20dba287[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 929485 INFO (jetty-launcher-1795-thread-2) [ ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:46350/solr
[junit4] 2> 929486 INFO (jetty-launcher-1795-thread-2) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 929500 INFO (jetty-launcher-1795-thread-4) [n:127.0.0.1:40116_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@86fa7ba
[junit4] 2> 929504 INFO (zkConnectionManagerCallback-1837-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 929504 INFO (jetty-launcher-1795-thread-2) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 929513 INFO (jetty-launcher-1795-thread-4) [n:127.0.0.1:40116_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@86fa7ba
[junit4] 2> 929513 INFO (jetty-launcher-1795-thread-4) [n:127.0.0.1:40116_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@86fa7ba
[junit4] 2> 929528 INFO (jetty-launcher-1795-thread-4) [n:127.0.0.1:40116_solr ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_B654420E9A68B436-001/tempDir-002/node4/.
[junit4] 2> 929600 INFO (jetty-launcher-1795-thread-3) [ ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
[junit4] 2> 929604 WARN (jetty-launcher-1795-thread-3) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@2fbe3ab0[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 929604 WARN (jetty-launcher-1795-thread-3) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@2fbe3ab0[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 929608 WARN (jetty-launcher-1795-thread-3) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@370a919c[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 929608 WARN (jetty-launcher-1795-thread-3) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@370a919c[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 929609 INFO (jetty-launcher-1795-thread-3) [ ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:46350/solr
[junit4] 2> 929621 INFO (jetty-launcher-1795-thread-3) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 929621 INFO (jetty-launcher-1795-thread-2) [n:127.0.0.1:36024_solr ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 929628 INFO (zkConnectionManagerCallback-1842-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 929628 INFO (jetty-launcher-1795-thread-2) [n:127.0.0.1:36024_solr ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 929632 INFO (jetty-launcher-1795-thread-2) [n:127.0.0.1:36024_solr ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
[junit4] 2> 929635 INFO (jetty-launcher-1795-thread-2) [n:127.0.0.1:36024_solr ] o.a.s.c.ZkController Publish node=127.0.0.1:36024_solr as DOWN
[junit4] 2> 929636 INFO (zkConnectionManagerCallback-1845-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 929636 INFO (jetty-launcher-1795-thread-3) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 929637 INFO (jetty-launcher-1795-thread-2) [n:127.0.0.1:36024_solr ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
[junit4] 2> 929637 INFO (jetty-launcher-1795-thread-2) [n:127.0.0.1:36024_solr ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:36024_solr
[junit4] 2> 929639 INFO (zkCallback-1815-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
[junit4] 2> 929640 INFO (zkCallback-1824-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
[junit4] 2> 929660 INFO (zkCallback-1841-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
[junit4] 2> 929664 INFO (jetty-launcher-1795-thread-2) [n:127.0.0.1:36024_solr ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
[junit4] 2> 929707 INFO (jetty-launcher-1795-thread-2) [n:127.0.0.1:36024_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@86fa7ba
[junit4] 2> 929719 INFO (jetty-launcher-1795-thread-2) [n:127.0.0.1:36024_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@86fa7ba
[junit4] 2> 929719 INFO (jetty-launcher-1795-thread-2) [n:127.0.0.1:36024_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@86fa7ba
[junit4] 2> 929722 INFO (jetty-launcher-1795-thread-2) [n:127.0.0.1:36024_solr ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_B654420E9A68B436-001/tempDir-002/node2/.
[junit4] 2> 929757 INFO (jetty-launcher-1795-thread-3) [n:127.0.0.1:42389_solr ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 929757 INFO (zkConnectionManagerCallback-1851-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 929757 INFO (jetty-launcher-1795-thread-3) [n:127.0.0.1:42389_solr ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 929762 INFO (jetty-launcher-1795-thread-3) [n:127.0.0.1:42389_solr ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (3)
[junit4] 2> 929766 INFO (jetty-launcher-1795-thread-3) [n:127.0.0.1:42389_solr ] o.a.s.c.ZkController Publish node=127.0.0.1:42389_solr as DOWN
[junit4] 2> 929767 INFO (jetty-launcher-1795-thread-3) [n:127.0.0.1:42389_solr ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
[junit4] 2> 929767 INFO (jetty-launcher-1795-thread-3) [n:127.0.0.1:42389_solr ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:42389_solr
[junit4] 2> 929769 INFO (zkCallback-1841-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
[junit4] 2> 929769 INFO (zkCallback-1824-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
[junit4] 2> 929769 INFO (zkCallback-1815-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
[junit4] 2> 929780 INFO (zkCallback-1850-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
[junit4] 2> 929802 INFO (jetty-launcher-1795-thread-3) [n:127.0.0.1:42389_solr ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
[junit4] 2> 929820 INFO (jetty-launcher-1795-thread-3) [n:127.0.0.1:42389_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@86fa7ba
[junit4] 2> 929853 INFO (jetty-launcher-1795-thread-3) [n:127.0.0.1:42389_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@86fa7ba
[junit4] 2> 929853 INFO (jetty-launcher-1795-thread-3) [n:127.0.0.1:42389_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@86fa7ba
[junit4] 2> 929856 INFO (jetty-launcher-1795-thread-3) [n:127.0.0.1:42389_solr ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_B654420E9A68B436-001/tempDir-002/node3/.
[junit4] 2> 929931 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[B654420E9A68B436]) [ ] o.a.s.c.MiniSolrCloudCluster waitForAllNodes: numServers=4
[junit4] 2> 929936 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[B654420E9A68B436]) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 929940 INFO (zkConnectionManagerCallback-1860-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 929940 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[B654420E9A68B436]) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 929941 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[B654420E9A68B436]) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (4)
[junit4] 2> 929942 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[B654420E9A68B436]) [ ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:46350/solr ready
[junit4] 2> 929980 INFO (qtp1523594628-5210) [n:127.0.0.1:36024_solr ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :overseerstatus with params action=OVERSEERSTATUS&wt=javabin&version=2 and sendToOCPQueue=true
[junit4] 2> 930002 INFO (qtp1523594628-5210) [n:127.0.0.1:36024_solr ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections params={action=OVERSEERSTATUS&wt=javabin&version=2} status=0 QTime=22
[junit4] 2> 930004 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[B654420E9A68B436]) [ ] o.a.s.c.MoveReplicaTest total_jettys: 4
[junit4] 2> 930005 INFO (qtp1523594628-5212) [n:127.0.0.1:36024_solr ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params pullReplicas=1&collection.configName=conf1&maxShardsPerNode=2&autoAddReplicas=false&name=MoveReplicaHDFSTest_coll_false&nrtReplicas=1&action=CREATE&numShards=2&tlogReplicas=0&wt=javabin&version=2 and sendToOCPQueue=true
[junit4] 2> 930021 INFO (OverseerCollectionConfigSetProcessor-75717133839630344-127.0.0.1:40116_solr-n_0000000000) [n:127.0.0.1:40116_solr ] o.a.s.c.OverseerTaskQueue Response ZK path: /overseer/collection-queue-work/qnr-0000000000 doesn't exist. Requestor may have disconnected from ZooKeeper
[junit4] 2> 930024 INFO (OverseerThreadFactory-1634-thread-2-processing-n:127.0.0.1:40116_solr) [n:127.0.0.1:40116_solr ] o.a.s.c.a.c.CreateCollectionCmd Create collection MoveReplicaHDFSTest_coll_false
[junit4] 2> 930132 INFO (OverseerStateUpdate-75717133839630344-127.0.0.1:40116_solr-n_0000000000) [n:127.0.0.1:40116_solr ] o.a.s.c.o.SliceMutator createReplica() {
[junit4] 2> "operation":"ADDREPLICA",
[junit4] 2> "collection":"MoveReplicaHDFSTest_coll_false",
[junit4] 2> "shard":"shard1",
[junit4] 2> "core":"MoveReplicaHDFSTest_coll_false_shard1_replica_n1",
[junit4] 2> "state":"down",
[junit4] 2> "base_url":"https://127.0.0.1:34574/solr",
[junit4] 2> "type":"NRT",
[junit4] 2> "waitForFinalState":"false"}
[junit4] 2> 930136 INFO (OverseerStateUpdate-75717133839630344-127.0.0.1:40116_solr-n_0000000000) [n:127.0.0.1:40116_solr ] o.a.s.c.o.SliceMutator createReplica() {
[junit4] 2> "operation":"ADDREPLICA",
[junit4] 2> "collection":"MoveReplicaHDFSTest_coll_false",
[junit4] 2> "shard":"shard1",
[junit4] 2> "core":"MoveReplicaHDFSTest_coll_false_shard1_replica_p3",
[junit4] 2> "state":"down",
[junit4] 2> "base_url":"https://127.0.0.1:40116/solr",
[junit4] 2> "type":"PULL",
[junit4] 2> "waitForFinalState":"false"}
[junit4] 2> 930139 INFO (OverseerStateUpdate-75717133839630344-127.0.0.1:40116_solr-n_0000000000) [n:127.0.0.1:40116_solr ] o.a.s.c.o.SliceMutator createReplica() {
[junit4] 2> "operation":"ADDREPLICA",
[junit4] 2> "collection":"MoveReplicaHDFSTest_coll_false",
[junit4] 2> "shard":"shard2",
[junit4] 2> "core":"MoveReplicaHDFSTest_coll_false_shard2_replica_n4",
[junit4] 2> "state":"down",
[junit4] 2> "base_url":"https://127.0.0.1:42389/solr",
[junit4] 2> "type":"NRT",
[junit4] 2> "waitForFinalState":"false"}
[junit4] 2> 930142 INFO (OverseerStateUpdate-75717133839630344-127.0.0.1:40116_solr-n_0000000000) [n:127.0.0.1:40116_solr ] o.a.s.c.o.SliceMutator createReplica() {
[junit4] 2> "operation":"ADDREPLICA",
[junit4] 2> "collection":"MoveReplicaHDFSTest_coll_false",
[junit4] 2> "shard":"shard2",
[junit4] 2> "core":"MoveReplicaHDFSTest_coll_false_shard2_replica_p6",
[junit4] 2> "state":"down",
[junit4] 2> "base_url":"https://127.0.0.1:36024/solr",
[junit4] 2> "type":"PULL",
[junit4] 2> "waitForFinalState":"false"}
[junit4] 2> 930381 INFO (qtp1265529686-5200) [n:127.0.0.1:40116_solr x:MoveReplicaHDFSTest_coll_false_shard1_replica_p3 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node5&collection.configName=conf1&newCollection=true&name=MoveReplicaHDFSTest_coll_false_shard1_replica_p3&action=CREATE&numShards=2&collection=MoveReplicaHDFSTest_coll_false&shard=shard1&wt=javabin&version=2&replicaType=PULL
[junit4] 2> 930395 INFO (qtp1523594628-5211) [n:127.0.0.1:36024_solr x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node8&collection.configName=conf1&newCollection=true&name=MoveReplicaHDFSTest_coll_false_shard2_replica_p6&action=CREATE&numShards=2&collection=MoveReplicaHDFSTest_coll_false&shard=shard2&wt=javabin&version=2&replicaType=PULL
[junit4] 2> 930398 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node7&collection.configName=conf1&newCollection=true&name=MoveReplicaHDFSTest_coll_false_shard2_replica_n4&action=CREATE&numShards=2&collection=MoveReplicaHDFSTest_coll_false&shard=shard2&wt=javabin&version=2&replicaType=NRT
[junit4] 2> 930405 INFO (qtp1873729893-5190) [n:127.0.0.1:34574_solr x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node2&collection.configName=conf1&newCollection=true&name=MoveReplicaHDFSTest_coll_false_shard1_replica_n1&action=CREATE&numShards=2&collection=MoveReplicaHDFSTest_coll_false&shard=shard1&wt=javabin&version=2&replicaType=NRT
[junit4] 2> 930405 INFO (qtp1873729893-5190) [n:127.0.0.1:34574_solr x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
[junit4] 2> 931415 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
[junit4] 2> 931418 INFO (qtp1523594628-5211) [n:127.0.0.1:36024_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
[junit4] 2> 931420 INFO (qtp1265529686-5200) [n:127.0.0.1:40116_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p3 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
[junit4] 2> 931432 INFO (qtp1873729893-5190) [n:127.0.0.1:34574_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node2 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
[junit4] 2> 931453 INFO (qtp1523594628-5211) [n:127.0.0.1:36024_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.s.IndexSchema [MoveReplicaHDFSTest_coll_false_shard2_replica_p6] Schema name=minimal
[junit4] 2> 931455 INFO (qtp1523594628-5211) [n:127.0.0.1:36024_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
[junit4] 2> 931455 INFO (qtp1523594628-5211) [n:127.0.0.1:36024_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.CoreContainer Creating SolrCore 'MoveReplicaHDFSTest_coll_false_shard2_replica_p6' using configuration from collection MoveReplicaHDFSTest_coll_false, trusted=true
[junit4] 2> 931455 INFO (qtp1523594628-5211) [n:127.0.0.1:36024_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.MoveReplicaHDFSTest_coll_false.shard2.replica_p6' (registry 'solr.core.MoveReplicaHDFSTest_coll_false.shard2.replica_p6') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@86fa7ba
[junit4] 2> 931472 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.s.IndexSchema [MoveReplicaHDFSTest_coll_false_shard2_replica_n4] Schema name=minimal
[junit4] 2> 931473 INFO (qtp1265529686-5200) [n:127.0.0.1:40116_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p3 ] o.a.s.s.IndexSchema [MoveReplicaHDFSTest_coll_false_shard1_replica_p3] Schema name=minimal
[junit4] 2> 931474 INFO (qtp1873729893-5190) [n:127.0.0.1:34574_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node2 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.s.IndexSchema [MoveReplicaHDFSTest_coll_false_shard1_replica_n1] Schema name=minimal
[junit4] 2> 931485 INFO (qtp1523594628-5211) [n:127.0.0.1:36024_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://lucene2-us-west.apache.org:35312/solr_hdfs_home
[junit4] 2> 931485 INFO (qtp1523594628-5211) [n:127.0.0.1:36024_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
[junit4] 2> 931488 INFO (qtp1523594628-5211) [n:127.0.0.1:36024_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.SolrCore [[MoveReplicaHDFSTest_coll_false_shard2_replica_p6] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_B654420E9A68B436-001/tempDir-002/node2/MoveReplicaHDFSTest_coll_false_shard2_replica_p6], dataDir=[hdfs://lucene2-us-west.apache.org:35312/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node8/data/]
[junit4] 2> 931490 INFO (qtp1523594628-5211) [n:127.0.0.1:36024_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:35312/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node8/data/snapshot_metadata
[junit4] 2> 931494 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
[junit4] 2> 931494 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.CoreContainer Creating SolrCore 'MoveReplicaHDFSTest_coll_false_shard2_replica_n4' using configuration from collection MoveReplicaHDFSTest_coll_false, trusted=true
[junit4] 2> 931494 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.MoveReplicaHDFSTest_coll_false.shard2.replica_n4' (registry 'solr.core.MoveReplicaHDFSTest_coll_false.shard2.replica_n4') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@86fa7ba
[junit4] 2> 931495 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://lucene2-us-west.apache.org:35312/solr_hdfs_home
[junit4] 2> 931495 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
[junit4] 2> 931495 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.SolrCore [[MoveReplicaHDFSTest_coll_false_shard2_replica_n4] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_B654420E9A68B436-001/tempDir-002/node3/MoveReplicaHDFSTest_coll_false_shard2_replica_n4], dataDir=[hdfs://lucene2-us-west.apache.org:35312/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node7/data/]
[junit4] 2> 931496 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:35312/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node7/data/snapshot_metadata
[junit4] 2> 931498 INFO (qtp1873729893-5190) [n:127.0.0.1:34574_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node2 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
[junit4] 2> 931498 INFO (qtp1873729893-5190) [n:127.0.0.1:34574_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node2 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.CoreContainer Creating SolrCore 'MoveReplicaHDFSTest_coll_false_shard1_replica_n1' using configuration from collection MoveReplicaHDFSTest_coll_false, trusted=true
[junit4] 2> 931498 INFO (qtp1873729893-5190) [n:127.0.0.1:34574_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node2 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.MoveReplicaHDFSTest_coll_false.shard1.replica_n1' (registry 'solr.core.MoveReplicaHDFSTest_coll_false.shard1.replica_n1') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@86fa7ba
[junit4] 2> 931499 INFO (qtp1873729893-5190) [n:127.0.0.1:34574_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node2 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://lucene2-us-west.apache.org:35312/solr_hdfs_home
[junit4] 2> 931499 INFO (qtp1873729893-5190) [n:127.0.0.1:34574_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node2 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
[junit4] 2> 931499 INFO (qtp1873729893-5190) [n:127.0.0.1:34574_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node2 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.SolrCore [[MoveReplicaHDFSTest_coll_false_shard1_replica_n1] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_B654420E9A68B436-001/tempDir-002/node1/MoveReplicaHDFSTest_coll_false_shard1_replica_n1], dataDir=[hdfs://lucene2-us-west.apache.org:35312/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node2/data/]
[junit4] 2> 931500 INFO (qtp1873729893-5190) [n:127.0.0.1:34574_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node2 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:35312/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node2/data/snapshot_metadata
[junit4] 2> 931532 INFO (qtp1265529686-5200) [n:127.0.0.1:40116_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p3 ] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
[junit4] 2> 931532 INFO (qtp1265529686-5200) [n:127.0.0.1:40116_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p3 ] o.a.s.c.CoreContainer Creating SolrCore 'MoveReplicaHDFSTest_coll_false_shard1_replica_p3' using configuration from collection MoveReplicaHDFSTest_coll_false, trusted=true
[junit4] 2> 931532 INFO (qtp1265529686-5200) [n:127.0.0.1:40116_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p3 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.MoveReplicaHDFSTest_coll_false.shard1.replica_p3' (registry 'solr.core.MoveReplicaHDFSTest_coll_false.shard1.replica_p3') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@86fa7ba
[junit4] 2> 931533 INFO (qtp1265529686-5200) [n:127.0.0.1:40116_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p3 ] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://lucene2-us-west.apache.org:35312/solr_hdfs_home
[junit4] 2> 931533 INFO (qtp1265529686-5200) [n:127.0.0.1:40116_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p3 ] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
[junit4] 2> 931533 INFO (qtp1265529686-5200) [n:127.0.0.1:40116_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p3 ] o.a.s.c.SolrCore [[MoveReplicaHDFSTest_coll_false_shard1_replica_p3] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_B654420E9A68B436-001/tempDir-002/node4/MoveReplicaHDFSTest_coll_false_shard1_replica_p3], dataDir=[hdfs://lucene2-us-west.apache.org:35312/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node5/data/]
[junit4] 2> 931548 INFO (qtp1265529686-5200) [n:127.0.0.1:40116_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p3 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:35312/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node5/data/snapshot_metadata
[junit4] 2> 931555 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
[junit4] 2> 931556 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
[junit4] 2> 931556 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
[junit4] 2> 931564 INFO (qtp1873729893-5190) [n:127.0.0.1:34574_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node2 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
[junit4] 2> 931564 INFO (qtp1873729893-5190) [n:127.0.0.1:34574_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node2 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
[junit4] 2> 931564 INFO (qtp1873729893-5190) [n:127.0.0.1:34574_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node2 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
[junit4] 2> 931567 INFO (qtp1523594628-5211) [n:127.0.0.1:36024_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
[junit4] 2> 931567 INFO (qtp1523594628-5211) [n:127.0.0.1:36024_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
[junit4] 2> 931567 INFO (qtp1523594628-5211) [n:127.0.0.1:36024_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
[junit4] 2> 931570 INFO (qtp1265529686-5200) [n:127.0.0.1:40116_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p3 ] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
[junit4] 2> 931570 INFO (qtp1265529686-5200) [n:127.0.0.1:40116_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p3 ] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
[junit4] 2> 931570 INFO (qtp1265529686-5200) [n:127.0.0.1:40116_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p3 ] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
[junit4] 2> 931651 INFO (qtp1265529686-5200) [n:127.0.0.1:40116_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p3 ] o.a.s.s.b.BlockDirectory Block cache on write is disabled
[junit4] 2> 931653 INFO (qtp1873729893-5190) [n:127.0.0.1:34574_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node2 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.s.b.BlockDirectory Block cache on write is disabled
[junit4] 2> 931655 INFO (qtp1265529686-5200) [n:127.0.0.1:40116_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p3 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:35312/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node5/data
[junit4] 2> 931656 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.s.b.BlockDirectory Block cache on write is disabled
[junit4] 2> 931656 INFO (qtp1873729893-5190) [n:127.0.0.1:34574_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node2 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:35312/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node2/data
[junit4] 2> 931660 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:35312/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node7/data
[junit4] 2> 931665 INFO (qtp1523594628-5211) [n:127.0.0.1:36024_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.s.b.BlockDirectory Block cache on write is disabled
[junit4] 2> 931689 INFO (qtp1523594628-5211) [n:127.0.0.1:36024_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:35312/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node8/data
[junit4] 2> 931709 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:35312/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node7/data/index
[junit4] 2> 931715 INFO (qtp1873729893-5190) [n:127.0.0.1:34574_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node2 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:35312/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node2/data/index
[junit4] 2> 931751 INFO (qtp1265529686-5200) [n:127.0.0.1:40116_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p3 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:35312/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node5/data/index
[junit4] 2> 931759 INFO (qtp1873729893-5190) [n:127.0.0.1:34574_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node2 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
[junit4] 2> 931761 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
[junit4] 2> 931766 INFO (qtp1873729893-5190) [n:127.0.0.1:34574_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node2 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
[junit4] 2> 931766 INFO (qtp1873729893-5190) [n:127.0.0.1:34574_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node2 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
[junit4] 2> 931768 INFO (qtp1265529686-5200) [n:127.0.0.1:40116_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p3 ] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
[junit4] 2> 931768 INFO (qtp1265529686-5200) [n:127.0.0.1:40116_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p3 ] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
[junit4] 2> 931768 INFO (qtp1265529686-5200) [n:127.0.0.1:40116_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p3 ] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
[junit4] 2> 931768 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
[junit4] 2> 931768 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
[junit4] 2> 931779 INFO (qtp1523594628-5211) [n:127.0.0.1:36024_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:35312/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node8/data/index
[junit4] 2> 931793 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.s.b.BlockDirectory Block cache on write is disabled
[junit4] 2> 931799 INFO (qtp1873729893-5190) [n:127.0.0.1:34574_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node2 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.s.b.BlockDirectory Block cache on write is disabled
[junit4] 2> 931817 INFO (qtp1265529686-5200) [n:127.0.0.1:40116_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p3 ] o.a.s.s.b.BlockDirectory Block cache on write is disabled
[junit4] 2> 931829 INFO (qtp1523594628-5211) [n:127.0.0.1:36024_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
[junit4] 2> 931829 INFO (qtp1523594628-5211) [n:127.0.0.1:36024_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
[junit4] 2> 931829 INFO (qtp1523594628-5211) [n:127.0.0.1:36024_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
[junit4] 2> 931841 INFO (qtp1523594628-5211) [n:127.0.0.1:36024_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.s.b.BlockDirectory Block cache on write is disabled
[junit4] 2> 932321 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
[junit4] 2> 932321 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
[junit4] 2> 932321 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=3
[junit4] 2> 932325 INFO (qtp1265529686-5200) [n:127.0.0.1:40116_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p3 ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
[junit4] 2> 932325 INFO (qtp1265529686-5200) [n:127.0.0.1:40116_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p3 ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
[junit4] 2> 932355 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
[junit4] 2> 932355 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
[junit4] 2> 932600 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.s.SolrIndexSearcher Opening [Searcher@669faf6f[MoveReplicaHDFSTest_coll_false_shard2_replica_n4] main]
[junit4] 2> 932605 INFO (qtp1265529686-5200) [n:127.0.0.1:40116_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p3 ] o.a.s.s.SolrIndexSearcher Opening [Searcher@79bd3481[MoveReplicaHDFSTest_coll_false_shard1_replica_p3] main]
[junit4] 2> 932606 INFO (qtp1265529686-5200) [n:127.0.0.1:40116_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p3 ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
[junit4] 2> 932607 INFO (qtp1265529686-5200) [n:127.0.0.1:40116_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p3 ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
[junit4] 2> 932607 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
[junit4] 2> 932608 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
[junit4] 2> 932612 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
[junit4] 2> 932614 INFO (qtp1265529686-5200) [n:127.0.0.1:40116_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p3 ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
[junit4] 2> 932616 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1649937218660204544
[junit4] 2> 932620 INFO (searcherExecutor-1653-thread-1-processing-n:127.0.0.1:42389_solr x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.SolrCore [MoveReplicaHDFSTest_coll_false_shard2_replica_n4] Registered new searcher Searcher@669faf6f[MoveReplicaHDFSTest_coll_false_shard2_replica_n4] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
[junit4] 2> 932622 INFO (searcherExecutor-1655-thread-1-processing-n:127.0.0.1:40116_solr x:MoveReplicaHDFSTest_coll_false_shard1_replica_p3 c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5) [n:127.0.0.1:40116_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p3 ] o.a.s.c.SolrCore [MoveReplicaHDFSTest_coll_false_shard1_replica_p3] Registered new searcher Searcher@79bd3481[MoveReplicaHDFSTest_coll_false_shard1_replica_p3] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
[junit4] 2> 932628 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.ZkShardTerms Successful update of terms at /collections/MoveReplicaHDFSTest_coll_false/terms/shard2 to Terms{values={core_node7=0}, version=0}
[junit4] 2> 932628 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/MoveReplicaHDFSTest_coll_false/leaders/shard2
[junit4] 2> 932641 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
[junit4] 2> 932641 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
[junit4] 2> 932641 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.SyncStrategy Sync replicas to https://127.0.0.1:42389/solr/MoveReplicaHDFSTest_coll_false_shard2_replica_n4/
[junit4] 2> 932642 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
[junit4] 2> 932642 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.SyncStrategy https://127.0.0.1:42389/solr/MoveReplicaHDFSTest_coll_false_shard2_replica_n4/ has no replicas
[junit4] 2> 932642 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.ShardLeaderElectionContextBase Creating leader registration node /collections/MoveReplicaHDFSTest_coll_false/leaders/shard2/leader after winning as /collections/MoveReplicaHDFSTest_coll_false/leader_elect/shard2/election/75717133839630350-core_node7-n_0000000000
[junit4] 2> 932643 INFO (qtp1265529686-5200) [n:127.0.0.1:40116_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p3 ] o.a.s.c.ZkController MoveReplicaHDFSTest_coll_false_shard1_replica_p3 starting background replication from leader
[junit4] 2> 932643 INFO (qtp1265529686-5200) [n:127.0.0.1:40116_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p3 ] o.a.s.c.ReplicateFromLeader Will start replication from leader with poll interval: 00:00:01
[junit4] 2> 932646 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.ShardLeaderElectionContext I am the new leader: https://127.0.0.1:42389/solr/MoveReplicaHDFSTest_coll_false_shard2_replica_n4/ shard2
[junit4] 2> 932651 INFO (qtp1265529686-5200) [n:127.0.0.1:40116_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p3 ] o.a.s.h.ReplicationHandler Poll scheduled at an interval of 1000ms
[junit4] 2> 932651 INFO (qtp1265529686-5200) [n:127.0.0.1:40116_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p3 ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
[junit4] 2> 932748 INFO (zkCallback-1850-thread-1) [ ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/MoveReplicaHDFSTest_coll_false/state.json] for collection [MoveReplicaHDFSTest_coll_false] has occurred - updating... (live nodes size: [4])
[junit4] 2> 932748 INFO (zkCallback-1815-thread-1) [ ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/MoveReplicaHDFSTest_coll_false/state.json] for collection [MoveReplicaHDFSTest_coll_false] has occurred - updating... (live nodes size: [4])
[junit4] 2> 932751 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.ZkController I am the leader, no recovery necessary
[junit4] 2> 932753 INFO (qtp1511014368-5220) [n:127.0.0.1:42389_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={qt=/admin/cores&coreNodeName=core_node7&collection.configName=conf1&newCollection=true&name=MoveReplicaHDFSTest_coll_false_shard2_replica_n4&action=CREATE&numShards=2&collection=MoveReplicaHDFSTest_coll_false&shard=shard2&wt=javabin&version=2&replicaType=NRT} status=0 QTime=2354
[junit4] 2> 932804 INFO (qtp1523594628-5211) [n:127.0.0.1:36024_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
[junit4] 2> 932804 INFO (qtp1523594628-5211) [n:127.0.0.1:36024_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
[junit4] 2> 932805 INFO (qtp1873729893-5190) [n:127.0.0.1:34574_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node2 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
[junit4] 2> 932805 INFO (qtp1873729893-5190) [n:127.0.0.1:34574_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node2 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
[junit4] 2> 932805 INFO (qtp1873729893-5190) [n:127.0.0.1:34574_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node2 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=3
[junit4] 2> 932836 INFO (qtp1873729893-5190) [n:127.0.0.1:34574_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node2 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
[junit4] 2> 932836 INFO (qtp1873729893-5190) [n:127.0.0.1:34574_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node2 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
[junit4] 2> 932836 INFO (qtp1523594628-5211) [n:127.0.0.1:36024_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.s.SolrIndexSearcher Opening [Searcher@2d9a874d[MoveReplicaHDFSTest_coll_false_shard2_replica_p6] main]
[junit4] 2> 932838 INFO (qtp1523594628-5211) [n:127.0.0.1:36024_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
[junit4] 2> 932838 INFO (qtp1523594628-5211) [n:127.0.0.1:36024_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
[junit4] 2> 932839 INFO (qtp1523594628-5211) [n:127.0.0.1:36024_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
[junit4] 2> 932848 INFO (searcherExecutor-1652-thread-1-processing-n:127.0.0.1:36024_solr x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8) [n:127.0.0.1:36024_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.SolrCore [MoveReplicaHDFSTest_coll_false_shard2_replica_p6] Registered new searcher Searcher@2d9a874d[MoveReplicaHDFSTest_coll_false_shard2_replica_p6] main{Exitable
[...truncated too long message...]
nfo.seed([B654420E9A68B436:3E007DD43494D9CE]:0)
[junit4] > at org.apache.solr.cloud.MoveReplicaTest.test(MoveReplicaTest.java:147)
[junit4] > at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
[junit4] > at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
[junit4] > at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
[junit4] > at java.base/java.lang.reflect.Method.invoke(Method.java:566)
[junit4] > at java.base/java.lang.Thread.run(Thread.java:834)
[junit4] 2> 1005339 WARN (SUITE-MoveReplicaHDFSTest-seed#[B654420E9A68B436]-worker) [ ] o.a.h.h.s.d.DirectoryScanner DirectoryScanner: shutdown has been called
[junit4] 2> 1005381 INFO (SUITE-MoveReplicaHDFSTest-seed#[B654420E9A68B436]-worker) [ ] o.e.j.s.h.ContextHandler Stopped o.e.j.w.WebAppContext@33d85234{datanode,/,null,UNAVAILABLE}{/datanode}
[junit4] 2> 1005381 INFO (SUITE-MoveReplicaHDFSTest-seed#[B654420E9A68B436]-worker) [ ] o.e.j.s.AbstractConnector Stopped ServerConnector@20beaa55{HTTP/1.1,[http/1.1]}{localhost:0}
[junit4] 2> 1005381 INFO (SUITE-MoveReplicaHDFSTest-seed#[B654420E9A68B436]-worker) [ ] o.e.j.s.session node0 Stopped scavenging
[junit4] 2> 1005381 INFO (SUITE-MoveReplicaHDFSTest-seed#[B654420E9A68B436]-worker) [ ] o.e.j.s.h.ContextHandler Stopped o.e.j.s.ServletContextHandler@41437e03{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,UNAVAILABLE}
[junit4] 2> 1005515 WARN (BP-885977843-127.0.0.1-1573502744279 heartbeating to lucene2-us-west.apache.org/127.0.0.1:35312) [ ] o.a.h.h.s.d.IncrementalBlockReportManager IncrementalBlockReportManager interrupted
[junit4] 2> 1005515 WARN (BP-885977843-127.0.0.1-1573502744279 heartbeating to lucene2-us-west.apache.org/127.0.0.1:35312) [ ] o.a.h.h.s.d.DataNode Ending block pool service for: Block pool BP-885977843-127.0.0.1-1573502744279 (Datanode Uuid 50567c2a-3b64-44b8-9852-22d976b1c0d2) service to lucene2-us-west.apache.org/127.0.0.1:35312
[junit4] 2> 1005562 WARN (refreshUsed-/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_B654420E9A68B436-001/tempDir-001/hdfsBaseDir/data/data1/current/BP-885977843-127.0.0.1-1573502744279) [ ] o.a.h.f.CachingGetSpaceUsed Thread Interrupted waiting to refresh disk information: sleep interrupted
[junit4] 2> 1005600 WARN (refreshUsed-/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_B654420E9A68B436-001/tempDir-001/hdfsBaseDir/data/data2/current/BP-885977843-127.0.0.1-1573502744279) [ ] o.a.h.f.CachingGetSpaceUsed Thread Interrupted waiting to refresh disk information: sleep interrupted
[junit4] 2> 1005696 INFO (SUITE-MoveReplicaHDFSTest-seed#[B654420E9A68B436]-worker) [ ] o.e.j.s.h.ContextHandler Stopped o.e.j.w.WebAppContext@615a69aa{hdfs,/,null,UNAVAILABLE}{/hdfs}
[junit4] 2> 1005696 INFO (SUITE-MoveReplicaHDFSTest-seed#[B654420E9A68B436]-worker) [ ] o.e.j.s.AbstractConnector Stopped ServerConnector@2a4b3702{HTTP/1.1,[http/1.1]}{lucene2-us-west.apache.org:0}
[junit4] 2> 1005696 INFO (SUITE-MoveReplicaHDFSTest-seed#[B654420E9A68B436]-worker) [ ] o.e.j.s.session node0 Stopped scavenging
[junit4] 2> 1005696 INFO (SUITE-MoveReplicaHDFSTest-seed#[B654420E9A68B436]-worker) [ ] o.e.j.s.h.ContextHandler Stopped o.e.j.s.ServletContextHandler@4d7517dc{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,UNAVAILABLE}
[junit4] 2> NOTE: leaving temporary files on disk at: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_B654420E9A68B436-001
[junit4] 2> Nov 11, 2019 8:07:02 PM com.carrotsearch.randomizedtesting.ThreadLeakControl checkThreadLeaks
[junit4] 2> WARNING: Will linger awaiting termination of 16 leaked thread(s).
[junit4] 2> NOTE: test params are: codec=Asserting(Lucene80): {_version_=FSTOrd50, _root_=FSTOrd50, id=PostingsFormat(name=LuceneVarGapFixedInterval)}, docValues:{_version_=DocValuesFormat(name=Lucene80)}, maxPointsInLeafNode=1084, maxMBSortInHeap=6.4329596622647465, sim=Asserting(org.apache.lucene.search.similarities.AssertingSimilarity@73702fbe), locale=ga, timezone=SystemV/AST4ADT
[junit4] 2> NOTE: Linux 4.4.0-112-generic amd64/Oracle Corporation 11.0.4 (64-bit)/cpus=4,threads=7,free=78706688,total=499122176
[junit4] 2> NOTE: All tests run in this JVM: [PingRequestHandlerTest, TermsComponentTest, AnalysisErrorHandlingTest, TestShortCircuitedRequests, TestCollectionsAPIViaSolrCloudCluster, DocValuesMultiTest, TestCustomDocTransformer, SignatureUpdateProcessorFactoryTest, BadCopyFieldTest, TestScoreJoinQPNoScore, TestSortByMinMaxFunction, SplitHandlerTest, TestMiniSolrCloudClusterSSL, TaggingAttributeTest, MigrateRouteKeyTest, SolrMetricManagerTest, RandomizedTaggerTest, TokenizerChainTest, RemoteQueryErrorTest, TestPerFieldSimilarity, TestPartialUpdateDeduplication, TestIndexSearcher, DistribJoinFromCollectionTest, BinaryUpdateRequestHandlerTest, TestNumericTerms64, DocValuesNotIndexedTest, HdfsRecoverLeaseTest, TestCollapseQParserPlugin, TestSchemaVersionResource, SuggestComponentTest, ClusterStateTest, MoveReplicaHDFSTest]
[junit4] Completed [102/888 (1!)] on J2 in 80.90s, 4 tests, 1 failure, 2 skipped <<< FAILURES!
[...truncated 52198 lines...]
-ecj-javadoc-lint-src:
[mkdir] Created dir: /tmp/ecj14923046
[ecj-lint] Compiling 69 source files to /tmp/ecj14923046
[ecj-lint] invalid Class-Path header in manifest of jar file: /home/jenkins/.ivy2/cache/org.restlet.jee/org.restlet/jars/org.restlet-2.3.0.jar
[ecj-lint] invalid Class-Path header in manifest of jar file: /home/jenkins/.ivy2/cache/org.restlet.jee/org.restlet.ext.servlet/jars/org.restlet.ext.servlet-2.3.0.jar
[ecj-lint] ----------
[ecj-lint] 1. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/JdbcDataSource.java (at line 28)
[ecj-lint] import javax.naming.InitialContext;
[ecj-lint] ^^^^^^^^^^^^^^^^^^^^^^^^^^^
[ecj-lint] The type javax.naming.InitialContext is not accessible
[ecj-lint] ----------
[ecj-lint] 2. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/JdbcDataSource.java (at line 29)
[ecj-lint] import javax.naming.NamingException;
[ecj-lint] ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
[ecj-lint] The type javax.naming.NamingException is not accessible
[ecj-lint] ----------
[ecj-lint] 3. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/JdbcDataSource.java (at line 182)
[ecj-lint] c = getFromJndi(initProps, jndiName);
[ecj-lint] ^^^^^^^^^^^
[ecj-lint] The method getFromJndi(Properties, String) from the type new Callable<Connection>(){} refers to the missing type NamingException
[ecj-lint] ----------
[ecj-lint] 4. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/JdbcDataSource.java (at line 245)
[ecj-lint] private Connection getFromJndi(final Properties initProps, final String jndiName) throws NamingException,
[ecj-lint] ^^^^^^^^^^^^^^^
[ecj-lint] NamingException cannot be resolved to a type
[ecj-lint] ----------
[ecj-lint] 5. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/JdbcDataSource.java (at line 249)
[ecj-lint] InitialContext ctx = new InitialContext();
[ecj-lint] ^^^^^^^^^^^^^^
[ecj-lint] InitialContext cannot be resolved to a type
[ecj-lint] ----------
[ecj-lint] 6. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/JdbcDataSource.java (at line 249)
[ecj-lint] InitialContext ctx = new InitialContext();
[ecj-lint] ^^^^^^^^^^^^^^
[ecj-lint] InitialContext cannot be resolved to a type
[ecj-lint] ----------
[ecj-lint] 6 problems (6 errors)
BUILD FAILED
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/build.xml:652: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/build.xml:101: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build.xml:651: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/common-build.xml:479: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/common-build.xml:2027: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/common-build.xml:2066: Compile failed; see the compiler error output for details.
Total time: 563 minutes 56 seconds
Build step 'Invoke Ant' marked build as failure
Archiving artifacts
Recording test results
Email was triggered for: Failure - Any
Sending email for trigger: Failure - Any
[JENKINS] Lucene-Solr-NightlyTests-master - Build # 2014 - Unstable
Posted by Apache Jenkins Server <je...@builds.apache.org>.
Build: https://builds.apache.org/job/Lucene-Solr-NightlyTests-master/2014/
2 tests failed.
FAILED: org.apache.solr.cloud.MoveReplicaHDFSTest.test
Error Message:
expected not same
Stack Trace:
java.lang.AssertionError: expected not same
at __randomizedtesting.SeedInfo.seed([839C476749F4BE7C:BC878BDE708D384]:0)
at org.junit.Assert.fail(Assert.java:88)
at org.junit.Assert.failSame(Assert.java:819)
at org.junit.Assert.assertNotSame(Assert.java:798)
at org.junit.Assert.assertNotSame(Assert.java:811)
at org.apache.solr.cloud.MoveReplicaTest.test(MoveReplicaTest.java:147)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938)
at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974)
at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988)
at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947)
at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832)
at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883)
at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
at java.base/java.lang.Thread.run(Thread.java:834)
FAILED: junit.framework.TestSuite.org.apache.solr.cloud.cdcr.CdcrBootstrapTest
Error Message:
ObjectTracker found 1 object(s) that were not released!!! [RawDirectoryWrapper] org.apache.solr.common.util.ObjectReleaseTracker$ObjectTrackerException: org.apache.lucene.store.RawDirectoryWrapper at org.apache.solr.common.util.ObjectReleaseTracker.track(ObjectReleaseTracker.java:42) at org.apache.solr.core.CachingDirectoryFactory.get(CachingDirectoryFactory.java:348) at org.apache.solr.handler.IndexFetcher.fetchLatestIndex(IndexFetcher.java:509) at org.apache.solr.handler.IndexFetcher.fetchLatestIndex(IndexFetcher.java:351) at org.apache.solr.handler.ReplicationHandler.doFetch(ReplicationHandler.java:422) at org.apache.solr.handler.CdcrRequestHandler$BootstrapCallable.call(CdcrRequestHandler.java:772) at org.apache.solr.handler.CdcrRequestHandler$BootstrapCallable.call(CdcrRequestHandler.java:727) at com.codahale.metrics.InstrumentedExecutorService$InstrumentedCallable.call(InstrumentedExecutorService.java:202) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) at org.apache.solr.common.util.ExecutorUtil$MDCAwareThreadPoolExecutor.lambda$execute$0(ExecutorUtil.java:210) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) at java.base/java.lang.Thread.run(Thread.java:834) expected null, but was:<ObjectTracker found 1 object(s) that were not released!!! [RawDirectoryWrapper] org.apache.solr.common.util.ObjectReleaseTracker$ObjectTrackerException: org.apache.lucene.store.RawDirectoryWrapper at org.apache.solr.common.util.ObjectReleaseTracker.track(ObjectReleaseTracker.java:42) at org.apache.solr.core.CachingDirectoryFactory.get(CachingDirectoryFactory.java:348) at org.apache.solr.handler.IndexFetcher.fetchLatestIndex(IndexFetcher.java:509) at org.apache.solr.handler.IndexFetcher.fetchLatestIndex(IndexFetcher.java:351) at org.apache.solr.handler.ReplicationHandler.doFetch(ReplicationHandler.java:422) at org.apache.solr.handler.CdcrRequestHandler$BootstrapCallable.call(CdcrRequestHandler.java:772) at org.apache.solr.handler.CdcrRequestHandler$BootstrapCallable.call(CdcrRequestHandler.java:727) at com.codahale.metrics.InstrumentedExecutorService$InstrumentedCallable.call(InstrumentedExecutorService.java:202) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) at org.apache.solr.common.util.ExecutorUtil$MDCAwareThreadPoolExecutor.lambda$execute$0(ExecutorUtil.java:210) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) at java.base/java.lang.Thread.run(Thread.java:834) >
Stack Trace:
java.lang.AssertionError: ObjectTracker found 1 object(s) that were not released!!! [RawDirectoryWrapper]
org.apache.solr.common.util.ObjectReleaseTracker$ObjectTrackerException: org.apache.lucene.store.RawDirectoryWrapper
at org.apache.solr.common.util.ObjectReleaseTracker.track(ObjectReleaseTracker.java:42)
at org.apache.solr.core.CachingDirectoryFactory.get(CachingDirectoryFactory.java:348)
at org.apache.solr.handler.IndexFetcher.fetchLatestIndex(IndexFetcher.java:509)
at org.apache.solr.handler.IndexFetcher.fetchLatestIndex(IndexFetcher.java:351)
at org.apache.solr.handler.ReplicationHandler.doFetch(ReplicationHandler.java:422)
at org.apache.solr.handler.CdcrRequestHandler$BootstrapCallable.call(CdcrRequestHandler.java:772)
at org.apache.solr.handler.CdcrRequestHandler$BootstrapCallable.call(CdcrRequestHandler.java:727)
at com.codahale.metrics.InstrumentedExecutorService$InstrumentedCallable.call(InstrumentedExecutorService.java:202)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at org.apache.solr.common.util.ExecutorUtil$MDCAwareThreadPoolExecutor.lambda$execute$0(ExecutorUtil.java:210)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
at java.base/java.lang.Thread.run(Thread.java:834)
expected null, but was:<ObjectTracker found 1 object(s) that were not released!!! [RawDirectoryWrapper]
org.apache.solr.common.util.ObjectReleaseTracker$ObjectTrackerException: org.apache.lucene.store.RawDirectoryWrapper
at org.apache.solr.common.util.ObjectReleaseTracker.track(ObjectReleaseTracker.java:42)
at org.apache.solr.core.CachingDirectoryFactory.get(CachingDirectoryFactory.java:348)
at org.apache.solr.handler.IndexFetcher.fetchLatestIndex(IndexFetcher.java:509)
at org.apache.solr.handler.IndexFetcher.fetchLatestIndex(IndexFetcher.java:351)
at org.apache.solr.handler.ReplicationHandler.doFetch(ReplicationHandler.java:422)
at org.apache.solr.handler.CdcrRequestHandler$BootstrapCallable.call(CdcrRequestHandler.java:772)
at org.apache.solr.handler.CdcrRequestHandler$BootstrapCallable.call(CdcrRequestHandler.java:727)
at com.codahale.metrics.InstrumentedExecutorService$InstrumentedCallable.call(InstrumentedExecutorService.java:202)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at org.apache.solr.common.util.ExecutorUtil$MDCAwareThreadPoolExecutor.lambda$execute$0(ExecutorUtil.java:210)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
at java.base/java.lang.Thread.run(Thread.java:834)
>
at __randomizedtesting.SeedInfo.seed([839C476749F4BE7C]:0)
at org.junit.Assert.fail(Assert.java:88)
at org.junit.Assert.failNotNull(Assert.java:755)
at org.junit.Assert.assertNull(Assert.java:737)
at org.apache.solr.SolrTestCaseJ4.teardownTestCases(SolrTestCaseJ4.java:342)
at jdk.internal.reflect.GeneratedMethodAccessor65.invoke(Unknown Source)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:901)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
at java.base/java.lang.Thread.run(Thread.java:834)
Build Log:
[...truncated 13425 lines...]
[junit4] Suite: org.apache.solr.cloud.MoveReplicaHDFSTest
[junit4] 2> 137067 INFO (SUITE-MoveReplicaHDFSTest-seed#[839C476749F4BE7C]-worker) [ ] o.a.s.SolrTestCaseJ4 SecureRandom sanity checks: test.solr.allowed.securerandom=null & java.security.egd=file:/dev/./urandom
[junit4] 2> 137067 INFO (SUITE-MoveReplicaHDFSTest-seed#[839C476749F4BE7C]-worker) [ ] o.a.s.SolrTestCaseJ4 Created dataDir: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_839C476749F4BE7C-001/data-dir-11-001
[junit4] 2> 137067 WARN (SUITE-MoveReplicaHDFSTest-seed#[839C476749F4BE7C]-worker) [ ] o.a.s.SolrTestCaseJ4 startTrackingSearchers: numOpens=14 numCloses=14
[junit4] 2> 137067 INFO (SUITE-MoveReplicaHDFSTest-seed#[839C476749F4BE7C]-worker) [ ] o.a.s.SolrTestCaseJ4 Using PointFields (NUMERIC_POINTS_SYSPROP=true) w/NUMERIC_DOCVALUES_SYSPROP=true
[junit4] 2> 137068 INFO (SUITE-MoveReplicaHDFSTest-seed#[839C476749F4BE7C]-worker) [ ] o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (false) via: @org.apache.solr.util.RandomizeSSL(reason="", value=0.0/0.0, ssl=0.0/0.0, clientAuth=0.0/0.0)
[junit4] 2> 137387 WARN (SUITE-MoveReplicaHDFSTest-seed#[839C476749F4BE7C]-worker) [ ] o.a.h.u.NativeCodeLoader Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
[junit4] 1> Formatting using clusterid: testClusterID
[junit4] 2> 138048 WARN (SUITE-MoveReplicaHDFSTest-seed#[839C476749F4BE7C]-worker) [ ] o.a.h.m.i.MetricsConfig Cannot locate configuration: tried hadoop-metrics2-namenode.properties,hadoop-metrics2.properties
[junit4] 2> 138163 WARN (SUITE-MoveReplicaHDFSTest-seed#[839C476749F4BE7C]-worker) [ ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
[junit4] 2> 138179 INFO (SUITE-MoveReplicaHDFSTest-seed#[839C476749F4BE7C]-worker) [ ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 11.0.4+10-LTS
[junit4] 2> 138183 INFO (SUITE-MoveReplicaHDFSTest-seed#[839C476749F4BE7C]-worker) [ ] o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 138183 INFO (SUITE-MoveReplicaHDFSTest-seed#[839C476749F4BE7C]-worker) [ ] o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 138183 INFO (SUITE-MoveReplicaHDFSTest-seed#[839C476749F4BE7C]-worker) [ ] o.e.j.s.session node0 Scavenging every 600000ms
[junit4] 2> 138184 INFO (SUITE-MoveReplicaHDFSTest-seed#[839C476749F4BE7C]-worker) [ ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@3a29708e{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
[junit4] 2> 138381 INFO (SUITE-MoveReplicaHDFSTest-seed#[839C476749F4BE7C]-worker) [ ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@300e5bb{hdfs,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/jetty-localhost.localdomain-35697-hdfs-_-any-13042088351302338697.dir/webapp/,AVAILABLE}{/hdfs}
[junit4] 2> 138381 INFO (SUITE-MoveReplicaHDFSTest-seed#[839C476749F4BE7C]-worker) [ ] o.e.j.s.AbstractConnector Started ServerConnector@226ec0e7{HTTP/1.1,[http/1.1]}{localhost.localdomain:35697}
[junit4] 2> 138383 INFO (SUITE-MoveReplicaHDFSTest-seed#[839C476749F4BE7C]-worker) [ ] o.e.j.s.Server Started @138442ms
[junit4] 2> 139030 WARN (SUITE-MoveReplicaHDFSTest-seed#[839C476749F4BE7C]-worker) [ ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
[junit4] 2> 139033 INFO (SUITE-MoveReplicaHDFSTest-seed#[839C476749F4BE7C]-worker) [ ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 11.0.4+10-LTS
[junit4] 2> 139041 INFO (SUITE-MoveReplicaHDFSTest-seed#[839C476749F4BE7C]-worker) [ ] o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 139041 INFO (SUITE-MoveReplicaHDFSTest-seed#[839C476749F4BE7C]-worker) [ ] o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 139041 INFO (SUITE-MoveReplicaHDFSTest-seed#[839C476749F4BE7C]-worker) [ ] o.e.j.s.session node0 Scavenging every 600000ms
[junit4] 2> 139041 INFO (SUITE-MoveReplicaHDFSTest-seed#[839C476749F4BE7C]-worker) [ ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@4dd04ffd{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
[junit4] 2> 139155 INFO (SUITE-MoveReplicaHDFSTest-seed#[839C476749F4BE7C]-worker) [ ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@7854a634{datanode,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/jetty-localhost-43743-datanode-_-any-16077562903980576423.dir/webapp/,AVAILABLE}{/datanode}
[junit4] 2> 139156 INFO (SUITE-MoveReplicaHDFSTest-seed#[839C476749F4BE7C]-worker) [ ] o.e.j.s.AbstractConnector Started ServerConnector@39c390cb{HTTP/1.1,[http/1.1]}{localhost:43743}
[junit4] 2> 139156 INFO (SUITE-MoveReplicaHDFSTest-seed#[839C476749F4BE7C]-worker) [ ] o.e.j.s.Server Started @139216ms
[junit4] 2> 139591 WARN (SUITE-MoveReplicaHDFSTest-seed#[839C476749F4BE7C]-worker) [ ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
[junit4] 2> 139592 INFO (SUITE-MoveReplicaHDFSTest-seed#[839C476749F4BE7C]-worker) [ ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 11.0.4+10-LTS
[junit4] 2> 139599 INFO (SUITE-MoveReplicaHDFSTest-seed#[839C476749F4BE7C]-worker) [ ] o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 139599 INFO (SUITE-MoveReplicaHDFSTest-seed#[839C476749F4BE7C]-worker) [ ] o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 139599 INFO (SUITE-MoveReplicaHDFSTest-seed#[839C476749F4BE7C]-worker) [ ] o.e.j.s.session node0 Scavenging every 600000ms
[junit4] 2> 139599 INFO (SUITE-MoveReplicaHDFSTest-seed#[839C476749F4BE7C]-worker) [ ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@43dbe77d{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
[junit4] 2> 139709 INFO (SUITE-MoveReplicaHDFSTest-seed#[839C476749F4BE7C]-worker) [ ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@4835a469{datanode,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/jetty-localhost-33739-datanode-_-any-17434162825628961492.dir/webapp/,AVAILABLE}{/datanode}
[junit4] 2> 139720 INFO (SUITE-MoveReplicaHDFSTest-seed#[839C476749F4BE7C]-worker) [ ] o.e.j.s.AbstractConnector Started ServerConnector@4ea04e11{HTTP/1.1,[http/1.1]}{localhost:33739}
[junit4] 2> 139720 INFO (SUITE-MoveReplicaHDFSTest-seed#[839C476749F4BE7C]-worker) [ ] o.e.j.s.Server Started @139781ms
[junit4] 2> 140338 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0x6219e893eee19412: Processing first storage report for DS-c8f47ee3-2a55-4a3d-9a93-66c33c6946a3 from datanode 642b576c-93fa-481b-969e-cdafdca728d6
[junit4] 2> 140339 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0x6219e893eee19412: from storage DS-c8f47ee3-2a55-4a3d-9a93-66c33c6946a3 node DatanodeRegistration(127.0.0.1:34737, datanodeUuid=642b576c-93fa-481b-969e-cdafdca728d6, infoPort=33221, infoSecurePort=0, ipcPort=36893, storageInfo=lv=-57;cid=testClusterID;nsid=1611607507;c=1573429816482), blocks: 0, hasStaleStorage: true, processing time: 1 msecs, invalidatedBlocks: 0
[junit4] 2> 140339 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0x169d61f2e32e5a78: Processing first storage report for DS-bd2de0e3-0868-49a6-adf6-a13cc45a9e47 from datanode 00b3c27f-c662-4eea-a27b-4587b39dbe31
[junit4] 2> 140339 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0x169d61f2e32e5a78: from storage DS-bd2de0e3-0868-49a6-adf6-a13cc45a9e47 node DatanodeRegistration(127.0.0.1:39171, datanodeUuid=00b3c27f-c662-4eea-a27b-4587b39dbe31, infoPort=40367, infoSecurePort=0, ipcPort=44047, storageInfo=lv=-57;cid=testClusterID;nsid=1611607507;c=1573429816482), blocks: 0, hasStaleStorage: true, processing time: 0 msecs, invalidatedBlocks: 0
[junit4] 2> 140339 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0x169d61f2e32e5a78: Processing first storage report for DS-912dd68f-5cdd-4396-b5c0-8da72e046001 from datanode 00b3c27f-c662-4eea-a27b-4587b39dbe31
[junit4] 2> 140339 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0x169d61f2e32e5a78: from storage DS-912dd68f-5cdd-4396-b5c0-8da72e046001 node DatanodeRegistration(127.0.0.1:39171, datanodeUuid=00b3c27f-c662-4eea-a27b-4587b39dbe31, infoPort=40367, infoSecurePort=0, ipcPort=44047, storageInfo=lv=-57;cid=testClusterID;nsid=1611607507;c=1573429816482), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0
[junit4] 2> 140339 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0x6219e893eee19412: Processing first storage report for DS-4359a688-9530-4e5e-b1b9-062a149616c8 from datanode 642b576c-93fa-481b-969e-cdafdca728d6
[junit4] 2> 140339 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0x6219e893eee19412: from storage DS-4359a688-9530-4e5e-b1b9-062a149616c8 node DatanodeRegistration(127.0.0.1:34737, datanodeUuid=642b576c-93fa-481b-969e-cdafdca728d6, infoPort=33221, infoSecurePort=0, ipcPort=36893, storageInfo=lv=-57;cid=testClusterID;nsid=1611607507;c=1573429816482), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0
[junit4] 2> 140492 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[839C476749F4BE7C]) [ ] o.a.s.SolrTestCaseJ4 ###Starting testNormalMove
[junit4] 2> 140493 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[839C476749F4BE7C]) [ ] o.a.s.c.MiniSolrCloudCluster Starting cluster of 4 servers in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_839C476749F4BE7C-001/tempDir-002
[junit4] 2> 140494 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[839C476749F4BE7C]) [ ] o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
[junit4] 2> 140494 INFO (ZkTestServer Run Thread) [ ] o.a.s.c.ZkTestServer client port:0.0.0.0/0.0.0.0:0
[junit4] 2> 140494 INFO (ZkTestServer Run Thread) [ ] o.a.s.c.ZkTestServer Starting server
[junit4] 2> 140594 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[839C476749F4BE7C]) [ ] o.a.s.c.ZkTestServer start zk server on port:42051
[junit4] 2> 140594 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[839C476749F4BE7C]) [ ] o.a.s.c.ZkTestServer waitForServerUp: 127.0.0.1:42051
[junit4] 2> 140594 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[839C476749F4BE7C]) [ ] o.a.s.c.ZkTestServer parse host and port list: 127.0.0.1:42051
[junit4] 2> 140594 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[839C476749F4BE7C]) [ ] o.a.s.c.ZkTestServer connecting to 127.0.0.1 42051
[junit4] 2> 140598 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[839C476749F4BE7C]) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 140607 INFO (zkConnectionManagerCallback-372-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 140607 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[839C476749F4BE7C]) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 140615 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[839C476749F4BE7C]) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 140623 INFO (zkConnectionManagerCallback-374-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 140623 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[839C476749F4BE7C]) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 140624 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[839C476749F4BE7C]) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 140627 INFO (zkConnectionManagerCallback-376-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 140627 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[839C476749F4BE7C]) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 140731 WARN (jetty-launcher-377-thread-1) [ ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
[junit4] 2> 140731 WARN (jetty-launcher-377-thread-2) [ ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
[junit4] 2> 140731 INFO (jetty-launcher-377-thread-2) [ ] o.a.s.c.s.e.JettySolrRunner Start Jetty (configured port=0, binding port=0)
[junit4] 2> 140731 INFO (jetty-launcher-377-thread-1) [ ] o.a.s.c.s.e.JettySolrRunner Start Jetty (configured port=0, binding port=0)
[junit4] 2> 140731 INFO (jetty-launcher-377-thread-2) [ ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
[junit4] 2> 140731 INFO (jetty-launcher-377-thread-1) [ ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
[junit4] 2> 140731 INFO (jetty-launcher-377-thread-2) [ ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 11.0.4+10-LTS
[junit4] 2> 140732 INFO (jetty-launcher-377-thread-1) [ ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 11.0.4+10-LTS
[junit4] 2> 140732 WARN (jetty-launcher-377-thread-3) [ ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
[junit4] 2> 140732 INFO (jetty-launcher-377-thread-3) [ ] o.a.s.c.s.e.JettySolrRunner Start Jetty (configured port=0, binding port=0)
[junit4] 2> 140732 INFO (jetty-launcher-377-thread-3) [ ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
[junit4] 2> 140733 INFO (jetty-launcher-377-thread-3) [ ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 11.0.4+10-LTS
[junit4] 2> 140739 WARN (jetty-launcher-377-thread-4) [ ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
[junit4] 2> 140740 INFO (jetty-launcher-377-thread-4) [ ] o.a.s.c.s.e.JettySolrRunner Start Jetty (configured port=0, binding port=0)
[junit4] 2> 140740 INFO (jetty-launcher-377-thread-4) [ ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
[junit4] 2> 140740 INFO (jetty-launcher-377-thread-4) [ ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 11.0.4+10-LTS
[junit4] 2> 140744 INFO (jetty-launcher-377-thread-3) [ ] o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 140744 INFO (jetty-launcher-377-thread-3) [ ] o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 140744 INFO (jetty-launcher-377-thread-3) [ ] o.e.j.s.session node0 Scavenging every 660000ms
[junit4] 2> 140744 INFO (jetty-launcher-377-thread-3) [ ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@5d9825ae{/solr,null,AVAILABLE}
[junit4] 2> 140753 INFO (jetty-launcher-377-thread-2) [ ] o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 140753 INFO (jetty-launcher-377-thread-2) [ ] o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 140753 INFO (jetty-launcher-377-thread-2) [ ] o.e.j.s.session node0 Scavenging every 600000ms
[junit4] 2> 140753 INFO (jetty-launcher-377-thread-1) [ ] o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 140753 INFO (jetty-launcher-377-thread-1) [ ] o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 140753 INFO (jetty-launcher-377-thread-1) [ ] o.e.j.s.session node0 Scavenging every 600000ms
[junit4] 2> 140754 INFO (jetty-launcher-377-thread-1) [ ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@2d376e6a{/solr,null,AVAILABLE}
[junit4] 2> 140755 INFO (jetty-launcher-377-thread-3) [ ] o.e.j.s.AbstractConnector Started ServerConnector@3b7a9783{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:41595}
[junit4] 2> 140755 INFO (jetty-launcher-377-thread-3) [ ] o.e.j.s.Server Started @140815ms
[junit4] 2> 140755 INFO (jetty-launcher-377-thread-3) [ ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=41595}
[junit4] 2> 140756 ERROR (jetty-launcher-377-thread-3) [ ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
[junit4] 2> 140756 INFO (jetty-launcher-377-thread-3) [ ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
[junit4] 2> 140756 INFO (jetty-launcher-377-thread-3) [ ] o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solrâ„¢ version 9.0.0
[junit4] 2> 140756 INFO (jetty-launcher-377-thread-3) [ ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 140756 INFO (jetty-launcher-377-thread-3) [ ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 140756 INFO (jetty-launcher-377-thread-3) [ ] o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time: 2019-11-10T23:50:19.488152Z
[junit4] 2> 140756 INFO (jetty-launcher-377-thread-2) [ ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@c2e55c7{/solr,null,AVAILABLE}
[junit4] 2> 140759 INFO (jetty-launcher-377-thread-3) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 140767 INFO (jetty-launcher-377-thread-4) [ ] o.e.j.s.session DefaultSessionIdManager workerName=node0
[junit4] 2> 140767 INFO (jetty-launcher-377-thread-4) [ ] o.e.j.s.session No SessionScavenger set, using defaults
[junit4] 2> 140767 INFO (jetty-launcher-377-thread-4) [ ] o.e.j.s.session node0 Scavenging every 600000ms
[junit4] 2> 140768 INFO (jetty-launcher-377-thread-4) [ ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@1b6762f7{/solr,null,AVAILABLE}
[junit4] 2> 140768 INFO (jetty-launcher-377-thread-1) [ ] o.e.j.s.AbstractConnector Started ServerConnector@74c30cbf{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:43275}
[junit4] 2> 140768 INFO (jetty-launcher-377-thread-1) [ ] o.e.j.s.Server Started @140828ms
[junit4] 2> 140768 INFO (jetty-launcher-377-thread-1) [ ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=43275}
[junit4] 2> 140769 ERROR (jetty-launcher-377-thread-1) [ ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
[junit4] 2> 140769 INFO (jetty-launcher-377-thread-1) [ ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
[junit4] 2> 140769 INFO (jetty-launcher-377-thread-1) [ ] o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solrâ„¢ version 9.0.0
[junit4] 2> 140769 INFO (jetty-launcher-377-thread-1) [ ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 140769 INFO (jetty-launcher-377-thread-1) [ ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 140769 INFO (jetty-launcher-377-thread-1) [ ] o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time: 2019-11-10T23:50:19.501511Z
[junit4] 2> 140770 INFO (jetty-launcher-377-thread-2) [ ] o.e.j.s.AbstractConnector Started ServerConnector@5ce0ad1a{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:39585}
[junit4] 2> 140770 INFO (jetty-launcher-377-thread-2) [ ] o.e.j.s.Server Started @140830ms
[junit4] 2> 140770 INFO (jetty-launcher-377-thread-2) [ ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=39585}
[junit4] 2> 140770 ERROR (jetty-launcher-377-thread-2) [ ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
[junit4] 2> 140771 INFO (jetty-launcher-377-thread-2) [ ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
[junit4] 2> 140771 INFO (jetty-launcher-377-thread-2) [ ] o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solrâ„¢ version 9.0.0
[junit4] 2> 140771 INFO (jetty-launcher-377-thread-2) [ ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 140771 INFO (jetty-launcher-377-thread-2) [ ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 140771 INFO (jetty-launcher-377-thread-2) [ ] o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time: 2019-11-10T23:50:19.503076Z
[junit4] 2> 140779 INFO (jetty-launcher-377-thread-1) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 140780 INFO (jetty-launcher-377-thread-4) [ ] o.e.j.s.AbstractConnector Started ServerConnector@629b4a9d{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:45515}
[junit4] 2> 140780 INFO (jetty-launcher-377-thread-4) [ ] o.e.j.s.Server Started @140840ms
[junit4] 2> 140780 INFO (jetty-launcher-377-thread-4) [ ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=45515}
[junit4] 2> 140780 ERROR (jetty-launcher-377-thread-4) [ ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
[junit4] 2> 140780 INFO (jetty-launcher-377-thread-4) [ ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
[junit4] 2> 140780 INFO (jetty-launcher-377-thread-4) [ ] o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solrâ„¢ version 9.0.0
[junit4] 2> 140780 INFO (jetty-launcher-377-thread-4) [ ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 140780 INFO (jetty-launcher-377-thread-4) [ ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 140780 INFO (jetty-launcher-377-thread-4) [ ] o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time: 2019-11-10T23:50:19.512427Z
[junit4] 2> 140781 INFO (jetty-launcher-377-thread-2) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 140787 INFO (jetty-launcher-377-thread-4) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 140787 INFO (zkConnectionManagerCallback-379-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 140787 INFO (jetty-launcher-377-thread-3) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 140792 INFO (zkConnectionManagerCallback-383-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 140792 INFO (jetty-launcher-377-thread-3) [ ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
[junit4] 2> 140792 INFO (jetty-launcher-377-thread-2) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 140795 INFO (jetty-launcher-377-thread-3) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
[junit4] 2> 140795 INFO (jetty-launcher-377-thread-3) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
[junit4] 2> 140798 INFO (jetty-launcher-377-thread-3) [ ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@1c0a1aa5, but no JMX reporters were configured - adding default JMX reporter.
[junit4] 2> 140798 INFO (zkConnectionManagerCallback-381-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 140798 INFO (jetty-launcher-377-thread-1) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 140799 INFO (zkConnectionManagerCallback-385-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 140799 INFO (jetty-launcher-377-thread-4) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 140800 INFO (jetty-launcher-377-thread-2) [ ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
[junit4] 2> 140800 INFO (jetty-launcher-377-thread-1) [ ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
[junit4] 2> 140900 INFO (jetty-launcher-377-thread-4) [ ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
[junit4] 2> 140903 INFO (jetty-launcher-377-thread-1) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
[junit4] 2> 140903 INFO (jetty-launcher-377-thread-1) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
[junit4] 2> 140904 INFO (jetty-launcher-377-thread-1) [ ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@1c0a1aa5, but no JMX reporters were configured - adding default JMX reporter.
[junit4] 2> 140907 INFO (jetty-launcher-377-thread-2) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
[junit4] 2> 140907 INFO (jetty-launcher-377-thread-2) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
[junit4] 2> 140907 INFO (jetty-launcher-377-thread-4) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
[junit4] 2> 140907 INFO (jetty-launcher-377-thread-4) [ ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
[junit4] 2> 140908 INFO (jetty-launcher-377-thread-2) [ ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@1c0a1aa5, but no JMX reporters were configured - adding default JMX reporter.
[junit4] 2> 140909 INFO (jetty-launcher-377-thread-4) [ ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@1c0a1aa5, but no JMX reporters were configured - adding default JMX reporter.
[junit4] 2> 140950 INFO (jetty-launcher-377-thread-3) [ ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
[junit4] 2> 140952 WARN (jetty-launcher-377-thread-3) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@6d0378af[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 140952 WARN (jetty-launcher-377-thread-3) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@6d0378af[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 140955 WARN (jetty-launcher-377-thread-3) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@7d3d01f4[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 140955 WARN (jetty-launcher-377-thread-3) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@7d3d01f4[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 140957 INFO (jetty-launcher-377-thread-3) [ ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:42051/solr
[junit4] 2> 140959 INFO (jetty-launcher-377-thread-3) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 140962 INFO (zkConnectionManagerCallback-392-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 140962 INFO (jetty-launcher-377-thread-3) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 141076 INFO (jetty-launcher-377-thread-4) [ ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
[junit4] 2> 141079 WARN (jetty-launcher-377-thread-4) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@2e195640[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 141079 WARN (jetty-launcher-377-thread-4) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@2e195640[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 141081 WARN (jetty-launcher-377-thread-4) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@26503076[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 141081 WARN (jetty-launcher-377-thread-4) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@26503076[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 141082 INFO (jetty-launcher-377-thread-4) [ ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:42051/solr
[junit4] 2> 141083 INFO (jetty-launcher-377-thread-4) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 141086 INFO (zkConnectionManagerCallback-399-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 141086 INFO (jetty-launcher-377-thread-4) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 141185 INFO (jetty-launcher-377-thread-1) [ ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
[junit4] 2> 141186 WARN (jetty-launcher-377-thread-1) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@267f2893[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 141186 WARN (jetty-launcher-377-thread-1) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@267f2893[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 141191 WARN (jetty-launcher-377-thread-1) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@7aae408f[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 141191 WARN (jetty-launcher-377-thread-1) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@7aae408f[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 141192 INFO (jetty-launcher-377-thread-1) [ ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:42051/solr
[junit4] 2> 141193 INFO (jetty-launcher-377-thread-1) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 141201 INFO (zkConnectionManagerCallback-407-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 141201 INFO (jetty-launcher-377-thread-1) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 141287 INFO (jetty-launcher-377-thread-3) [n:127.0.0.1:41595_solr ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 141326 INFO (zkConnectionManagerCallback-409-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 141326 INFO (jetty-launcher-377-thread-3) [n:127.0.0.1:41595_solr ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 141423 INFO (jetty-launcher-377-thread-1) [n:127.0.0.1:43275_solr ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 141435 INFO (zkConnectionManagerCallback-411-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 141435 INFO (jetty-launcher-377-thread-1) [n:127.0.0.1:43275_solr ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 141524 INFO (jetty-launcher-377-thread-4) [n:127.0.0.1:45515_solr ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 141543 INFO (zkConnectionManagerCallback-413-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 141543 INFO (jetty-launcher-377-thread-4) [n:127.0.0.1:45515_solr ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 141562 INFO (jetty-launcher-377-thread-4) [n:127.0.0.1:45515_solr ] o.a.s.c.OverseerElectionContext I am going to be the leader 127.0.0.1:45515_solr
[junit4] 2> 141567 INFO (jetty-launcher-377-thread-4) [n:127.0.0.1:45515_solr ] o.a.s.c.Overseer Overseer (id=72703216206086156-127.0.0.1:45515_solr-n_0000000000) starting
[junit4] 2> 141584 INFO (jetty-launcher-377-thread-2) [ ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
[junit4] 2> 141585 INFO (OverseerStateUpdate-72703216206086156-127.0.0.1:45515_solr-n_0000000000) [n:127.0.0.1:45515_solr ] o.a.s.c.Overseer Starting to work on the main queue : 127.0.0.1:45515_solr
[junit4] 2> 141586 INFO (jetty-launcher-377-thread-4) [n:127.0.0.1:45515_solr ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:45515_solr
[junit4] 2> 141586 WARN (jetty-launcher-377-thread-2) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@4fb22caa[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 141586 WARN (jetty-launcher-377-thread-2) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@4fb22caa[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 141589 INFO (OverseerStateUpdate-72703216206086156-127.0.0.1:45515_solr-n_0000000000) [n:127.0.0.1:45515_solr ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
[junit4] 2> 141592 WARN (jetty-launcher-377-thread-2) [ ] o.e.j.u.s.S.config Trusting all certificates configured for Client@30d26740[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 141592 WARN (jetty-launcher-377-thread-2) [ ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@30d26740[provider=null,keyStore=null,trustStore=null]
[junit4] 2> 141593 INFO (jetty-launcher-377-thread-2) [ ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:42051/solr
[junit4] 2> 141603 INFO (jetty-launcher-377-thread-4) [n:127.0.0.1:45515_solr ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
[junit4] 2> 141614 INFO (jetty-launcher-377-thread-3) [n:127.0.0.1:41595_solr ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
[junit4] 2> 141615 INFO (jetty-launcher-377-thread-4) [n:127.0.0.1:45515_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@1c0a1aa5
[junit4] 2> 141621 INFO (jetty-launcher-377-thread-4) [n:127.0.0.1:45515_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@1c0a1aa5
[junit4] 2> 141621 INFO (jetty-launcher-377-thread-3) [n:127.0.0.1:41595_solr ] o.a.s.c.ZkController Publish node=127.0.0.1:41595_solr as DOWN
[junit4] 2> 141621 INFO (jetty-launcher-377-thread-4) [n:127.0.0.1:45515_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@1c0a1aa5
[junit4] 2> 141621 INFO (jetty-launcher-377-thread-3) [n:127.0.0.1:41595_solr ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
[junit4] 2> 141621 INFO (jetty-launcher-377-thread-3) [n:127.0.0.1:41595_solr ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:41595_solr
[junit4] 2> 141623 INFO (zkCallback-412-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
[junit4] 2> 141626 INFO (jetty-launcher-377-thread-2) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 141628 INFO (jetty-launcher-377-thread-1) [n:127.0.0.1:43275_solr ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
[junit4] 2> 141630 INFO (jetty-launcher-377-thread-1) [n:127.0.0.1:43275_solr ] o.a.s.c.ZkController Publish node=127.0.0.1:43275_solr as DOWN
[junit4] 2> 141630 INFO (jetty-launcher-377-thread-1) [n:127.0.0.1:43275_solr ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
[junit4] 2> 141630 INFO (jetty-launcher-377-thread-1) [n:127.0.0.1:43275_solr ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:43275_solr
[junit4] 2> 141632 INFO (zkCallback-412-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
[junit4] 2> 141638 INFO (jetty-launcher-377-thread-4) [n:127.0.0.1:45515_solr ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_839C476749F4BE7C-001/tempDir-002/node4/.
[junit4] 2> 141641 INFO (jetty-launcher-377-thread-3) [n:127.0.0.1:41595_solr ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
[junit4] 2> 141651 INFO (zkCallback-410-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
[junit4] 2> 141655 INFO (zkConnectionManagerCallback-426-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 141655 INFO (jetty-launcher-377-thread-2) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 141656 INFO (jetty-launcher-377-thread-1) [n:127.0.0.1:43275_solr ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
[junit4] 2> 141656 INFO (zkCallback-408-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (3)
[junit4] 2> 141673 INFO (jetty-launcher-377-thread-3) [n:127.0.0.1:41595_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@1c0a1aa5
[junit4] 2> 141686 INFO (jetty-launcher-377-thread-3) [n:127.0.0.1:41595_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@1c0a1aa5
[junit4] 2> 141686 INFO (jetty-launcher-377-thread-3) [n:127.0.0.1:41595_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@1c0a1aa5
[junit4] 2> 141690 INFO (jetty-launcher-377-thread-1) [n:127.0.0.1:43275_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@1c0a1aa5
[junit4] 2> 141696 INFO (jetty-launcher-377-thread-1) [n:127.0.0.1:43275_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@1c0a1aa5
[junit4] 2> 141696 INFO (jetty-launcher-377-thread-1) [n:127.0.0.1:43275_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@1c0a1aa5
[junit4] 2> 141778 INFO (jetty-launcher-377-thread-2) [n:127.0.0.1:39585_solr ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 141780 INFO (jetty-launcher-377-thread-3) [n:127.0.0.1:41595_solr ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_839C476749F4BE7C-001/tempDir-002/node3/.
[junit4] 2> 141780 INFO (jetty-launcher-377-thread-1) [n:127.0.0.1:43275_solr ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_839C476749F4BE7C-001/tempDir-002/node1/.
[junit4] 2> 141783 INFO (zkConnectionManagerCallback-433-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 141783 INFO (jetty-launcher-377-thread-2) [n:127.0.0.1:39585_solr ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 141796 INFO (jetty-launcher-377-thread-2) [n:127.0.0.1:39585_solr ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (3)
[junit4] 2> 141799 INFO (jetty-launcher-377-thread-2) [n:127.0.0.1:39585_solr ] o.a.s.c.ZkController Publish node=127.0.0.1:39585_solr as DOWN
[junit4] 2> 141800 INFO (jetty-launcher-377-thread-2) [n:127.0.0.1:39585_solr ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
[junit4] 2> 141800 INFO (jetty-launcher-377-thread-2) [n:127.0.0.1:39585_solr ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:39585_solr
[junit4] 2> 141801 INFO (zkCallback-410-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
[junit4] 2> 141801 INFO (zkCallback-412-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
[junit4] 2> 141801 INFO (zkCallback-408-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
[junit4] 2> 141815 INFO (zkCallback-432-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
[junit4] 2> 141816 INFO (jetty-launcher-377-thread-2) [n:127.0.0.1:39585_solr ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
[junit4] 2> 141829 INFO (jetty-launcher-377-thread-2) [n:127.0.0.1:39585_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@1c0a1aa5
[junit4] 2> 141838 INFO (jetty-launcher-377-thread-2) [n:127.0.0.1:39585_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@1c0a1aa5
[junit4] 2> 141838 INFO (jetty-launcher-377-thread-2) [n:127.0.0.1:39585_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@1c0a1aa5
[junit4] 2> 141842 INFO (jetty-launcher-377-thread-2) [n:127.0.0.1:39585_solr ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_839C476749F4BE7C-001/tempDir-002/node2/.
[junit4] 2> 141872 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[839C476749F4BE7C]) [ ] o.a.s.c.MiniSolrCloudCluster waitForAllNodes: numServers=4
[junit4] 2> 141873 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[839C476749F4BE7C]) [ ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
[junit4] 2> 141875 INFO (zkConnectionManagerCallback-442-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected
[junit4] 2> 141875 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[839C476749F4BE7C]) [ ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
[junit4] 2> 141876 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[839C476749F4BE7C]) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (4)
[junit4] 2> 141877 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[839C476749F4BE7C]) [ ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:42051/solr ready
[junit4] 2> 141884 INFO (qtp1378310446-1256) [n:127.0.0.1:39585_solr ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :overseerstatus with params action=OVERSEERSTATUS&wt=javabin&version=2 and sendToOCPQueue=true
[junit4] 2> 141891 INFO (qtp1378310446-1256) [n:127.0.0.1:39585_solr ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections params={action=OVERSEERSTATUS&wt=javabin&version=2} status=0 QTime=6
[junit4] 2> 141892 INFO (TEST-MoveReplicaHDFSTest.testNormalMove-seed#[839C476749F4BE7C]) [ ] o.a.s.c.MoveReplicaTest total_jettys: 4
[junit4] 2> 141893 INFO (qtp1378310446-1257) [n:127.0.0.1:39585_solr ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params pullReplicas=1&collection.configName=conf1&maxShardsPerNode=2&autoAddReplicas=false&name=MoveReplicaHDFSTest_coll_false&nrtReplicas=1&action=CREATE&numShards=2&tlogReplicas=0&wt=javabin&version=2 and sendToOCPQueue=true
[junit4] 2> 141895 INFO (OverseerCollectionConfigSetProcessor-72703216206086156-127.0.0.1:45515_solr-n_0000000000) [n:127.0.0.1:45515_solr ] o.a.s.c.OverseerTaskQueue Response ZK path: /overseer/collection-queue-work/qnr-0000000000 doesn't exist. Requestor may have disconnected from ZooKeeper
[junit4] 2> 141896 INFO (OverseerThreadFactory-381-thread-2-processing-n:127.0.0.1:45515_solr) [n:127.0.0.1:45515_solr ] o.a.s.c.a.c.CreateCollectionCmd Create collection MoveReplicaHDFSTest_coll_false
[junit4] 2> 142001 INFO (OverseerStateUpdate-72703216206086156-127.0.0.1:45515_solr-n_0000000000) [n:127.0.0.1:45515_solr ] o.a.s.c.o.SliceMutator createReplica() {
[junit4] 2> "operation":"ADDREPLICA",
[junit4] 2> "collection":"MoveReplicaHDFSTest_coll_false",
[junit4] 2> "shard":"shard1",
[junit4] 2> "core":"MoveReplicaHDFSTest_coll_false_shard1_replica_n1",
[junit4] 2> "state":"down",
[junit4] 2> "base_url":"http://127.0.0.1:45515/solr",
[junit4] 2> "type":"NRT",
[junit4] 2> "waitForFinalState":"false"}
[junit4] 2> 142004 INFO (OverseerStateUpdate-72703216206086156-127.0.0.1:45515_solr-n_0000000000) [n:127.0.0.1:45515_solr ] o.a.s.c.o.SliceMutator createReplica() {
[junit4] 2> "operation":"ADDREPLICA",
[junit4] 2> "collection":"MoveReplicaHDFSTest_coll_false",
[junit4] 2> "shard":"shard1",
[junit4] 2> "core":"MoveReplicaHDFSTest_coll_false_shard1_replica_p2",
[junit4] 2> "state":"down",
[junit4] 2> "base_url":"http://127.0.0.1:43275/solr",
[junit4] 2> "type":"PULL",
[junit4] 2> "waitForFinalState":"false"}
[junit4] 2> 142006 INFO (OverseerStateUpdate-72703216206086156-127.0.0.1:45515_solr-n_0000000000) [n:127.0.0.1:45515_solr ] o.a.s.c.o.SliceMutator createReplica() {
[junit4] 2> "operation":"ADDREPLICA",
[junit4] 2> "collection":"MoveReplicaHDFSTest_coll_false",
[junit4] 2> "shard":"shard2",
[junit4] 2> "core":"MoveReplicaHDFSTest_coll_false_shard2_replica_n4",
[junit4] 2> "state":"down",
[junit4] 2> "base_url":"http://127.0.0.1:39585/solr",
[junit4] 2> "type":"NRT",
[junit4] 2> "waitForFinalState":"false"}
[junit4] 2> 142008 INFO (OverseerStateUpdate-72703216206086156-127.0.0.1:45515_solr-n_0000000000) [n:127.0.0.1:45515_solr ] o.a.s.c.o.SliceMutator createReplica() {
[junit4] 2> "operation":"ADDREPLICA",
[junit4] 2> "collection":"MoveReplicaHDFSTest_coll_false",
[junit4] 2> "shard":"shard2",
[junit4] 2> "core":"MoveReplicaHDFSTest_coll_false_shard2_replica_p6",
[junit4] 2> "state":"down",
[junit4] 2> "base_url":"http://127.0.0.1:41595/solr",
[junit4] 2> "type":"PULL",
[junit4] 2> "waitForFinalState":"false"}
[junit4] 2> 142212 INFO (qtp1378310446-1259) [n:127.0.0.1:39585_solr x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node7&collection.configName=conf1&newCollection=true&name=MoveReplicaHDFSTest_coll_false_shard2_replica_n4&action=CREATE&numShards=2&collection=MoveReplicaHDFSTest_coll_false&shard=shard2&wt=javabin&version=2&replicaType=NRT
[junit4] 2> 142219 INFO (qtp2047143311-1251) [n:127.0.0.1:43275_solr x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node5&collection.configName=conf1&newCollection=true&name=MoveReplicaHDFSTest_coll_false_shard1_replica_p2&action=CREATE&numShards=2&collection=MoveReplicaHDFSTest_coll_false&shard=shard1&wt=javabin&version=2&replicaType=PULL
[junit4] 2> 142219 INFO (qtp1818016102-1268) [n:127.0.0.1:45515_solr x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node3&collection.configName=conf1&newCollection=true&name=MoveReplicaHDFSTest_coll_false_shard1_replica_n1&action=CREATE&numShards=2&collection=MoveReplicaHDFSTest_coll_false&shard=shard1&wt=javabin&version=2&replicaType=NRT
[junit4] 2> 142220 INFO (qtp1818016102-1268) [n:127.0.0.1:45515_solr x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
[junit4] 2> 142221 INFO (qtp901607236-1245) [n:127.0.0.1:41595_solr x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node8&collection.configName=conf1&newCollection=true&name=MoveReplicaHDFSTest_coll_false_shard2_replica_p6&action=CREATE&numShards=2&collection=MoveReplicaHDFSTest_coll_false&shard=shard2&wt=javabin&version=2&replicaType=PULL
[junit4] 2> 143243 INFO (qtp901607236-1245) [n:127.0.0.1:41595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
[junit4] 2> 143243 INFO (qtp2047143311-1251) [n:127.0.0.1:43275_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
[junit4] 2> 143247 INFO (qtp1378310446-1259) [n:127.0.0.1:39585_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
[junit4] 2> 143248 INFO (qtp1818016102-1268) [n:127.0.0.1:45515_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
[junit4] 2> 143266 INFO (qtp1818016102-1268) [n:127.0.0.1:45515_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.s.IndexSchema [MoveReplicaHDFSTest_coll_false_shard1_replica_n1] Schema name=minimal
[junit4] 2> 143270 INFO (qtp2047143311-1251) [n:127.0.0.1:43275_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.s.IndexSchema [MoveReplicaHDFSTest_coll_false_shard1_replica_p2] Schema name=minimal
[junit4] 2> 143271 INFO (qtp1378310446-1259) [n:127.0.0.1:39585_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.s.IndexSchema [MoveReplicaHDFSTest_coll_false_shard2_replica_n4] Schema name=minimal
[junit4] 2> 143272 INFO (qtp2047143311-1251) [n:127.0.0.1:43275_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
[junit4] 2> 143272 INFO (qtp2047143311-1251) [n:127.0.0.1:43275_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.c.CoreContainer Creating SolrCore 'MoveReplicaHDFSTest_coll_false_shard1_replica_p2' using configuration from collection MoveReplicaHDFSTest_coll_false, trusted=true
[junit4] 2> 143272 INFO (qtp2047143311-1251) [n:127.0.0.1:43275_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.MoveReplicaHDFSTest_coll_false.shard1.replica_p2' (registry 'solr.core.MoveReplicaHDFSTest_coll_false.shard1.replica_p2') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@1c0a1aa5
[junit4] 2> 143274 INFO (qtp1378310446-1259) [n:127.0.0.1:39585_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
[junit4] 2> 143275 INFO (qtp1378310446-1259) [n:127.0.0.1:39585_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.CoreContainer Creating SolrCore 'MoveReplicaHDFSTest_coll_false_shard2_replica_n4' using configuration from collection MoveReplicaHDFSTest_coll_false, trusted=true
[junit4] 2> 143275 INFO (qtp1378310446-1259) [n:127.0.0.1:39585_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.MoveReplicaHDFSTest_coll_false.shard2.replica_n4' (registry 'solr.core.MoveReplicaHDFSTest_coll_false.shard2.replica_n4') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@1c0a1aa5
[junit4] 2> 143278 INFO (qtp1818016102-1268) [n:127.0.0.1:45515_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
[junit4] 2> 143278 INFO (qtp1818016102-1268) [n:127.0.0.1:45515_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.CoreContainer Creating SolrCore 'MoveReplicaHDFSTest_coll_false_shard1_replica_n1' using configuration from collection MoveReplicaHDFSTest_coll_false, trusted=true
[junit4] 2> 143279 INFO (qtp901607236-1245) [n:127.0.0.1:41595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.s.IndexSchema [MoveReplicaHDFSTest_coll_false_shard2_replica_p6] Schema name=minimal
[junit4] 2> 143279 INFO (qtp1818016102-1268) [n:127.0.0.1:45515_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.MoveReplicaHDFSTest_coll_false.shard1.replica_n1' (registry 'solr.core.MoveReplicaHDFSTest_coll_false.shard1.replica_n1') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@1c0a1aa5
[junit4] 2> 143282 INFO (qtp1818016102-1268) [n:127.0.0.1:45515_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://localhost.localdomain:41417/solr_hdfs_home
[junit4] 2> 143282 INFO (qtp1818016102-1268) [n:127.0.0.1:45515_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
[junit4] 2> 143282 INFO (qtp1378310446-1259) [n:127.0.0.1:39585_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://localhost.localdomain:41417/solr_hdfs_home
[junit4] 2> 143282 INFO (qtp1378310446-1259) [n:127.0.0.1:39585_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
[junit4] 2> 143282 INFO (qtp1818016102-1268) [n:127.0.0.1:45515_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.SolrCore [[MoveReplicaHDFSTest_coll_false_shard1_replica_n1] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_839C476749F4BE7C-001/tempDir-002/node4/MoveReplicaHDFSTest_coll_false_shard1_replica_n1], dataDir=[hdfs://localhost.localdomain:41417/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node3/data/]
[junit4] 2> 143282 INFO (qtp1378310446-1259) [n:127.0.0.1:39585_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.SolrCore [[MoveReplicaHDFSTest_coll_false_shard2_replica_n4] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_839C476749F4BE7C-001/tempDir-002/node2/MoveReplicaHDFSTest_coll_false_shard2_replica_n4], dataDir=[hdfs://localhost.localdomain:41417/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node7/data/]
[junit4] 2> 143283 INFO (qtp901607236-1245) [n:127.0.0.1:41595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
[junit4] 2> 143283 INFO (qtp901607236-1245) [n:127.0.0.1:41595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.CoreContainer Creating SolrCore 'MoveReplicaHDFSTest_coll_false_shard2_replica_p6' using configuration from collection MoveReplicaHDFSTest_coll_false, trusted=true
[junit4] 2> 143284 INFO (qtp901607236-1245) [n:127.0.0.1:41595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.MoveReplicaHDFSTest_coll_false.shard2.replica_p6' (registry 'solr.core.MoveReplicaHDFSTest_coll_false.shard2.replica_p6') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@1c0a1aa5
[junit4] 2> 143284 INFO (qtp2047143311-1251) [n:127.0.0.1:43275_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://localhost.localdomain:41417/solr_hdfs_home
[junit4] 2> 143284 INFO (qtp2047143311-1251) [n:127.0.0.1:43275_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
[junit4] 2> 143284 INFO (qtp2047143311-1251) [n:127.0.0.1:43275_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.c.SolrCore [[MoveReplicaHDFSTest_coll_false_shard1_replica_p2] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_839C476749F4BE7C-001/tempDir-002/node1/MoveReplicaHDFSTest_coll_false_shard1_replica_p2], dataDir=[hdfs://localhost.localdomain:41417/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node5/data/]
[junit4] 2> 143284 INFO (qtp1818016102-1268) [n:127.0.0.1:45515_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:41417/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node3/data/snapshot_metadata
[junit4] 2> 143285 INFO (qtp1378310446-1259) [n:127.0.0.1:39585_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:41417/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node7/data/snapshot_metadata
[junit4] 2> 143287 INFO (qtp2047143311-1251) [n:127.0.0.1:43275_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:41417/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node5/data/snapshot_metadata
[junit4] 2> 143288 INFO (qtp901607236-1245) [n:127.0.0.1:41595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://localhost.localdomain:41417/solr_hdfs_home
[junit4] 2> 143288 INFO (qtp901607236-1245) [n:127.0.0.1:41595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
[junit4] 2> 143288 INFO (qtp901607236-1245) [n:127.0.0.1:41595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.SolrCore [[MoveReplicaHDFSTest_coll_false_shard2_replica_p6] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.MoveReplicaHDFSTest_839C476749F4BE7C-001/tempDir-002/node3/MoveReplicaHDFSTest_coll_false_shard2_replica_p6], dataDir=[hdfs://localhost.localdomain:41417/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node8/data/]
[junit4] 2> 143288 INFO (qtp901607236-1245) [n:127.0.0.1:41595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:41417/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node8/data/snapshot_metadata
[junit4] 2> 143303 INFO (qtp1818016102-1268) [n:127.0.0.1:45515_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
[junit4] 2> 143303 INFO (qtp901607236-1245) [n:127.0.0.1:41595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
[junit4] 2> 143303 INFO (qtp1818016102-1268) [n:127.0.0.1:45515_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [33554432] will allocate [1] slabs and use ~[33554432] bytes
[junit4] 2> 143303 INFO (qtp1818016102-1268) [n:127.0.0.1:45515_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Creating new global HDFS BlockCache
[junit4] 2> 143304 INFO (qtp1378310446-1259) [n:127.0.0.1:39585_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
[junit4] 2> 143304 INFO (qtp1378310446-1259) [n:127.0.0.1:39585_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [33554432] will allocate [1] slabs and use ~[33554432] bytes
[junit4] 2> 143304 INFO (qtp901607236-1245) [n:127.0.0.1:41595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [33554432] will allocate [1] slabs and use ~[33554432] bytes
[junit4] 2> 143305 INFO (qtp2047143311-1251) [n:127.0.0.1:43275_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
[junit4] 2> 143305 INFO (qtp2047143311-1251) [n:127.0.0.1:43275_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [33554432] will allocate [1] slabs and use ~[33554432] bytes
[junit4] 2> 143388 INFO (qtp2047143311-1251) [n:127.0.0.1:43275_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.s.b.BlockDirectory Block cache on write is disabled
[junit4] 2> 143390 INFO (qtp1378310446-1259) [n:127.0.0.1:39585_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.s.b.BlockDirectory Block cache on write is disabled
[junit4] 2> 143390 INFO (qtp901607236-1245) [n:127.0.0.1:41595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.s.b.BlockDirectory Block cache on write is disabled
[junit4] 2> 143394 INFO (qtp2047143311-1251) [n:127.0.0.1:43275_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:41417/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node5/data
[junit4] 2> 143394 INFO (qtp1818016102-1268) [n:127.0.0.1:45515_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.s.b.BlockDirectory Block cache on write is disabled
[junit4] 2> 143395 INFO (qtp1378310446-1259) [n:127.0.0.1:39585_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:41417/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node7/data
[junit4] 2> 143400 INFO (qtp901607236-1245) [n:127.0.0.1:41595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:41417/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node8/data
[junit4] 2> 143403 INFO (qtp1818016102-1268) [n:127.0.0.1:45515_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:41417/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node3/data
[junit4] 2> 143428 INFO (qtp1378310446-1259) [n:127.0.0.1:39585_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:41417/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node7/data/index
[junit4] 2> 143433 INFO (qtp1378310446-1259) [n:127.0.0.1:39585_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
[junit4] 2> 143433 INFO (qtp1378310446-1259) [n:127.0.0.1:39585_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [33554432] will allocate [1] slabs and use ~[33554432] bytes
[junit4] 2> 143439 INFO (qtp901607236-1245) [n:127.0.0.1:41595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:41417/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node8/data/index
[junit4] 2> 143441 INFO (qtp1378310446-1259) [n:127.0.0.1:39585_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.s.b.BlockDirectory Block cache on write is disabled
[junit4] 2> 143447 INFO (qtp901607236-1245) [n:127.0.0.1:41595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
[junit4] 2> 143447 INFO (qtp901607236-1245) [n:127.0.0.1:41595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [33554432] will allocate [1] slabs and use ~[33554432] bytes
[junit4] 2> 143451 INFO (qtp2047143311-1251) [n:127.0.0.1:43275_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:41417/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node5/data/index
[junit4] 2> 143459 INFO (qtp2047143311-1251) [n:127.0.0.1:43275_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
[junit4] 2> 143459 INFO (qtp2047143311-1251) [n:127.0.0.1:43275_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [33554432] will allocate [1] slabs and use ~[33554432] bytes
[junit4] 2> 143462 INFO (qtp1818016102-1268) [n:127.0.0.1:45515_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:41417/solr_hdfs_home/MoveReplicaHDFSTest_coll_false/core_node3/data/index
[junit4] 2> 143464 INFO (qtp901607236-1245) [n:127.0.0.1:41595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.s.b.BlockDirectory Block cache on write is disabled
[junit4] 2> 143465 INFO (qtp2047143311-1251) [n:127.0.0.1:43275_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node5 x:MoveReplicaHDFSTest_coll_false_shard1_replica_p2 ] o.a.s.s.b.BlockDirectory Block cache on write is disabled
[junit4] 2> 143472 INFO (qtp1818016102-1268) [n:127.0.0.1:45515_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
[junit4] 2> 143472 INFO (qtp1818016102-1268) [n:127.0.0.1:45515_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [33554432] will allocate [1] slabs and use ~[33554432] bytes
[junit4] 2> 143481 INFO (qtp1818016102-1268) [n:127.0.0.1:45515_solr c:MoveReplicaHDFSTest_coll_false s:shard1 r:core_node3 x:MoveReplicaHDFSTest_coll_false_shard1_replica_n1 ] o.a.s.s.b.BlockDirectory Block cache on write is disabled
[junit4] 2> 143748 INFO (qtp901607236-1245) [n:127.0.0.1:41595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
[junit4] 2> 143748 INFO (qtp901607236-1245) [n:127.0.0.1:41595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
[junit4] 2> 143761 INFO (qtp1378310446-1259) [n:127.0.0.1:39585_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
[junit4] 2> 143761 INFO (qtp1378310446-1259) [n:127.0.0.1:39585_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
[junit4] 2> 143761 INFO (qtp1378310446-1259) [n:127.0.0.1:39585_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=3
[junit4] 2> 143781 INFO (qtp1378310446-1259) [n:127.0.0.1:39585_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
[junit4] 2> 143781 INFO (qtp1378310446-1259) [n:127.0.0.1:39585_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
[junit4] 2> 143820 INFO (qtp1378310446-1259) [n:127.0.0.1:39585_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.s.SolrIndexSearcher Opening [Searcher@6ee2daac[MoveReplicaHDFSTest_coll_false_shard2_replica_n4] main]
[junit4] 2> 143822 INFO (qtp901607236-1245) [n:127.0.0.1:41595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.s.SolrIndexSearcher Opening [Searcher@68f3c14a[MoveReplicaHDFSTest_coll_false_shard2_replica_p6] main]
[junit4] 2> 143822 INFO (qtp1378310446-1259) [n:127.0.0.1:39585_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
[junit4] 2> 143822 INFO (qtp1378310446-1259) [n:127.0.0.1:39585_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
[junit4] 2> 143823 INFO (qtp901607236-1245) [n:127.0.0.1:41595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
[junit4] 2> 143823 INFO (qtp901607236-1245) [n:127.0.0.1:41595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
[junit4] 2> 143824 INFO (qtp901607236-1245) [n:127.0.0.1:41595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
[junit4] 2> 143824 INFO (qtp1378310446-1259) [n:127.0.0.1:39585_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
[junit4] 2> 143826 INFO (qtp1378310446-1259) [n:127.0.0.1:39585_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node7 x:MoveReplicaHDFSTest_coll_false_shard2_replica_n4 ] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1649860749618577408
[junit4] 2> 143827 INFO (searcherExecutor-401-thread-1-processing-n:127.0.0.1:41595_solr x:MoveReplicaHDFSTest_coll_false_shard2_replica_p6 c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8) [n:127.0.0.1:41595_solr c:MoveReplicaHDFSTest_coll_false s:shard2 r:core_node8 x:MoveR
[...truncated too long message...]
a.base@11.0.4/jdk.internal.ref.CleanerImpl.run(CleanerImpl.java:148)
[junit4] 2> java.base@11.0.4/java.lang.Thread.run(Thread.java:834)
[junit4] 2> java.base@11.0.4/jdk.internal.misc.InnocuousThread.run(InnocuousThread.java:134)
[junit4] 2>
[junit4] 2> Log4j2-TF-1-AsyncLoggerConfig-1:
[junit4] 2> java.base@11.0.4/jdk.internal.misc.Unsafe.park(Native Method)
[junit4] 2> java.base@11.0.4/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)
[junit4] 2> java.base@11.0.4/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)
[junit4] 2> app//com.lmax.disruptor.TimeoutBlockingWaitStrategy.waitFor(TimeoutBlockingWaitStrategy.java:38)
[junit4] 2> app//com.lmax.disruptor.ProcessingSequenceBarrier.waitFor(ProcessingSequenceBarrier.java:56)
[junit4] 2> app//com.lmax.disruptor.BatchEventProcessor.processEvents(BatchEventProcessor.java:159)
[junit4] 2> app//com.lmax.disruptor.BatchEventProcessor.run(BatchEventProcessor.java:125)
[junit4] 2> java.base@11.0.4/java.lang.Thread.run(Thread.java:834)
[junit4] 2>
[junit4] 2> ForkJoinPool.commonPool-worker-3:
[junit4] 2> java.base@11.0.4/jdk.internal.misc.Unsafe.park(Native Method)
[junit4] 2> java.base@11.0.4/java.util.concurrent.locks.LockSupport.parkUntil(LockSupport.java:275)
[junit4] 2> java.base@11.0.4/java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1619)
[junit4] 2> java.base@11.0.4/java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:177)
[junit4] 2>
[junit4] 2> JUnit4-serializer-daemon:
[junit4] 2> java.base@11.0.4/java.lang.Thread.sleep(Native Method)
[junit4] 2> app//com.carrotsearch.ant.tasks.junit4.events.Serializer$1.run(Serializer.java:50)
[junit4] 2>
[junit4] 2> 1614970 INFO (SUITE-CdcrBootstrapTest-seed#[839C476749F4BE7C]-worker) [ ] o.a.s.SolrTestCaseJ4 ------------------------------------------------------- Done waiting for tracked resources to be released
[junit4] 2> NOTE: test params are: codec=SimpleText, sim=Asserting(org.apache.lucene.search.similarities.AssertingSimilarity@343f77f0), locale=kn-IN, timezone=ART
[junit4] 2> NOTE: Linux 4.15.0-54-generic amd64/Oracle Corporation 11.0.4 (64-bit)/cpus=4,threads=1,free=327271920,total=536870912
[junit4] 2> NOTE: All tests run in this JVM: [TestScoreJoinQPNoScore, SimpleFacetsTest, ConfigureRecoveryStrategyTest, SpellCheckCollatorTest, CircularListTest, TestFunctionQuery, TestExplainDocTransformer, ZkShardTermsTest, DeleteStatusTest, TimeZoneUtilsTest, SuggesterFSTTest, SpellingQueryConverterTest, ExecutePlanActionTest, MoveReplicaHDFSTest, TestEmbeddedSolrServerConstructors, TestRangeQuery, TestSimpleTrackingShardHandler, SolrMetricManagerTest, UninvertDocValuesMergePolicyTest, TestCustomStream, CustomCollectionTest, SystemCollectionCompatTest, DistributedUpdateProcessorTest, ManagedSchemaRoundRobinCloudTest, TestQuerySenderListener, TestSolr4Spatial2, TestCollectionsAPIViaSolrCloudCluster, OutOfBoxZkACLAndCredentialsProvidersTest, TestLazyCores, TestStreamBody, CdcrRequestHandlerTest, TestMacroExpander, TestFieldSortValues, PreAnalyzedFieldTest, DistributedFacetPivotSmallTest, LegacyCloudClusterPropTest, TestCoreBackup, CoreAdminRequestStatusTest, TestSimComputePlanAction, BasicAuthOnSingleNodeTest, UnloadDistributedZkTest, TestHalfAndHalfDocValues, AliasIntegrationTest, TransactionLogTest, TestStressReorder, LeaderElectionIntegrationTest, TestSolrJ, TestShardHandlerFactory, VersionInfoTest, PingRequestHandlerTest, JvmMetricsTest, ResponseLogComponentTest, ProtectedTermFilterFactoryTest, PeerSyncTest, QueryEqualityTest, TestSchemaManager, ConvertedLegacyTest, HighlighterConfigTest, BinaryUpdateRequestHandlerTest, SegmentsInfoRequestHandlerTest, TriggerIntegrationTest, SearchRateTriggerIntegrationTest, TestDocumentBuilder, TestBlobHandler, TestPayloadScoreQParserPlugin, SignatureUpdateProcessorFactoryTest, TestSolrCloudWithDelegationTokens, SparseHLLTest, FieldAnalysisRequestHandlerTest, TestReloadDeadlock, CreateRoutedAliasTest, HdfsSyncSliceTest, IndexSizeEstimatorTest, TestManagedStopFilterFactory, TestReplicationHandlerBackup, JWTAuthPluginTest, TestExactSharedStatsCacheCloud, TestSolrXml, LeaderFailureAfterFreshStartTest, DistributedFacetPivotLargeTest, ResolveAnalyzerByNameTest, RoutingToNodesWithPropertiesTest, SolrCoreTest, SolrSlf4jReporterTest, TestComponentsName, RandomizedTaggerTest, DistributedDebugComponentTest, TestCoreContainer, PhrasesIdentificationComponentTest, TemplateUpdateProcessorTest, TestSimExecutePlanAction, TestCloudConsistency, TestLuceneMatchVersion, ResourceLoaderTest, XsltUpdateRequestHandlerTest, UpdateRequestProcessorFactoryTest, CloneFieldUpdateProcessorFactoryTest, TestFuzzyAnalyzedSuggestions, DocValuesMultiTest, TestCustomDocTransformer, DistributedIntervalFacetingTest, ScheduledMaintenanceTriggerTest, ChaosMonkeyNothingIsSafeWithPullReplicasTest, TestExactSharedStatsCache, TestPerFieldSimilarity, TestFieldCacheSort, TestTolerantSearch, RangeFacetCloudTest, DirectSolrSpellCheckerTest, AsyncCallRequestStatusResponseTest, ZkControllerTest, TestLegacyTerms, NodeLostTriggerTest, TestInfoStreamLogging, MigrateRouteKeyTest, HdfsChaosMonkeyNothingIsSafeTest, TestDistribDocBasedVersion, CdcrBootstrapTest]
[junit4] 2> NOTE: download the large Jenkins line-docs file by running 'ant get-jenkins-line-docs' in the lucene directory.
[junit4] 2> NOTE: reproduce with: ant test -Dtestcase=CdcrBootstrapTest -Dtests.seed=839C476749F4BE7C -Dtests.multiplier=2 -Dtests.nightly=true -Dtests.slow=true -Dtests.linedocsfile=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/test-data/enwiki.random.lines.txt -Dtests.locale=kn-IN -Dtests.timezone=ART -Dtests.asserts=true -Dtests.file.encoding=UTF-8
[junit4] ERROR 0.00s J2 | CdcrBootstrapTest (suite) <<<
[junit4] > Throwable #1: java.lang.AssertionError: ObjectTracker found 1 object(s) that were not released!!! [RawDirectoryWrapper]
[junit4] > org.apache.solr.common.util.ObjectReleaseTracker$ObjectTrackerException: org.apache.lucene.store.RawDirectoryWrapper
[junit4] > at org.apache.solr.common.util.ObjectReleaseTracker.track(ObjectReleaseTracker.java:42)
[junit4] > at org.apache.solr.core.CachingDirectoryFactory.get(CachingDirectoryFactory.java:348)
[junit4] > at org.apache.solr.handler.IndexFetcher.fetchLatestIndex(IndexFetcher.java:509)
[junit4] > at org.apache.solr.handler.IndexFetcher.fetchLatestIndex(IndexFetcher.java:351)
[junit4] > at org.apache.solr.handler.ReplicationHandler.doFetch(ReplicationHandler.java:422)
[junit4] > at org.apache.solr.handler.CdcrRequestHandler$BootstrapCallable.call(CdcrRequestHandler.java:772)
[junit4] > at org.apache.solr.handler.CdcrRequestHandler$BootstrapCallable.call(CdcrRequestHandler.java:727)
[junit4] > at com.codahale.metrics.InstrumentedExecutorService$InstrumentedCallable.call(InstrumentedExecutorService.java:202)
[junit4] > at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
[junit4] > at org.apache.solr.common.util.ExecutorUtil$MDCAwareThreadPoolExecutor.lambda$execute$0(ExecutorUtil.java:210)
[junit4] > at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
[junit4] > at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
[junit4] > at java.base/java.lang.Thread.run(Thread.java:834)
[junit4] > expected null, but was:<ObjectTracker found 1 object(s) that were not released!!! [RawDirectoryWrapper]
[junit4] > org.apache.solr.common.util.ObjectReleaseTracker$ObjectTrackerException: org.apache.lucene.store.RawDirectoryWrapper
[junit4] > at org.apache.solr.common.util.ObjectReleaseTracker.track(ObjectReleaseTracker.java:42)
[junit4] > at org.apache.solr.core.CachingDirectoryFactory.get(CachingDirectoryFactory.java:348)
[junit4] > at org.apache.solr.handler.IndexFetcher.fetchLatestIndex(IndexFetcher.java:509)
[junit4] > at org.apache.solr.handler.IndexFetcher.fetchLatestIndex(IndexFetcher.java:351)
[junit4] > at org.apache.solr.handler.ReplicationHandler.doFetch(ReplicationHandler.java:422)
[junit4] > at org.apache.solr.handler.CdcrRequestHandler$BootstrapCallable.call(CdcrRequestHandler.java:772)
[junit4] > at org.apache.solr.handler.CdcrRequestHandler$BootstrapCallable.call(CdcrRequestHandler.java:727)
[junit4] > at com.codahale.metrics.InstrumentedExecutorService$InstrumentedCallable.call(InstrumentedExecutorService.java:202)
[junit4] > at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
[junit4] > at org.apache.solr.common.util.ExecutorUtil$MDCAwareThreadPoolExecutor.lambda$execute$0(ExecutorUtil.java:210)
[junit4] > at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
[junit4] > at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
[junit4] > at java.base/java.lang.Thread.run(Thread.java:834)
[junit4] > >
[junit4] > at __randomizedtesting.SeedInfo.seed([839C476749F4BE7C]:0)
[junit4] > at org.apache.solr.SolrTestCaseJ4.teardownTestCases(SolrTestCaseJ4.java:342)
[junit4] > at jdk.internal.reflect.GeneratedMethodAccessor65.invoke(Unknown Source)
[junit4] > at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
[junit4] > at java.base/java.lang.reflect.Method.invoke(Method.java:566)
[junit4] > at java.base/java.lang.Thread.run(Thread.java:834)
[junit4] Completed [320/888 (2!)] on J2 in 160.10s, 4 tests, 1 failure, 1 skipped <<< FAILURES!
[...truncated 54791 lines...]