You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@lucene.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2019/08/29 07:23:42 UTC

[JENKINS] Lucene-Solr-Tests-8.x - Build # 491 - Unstable

Build: https://builds.apache.org/job/Lucene-Solr-Tests-8.x/491/

2 tests failed.
FAILED:  org.apache.solr.cloud.api.collections.TestHdfsCloudBackupRestore.testRestoreFailure

Error Message:
Failed collection is still in the clusterstate: DocCollection(hdfsbackuprestore_testfailure_restored//collections/hdfsbackuprestore_testfailure_restored/state.json/2)={   "pullReplicas":0,   "replicationFactor":1,   "shards":{     "shard2":{       "range":"0-7fffffff",       "state":"construction",       "replicas":{"core_node2":{           "core":"hdfsbackuprestore_testfailure_restored_shard2_replica_n1",           "base_url":"https://127.0.0.1:36659/solr",           "node_name":"127.0.0.1:36659_solr",           "state":"down",           "type":"NRT",           "force_set_state":"false"}},       "stateTimestamp":"1567059232049688251"},     "shard1":{       "range":"80000000-ffffffff",       "state":"construction",       "replicas":{},       "stateTimestamp":"1567059232049701653"}},   "router":{"name":"compositeId"},   "maxShardsPerNode":"1",   "autoAddReplicas":"false",   "nrtReplicas":1,   "tlogReplicas":0} Expected: not a collection containing "hdfsbackuprestore_testfailure_restored"      but: was <[hdfsbackuprestore_testok, hdfsbackuprestore_testfailure_restored, hdfsbackuprestore_testfailure, hdfsbackuprestore_testok_restored]>

Stack Trace:
java.lang.AssertionError: Failed collection is still in the clusterstate: DocCollection(hdfsbackuprestore_testfailure_restored//collections/hdfsbackuprestore_testfailure_restored/state.json/2)={
  "pullReplicas":0,
  "replicationFactor":1,
  "shards":{
    "shard2":{
      "range":"0-7fffffff",
      "state":"construction",
      "replicas":{"core_node2":{
          "core":"hdfsbackuprestore_testfailure_restored_shard2_replica_n1",
          "base_url":"https://127.0.0.1:36659/solr",
          "node_name":"127.0.0.1:36659_solr",
          "state":"down",
          "type":"NRT",
          "force_set_state":"false"}},
      "stateTimestamp":"1567059232049688251"},
    "shard1":{
      "range":"80000000-ffffffff",
      "state":"construction",
      "replicas":{},
      "stateTimestamp":"1567059232049701653"}},
  "router":{"name":"compositeId"},
  "maxShardsPerNode":"1",
  "autoAddReplicas":"false",
  "nrtReplicas":1,
  "tlogReplicas":0}
Expected: not a collection containing "hdfsbackuprestore_testfailure_restored"
     but: was <[hdfsbackuprestore_testok, hdfsbackuprestore_testfailure_restored, hdfsbackuprestore_testfailure, hdfsbackuprestore_testok_restored]>
	at __randomizedtesting.SeedInfo.seed([E037D74065656872:C94B49654D3C6B5F]:0)
	at org.hamcrest.MatcherAssert.assertThat(MatcherAssert.java:20)
	at org.junit.Assert.assertThat(Assert.java:956)
	at org.apache.solr.cloud.api.collections.AbstractCloudBackupRestoreTestCase.testRestoreFailure(AbstractCloudBackupRestoreTestCase.java:211)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at java.lang.Thread.run(Thread.java:748)


FAILED:  org.apache.solr.cloud.api.collections.TestLocalFSCloudBackupRestore.testRestoreFailure

Error Message:
Failed collection is still in the clusterstate: DocCollection(backuprestore_testfailure_restored//collections/backuprestore_testfailure_restored/state.json/2)={   "pullReplicas":0,   "replicationFactor":1,   "shards":{     "shard2":{       "range":"0-7fffffff",       "state":"construction",       "replicas":{"core_node2":{           "core":"backuprestore_testfailure_restored_shard2_replica_n1",           "base_url":"http://127.0.0.1:33205/solr",           "node_name":"127.0.0.1:33205_solr",           "state":"down",           "type":"NRT",           "force_set_state":"false"}},       "stateTimestamp":"1567060879213084847"},     "shard1":{       "range":"80000000-ffffffff",       "state":"construction",       "replicas":{},       "stateTimestamp":"1567060879213099152"}},   "router":{"name":"compositeId"},   "maxShardsPerNode":"1",   "autoAddReplicas":"false",   "nrtReplicas":1,   "tlogReplicas":0} Expected: not a collection containing "backuprestore_testfailure_restored"      but: was <[backuprestore_testok, backuprestore_testfailure, backuprestore_testfailure_restored, backuprestore_testok_restored]>

Stack Trace:
java.lang.AssertionError: Failed collection is still in the clusterstate: DocCollection(backuprestore_testfailure_restored//collections/backuprestore_testfailure_restored/state.json/2)={
  "pullReplicas":0,
  "replicationFactor":1,
  "shards":{
    "shard2":{
      "range":"0-7fffffff",
      "state":"construction",
      "replicas":{"core_node2":{
          "core":"backuprestore_testfailure_restored_shard2_replica_n1",
          "base_url":"http://127.0.0.1:33205/solr",
          "node_name":"127.0.0.1:33205_solr",
          "state":"down",
          "type":"NRT",
          "force_set_state":"false"}},
      "stateTimestamp":"1567060879213084847"},
    "shard1":{
      "range":"80000000-ffffffff",
      "state":"construction",
      "replicas":{},
      "stateTimestamp":"1567060879213099152"}},
  "router":{"name":"compositeId"},
  "maxShardsPerNode":"1",
  "autoAddReplicas":"false",
  "nrtReplicas":1,
  "tlogReplicas":0}
Expected: not a collection containing "backuprestore_testfailure_restored"
     but: was <[backuprestore_testok, backuprestore_testfailure, backuprestore_testfailure_restored, backuprestore_testok_restored]>
	at __randomizedtesting.SeedInfo.seed([E037D74065656872:C94B49654D3C6B5F]:0)
	at org.hamcrest.MatcherAssert.assertThat(MatcherAssert.java:20)
	at org.junit.Assert.assertThat(Assert.java:956)
	at org.apache.solr.cloud.api.collections.AbstractCloudBackupRestoreTestCase.testRestoreFailure(AbstractCloudBackupRestoreTestCase.java:211)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at java.lang.Thread.run(Thread.java:748)




Build Log:
[...truncated 13726 lines...]
   [junit4] Suite: org.apache.solr.cloud.api.collections.TestHdfsCloudBackupRestore
   [junit4]   1> Formatting using clusterid: testClusterID
   [junit4]   2> 439279 WARN  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.h.m.i.MetricsConfig Cannot locate configuration: tried hadoop-metrics2-namenode.properties,hadoop-metrics2.properties
   [junit4]   2> 439296 WARN  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 439298 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 1.8.0_191-b12
   [junit4]   2> 439300 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 439300 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 439300 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 439301 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@2774068b{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
   [junit4]   2> 439459 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@f007949{hdfs,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/jetty-localhost.localdomain-36239-hdfs-_-any-924387434669286531.dir/webapp/,AVAILABLE}{/hdfs}
   [junit4]   2> 439460 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.AbstractConnector Started ServerConnector@7f6b887c{HTTP/1.1,[http/1.1]}{localhost.localdomain:36239}
   [junit4]   2> 439461 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.Server Started @439527ms
   [junit4]   2> 439553 WARN  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 439556 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 1.8.0_191-b12
   [junit4]   2> 439556 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 439556 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 439557 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 439557 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@6adf3fad{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
   [junit4]   2> 439714 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@1c703108{datanode,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/jetty-localhost-37543-datanode-_-any-7314119788980653551.dir/webapp/,AVAILABLE}{/datanode}
   [junit4]   2> 439715 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.AbstractConnector Started ServerConnector@7162d9c9{HTTP/1.1,[http/1.1]}{localhost:37543}
   [junit4]   2> 439715 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.Server Started @439781ms
   [junit4]   2> 439791 WARN  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 439792 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 1.8.0_191-b12
   [junit4]   2> 439794 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 439794 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 439794 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 439795 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@11532006{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
   [junit4]   2> 439974 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0xcac9d599fa230d3: Processing first storage report for DS-d29fa2ae-e164-4cca-aa51-f36bddc1bd73 from datanode f9a9e1ed-6c2b-46ce-b8bb-7bae1b0f893d
   [junit4]   2> 439974 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0xcac9d599fa230d3: from storage DS-d29fa2ae-e164-4cca-aa51-f36bddc1bd73 node DatanodeRegistration(127.0.0.1:38477, datanodeUuid=f9a9e1ed-6c2b-46ce-b8bb-7bae1b0f893d, infoPort=34367, infoSecurePort=0, ipcPort=41639, storageInfo=lv=-57;cid=testClusterID;nsid=968518402;c=1567059213337), blocks: 0, hasStaleStorage: true, processing time: 0 msecs, invalidatedBlocks: 0
   [junit4]   2> 439974 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0xcac9d599fa230d3: Processing first storage report for DS-1c4da6b9-2544-4f1f-b527-c4142a5267fd from datanode f9a9e1ed-6c2b-46ce-b8bb-7bae1b0f893d
   [junit4]   2> 439974 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0xcac9d599fa230d3: from storage DS-1c4da6b9-2544-4f1f-b527-c4142a5267fd node DatanodeRegistration(127.0.0.1:38477, datanodeUuid=f9a9e1ed-6c2b-46ce-b8bb-7bae1b0f893d, infoPort=34367, infoSecurePort=0, ipcPort=41639, storageInfo=lv=-57;cid=testClusterID;nsid=968518402;c=1567059213337), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0
   [junit4]   2> 440012 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@397cc67d{datanode,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/jetty-localhost-42969-datanode-_-any-8925702212772852981.dir/webapp/,AVAILABLE}{/datanode}
   [junit4]   2> 440012 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.AbstractConnector Started ServerConnector@4892d943{HTTP/1.1,[http/1.1]}{localhost:42969}
   [junit4]   2> 440012 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.Server Started @440079ms
   [junit4]   2> 440173 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0xb57ad50ac595db20: Processing first storage report for DS-ae78d8eb-dd57-4c19-ae6e-ea8f8519c130 from datanode d9c8819b-1365-4c42-ae05-ffe965768d2c
   [junit4]   2> 440173 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0xb57ad50ac595db20: from storage DS-ae78d8eb-dd57-4c19-ae6e-ea8f8519c130 node DatanodeRegistration(127.0.0.1:42413, datanodeUuid=d9c8819b-1365-4c42-ae05-ffe965768d2c, infoPort=40741, infoSecurePort=0, ipcPort=34355, storageInfo=lv=-57;cid=testClusterID;nsid=968518402;c=1567059213337), blocks: 0, hasStaleStorage: true, processing time: 0 msecs, invalidatedBlocks: 0
   [junit4]   2> 440173 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0xb57ad50ac595db20: Processing first storage report for DS-ef48ae90-a08b-4a82-9795-00787d190e45 from datanode d9c8819b-1365-4c42-ae05-ffe965768d2c
   [junit4]   2> 440173 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0xb57ad50ac595db20: from storage DS-ef48ae90-a08b-4a82-9795-00787d190e45 node DatanodeRegistration(127.0.0.1:42413, datanodeUuid=d9c8819b-1365-4c42-ae05-ffe965768d2c, infoPort=40741, infoSecurePort=0, ipcPort=34355, storageInfo=lv=-57;cid=testClusterID;nsid=968518402;c=1567059213337), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0
   [junit4]   2> 440259 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.s.c.MiniSolrCloudCluster Starting cluster of 2 servers in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_E037D74065656872-001/tempDir-002
   [junit4]   2> 440260 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
   [junit4]   2> 440260 INFO  (ZkTestServer Run Thread) [     ] o.a.s.c.ZkTestServer client port:0.0.0.0/0.0.0.0:0
   [junit4]   2> 440260 INFO  (ZkTestServer Run Thread) [     ] o.a.s.c.ZkTestServer Starting server
   [junit4]   2> 440360 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.s.c.ZkTestServer start zk server on port:45147
   [junit4]   2> 440360 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.s.c.ZkTestServer waitForServerUp: 127.0.0.1:45147
   [junit4]   2> 440360 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.s.c.ZkTestServer parse host and port list: 127.0.0.1:45147
   [junit4]   2> 440360 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.s.c.ZkTestServer connecting to 127.0.0.1 45147
   [junit4]   2> 440363 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 440367 INFO  (zkConnectionManagerCallback-2523-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 440367 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 440371 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 440372 INFO  (zkConnectionManagerCallback-2525-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 440372 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 440376 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 440378 INFO  (zkConnectionManagerCallback-2527-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 440378 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 440486 WARN  (jetty-launcher-2528-thread-1) [     ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 440486 WARN  (jetty-launcher-2528-thread-2) [     ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 440487 INFO  (jetty-launcher-2528-thread-2) [     ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 440487 INFO  (jetty-launcher-2528-thread-1) [     ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 440487 INFO  (jetty-launcher-2528-thread-2) [     ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 440487 INFO  (jetty-launcher-2528-thread-1) [     ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 440487 INFO  (jetty-launcher-2528-thread-2) [     ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 1.8.0_191-b12
   [junit4]   2> 440487 INFO  (jetty-launcher-2528-thread-1) [     ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 1.8.0_191-b12
   [junit4]   2> 440495 INFO  (jetty-launcher-2528-thread-1) [     ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 440495 INFO  (jetty-launcher-2528-thread-1) [     ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 440496 INFO  (jetty-launcher-2528-thread-1) [     ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 440496 INFO  (jetty-launcher-2528-thread-2) [     ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 440496 INFO  (jetty-launcher-2528-thread-2) [     ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 440496 INFO  (jetty-launcher-2528-thread-2) [     ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 440496 INFO  (jetty-launcher-2528-thread-1) [     ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@45e0eb05{/solr,null,AVAILABLE}
   [junit4]   2> 440496 INFO  (jetty-launcher-2528-thread-2) [     ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@2f3604af{/solr,null,AVAILABLE}
   [junit4]   2> 440497 INFO  (jetty-launcher-2528-thread-2) [     ] o.e.j.s.AbstractConnector Started ServerConnector@224c8694{SSL,[ssl, http/1.1]}{127.0.0.1:36659}
   [junit4]   2> 440497 INFO  (jetty-launcher-2528-thread-2) [     ] o.e.j.s.Server Started @440564ms
   [junit4]   2> 440497 INFO  (jetty-launcher-2528-thread-2) [     ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=36659}
   [junit4]   2> 440498 INFO  (jetty-launcher-2528-thread-1) [     ] o.e.j.s.AbstractConnector Started ServerConnector@341995c6{SSL,[ssl, http/1.1]}{127.0.0.1:46735}
   [junit4]   2> 440498 INFO  (jetty-launcher-2528-thread-1) [     ] o.e.j.s.Server Started @440564ms
   [junit4]   2> 440498 INFO  (jetty-launcher-2528-thread-1) [     ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=46735}
   [junit4]   2> 440498 ERROR (jetty-launcher-2528-thread-2) [     ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 440498 INFO  (jetty-launcher-2528-thread-2) [     ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 440498 ERROR (jetty-launcher-2528-thread-1) [     ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 440498 INFO  (jetty-launcher-2528-thread-2) [     ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 8.3.0
   [junit4]   2> 440498 INFO  (jetty-launcher-2528-thread-1) [     ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 440498 INFO  (jetty-launcher-2528-thread-2) [     ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 440498 INFO  (jetty-launcher-2528-thread-2) [     ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 440498 INFO  (jetty-launcher-2528-thread-1) [     ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 8.3.0
   [junit4]   2> 440498 INFO  (jetty-launcher-2528-thread-1) [     ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 440498 INFO  (jetty-launcher-2528-thread-1) [     ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 440498 INFO  (jetty-launcher-2528-thread-2) [     ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-08-29T06:13:34.605Z
   [junit4]   2> 440498 INFO  (jetty-launcher-2528-thread-1) [     ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-08-29T06:13:34.605Z
   [junit4]   2> 440500 INFO  (jetty-launcher-2528-thread-1) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 440503 INFO  (jetty-launcher-2528-thread-2) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 440503 INFO  (zkConnectionManagerCallback-2530-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 440503 INFO  (jetty-launcher-2528-thread-1) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 440504 INFO  (zkConnectionManagerCallback-2532-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 440504 INFO  (jetty-launcher-2528-thread-2) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 440504 INFO  (jetty-launcher-2528-thread-1) [     ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 440505 INFO  (jetty-launcher-2528-thread-2) [     ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 440523 INFO  (jetty-launcher-2528-thread-1) [     ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@481a4267, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 440530 INFO  (jetty-launcher-2528-thread-2) [     ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@481a4267, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 440887 INFO  (jetty-launcher-2528-thread-2) [     ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
   [junit4]   2> 440888 WARN  (jetty-launcher-2528-thread-2) [     ] o.a.s.c.s.i.Http2SolrClient Create Http2SolrClient with HTTP/1.1 transport since Java 8 or lower versions does not support SSL + HTTP/2
   [junit4]   2> 440889 WARN  (jetty-launcher-2528-thread-2) [     ] o.e.j.u.s.S.config Trusting all certificates configured for Client@4b3ef0b7[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 440889 WARN  (jetty-launcher-2528-thread-2) [     ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@4b3ef0b7[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 440890 INFO  (jetty-launcher-2528-thread-1) [     ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
   [junit4]   2> 440891 WARN  (jetty-launcher-2528-thread-1) [     ] o.a.s.c.s.i.Http2SolrClient Create Http2SolrClient with HTTP/1.1 transport since Java 8 or lower versions does not support SSL + HTTP/2
   [junit4]   2> 440893 WARN  (jetty-launcher-2528-thread-1) [     ] o.e.j.u.s.S.config Trusting all certificates configured for Client@7637eb82[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 440893 WARN  (jetty-launcher-2528-thread-1) [     ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@7637eb82[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 440895 WARN  (jetty-launcher-2528-thread-2) [     ] o.a.s.c.s.i.Http2SolrClient Create Http2SolrClient with HTTP/1.1 transport since Java 8 or lower versions does not support SSL + HTTP/2
   [junit4]   2> 440901 WARN  (jetty-launcher-2528-thread-1) [     ] o.a.s.c.s.i.Http2SolrClient Create Http2SolrClient with HTTP/1.1 transport since Java 8 or lower versions does not support SSL + HTTP/2
   [junit4]   2> 440901 WARN  (jetty-launcher-2528-thread-2) [     ] o.e.j.u.s.S.config Trusting all certificates configured for Client@74bd9149[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 440901 WARN  (jetty-launcher-2528-thread-2) [     ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@74bd9149[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 440902 WARN  (jetty-launcher-2528-thread-1) [     ] o.e.j.u.s.S.config Trusting all certificates configured for Client@18e664b8[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 440902 WARN  (jetty-launcher-2528-thread-1) [     ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@18e664b8[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 440903 INFO  (jetty-launcher-2528-thread-2) [     ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:45147/solr
   [junit4]   2> 440903 INFO  (jetty-launcher-2528-thread-1) [     ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:45147/solr
   [junit4]   2> 440905 INFO  (jetty-launcher-2528-thread-1) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 440906 INFO  (jetty-launcher-2528-thread-2) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 440906 INFO  (zkConnectionManagerCallback-2546-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 440906 INFO  (jetty-launcher-2528-thread-1) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 440911 INFO  (zkConnectionManagerCallback-2544-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 440911 INFO  (jetty-launcher-2528-thread-2) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 441011 INFO  (jetty-launcher-2528-thread-1) [n:127.0.0.1:46735_solr     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 441014 INFO  (jetty-launcher-2528-thread-2) [n:127.0.0.1:36659_solr     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 441014 INFO  (zkConnectionManagerCallback-2548-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 441014 INFO  (jetty-launcher-2528-thread-1) [n:127.0.0.1:46735_solr     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 441024 INFO  (zkConnectionManagerCallback-2550-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 441024 INFO  (jetty-launcher-2528-thread-2) [n:127.0.0.1:36659_solr     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 441203 INFO  (jetty-launcher-2528-thread-1) [n:127.0.0.1:46735_solr     ] o.a.s.c.OverseerElectionContext I am going to be the leader 127.0.0.1:46735_solr
   [junit4]   2> 441205 INFO  (jetty-launcher-2528-thread-1) [n:127.0.0.1:46735_solr     ] o.a.s.c.Overseer Overseer (id=72285712308305927-127.0.0.1:46735_solr-n_0000000000) starting
   [junit4]   2> 441223 INFO  (jetty-launcher-2528-thread-1) [n:127.0.0.1:46735_solr     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 441231 INFO  (zkConnectionManagerCallback-2559-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 441231 INFO  (jetty-launcher-2528-thread-1) [n:127.0.0.1:46735_solr     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 441237 INFO  (jetty-launcher-2528-thread-1) [n:127.0.0.1:46735_solr     ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:45147/solr ready
   [junit4]   2> 441243 INFO  (jetty-launcher-2528-thread-2) [n:127.0.0.1:36659_solr     ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:36659_solr
   [junit4]   2> 441246 INFO  (OverseerStateUpdate-72285712308305927-127.0.0.1:46735_solr-n_0000000000) [n:127.0.0.1:46735_solr     ] o.a.s.c.Overseer Starting to work on the main queue : 127.0.0.1:46735_solr
   [junit4]   2> 441247 INFO  (OverseerStateUpdate-72285712308305927-127.0.0.1:46735_solr-n_0000000000) [n:127.0.0.1:46735_solr     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 441257 INFO  (jetty-launcher-2528-thread-1) [n:127.0.0.1:46735_solr     ] o.a.s.c.ZkController Publish node=127.0.0.1:46735_solr as DOWN
   [junit4]   2> 441259 INFO  (jetty-launcher-2528-thread-1) [n:127.0.0.1:46735_solr     ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 2147483647 transient cores
   [junit4]   2> 441259 INFO  (jetty-launcher-2528-thread-1) [n:127.0.0.1:46735_solr     ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:46735_solr
   [junit4]   2> 441262 INFO  (jetty-launcher-2528-thread-1) [n:127.0.0.1:46735_solr     ] o.a.s.c.PackageManager clusterprops.json changed , version 0
   [junit4]   2> 441262 INFO  (jetty-launcher-2528-thread-1) [n:127.0.0.1:46735_solr     ] o.a.s.c.b.r.BackupRepositoryFactory Added backup repository with configuration params {type = repository,name = hdfs,class = org.apache.solr.core.backup.repository.HdfsBackupRepository,attributes = {name=hdfs, class=org.apache.solr.core.backup.repository.HdfsBackupRepository},args = {location=/backup,solr.hdfs.home=hdfs://localhost.localdomain:46481/solr,solr.hdfs.confdir=}}
   [junit4]   2> 441262 INFO  (jetty-launcher-2528-thread-1) [n:127.0.0.1:46735_solr     ] o.a.s.c.b.r.BackupRepositoryFactory Added backup repository with configuration params {type = repository,name = poisioned,class = org.apache.solr.cloud.api.collections.TestLocalFSCloudBackupRestore$PoinsionedRepository,attributes = {default=true, name=poisioned, class=org.apache.solr.cloud.api.collections.TestLocalFSCloudBackupRestore$PoinsionedRepository},}
   [junit4]   2> 441262 INFO  (jetty-launcher-2528-thread-1) [n:127.0.0.1:46735_solr     ] o.a.s.c.b.r.BackupRepositoryFactory Default configuration for backup repository is with configuration params {type = repository,name = poisioned,class = org.apache.solr.cloud.api.collections.TestLocalFSCloudBackupRestore$PoinsionedRepository,attributes = {default=true, name=poisioned, class=org.apache.solr.cloud.api.collections.TestLocalFSCloudBackupRestore$PoinsionedRepository},}
   [junit4]   2> 441267 INFO  (zkCallback-2547-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 441274 INFO  (zkCallback-2558-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 441277 INFO  (zkCallback-2549-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 441280 INFO  (jetty-launcher-2528-thread-2) [n:127.0.0.1:36659_solr     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 441301 INFO  (zkConnectionManagerCallback-2564-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 441301 INFO  (jetty-launcher-2528-thread-2) [n:127.0.0.1:36659_solr     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 441302 INFO  (jetty-launcher-2528-thread-2) [n:127.0.0.1:36659_solr     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 441305 INFO  (jetty-launcher-2528-thread-2) [n:127.0.0.1:36659_solr     ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:45147/solr ready
   [junit4]   2> 441306 INFO  (jetty-launcher-2528-thread-2) [n:127.0.0.1:36659_solr     ] o.a.s.c.PackageManager clusterprops.json changed , version 0
   [junit4]   2> 441306 INFO  (jetty-launcher-2528-thread-2) [n:127.0.0.1:36659_solr     ] o.a.s.c.b.r.BackupRepositoryFactory Added backup repository with configuration params {type = repository,name = hdfs,class = org.apache.solr.core.backup.repository.HdfsBackupRepository,attributes = {name=hdfs, class=org.apache.solr.core.backup.repository.HdfsBackupRepository},args = {location=/backup,solr.hdfs.home=hdfs://localhost.localdomain:46481/solr,solr.hdfs.confdir=}}
   [junit4]   2> 441306 INFO  (jetty-launcher-2528-thread-2) [n:127.0.0.1:36659_solr     ] o.a.s.c.b.r.BackupRepositoryFactory Added backup repository with configuration params {type = repository,name = poisioned,class = org.apache.solr.cloud.api.collections.TestLocalFSCloudBackupRestore$PoinsionedRepository,attributes = {default=true, name=poisioned, class=org.apache.solr.cloud.api.collections.TestLocalFSCloudBackupRestore$PoinsionedRepository},}
   [junit4]   2> 441306 INFO  (jetty-launcher-2528-thread-2) [n:127.0.0.1:36659_solr     ] o.a.s.c.b.r.BackupRepositoryFactory Default configuration for backup repository is with configuration params {type = repository,name = poisioned,class = org.apache.solr.cloud.api.collections.TestLocalFSCloudBackupRestore$PoinsionedRepository,attributes = {default=true, name=poisioned, class=org.apache.solr.cloud.api.collections.TestLocalFSCloudBackupRestore$PoinsionedRepository},}
   [junit4]   2> 441332 INFO  (jetty-launcher-2528-thread-1) [n:127.0.0.1:46735_solr     ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 441378 INFO  (jetty-launcher-2528-thread-2) [n:127.0.0.1:36659_solr     ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 441409 INFO  (jetty-launcher-2528-thread-1) [n:127.0.0.1:46735_solr     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@481a4267
   [junit4]   2> 441436 INFO  (jetty-launcher-2528-thread-2) [n:127.0.0.1:36659_solr     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@481a4267
   [junit4]   2> 441444 INFO  (jetty-launcher-2528-thread-1) [n:127.0.0.1:46735_solr     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@481a4267
   [junit4]   2> 441444 INFO  (jetty-launcher-2528-thread-1) [n:127.0.0.1:46735_solr     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@481a4267
   [junit4]   2> 441445 INFO  (jetty-launcher-2528-thread-1) [n:127.0.0.1:46735_solr     ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_E037D74065656872-001/tempDir-002/node1/.
   [junit4]   2> 441454 INFO  (jetty-launcher-2528-thread-2) [n:127.0.0.1:36659_solr     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@481a4267
   [junit4]   2> 441454 INFO  (jetty-launcher-2528-thread-2) [n:127.0.0.1:36659_solr     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@481a4267
   [junit4]   2> 441456 INFO  (jetty-launcher-2528-thread-2) [n:127.0.0.1:36659_solr     ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_E037D74065656872-001/tempDir-002/node2/.
   [junit4]   2> 441578 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.s.c.MiniSolrCloudCluster waitForAllNodes: numServers=2
   [junit4]   2> 441579 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 441584 INFO  (zkConnectionManagerCallback-2571-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 441585 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 441588 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 441607 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:45147/solr ready
   [junit4]   2> 441715 INFO  (qtp1840676713-6927) [n:127.0.0.1:36659_solr     ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params collection.configName=conf1&router.name=implicit&version=2&pullReplicas=0&shards=shard1,shard2&property.customKey=customValue&maxShardsPerNode=3&router.field=shard_s&autoAddReplicas=true&name=hdfsbackuprestore_testok&nrtReplicas=2&action=CREATE&tlogReplicas=1&wt=javabin and sendToOCPQueue=true
   [junit4]   2> 441723 INFO  (OverseerThreadFactory-1679-thread-1-processing-n:127.0.0.1:46735_solr) [n:127.0.0.1:46735_solr     ] o.a.s.c.a.c.CreateCollectionCmd Create collection hdfsbackuprestore_testok
   [junit4]   2> 441830 WARN  (OverseerThreadFactory-1679-thread-1-processing-n:127.0.0.1:46735_solr) [n:127.0.0.1:46735_solr     ] o.a.s.c.a.c.CreateCollectionCmd Specified number of replicas of 3 on collection hdfsbackuprestore_testok is higher than the number of Solr instances currently live or live and part of your createNodeSet(2). It's unusual to run two replica of the same slice on the same Solr-instance.
   [junit4]   2> 441836 INFO  (OverseerStateUpdate-72285712308305927-127.0.0.1:46735_solr-n_0000000000) [n:127.0.0.1:46735_solr     ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"hdfsbackuprestore_testok",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "core":"hdfsbackuprestore_testok_shard1_replica_n1",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"https://127.0.0.1:46735/solr",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 441841 INFO  (OverseerStateUpdate-72285712308305927-127.0.0.1:46735_solr-n_0000000000) [n:127.0.0.1:46735_solr     ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"hdfsbackuprestore_testok",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "core":"hdfsbackuprestore_testok_shard1_replica_n2",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"https://127.0.0.1:36659/solr",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 441847 INFO  (OverseerStateUpdate-72285712308305927-127.0.0.1:46735_solr-n_0000000000) [n:127.0.0.1:46735_solr     ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"hdfsbackuprestore_testok",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "core":"hdfsbackuprestore_testok_shard1_replica_t4",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"https://127.0.0.1:46735/solr",
   [junit4]   2>   "type":"TLOG",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 441852 INFO  (OverseerStateUpdate-72285712308305927-127.0.0.1:46735_solr-n_0000000000) [n:127.0.0.1:46735_solr     ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"hdfsbackuprestore_testok",
   [junit4]   2>   "shard":"shard2",
   [junit4]   2>   "core":"hdfsbackuprestore_testok_shard2_replica_n7",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"https://127.0.0.1:36659/solr",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 441855 INFO  (OverseerStateUpdate-72285712308305927-127.0.0.1:46735_solr-n_0000000000) [n:127.0.0.1:46735_solr     ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"hdfsbackuprestore_testok",
   [junit4]   2>   "shard":"shard2",
   [junit4]   2>   "core":"hdfsbackuprestore_testok_shard2_replica_n8",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"https://127.0.0.1:46735/solr",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 441859 INFO  (OverseerStateUpdate-72285712308305927-127.0.0.1:46735_solr-n_0000000000) [n:127.0.0.1:46735_solr     ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"hdfsbackuprestore_testok",
   [junit4]   2>   "shard":"shard2",
   [junit4]   2>   "core":"hdfsbackuprestore_testok_shard2_replica_t10",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"https://127.0.0.1:36659/solr",
   [junit4]   2>   "type":"TLOG",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 442066 INFO  (qtp1840676713-6923) [n:127.0.0.1:36659_solr    x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf1&newCollection=true&collection=hdfsbackuprestore_testok&version=2&replicaType=NRT&property.customKey=customValue&coreNodeName=core_node5&name=hdfsbackuprestore_testok_shard1_replica_n2&action=CREATE&numShards=2&shard=shard1&wt=javabin
   [junit4]   2> 442066 INFO  (qtp1840676713-6923) [n:127.0.0.1:36659_solr    x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 2147483647 transient cores
   [junit4]   2> 442078 INFO  (qtp1840676713-6919) [n:127.0.0.1:36659_solr    x:hdfsbackuprestore_testok_shard2_replica_n7 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf1&newCollection=true&collection=hdfsbackuprestore_testok&version=2&replicaType=NRT&property.customKey=customValue&coreNodeName=core_node9&name=hdfsbackuprestore_testok_shard2_replica_n7&action=CREATE&numShards=2&shard=shard2&wt=javabin
   [junit4]   2> 442084 INFO  (qtp1840676713-6921) [n:127.0.0.1:36659_solr    x:hdfsbackuprestore_testok_shard2_replica_t10 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf1&newCollection=true&collection=hdfsbackuprestore_testok&version=2&replicaType=TLOG&property.customKey=customValue&coreNodeName=core_node12&name=hdfsbackuprestore_testok_shard2_replica_t10&action=CREATE&numShards=2&shard=shard2&wt=javabin
   [junit4]   2> 442098 INFO  (qtp2078506737-6920) [n:127.0.0.1:46735_solr    x:hdfsbackuprestore_testok_shard1_replica_t4 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf1&newCollection=true&collection=hdfsbackuprestore_testok&version=2&replicaType=TLOG&property.customKey=customValue&coreNodeName=core_node6&name=hdfsbackuprestore_testok_shard1_replica_t4&action=CREATE&numShards=2&shard=shard1&wt=javabin
   [junit4]   2> 442101 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr    x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf1&newCollection=true&collection=hdfsbackuprestore_testok&version=2&replicaType=NRT&property.customKey=customValue&coreNodeName=core_node3&name=hdfsbackuprestore_testok_shard1_replica_n1&action=CREATE&numShards=2&shard=shard1&wt=javabin
   [junit4]   2> 442112 INFO  (qtp2078506737-6924) [n:127.0.0.1:46735_solr    x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf1&newCollection=true&collection=hdfsbackuprestore_testok&version=2&replicaType=NRT&property.customKey=customValue&coreNodeName=core_node11&name=hdfsbackuprestore_testok_shard2_replica_n8&action=CREATE&numShards=2&shard=shard2&wt=javabin
   [junit4]   2> 443135 INFO  (qtp1840676713-6921) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_t10 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 8.3.0
   [junit4]   2> 443135 INFO  (qtp1840676713-6923) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 8.3.0
   [junit4]   2> 443138 INFO  (qtp1840676713-6919) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n7 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 8.3.0
   [junit4]   2> 443144 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 8.3.0
   [junit4]   2> 443144 INFO  (qtp2078506737-6920) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node6 x:hdfsbackuprestore_testok_shard1_replica_t4 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 8.3.0
   [junit4]   2> 443151 INFO  (qtp2078506737-6924) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 8.3.0
   [junit4]   2> 443188 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.s.IndexSchema [hdfsbackuprestore_testok_shard1_replica_n1] Schema name=minimal
   [junit4]   2> 443204 INFO  (qtp2078506737-6920) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node6 x:hdfsbackuprestore_testok_shard1_replica_t4 ] o.a.s.s.IndexSchema [hdfsbackuprestore_testok_shard1_replica_t4] Schema name=minimal
   [junit4]   2> 443207 INFO  (qtp2078506737-6920) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node6 x:hdfsbackuprestore_testok_shard1_replica_t4 ] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 443208 INFO  (qtp2078506737-6920) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node6 x:hdfsbackuprestore_testok_shard1_replica_t4 ] o.a.s.c.CoreContainer Creating SolrCore 'hdfsbackuprestore_testok_shard1_replica_t4' using configuration from collection hdfsbackuprestore_testok, trusted=true
   [junit4]   2> 443208 INFO  (qtp2078506737-6920) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node6 x:hdfsbackuprestore_testok_shard1_replica_t4 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.hdfsbackuprestore_testok.shard1.replica_t4' (registry 'solr.core.hdfsbackuprestore_testok.shard1.replica_t4') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@481a4267
   [junit4]   2> 443215 INFO  (qtp1840676713-6919) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n7 ] o.a.s.s.IndexSchema [hdfsbackuprestore_testok_shard2_replica_n7] Schema name=minimal
   [junit4]   2> 443218 INFO  (qtp1840676713-6923) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.s.IndexSchema [hdfsbackuprestore_testok_shard1_replica_n2] Schema name=minimal
   [junit4]   2> 443218 INFO  (qtp1840676713-6919) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n7 ] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 443218 INFO  (qtp1840676713-6919) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n7 ] o.a.s.c.CoreContainer Creating SolrCore 'hdfsbackuprestore_testok_shard2_replica_n7' using configuration from collection hdfsbackuprestore_testok, trusted=true
   [junit4]   2> 443219 INFO  (qtp1840676713-6919) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n7 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.hdfsbackuprestore_testok.shard2.replica_n7' (registry 'solr.core.hdfsbackuprestore_testok.shard2.replica_n7') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@481a4267
   [junit4]   2> 443222 INFO  (qtp1840676713-6921) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_t10 ] o.a.s.s.IndexSchema [hdfsbackuprestore_testok_shard2_replica_t10] Schema name=minimal
   [junit4]   2> 443226 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 443226 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.c.CoreContainer Creating SolrCore 'hdfsbackuprestore_testok_shard1_replica_n1' using configuration from collection hdfsbackuprestore_testok, trusted=true
   [junit4]   2> 443227 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.hdfsbackuprestore_testok.shard1.replica_n1' (registry 'solr.core.hdfsbackuprestore_testok.shard1.replica_n1') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@481a4267
   [junit4]   2> 443230 INFO  (qtp1840676713-6923) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 443231 INFO  (qtp1840676713-6923) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.c.CoreContainer Creating SolrCore 'hdfsbackuprestore_testok_shard1_replica_n2' using configuration from collection hdfsbackuprestore_testok, trusted=true
   [junit4]   2> 443231 INFO  (qtp1840676713-6919) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n7 ] o.a.s.c.SolrCore [[hdfsbackuprestore_testok_shard2_replica_n7] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_E037D74065656872-001/tempDir-002/node2/hdfsbackuprestore_testok_shard2_replica_n7], dataDir=[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_E037D74065656872-001/tempDir-002/node2/./hdfsbackuprestore_testok_shard2_replica_n7/data/]
   [junit4]   2> 443232 INFO  (qtp1840676713-6923) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.hdfsbackuprestore_testok.shard1.replica_n2' (registry 'solr.core.hdfsbackuprestore_testok.shard1.replica_n2') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@481a4267
   [junit4]   2> 443232 INFO  (qtp1840676713-6923) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.c.SolrCore [[hdfsbackuprestore_testok_shard1_replica_n2] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_E037D74065656872-001/tempDir-002/node2/hdfsbackuprestore_testok_shard1_replica_n2], dataDir=[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_E037D74065656872-001/tempDir-002/node2/./hdfsbackuprestore_testok_shard1_replica_n2/data/]
   [junit4]   2> 443233 INFO  (qtp2078506737-6920) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node6 x:hdfsbackuprestore_testok_shard1_replica_t4 ] o.a.s.c.SolrCore [[hdfsbackuprestore_testok_shard1_replica_t4] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_E037D74065656872-001/tempDir-002/node1/hdfsbackuprestore_testok_shard1_replica_t4], dataDir=[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_E037D74065656872-001/tempDir-002/node1/./hdfsbackuprestore_testok_shard1_replica_t4/data/]
   [junit4]   2> 443234 INFO  (qtp1840676713-6921) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_t10 ] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 443234 INFO  (qtp1840676713-6921) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_t10 ] o.a.s.c.CoreContainer Creating SolrCore 'hdfsbackuprestore_testok_shard2_replica_t10' using configuration from collection hdfsbackuprestore_testok, trusted=true
   [junit4]   2> 443235 INFO  (qtp1840676713-6921) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_t10 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.hdfsbackuprestore_testok.shard2.replica_t10' (registry 'solr.core.hdfsbackuprestore_testok.shard2.replica_t10') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@481a4267
   [junit4]   2> 443235 INFO  (qtp1840676713-6921) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_t10 ] o.a.s.c.SolrCore [[hdfsbackuprestore_testok_shard2_replica_t10] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_E037D74065656872-001/tempDir-002/node2/hdfsbackuprestore_testok_shard2_replica_t10], dataDir=[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_E037D74065656872-001/tempDir-002/node2/./hdfsbackuprestore_testok_shard2_replica_t10/data/]
   [junit4]   2> 443236 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.c.SolrCore [[hdfsbackuprestore_testok_shard1_replica_n1] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_E037D74065656872-001/tempDir-002/node1/hdfsbackuprestore_testok_shard1_replica_n1], dataDir=[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_E037D74065656872-001/tempDir-002/node1/./hdfsbackuprestore_testok_shard1_replica_n1/data/]
   [junit4]   2> 443240 INFO  (qtp2078506737-6924) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.s.IndexSchema [hdfsbackuprestore_testok_shard2_replica_n8] Schema name=minimal
   [junit4]   2> 443243 INFO  (qtp2078506737-6924) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 443243 INFO  (qtp2078506737-6924) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.c.CoreContainer Creating SolrCore 'hdfsbackuprestore_testok_shard2_replica_n8' using configuration from collection hdfsbackuprestore_testok, trusted=true
   [junit4]   2> 443244 INFO  (qtp2078506737-6924) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.hdfsbackuprestore_testok.shard2.replica_n8' (registry 'solr.core.hdfsbackuprestore_testok.shard2.replica_n8') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@481a4267
   [junit4]   2> 443244 INFO  (qtp2078506737-6924) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.c.SolrCore [[hdfsbackuprestore_testok_shard2_replica_n8] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_E037D74065656872-001/tempDir-002/node1/hdfsbackuprestore_testok_shard2_replica_n8], dataDir=[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_E037D74065656872-001/tempDir-002/node1/./hdfsbackuprestore_testok_shard2_replica_n8/data/]
   [junit4]   2> 443410 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 443410 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 443412 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 443412 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 443418 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.s.SolrIndexSearcher Opening [Searcher@5d6f00f3[hdfsbackuprestore_testok_shard1_replica_n1] main]
   [junit4]   2> 443427 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 443428 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 443432 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 443432 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1643180686090174464
   [junit4]   2> 443440 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.c.ZkShardTerms Successful update of terms at /collections/hdfsbackuprestore_testok/terms/shard1 to Terms{values={core_node3=0}, version=0}
   [junit4]   2> 443441 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/hdfsbackuprestore_testok/leaders/shard1
   [junit4]   2> 443444 INFO  (searcherExecutor-1690-thread-1-processing-n:127.0.0.1:46735_solr x:hdfsbackuprestore_testok_shard1_replica_n1 c:hdfsbackuprestore_testok s:shard1 r:core_node3) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.c.SolrCore [hdfsbackuprestore_testok_shard1_replica_n1] Registered new searcher Searcher@5d6f00f3[hdfsbackuprestore_testok_shard1_replica_n1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 443449 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContext Waiting until we see more replicas up for shard shard1: total=3 found=1 timeoutin=9999ms
   [junit4]   2> 443462 INFO  (qtp2078506737-6924) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 443462 INFO  (qtp2078506737-6924) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 443462 INFO  (qtp1840676713-6923) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 443462 INFO  (qtp1840676713-6923) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 443463 INFO  (qtp2078506737-6924) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 443463 INFO  (qtp2078506737-6924) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 443463 INFO  (qtp1840676713-6923) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 443464 INFO  (qtp1840676713-6923) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 443466 INFO  (qtp2078506737-6920) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node6 x:hdfsbackuprestore_testok_shard1_replica_t4 ] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 443466 INFO  (qtp2078506737-6920) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node6 x:hdfsbackuprestore_testok_shard1_replica_t4 ] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 443467 INFO  (qtp1840676713-6923) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.s.SolrIndexSearcher Opening [Searcher@7add75ac[hdfsbackuprestore_testok_shard1_replica_n2] main]
   [junit4]   2> 443467 INFO  (qtp2078506737-6920) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node6 x:hdfsbackuprestore_testok_shard1_replica_t4 ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 443467 INFO  (qtp2078506737-6920) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node6 x:hdfsbackuprestore_testok_shard1_replica_t4 ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 443470 INFO  (qtp2078506737-6920) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node6 x:hdfsbackuprestore_testok_shard1_replica_t4 ] o.a.s.s.SolrIndexSearcher Opening [Searcher@abe7412[hdfsbackuprestore_testok_shard1_replica_t4] main]
   [junit4]   2> 443473 INFO  (qtp1840676713-6923) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 443475 INFO  (qtp1840676713-6923) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 443475 INFO  (qtp1840676713-6923) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 443476 INFO  (qtp1840676713-6923) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1643180686136311808
   [junit4]   2> 443484 INFO  (qtp2078506737-6920) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node6 x:hdfsbackuprestore_testok_shard1_replica_t4 ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 443484 INFO  (qtp2078506737-6924) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.s.SolrIndexSearcher Opening [Searcher@40bbb2b8[hdfsbackuprestore_testok_shard2_replica_n8] main]
   [junit4]   2> 443485 INFO  (qtp2078506737-6920) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node6 x:hdfsbackuprestore_testok_shard1_replica_t4 ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 443485 INFO  (qtp2078506737-6920) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node6 x:hdfsbackuprestore_testok_shard1_replica_t4 ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 443485 INFO  (qtp2078506737-6920) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node6 x:hdfsbackuprestore_testok_shard1_replica_t4 ] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1643180686145748992
   [junit4]   2> 443487 INFO  (qtp2078506737-6924) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 443488 INFO  (qtp2078506737-6924) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 443488 INFO  (qtp2078506737-6924) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 443489 INFO  (qtp2078506737-6924) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1643180686148894720
   [junit4]   2> 443494 INFO  (qtp1840676713-6921) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_t10 ] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 443494 INFO  (qtp1840676713-6921) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_t10 ] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 443494 INFO  (searcherExecutor-1691-thread-1-processing-n:127.0.0.1:36659_solr x:hdfsbackuprestore_testok_shard1_replica_n2 c:hdfsbackuprestore_testok s:shard1 r:core_node5) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.c.SolrCore [hdfsbackuprestore_testok_shard1_replica_n2] Registered new searcher Searcher@7add75ac[hdfsbackuprestore_testok_shard1_replica_n2] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 443495 INFO  (qtp1840676713-6921) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_t10 ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 443495 INFO  (qtp1840676713-6921) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_t10 ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 443496 INFO  (searcherExecutor-1693-thread-1-processing-n:127.0.0.1:46735_solr x:hdfsbackuprestore_testok_shard2_replica_n8 c:hdfsbackuprestore_testok s:shard2 r:core_node11) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.c.SolrCore [hdfsbackuprestore_testok_shard2_replica_n8] Registered new searcher Searcher@40bbb2b8[hdfsbackuprestore_testok_shard2_replica_n8] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 443497 INFO  (qtp2078506737-6920) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node6 x:hdfsbackuprestore_testok_shard1_replica_t4 ] o.a.s.c.ZkShardTerms Successful update of terms at /collections/hdfsbackuprestore_testok/terms/shard1 to Terms{values={core_node6=0, core_node3=0}, version=1}
   [junit4]   2> 443498 INFO  (qtp2078506737-6920) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node6 x:hdfsbackuprestore_testok_shard1_replica_t4 ] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/hdfsbackuprestore_testok/leaders/shard1
   [junit4]   2> 443500 INFO  (qtp1840676713-6921) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_t10 ] o.a.s.s.SolrIndexSearcher Opening [Searcher@69173d6[hdfsbackuprestore_testok_shard2_replica_t10] main]
   [junit4]   2> 443501 INFO  (searcherExecutor-1688-thread-1-processing-n:127.0.0.1:46735_solr x:hdfsbackuprestore_testok_shard1_replica_t4 c:hdfsbackuprestore_testok s:shard1 r:core_node6) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node6 x:hdfsbackuprestore_testok_shard1_replica_t4 ] o.a.s.c.SolrCore [hdfsbackuprestore_testok_shard1_replica_t4] Registered new searcher Searcher@abe7412[hdfsbackuprestore_testok_shard1_replica_t4] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 443503 INFO  (qtp1840676713-6921) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_t10 ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 443504 INFO  (qtp1840676713-6921) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_t10 ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 443505 INFO  (qtp1840676713-6921) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_t10 ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 443505 INFO  (qtp1840676713-6921) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_t10 ] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1643180686166720512
   [junit4]   2> 443505 INFO  (qtp1840676713-6923) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.c.ZkShardTerms Successful update of terms at /collections/hdfsbackuprestore_testok/terms/shard1 to Terms{values={core_node6=0, core_node3=0, core_node5=0}, version=2}
   [junit4]   2> 443505 INFO  (qtp1840676713-6923) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/hdfsbackuprestore_testok/leaders/shard1
   [junit4]   2> 443514 INFO  (qtp2078506737-6924) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.c.ZkShardTerms Successful update of terms at /collections/hdfsbackuprestore_testok/terms/shard2 to Terms{values={core_node11=0}, version=0}
   [junit4]   2> 443514 INFO  (qtp2078506737-6924) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/hdfsbackuprestore_testok/leaders/shard2
   [junit4]   2> 443520 INFO  (searcherExecutor-1692-thread-1-processing-n:127.0.0.1:36659_solr x:hdfsbackuprestore_testok_shard2_replica_t10 c:hdfsbackuprestore_testok s:shard2 r:core_node12) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_t10 ] o.a.s.c.SolrCore [hdfsbackuprestore_testok_shard2_replica_t10] Registered new searcher Searcher@69173d6[hdfsbackuprestore_testok_shard2_replica_t10] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 443521 INFO  (qtp1840676713-6921) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_t10 ] o.a.s.c.ZkShardTerms Successful update of terms at /collections/hdfsbackuprestore_testok/terms/shard2 to Terms{values={core_node12=0, core_node11=0}, version=1}
   [junit4]   2> 443524 INFO  (qtp2078506737-6924) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.c.ShardLeaderElectionContext Waiting until we see more replicas up for shard shard2: total=3 found=1 timeoutin=9998ms
   [junit4]   2> 443530 INFO  (qtp1840676713-6921) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_t10 ] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/hdfsbackuprestore_testok/leaders/shard2
   [junit4]   2> 443530 INFO  (qtp1840676713-6919) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n7 ] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 443530 INFO  (qtp1840676713-6919) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n7 ] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 443532 INFO  (qtp1840676713-6919) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n7 ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 443532 INFO  (qtp1840676713-6919) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n7 ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 443536 INFO  (qtp1840676713-6919) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n7 ] o.a.s.s.SolrIndexSearcher Opening [Searcher@50d2c5a[hdfsbackuprestore_testok_shard2_replica_n7] main]
   [junit4]   2> 443538 INFO  (qtp1840676713-6919) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n7 ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 443538 INFO  (qtp1840676713-6919) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n7 ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 443539 INFO  (qtp1840676713-6919) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n7 ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 443539 INFO  (qtp1840676713-6919) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n7 ] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1643180686202372096
   [junit4]   2> 443545 INFO  (searcherExecutor-1689-thread-1-processing-n:127.0.0.1:36659_solr x:hdfsbackuprestore_testok_shard2_replica_n7 c:hdfsbackuprestore_testok s:shard2 r:core_node9) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n7 ] o.a.s.c.SolrCore [hdfsbackuprestore_testok_shard2_replica_n7] Registered new searcher Searcher@50d2c5a[hdfsbackuprestore_testok_shard2_replica_n7] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 443546 INFO  (qtp1840676713-6919) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n7 ] o.a.s.c.ZkShardTerms Successful update of terms at /collections/hdfsbackuprestore_testok/terms/shard2 to Terms{values={core_node12=0, core_node11=0, core_node9=0}, version=2}
   [junit4]   2> 443546 INFO  (qtp1840676713-6919) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n7 ] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/hdfsbackuprestore_testok/leaders/shard2
   [junit4]   2> 443952 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 443952 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 443952 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.c.SyncStrategy Sync replicas to https://127.0.0.1:46735/solr/hdfsbackuprestore_testok_shard1_replica_n1/
   [junit4]   2> 443953 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.u.PeerSync PeerSync: core=hdfsbackuprestore_testok_shard1_replica_n1 url=https://127.0.0.1:46735/solr START replicas=[https://127.0.0.1:36659/solr/hdfsbackuprestore_testok_shard1_replica_n2/, https://127.0.0.1:46735/solr/hdfsbackuprestore_testok_shard1_replica_t4/] nUpdates=100
   [junit4]   2> 443954 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.u.PeerSync PeerSync: core=hdfsbackuprestore_testok_shard1_replica_n1 url=https://127.0.0.1:46735/solr DONE.  We have no versions.  sync failed.
   [junit4]   2> 443961 INFO  (qtp1840676713-6925) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.c.S.Request [hdfsbac

[...truncated too long message...]

 loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

jar-checksums:
    [mkdir] Created dir: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/null699052273
     [copy] Copying 249 files to /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/null699052273
   [delete] Deleting directory /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/null699052273

check-working-copy:
[ivy:cachepath] :: resolving dependencies :: #;working@lucene1-us-west
[ivy:cachepath] 	confs: [default]
[ivy:cachepath] 	found org.eclipse.jgit#org.eclipse.jgit;5.3.0.201903130848-r in public
[ivy:cachepath] 	found com.jcraft#jsch;0.1.54 in public
[ivy:cachepath] 	found com.jcraft#jzlib;1.1.1 in public
[ivy:cachepath] 	found com.googlecode.javaewah#JavaEWAH;1.1.6 in public
[ivy:cachepath] 	found org.slf4j#slf4j-api;1.7.2 in public
[ivy:cachepath] 	found org.bouncycastle#bcpg-jdk15on;1.60 in public
[ivy:cachepath] 	found org.bouncycastle#bcprov-jdk15on;1.60 in public
[ivy:cachepath] 	found org.bouncycastle#bcpkix-jdk15on;1.60 in public
[ivy:cachepath] 	found org.slf4j#slf4j-nop;1.7.2 in public
[ivy:cachepath] :: resolution report :: resolve 30ms :: artifacts dl 2ms
	---------------------------------------------------------------------
	|                  |            modules            ||   artifacts   |
	|       conf       | number| search|dwnlded|evicted|| number|dwnlded|
	---------------------------------------------------------------------
	|      default     |   9   |   0   |   0   |   0   ||   9   |   0   |
	---------------------------------------------------------------------
[wc-checker] Initializing working copy...
[wc-checker] Checking working copy status...

-jenkins-base:

BUILD SUCCESSFUL
Total time: 118 minutes 58 seconds
Archiving artifacts
java.lang.InterruptedException: no matches found within 10000
	at hudson.FilePath$ValidateAntFileMask.hasMatch(FilePath.java:2847)
	at hudson.FilePath$ValidateAntFileMask.invoke(FilePath.java:2726)
	at hudson.FilePath$ValidateAntFileMask.invoke(FilePath.java:2707)
	at hudson.FilePath$FileCallableWrapper.call(FilePath.java:3086)
Also:   hudson.remoting.Channel$CallSiteStackTrace: Remote call to lucene
		at hudson.remoting.Channel.attachCallSiteStackTrace(Channel.java:1741)
		at hudson.remoting.UserRequest$ExceptionResponse.retrieve(UserRequest.java:357)
		at hudson.remoting.Channel.call(Channel.java:955)
		at hudson.FilePath.act(FilePath.java:1072)
		at hudson.FilePath.act(FilePath.java:1061)
		at hudson.FilePath.validateAntFileMask(FilePath.java:2705)
		at hudson.tasks.ArtifactArchiver.perform(ArtifactArchiver.java:243)
		at hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
		at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
		at hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
		at hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
		at hudson.model.Build$BuildExecution.post2(Build.java:186)
		at hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
		at hudson.model.Run.execute(Run.java:1835)
		at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
		at hudson.model.ResourceController.execute(ResourceController.java:97)
		at hudson.model.Executor.run(Executor.java:429)
Caused: hudson.FilePath$TunneledInterruptedException
	at hudson.FilePath$FileCallableWrapper.call(FilePath.java:3088)
	at hudson.remoting.UserRequest.perform(UserRequest.java:212)
	at hudson.remoting.UserRequest.perform(UserRequest.java:54)
	at hudson.remoting.Request$2.run(Request.java:369)
	at hudson.remoting.InterceptingExecutorService$1.call(InterceptingExecutorService.java:72)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
	at java.lang.Thread.run(Thread.java:744)
Caused: java.lang.InterruptedException: java.lang.InterruptedException: no matches found within 10000
	at hudson.FilePath.act(FilePath.java:1074)
	at hudson.FilePath.act(FilePath.java:1061)
	at hudson.FilePath.validateAntFileMask(FilePath.java:2705)
	at hudson.tasks.ArtifactArchiver.perform(ArtifactArchiver.java:243)
	at hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
	at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
	at hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
	at hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
	at hudson.model.Build$BuildExecution.post2(Build.java:186)
	at hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
	at hudson.model.Run.execute(Run.java:1835)
	at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
	at hudson.model.ResourceController.execute(ResourceController.java:97)
	at hudson.model.Executor.run(Executor.java:429)
No artifacts found that match the file pattern "**/*.events,heapdumps/**,**/hs_err_pid*". Configuration error?
Recording test results
Build step 'Publish JUnit test result report' changed build result to UNSTABLE
Email was triggered for: Unstable (Test Failures)
Sending email for trigger: Unstable (Test Failures)

[JENKINS] Lucene-Solr-Tests-8.x - Build # 493 - Still Unstable

Posted by Apache Jenkins Server <je...@builds.apache.org>.
Build: https://builds.apache.org/job/Lucene-Solr-Tests-8.x/493/

1 tests failed.
FAILED:  org.apache.solr.cloud.UnloadDistributedZkTest.test

Error Message:
Error from server at http://127.0.0.1:34774: ADDREPLICA failed to create replica

Stack Trace:
org.apache.solr.client.solrj.impl.HttpSolrClient$RemoteSolrException: Error from server at http://127.0.0.1:34774: ADDREPLICA failed to create replica
	at __randomizedtesting.SeedInfo.seed([49E4041409F004D7:C1B03BCEA70C692F]:0)
	at org.apache.solr.client.solrj.impl.HttpSolrClient.executeMethod(HttpSolrClient.java:656)
	at org.apache.solr.client.solrj.impl.HttpSolrClient.request(HttpSolrClient.java:262)
	at org.apache.solr.client.solrj.impl.HttpSolrClient.request(HttpSolrClient.java:245)
	at org.apache.solr.client.solrj.impl.LBSolrClient.doRequest(LBSolrClient.java:368)
	at org.apache.solr.client.solrj.impl.LBSolrClient.request(LBSolrClient.java:296)
	at org.apache.solr.client.solrj.impl.BaseCloudSolrClient.sendRequest(BaseCloudSolrClient.java:1128)
	at org.apache.solr.client.solrj.impl.BaseCloudSolrClient.requestWithRetryOnStaleState(BaseCloudSolrClient.java:897)
	at org.apache.solr.client.solrj.impl.BaseCloudSolrClient.request(BaseCloudSolrClient.java:829)
	at org.apache.solr.client.solrj.SolrRequest.process(SolrRequest.java:211)
	at org.apache.solr.client.solrj.SolrRequest.process(SolrRequest.java:228)
	at org.apache.solr.cloud.UnloadDistributedZkTest.testCoreUnloadAndLeaders(UnloadDistributedZkTest.java:215)
	at org.apache.solr.cloud.UnloadDistributedZkTest.test(UnloadDistributedZkTest.java:67)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988)
	at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:1082)
	at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:1054)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at java.lang.Thread.run(Thread.java:748)




Build Log:
[...truncated 13834 lines...]
   [junit4] Suite: org.apache.solr.cloud.UnloadDistributedZkTest
   [junit4]   2> 1250860 INFO  (SUITE-UnloadDistributedZkTest-seed#[49E4041409F004D7]-worker) [     ] o.a.s.SolrTestCaseJ4 Created dataDir: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.cloud.UnloadDistributedZkTest_49E4041409F004D7-001/data-dir-56-001
   [junit4]   2> 1250860 WARN  (SUITE-UnloadDistributedZkTest-seed#[49E4041409F004D7]-worker) [     ] o.a.s.SolrTestCaseJ4 startTrackingSearchers: numOpens=18 numCloses=18
   [junit4]   2> 1250860 INFO  (SUITE-UnloadDistributedZkTest-seed#[49E4041409F004D7]-worker) [     ] o.a.s.SolrTestCaseJ4 Using PointFields (NUMERIC_POINTS_SYSPROP=true) w/NUMERIC_DOCVALUES_SYSPROP=true
   [junit4]   2> 1250861 INFO  (SUITE-UnloadDistributedZkTest-seed#[49E4041409F004D7]-worker) [     ] o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (false) via: @org.apache.solr.SolrTestCaseJ4$SuppressSSL(bugUrl=https://issues.apache.org/jira/browse/SOLR-5776)
   [junit4]   2> 1250861 INFO  (SUITE-UnloadDistributedZkTest-seed#[49E4041409F004D7]-worker) [     ] o.a.s.SolrTestCaseJ4 SecureRandom sanity checks: test.solr.allowed.securerandom=null & java.security.egd=file:/dev/./urandom
   [junit4]   2> 1250861 INFO  (SUITE-UnloadDistributedZkTest-seed#[49E4041409F004D7]-worker) [     ] o.a.s.BaseDistributedSearchTestCase Setting hostContext system property: /
   [junit4]   2> 1250868 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
   [junit4]   2> 1250868 INFO  (ZkTestServer Run Thread) [     ] o.a.s.c.ZkTestServer client port:0.0.0.0/0.0.0.0:0
   [junit4]   2> 1250868 INFO  (ZkTestServer Run Thread) [     ] o.a.s.c.ZkTestServer Starting server
   [junit4]   2> 1250968 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.ZkTestServer start zk server on port:37467
   [junit4]   2> 1250969 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.ZkTestServer waitForServerUp: 127.0.0.1:37467
   [junit4]   2> 1250969 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.ZkTestServer parse host and port list: 127.0.0.1:37467
   [junit4]   2> 1250969 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.ZkTestServer connecting to 127.0.0.1 37467
   [junit4]   2> 1250971 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 1250972 INFO  (zkConnectionManagerCallback-5669-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1250972 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 1250975 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 1250996 INFO  (zkConnectionManagerCallback-5671-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1250996 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 1251003 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/core/src/test-files/solr/collection1/conf/solrconfig-tlog.xml to /configs/conf1/solrconfig.xml
   [junit4]   2> 1251017 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/core/src/test-files/solr/collection1/conf/schema.xml to /configs/conf1/schema.xml
   [junit4]   2> 1251019 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/core/src/test-files/solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml to /configs/conf1/solrconfig.snippet.randomindexconfig.xml
   [junit4]   2> 1251019 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/core/src/test-files/solr/collection1/conf/stopwords.txt to /configs/conf1/stopwords.txt
   [junit4]   2> 1251020 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/core/src/test-files/solr/collection1/conf/protwords.txt to /configs/conf1/protwords.txt
   [junit4]   2> 1251021 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/core/src/test-files/solr/collection1/conf/currency.xml to /configs/conf1/currency.xml
   [junit4]   2> 1251022 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/core/src/test-files/solr/collection1/conf/enumsConfig.xml to /configs/conf1/enumsConfig.xml
   [junit4]   2> 1251025 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/core/src/test-files/solr/collection1/conf/open-exchange-rates.json to /configs/conf1/open-exchange-rates.json
   [junit4]   2> 1251025 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/core/src/test-files/solr/collection1/conf/mapping-ISOLatin1Accent.txt to /configs/conf1/mapping-ISOLatin1Accent.txt
   [junit4]   2> 1251026 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/core/src/test-files/solr/collection1/conf/old_synonyms.txt to /configs/conf1/old_synonyms.txt
   [junit4]   2> 1251027 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/core/src/test-files/solr/collection1/conf/synonyms.txt to /configs/conf1/synonyms.txt
   [junit4]   2> 1251028 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.AbstractFullDistribZkTestBase Will use NRT replicas unless explicitly asked otherwise
   [junit4]   2> 1251271 WARN  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 1251272 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 1251272 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 1251272 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 1.8.0_191-b12
   [junit4]   2> 1251274 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 1251274 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 1251274 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 1251274 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@65215a41{/,null,AVAILABLE}
   [junit4]   2> 1251275 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.e.j.s.AbstractConnector Started ServerConnector@64f87209{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:34774}
   [junit4]   2> 1251275 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.e.j.s.Server Started @1251386ms
   [junit4]   2> 1251275 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.cloud.UnloadDistributedZkTest_49E4041409F004D7-001/tempDir-001/control/data, hostContext=/, hostPort=34774, coreRootDirectory=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.cloud.UnloadDistributedZkTest_49E4041409F004D7-001/control-001/cores}
   [junit4]   2> 1251275 ERROR (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 1251275 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 1251275 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solrâ„¢ version 8.3.0
   [junit4]   2> 1251275 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 1251275 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 1251275 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-08-29T14:36:32.546Z
   [junit4]   2> 1251276 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 1251277 INFO  (zkConnectionManagerCallback-5673-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1251277 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 1251378 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 1251378 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.SolrXmlConfig Loading container configuration from /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.cloud.UnloadDistributedZkTest_49E4041409F004D7-001/control-001/solr.xml
   [junit4]   2> 1251382 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 1251382 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 1251383 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@5789c771, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 1251615 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=false]
   [junit4]   2> 1251616 WARN  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.e.j.u.s.S.config Trusting all certificates configured for Client@397bee98[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 1251616 WARN  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@397bee98[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 1251619 WARN  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.e.j.u.s.S.config Trusting all certificates configured for Client@14fa8881[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 1251619 WARN  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@14fa8881[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 1251620 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:37467/solr
   [junit4]   2> 1251621 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 1251622 INFO  (zkConnectionManagerCallback-5680-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1251622 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 1251724 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [n:127.0.0.1:34774_     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 1251724 INFO  (zkConnectionManagerCallback-5682-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1251724 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [n:127.0.0.1:34774_     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 1251861 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [n:127.0.0.1:34774_     ] o.a.s.c.OverseerElectionContext I am going to be the leader 127.0.0.1:34774_
   [junit4]   2> 1251862 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [n:127.0.0.1:34774_     ] o.a.s.c.Overseer Overseer (id=75296828349284356-127.0.0.1:34774_-n_0000000000) starting
   [junit4]   2> 1251867 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [n:127.0.0.1:34774_     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 1251868 INFO  (zkConnectionManagerCallback-5689-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1251868 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [n:127.0.0.1:34774_     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 1251870 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [n:127.0.0.1:34774_     ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:37467/solr ready
   [junit4]   2> 1251871 INFO  (OverseerStateUpdate-75296828349284356-127.0.0.1:34774_-n_0000000000) [n:127.0.0.1:34774_     ] o.a.s.c.Overseer Starting to work on the main queue : 127.0.0.1:34774_
   [junit4]   2> 1251871 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [n:127.0.0.1:34774_     ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:34774_
   [junit4]   2> 1251872 INFO  (OverseerStateUpdate-75296828349284356-127.0.0.1:34774_-n_0000000000) [n:127.0.0.1:34774_     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 1251873 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [n:127.0.0.1:34774_     ] o.a.s.c.PackageManager clusterprops.json changed , version 0
   [junit4]   2> 1251888 INFO  (zkCallback-5688-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 1251909 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [n:127.0.0.1:34774_     ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 1251924 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [n:127.0.0.1:34774_     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@5789c771
   [junit4]   2> 1251931 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [n:127.0.0.1:34774_     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@5789c771
   [junit4]   2> 1251931 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [n:127.0.0.1:34774_     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@5789c771
   [junit4]   2> 1251933 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [n:127.0.0.1:34774_     ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.cloud.UnloadDistributedZkTest_49E4041409F004D7-001/control-001/cores
   [junit4]   2> 1251958 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 1251959 INFO  (zkConnectionManagerCallback-5695-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1251959 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 1251960 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 1251961 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:37467/solr ready
   [junit4]   2> 1251962 INFO  (qtp510562396-9807) [n:127.0.0.1:34774_     ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params collection.configName=conf1&name=control_collection&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=127.0.0.1:34774_&wt=javabin&version=2 and sendToOCPQueue=true
   [junit4]   2> 1251965 INFO  (OverseerThreadFactory-2062-thread-1-processing-n:127.0.0.1:34774_) [n:127.0.0.1:34774_     ] o.a.s.c.a.c.CreateCollectionCmd Create collection control_collection
   [junit4]   2> 1252073 INFO  (qtp510562396-9809) [n:127.0.0.1:34774_    x:control_collection_shard1_replica_n1 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf1&newCollection=true&name=control_collection_shard1_replica_n1&action=CREATE&numShards=1&collection=control_collection&shard=shard1&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 1252074 INFO  (qtp510562396-9809) [n:127.0.0.1:34774_    x:control_collection_shard1_replica_n1 ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 1253087 INFO  (qtp510562396-9809) [n:127.0.0.1:34774_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 8.3.0
   [junit4]   2> 1253118 INFO  (qtp510562396-9809) [n:127.0.0.1:34774_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.s.IndexSchema [control_collection_shard1_replica_n1] Schema name=test
   [junit4]   2> 1253217 INFO  (qtp510562396-9809) [n:127.0.0.1:34774_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 1253243 INFO  (qtp510562396-9809) [n:127.0.0.1:34774_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.c.CoreContainer Creating SolrCore 'control_collection_shard1_replica_n1' using configuration from collection control_collection, trusted=true
   [junit4]   2> 1253244 INFO  (qtp510562396-9809) [n:127.0.0.1:34774_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.control_collection.shard1.replica_n1' (registry 'solr.core.control_collection.shard1.replica_n1') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@5789c771
   [junit4]   2> 1253244 INFO  (qtp510562396-9809) [n:127.0.0.1:34774_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.c.SolrCore [[control_collection_shard1_replica_n1] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.cloud.UnloadDistributedZkTest_49E4041409F004D7-001/control-001/cores/control_collection_shard1_replica_n1], dataDir=[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.cloud.UnloadDistributedZkTest_49E4041409F004D7-001/control-001/cores/control_collection_shard1_replica_n1/data/]
   [junit4]   2> 1253246 INFO  (qtp510562396-9809) [n:127.0.0.1:34774_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=14, maxMergeAtOnceExplicit=40, maxMergedSegmentMB=44.4296875, floorSegmentMB=1.7041015625, forceMergeDeletesPctAllowed=0.6647463089672323, segmentsPerTier=36.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.1906980108789277, deletesPctAllowed=21.775055026738894
   [junit4]   2> 1253261 WARN  (qtp510562396-9809) [n:127.0.0.1:34774_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 1253310 INFO  (qtp510562396-9809) [n:127.0.0.1:34774_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 1253310 INFO  (qtp510562396-9809) [n:127.0.0.1:34774_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 1253311 INFO  (qtp510562396-9809) [n:127.0.0.1:34774_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 1253311 INFO  (qtp510562396-9809) [n:127.0.0.1:34774_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 1253312 INFO  (qtp510562396-9809) [n:127.0.0.1:34774_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.LogByteSizeMergePolicy: [LogByteSizeMergePolicy: minMergeSize=1677721, mergeFactor=13, maxMergeSize=2147483648, maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.5479530286378628]
   [junit4]   2> 1253313 INFO  (qtp510562396-9809) [n:127.0.0.1:34774_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.s.SolrIndexSearcher Opening [Searcher@13ada31[control_collection_shard1_replica_n1] main]
   [junit4]   2> 1253322 INFO  (qtp510562396-9809) [n:127.0.0.1:34774_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 1253323 INFO  (qtp510562396-9809) [n:127.0.0.1:34774_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 1253323 INFO  (qtp510562396-9809) [n:127.0.0.1:34774_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 1253323 INFO  (qtp510562396-9809) [n:127.0.0.1:34774_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1643212329025798144
   [junit4]   2> 1253328 INFO  (searcherExecutor-2067-thread-1-processing-n:127.0.0.1:34774_ x:control_collection_shard1_replica_n1 c:control_collection s:shard1) [n:127.0.0.1:34774_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.c.SolrCore [control_collection_shard1_replica_n1] Registered new searcher Searcher@13ada31[control_collection_shard1_replica_n1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 1253330 INFO  (qtp510562396-9809) [n:127.0.0.1:34774_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.c.ZkShardTerms Successful update of terms at /collections/control_collection/terms/shard1 to Terms{values={core_node2=0}, version=0}
   [junit4]   2> 1253330 INFO  (qtp510562396-9809) [n:127.0.0.1:34774_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/control_collection/leaders/shard1
   [junit4]   2> 1253333 INFO  (qtp510562396-9809) [n:127.0.0.1:34774_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 1253333 INFO  (qtp510562396-9809) [n:127.0.0.1:34774_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 1253333 INFO  (qtp510562396-9809) [n:127.0.0.1:34774_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.c.SyncStrategy Sync replicas to http://127.0.0.1:34774/control_collection_shard1_replica_n1/
   [junit4]   2> 1253333 INFO  (qtp510562396-9809) [n:127.0.0.1:34774_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
   [junit4]   2> 1253333 INFO  (qtp510562396-9809) [n:127.0.0.1:34774_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.c.SyncStrategy http://127.0.0.1:34774/control_collection_shard1_replica_n1/ has no replicas
   [junit4]   2> 1253334 INFO  (qtp510562396-9809) [n:127.0.0.1:34774_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContextBase Creating leader registration node /collections/control_collection/leaders/shard1/leader after winning as /collections/control_collection/leader_elect/shard1/election/75296828349284356-core_node2-n_0000000000
   [junit4]   2> 1253335 INFO  (qtp510562396-9809) [n:127.0.0.1:34774_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContext I am the new leader: http://127.0.0.1:34774/control_collection_shard1_replica_n1/ shard1
   [junit4]   2> 1253437 INFO  (zkCallback-5681-thread-1) [     ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 1253437 INFO  (zkCallback-5681-thread-2) [     ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 1253439 INFO  (qtp510562396-9809) [n:127.0.0.1:34774_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 1253464 INFO  (qtp510562396-9809) [n:127.0.0.1:34774_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1 ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={qt=/admin/cores&collection.configName=conf1&newCollection=true&name=control_collection_shard1_replica_n1&action=CREATE&numShards=1&collection=control_collection&shard=shard1&wt=javabin&version=2&replicaType=NRT} status=0 QTime=1391
   [junit4]   2> 1253477 INFO  (qtp510562396-9807) [n:127.0.0.1:34774_     ] o.a.s.h.a.CollectionsHandler Wait for new collection to be active for at most 45 seconds. Check all shard replicas
   [junit4]   2> 1253569 INFO  (zkCallback-5681-thread-1) [     ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 1253569 INFO  (zkCallback-5681-thread-2) [     ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 1253569 INFO  (zkCallback-5681-thread-3) [     ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 1253569 INFO  (qtp510562396-9807) [n:127.0.0.1:34774_     ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections params={collection.configName=conf1&name=control_collection&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=127.0.0.1:34774_&wt=javabin&version=2} status=0 QTime=1607
   [junit4]   2> 1253570 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.AbstractFullDistribZkTestBase Waiting to see 1 active replicas in collection: control_collection
   [junit4]   2> 1253674 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 1253675 INFO  (zkConnectionManagerCallback-5701-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1253675 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 1253676 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 1253677 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:37467/solr ready
   [junit4]   2> 1253677 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.ChaosMonkey monkey: init - expire sessions:false cause connection loss:false
   [junit4]   2> 1253678 INFO  (qtp510562396-9809) [n:127.0.0.1:34774_     ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params collection.configName=conf1&name=collection1&nrtReplicas=1&action=CREATE&numShards=2&createNodeSet=&stateFormat=1&wt=javabin&version=2 and sendToOCPQueue=true
   [junit4]   2> 1253681 INFO  (OverseerThreadFactory-2062-thread-2-processing-n:127.0.0.1:34774_) [n:127.0.0.1:34774_     ] o.a.s.c.a.c.CreateCollectionCmd Create collection collection1
   [junit4]   2> 1253682 INFO  (OverseerCollectionConfigSetProcessor-75296828349284356-127.0.0.1:34774_-n_0000000000) [n:127.0.0.1:34774_     ] o.a.s.c.OverseerTaskQueue Response ZK path: /overseer/collection-queue-work/qnr-0000000000 doesn't exist.  Requestor may have disconnected from ZooKeeper
   [junit4]   2> 1253884 WARN  (OverseerThreadFactory-2062-thread-2-processing-n:127.0.0.1:34774_) [n:127.0.0.1:34774_     ] o.a.s.c.a.c.CreateCollectionCmd It is unusual to create a collection (collection1) without cores.
   [junit4]   2> 1253886 INFO  (qtp510562396-9809) [n:127.0.0.1:34774_     ] o.a.s.h.a.CollectionsHandler Wait for new collection to be active for at most 45 seconds. Check all shard replicas
   [junit4]   2> 1253887 INFO  (qtp510562396-9809) [n:127.0.0.1:34774_     ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections params={collection.configName=conf1&name=collection1&nrtReplicas=1&action=CREATE&numShards=2&createNodeSet=&stateFormat=1&wt=javabin&version=2} status=0 QTime=208
   [junit4]   2> 1253888 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.SolrCloudTestCase active slice count: 2 expected:2
   [junit4]   2> 1253888 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.SolrCloudTestCase active replica count: 0 expected replica count: 0
   [junit4]   2> 1253888 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.SolrCloudTestCase active slice count: 2 expected:2
   [junit4]   2> 1253888 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.SolrCloudTestCase active replica count: 0 expected replica count: 0
   [junit4]   2> 1253888 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.SolrCloudTestCase active slice count: 2 expected:2
   [junit4]   2> 1253888 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.SolrCloudTestCase active replica count: 0 expected replica count: 0
   [junit4]   2> 1253888 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.AbstractFullDistribZkTestBase Creating jetty instances pullReplicaCount=0 numOtherReplicas=4
   [junit4]   2> 1253997 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 1 in directory /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.cloud.UnloadDistributedZkTest_49E4041409F004D7-001/shard-1-001 of type NRT
   [junit4]   2> 1253997 WARN  (closeThreadPool-5702-thread-1) [     ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 1253998 INFO  (closeThreadPool-5702-thread-1) [     ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 1253998 INFO  (closeThreadPool-5702-thread-1) [     ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 1253998 INFO  (closeThreadPool-5702-thread-1) [     ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 1.8.0_191-b12
   [junit4]   2> 1253999 INFO  (closeThreadPool-5702-thread-1) [     ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 1253999 INFO  (closeThreadPool-5702-thread-1) [     ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 1253999 INFO  (closeThreadPool-5702-thread-1) [     ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 1253999 INFO  (closeThreadPool-5702-thread-1) [     ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@3a2915eb{/,null,AVAILABLE}
   [junit4]   2> 1253999 INFO  (closeThreadPool-5702-thread-1) [     ] o.e.j.s.AbstractConnector Started ServerConnector@45a8e9ae{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:36206}
   [junit4]   2> 1253999 INFO  (closeThreadPool-5702-thread-1) [     ] o.e.j.s.Server Started @1254111ms
   [junit4]   2> 1253999 INFO  (closeThreadPool-5702-thread-1) [     ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.cloud.UnloadDistributedZkTest_49E4041409F004D7-001/tempDir-001/jetty1, replicaType=NRT, solrconfig=solrconfig.xml, hostContext=/, hostPort=36206, coreRootDirectory=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.cloud.UnloadDistributedZkTest_49E4041409F004D7-001/shard-1-001/cores}
   [junit4]   2> 1254000 ERROR (closeThreadPool-5702-thread-1) [     ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 1254000 INFO  (closeThreadPool-5702-thread-1) [     ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 1254000 INFO  (closeThreadPool-5702-thread-1) [     ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solrâ„¢ version 8.3.0
   [junit4]   2> 1254000 INFO  (closeThreadPool-5702-thread-1) [     ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 1254000 INFO  (closeThreadPool-5702-thread-1) [     ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 1254000 INFO  (closeThreadPool-5702-thread-1) [     ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-08-29T14:36:35.271Z
   [junit4]   2> 1254001 INFO  (closeThreadPool-5702-thread-1) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 1254002 INFO  (zkConnectionManagerCallback-5704-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1254002 INFO  (closeThreadPool-5702-thread-1) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 1254103 INFO  (closeThreadPool-5702-thread-1) [     ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 1254103 INFO  (closeThreadPool-5702-thread-1) [     ] o.a.s.c.SolrXmlConfig Loading container configuration from /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.cloud.UnloadDistributedZkTest_49E4041409F004D7-001/shard-1-001/solr.xml
   [junit4]   2> 1254107 INFO  (closeThreadPool-5702-thread-1) [     ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 1254107 INFO  (closeThreadPool-5702-thread-1) [     ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 1254109 INFO  (closeThreadPool-5702-thread-1) [     ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@5789c771, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 1254138 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 2 in directory /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.cloud.UnloadDistributedZkTest_49E4041409F004D7-001/shard-2-001 of type NRT
   [junit4]   2> 1254138 WARN  (closeThreadPool-5702-thread-2) [     ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 1254139 INFO  (closeThreadPool-5702-thread-2) [     ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 1254139 INFO  (closeThreadPool-5702-thread-2) [     ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 1254139 INFO  (closeThreadPool-5702-thread-2) [     ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 1.8.0_191-b12
   [junit4]   2> 1254196 INFO  (closeThreadPool-5702-thread-2) [     ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 1254196 INFO  (closeThreadPool-5702-thread-2) [     ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 1254196 INFO  (closeThreadPool-5702-thread-2) [     ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 1254196 INFO  (closeThreadPool-5702-thread-2) [     ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@218c93fd{/,null,AVAILABLE}
   [junit4]   2> 1254209 INFO  (closeThreadPool-5702-thread-2) [     ] o.e.j.s.AbstractConnector Started ServerConnector@67e136ae{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:34332}
   [junit4]   2> 1254209 INFO  (closeThreadPool-5702-thread-2) [     ] o.e.j.s.Server Started @1254320ms
   [junit4]   2> 1254209 INFO  (closeThreadPool-5702-thread-2) [     ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.cloud.UnloadDistributedZkTest_49E4041409F004D7-001/tempDir-001/jetty2, replicaType=NRT, solrconfig=solrconfig.xml, hostContext=/, hostPort=34332, coreRootDirectory=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.cloud.UnloadDistributedZkTest_49E4041409F004D7-001/shard-2-001/cores}
   [junit4]   2> 1254209 ERROR (closeThreadPool-5702-thread-2) [     ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 1254209 INFO  (closeThreadPool-5702-thread-2) [     ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 1254209 INFO  (closeThreadPool-5702-thread-2) [     ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solrâ„¢ version 8.3.0
   [junit4]   2> 1254209 INFO  (closeThreadPool-5702-thread-2) [     ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 1254209 INFO  (closeThreadPool-5702-thread-2) [     ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 1254209 INFO  (closeThreadPool-5702-thread-2) [     ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-08-29T14:36:35.480Z
   [junit4]   2> 1254230 INFO  (closeThreadPool-5702-thread-2) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 1254237 INFO  (zkConnectionManagerCallback-5707-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1254245 INFO  (closeThreadPool-5702-thread-2) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 1254356 INFO  (closeThreadPool-5702-thread-2) [     ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 1254356 INFO  (closeThreadPool-5702-thread-2) [     ] o.a.s.c.SolrXmlConfig Loading container configuration from /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.cloud.UnloadDistributedZkTest_49E4041409F004D7-001/shard-2-001/solr.xml
   [junit4]   2> 1254365 INFO  (closeThreadPool-5702-thread-2) [     ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 1254365 INFO  (closeThreadPool-5702-thread-2) [     ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 1254369 INFO  (closeThreadPool-5702-thread-2) [     ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@5789c771, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 1254497 INFO  (closeThreadPool-5702-thread-1) [     ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=false]
   [junit4]   2> 1254498 WARN  (closeThreadPool-5702-thread-1) [     ] o.e.j.u.s.S.config Trusting all certificates configured for Client@1fcda107[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 1254498 WARN  (closeThreadPool-5702-thread-1) [     ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@1fcda107[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 1254513 WARN  (closeThreadPool-5702-thread-1) [     ] o.e.j.u.s.S.config Trusting all certificates configured for Client@566c1a76[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 1254513 WARN  (closeThreadPool-5702-thread-1) [     ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@566c1a76[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 1254514 INFO  (closeThreadPool-5702-thread-1) [     ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:37467/solr
   [junit4]   2> 1254525 INFO  (closeThreadPool-5702-thread-1) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 1254541 INFO  (zkConnectionManagerCallback-5714-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1254541 INFO  (closeThreadPool-5702-thread-1) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 1254543 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 3 in directory /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.cloud.UnloadDistributedZkTest_49E4041409F004D7-001/shard-3-001 of type NRT
   [junit4]   2> 1254564 INFO  (closeThreadPool-5702-thread-2) [     ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=false]
   [junit4]   2> 1254574 WARN  (closeThreadPool-5702-thread-2) [     ] o.e.j.u.s.S.config Trusting all certificates configured for Client@6572b057[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 1254574 WARN  (closeThreadPool-5702-thread-2) [     ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@6572b057[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 1254584 WARN  (closeThreadPool-5702-thread-2) [     ] o.e.j.u.s.S.config Trusting all certificates configured for Client@7594efc1[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 1254584 WARN  (closeThreadPool-5702-thread-2) [     ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@7594efc1[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 1254585 INFO  (closeThreadPool-5702-thread-2) [     ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:37467/solr
   [junit4]   2> 1254593 WARN  (closeThreadPool-5702-thread-3) [     ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 1254594 INFO  (closeThreadPool-5702-thread-3) [     ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 1254594 INFO  (closeThreadPool-5702-thread-3) [     ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 1254594 INFO  (closeThreadPool-5702-thread-3) [     ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 1.8.0_191-b12
   [junit4]   2> 1254604 INFO  (closeThreadPool-5702-thread-3) [     ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 1254604 INFO  (closeThreadPool-5702-thread-3) [     ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 1254604 INFO  (closeThreadPool-5702-thread-3) [     ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 1254605 INFO  (closeThreadPool-5702-thread-3) [     ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@a686c{/,null,AVAILABLE}
   [junit4]   2> 1254605 INFO  (closeThreadPool-5702-thread-3) [     ] o.e.j.s.AbstractConnector Started ServerConnector@6b3d9b2c{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:45618}
   [junit4]   2> 1254605 INFO  (closeThreadPool-5702-thread-3) [     ] o.e.j.s.Server Started @1254717ms
   [junit4]   2> 1254605 INFO  (closeThreadPool-5702-thread-3) [     ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.cloud.UnloadDistributedZkTest_49E4041409F004D7-001/tempDir-001/jetty3, replicaType=NRT, solrconfig=solrconfig.xml, hostContext=/, hostPort=45618, coreRootDirectory=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.cloud.UnloadDistributedZkTest_49E4041409F004D7-001/shard-3-001/cores}
   [junit4]   2> 1254608 INFO  (closeThreadPool-5702-thread-2) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 1254615 ERROR (closeThreadPool-5702-thread-3) [     ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 1254615 INFO  (closeThreadPool-5702-thread-3) [     ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 1254615 INFO  (closeThreadPool-5702-thread-3) [     ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solrâ„¢ version 8.3.0
   [junit4]   2> 1254615 INFO  (closeThreadPool-5702-thread-3) [     ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 1254615 INFO  (closeThreadPool-5702-thread-3) [     ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 1254615 INFO  (closeThreadPool-5702-thread-3) [     ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-08-29T14:36:35.886Z
   [junit4]   2> 1254616 INFO  (closeThreadPool-5702-thread-3) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 1254617 INFO  (zkConnectionManagerCallback-5722-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1254617 INFO  (closeThreadPool-5702-thread-3) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 1254638 INFO  (zkConnectionManagerCallback-5720-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1254638 INFO  (closeThreadPool-5702-thread-2) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 1254691 INFO  (closeThreadPool-5702-thread-1) [n:127.0.0.1:36206_     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 1254712 INFO  (zkConnectionManagerCallback-5724-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1254719 INFO  (closeThreadPool-5702-thread-1) [n:127.0.0.1:36206_     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 1254728 INFO  (closeThreadPool-5702-thread-1) [n:127.0.0.1:36206_     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 1254740 INFO  (closeThreadPool-5702-thread-1) [n:127.0.0.1:36206_     ] o.a.s.c.ZkController Publish node=127.0.0.1:36206_ as DOWN
   [junit4]   2> 1254741 INFO  (closeThreadPool-5702-thread-1) [n:127.0.0.1:36206_     ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 1254741 INFO  (closeThreadPool-5702-thread-1) [n:127.0.0.1:36206_     ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:36206_
   [junit4]   2> 1254742 INFO  (zkCallback-5688-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 1254744 INFO  (zkCallback-5681-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 1254760 INFO  (zkCallback-5723-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 1254760 INFO  (zkCallback-5700-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 1254764 INFO  (closeThreadPool-5702-thread-1) [n:127.0.0.1:36206_     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 1254820 INFO  (zkConnectionManagerCallback-5731-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1254833 INFO  (closeThreadPool-5702-thread-2) [n:127.0.0.1:34332_     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 1254845 INFO  (closeThreadPool-5702-thread-1) [n:127.0.0.1:36206_     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 1254849 INFO  (zkConnectionManagerCallback-5733-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1254857 INFO  (closeThreadPool-5702-thread-2) [n:127.0.0.1:34332_     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 1254869 INFO  (TEST-UnloadDistributedZkTest.test-seed#[49E4041409F004D7]) [     ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 4 in directory /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.cloud.UnloadDistributedZkTest_49E4041409F004D7-001/shard-4-001 of type NRT
   [junit4]   2> 1254870 WARN  (closeThreadPool-5702-thread-4) [     ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 1254870 INFO  (closeThreadPool-5702-thread-4) [     ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 1254870 INFO  (closeThreadPool-5702-thread-4) [     ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 1254870 INFO  (closeThreadPool-5702-thread-4) [     ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 1.8.0_191-b12
   [junit4]   2> 1254871 INFO  (closeThreadPool-5702-thread-4) [     ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 1254871 INFO  (closeThreadPool-5702-thread-4) [     ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 1254871 INFO  (closeThreadPool-5702-thread-4) [     ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 1254871 INFO  (closeThreadPool-5702-thread-4) [     ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@5056d9e8{/,null,AVAILABLE}
   [junit4]   2> 1254872 INFO  (closeThreadPool-5702-thread-4) [     ] o.e.j.s.AbstractConnector Started ServerConnector@1243338c{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:42916}
   [junit4]   2> 1254872 INFO  (closeThreadPool-5702-thread-4) [     ] o.e.j.s.Server Started @1254983ms
   [junit4]   2> 1254872 INFO  (closeThreadPool-5702-thread-4) [     ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {solr.data.dir=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.cloud.UnloadDistributedZkTest_49E4041409F004D7-001/tempDir-001/jetty4, replicaType=NRT, solrconfig=solrconfig.xml, hostContext=/, hostPort=42916, coreRootDirectory=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.cloud.UnloadDistributedZkTest_49E4041409F004D7-001/shard-4-001/cores}
   [junit4]   2> 1254872 ERROR (closeThreadPool-5702-thread-4) [     ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 1254872 INFO  (closeThreadPool-5702-thread-4) [     ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 1254872 INFO  (closeThreadPool-5702-thread-4) [     ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solrâ„¢ version 8.3.0
   [junit4]   2> 1254872 INFO  (closeThreadPool-5702-thread-4) [     ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 1254872 INFO  (closeThreadPool-5702-thread-4) [     ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 1254872 INFO  (closeThreadPool-5702-thread-4) [     ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-08-29T14:36:36.143Z
   [junit4]   2> 1254873 INFO  (closeThreadPool-5702-thread-1) [n:127.0.0.1:36206_     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 1254874 INFO  (closeThreadPool-5702-thread-1) [n:127.0.0.1:36206_     ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:37467/solr ready
   [junit4]   2> 1254875 INFO  (closeThreadPool-5702-thread-1) [n:127.0.0.1:36206_     ] o.a.s.c.PackageManager clusterprops.json changed , version 0
   [junit4]   2> 1254889 INFO  (closeThreadPool-5702-thread-3) [     ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 1254889 INFO  (closeThreadPool-5702-thread-3) [     ] o.a.s.c.SolrXmlConfig Loading container configuration from /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.cloud.UnloadDistributedZkTest_49E4041409F004D7-001/shard-3-001/solr.xml
   [junit4]   2> 1254890 INFO  (closeThreadPool-5702-thread-4) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 1254907 INFO  (closeThreadPool-5702-thread-3) [     ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 1254907 INFO  (closeThreadPool-5702-thread-3) [     ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 1254908 INFO  (closeThreadPool-5702-thread-3) [     ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@5789c771, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 1254922 INFO  (zkConnectionManagerCallback-5737-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1254922 INFO  (closeThreadPool-5702-thread-4) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 1255084 INFO  (closeThreadPool-5702-thread-1) [n:127.0.0.1:36206_     ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 1255092 INFO  (closeThreadPool-5702-thread-2) [n:127.0.0.1:34332_     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 1255107 INFO  (closeThreadPool-5702-thread-1) [n:127.0.0.1:36206_     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@5789c771
   [junit4]   2> 1255127 INFO  (closeThreadPool-5702-thread-1) [n:127.0.0.1:36206_     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@5789c771
   [junit4]   2> 1255127 INFO  (closeThreadPool-5702-thread-1) [n:127.0.0.1:36206_     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@5789c771
   [junit4]   2> 1255128 INFO  (closeThreadPool-5702-thread-1) [n:127.0.0.1:36206_     ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.cloud.UnloadDistributedZkTest_49E4041409F004D7-001/shard-1-001/cores
   [junit4]   2> 1255197 INFO  (closeThreadPool-5702-thread-4) [     ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 1255197 INFO  (closeThreadPool-5702-thread-4) [     ] o.a.s.c.SolrXmlConfig Loading container configuration from /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.cloud.UnloadDistributedZkTest_49E4041409F004D7-001/shard-4-001/solr.xml
   [junit4]   2> 1255200 INFO  (closeThreadPool-5702-thread-2) [n:127.0.0.1:34332_     ] o.a.s.c.ZkController Publish node=127.0.0.1:34332_ as DOWN
   [junit4]   2> 1255200 INFO  (closeThreadPool-5702-thread-4) [     ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 1255200 INFO  (closeThreadPool-5702-thread-4) [     ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 1255201 INFO  (closeThreadPool-5702-thread-2) [n:127.0.0.1:34332_     ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 1255201 INFO  (closeThreadPool-5702-thread-2) [n:127.0.0.1:34332_     ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:34332_
   [junit4]   2> 1255202 INFO  (zkCallback-5688-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 1255202 INFO  (zkCallback-5681-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 1255214 INFO  (closeThreadPool-5702-thread-4) [     ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@5789c771, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 1255217 INFO  (zkCallback-5700-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 1255220 INFO  (zkCallback-5730-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 1255228 INFO  (zkCallback-5723-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 1255229 INFO  (closeThreadPool-5702-thread-2) [n:127.0.0.1:34332_     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 1255229 INFO  (zkConnectionManagerCallback-5744-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1255229 INFO  (closeThreadPool-5702-thread-2) [n:127.0.0.1:34332_     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 1255230 INFO  (closeThreadPool-5702-thread-2) [n:127.0.0.1:34332_     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (3)
   [junit4]   2> 1255231 INFO  (closeThreadPool-5702-thread-2) [n:127.0.0.1:34332_     ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:37467/solr ready
   [junit4]   2> 1255231 INFO  (closeThreadPool-5702-thread-2) [n:127.0.0.1:34332_     ] o.a.s.c.PackageManager clusterprops.json changed , version 0
   [junit4]   2> 1255257 INFO  (closeThreadPool-5702-thread-2) [n:127.0.0.1:34332_     ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 1255258 INFO  (zkCallback-5732-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 1255319 INFO  (closeThreadPool-5702-thread-2) [n:127.0.0.1:34332_     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@5789c771
   [junit4]   2> 1255350 INFO  (closeThreadPool-5702-thread-2) [n:127.0.0.1:34332_     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@5789c771
   [junit4]   2> 1255350 INFO  (closeThreadPool-5702-thread-2) [n:127.0.0.1:34332_     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@5789c771
   [junit4]   2> 1255351 INFO  (closeThreadPool-5702-thread-2) [n:127.0.0.1:34332_     ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.cloud.UnloadDistributedZkTest_49E4041409F004D7-001/shard-2-001/cores
   [junit4]   2> 1255473 INFO  (closeThreadPool-5702-thread-1) [     ] o.a.s.c.AbstractFullDistribZkTestBase waitForLiveNode: 127.0.0.1:36206_
   [junit4]   2> 1255494 INFO  (closeThreadPool-5702-thread-2) [     ] o.a.s.c.AbstractFullDistribZkTestBase waitForLiveNode: 127.0.0.1:34332_
   [junit4]   2> 1255688 INFO  (OverseerCollectionConfigSetProcessor-75296828349284356-127.0.0.1:34774_-n_0000000000) [n:127.0.0.1:34774_     ] o.a.s.c.OverseerTaskQueue Response ZK path: /overseer/collection-queue-work/qnr-0000000002 doesn't exist.  Requestor may have disconnected from ZooKeeper
   [junit4]   2> 1255774 INFO  (closeThreadPool-5702-thread-3) [     ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=false]
   [junit4]   2> 1255790 WARN  (closeThreadPool-5702-thread-3) [     ] o.e.j.u.s.S.config Trusting all certificates configured for Client@23623999[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 1255790 WARN  (closeThreadPool-5702-thread-3) [     ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@23623999[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 1255834 WARN  (closeThreadPool-5702-thread-3) [     ] o.e.j.u.s.S.config Trusting all certificates configured for Client@710d57be[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 1255834 WARN  (closeThreadPool-5702-thread-3) [     ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@710d57be[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 1255835 INFO  (closeThreadPool-5702-thread-3) [     ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:37467/solr
   [junit4]   2> 1255865 INFO  (closeThreadPool-5702-thread-3) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 1255884 INFO  (zkConnectionManagerCallback-5752-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1255886 INFO  (closeThreadPool-5702-thread-3) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 1255993 INFO  (closeThreadPool-5702-thread-3) [n:127.0.0.1:45618_     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 1255993 INFO  (zkConnectionManagerCallback-5754-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1255993 INFO  (closeThreadPool-5702-thread-3) [n:127.0.0.1:45618_     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 1256009 INFO  (closeThreadPool-5702-thread-4) [     ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=false]
   [junit4]   2> 1256010 WARN  (closeThreadPool-5702-thread-4) [     ] o.e.j.u.s.S.config Trusting all certificates configured for Client@64a34d22[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 1256010 WARN  (closeThreadPool-5702-thread-4) [     ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@64a34d22[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 1256029 WARN  (closeThreadPool-5702-thread-4) [     ] o.e.j.u.s.S.config Trusting all certificates configured for Client@68beef98[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 1256029 WARN  (closeThreadPool-5702-thread-4) [     ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@68beef98[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 1256030 INFO  (closeThreadPool-5702-thread-4) [     ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:37467/solr
   [junit4]   2> 1256031 INFO  (closeThreadPool-5702-thread-4) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 1256047 INFO  (closeThreadPool-5702-thread-3) [n:127.0.0.1:45618_     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (3)
   [junit4]   2> 1256048 INFO  (zkConnectionManagerCallback-5762-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1256048 INFO  (closeThreadPool-5702-thread-4) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 1256067 INFO  (closeThreadPool-5702-thread-3) [n:127.0.0.1:45618_     ] o.a.s.c.ZkController Publish node=127.0.0.1:45618_ as DOWN
   [junit4]   2> 1256068 INFO  (closeThreadPool-5702-thread-3) [n:127.0.0.1:45618_     ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 1256068 INFO  (closeThreadPool-5702-thread-3) [n:127.0.0.1:45618_     ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:45618_
   [junit4]   2> 1256137 INFO  (zkCallback-5681-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 1256137 INFO  (zkCallback-5688-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 1256137 INFO  (zkCallback-5730-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 1256137 INFO  (zkCallback-5723-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 1256137 INFO  (zkCallback-5700-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 1256157 INFO  (zkCallback-5753-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 1256157 INFO  (zkCallback-5743-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 1256157 INFO  (zkCallback-5732-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 1256176 INFO  (closeThreadPool-5702-thread-3) [n:127.0.0.1:45618_     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 1256176 INFO  (closeThreadPool-5702-thread-4) [n:127.0.0.1:42916_     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 1256190 INFO  (zkConnectionManagerCallback-5767-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1256190 INFO  (closeThreadPool-5702-thread-3) [n:127.0.0.1:45618_     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 1256191 INFO  (zkConnectionManagerCallback-5769-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1256191 INFO  (closeThreadPool-5702-thread-4) [n:127.0.0.1:42916_     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 1256191 INFO  (closeThreadPool-5702-thread-3) [n:127.0.0.1:45618_     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (4)
   [junit4]   2> 1256192 INFO  (closeThreadPool-5702-thread-3) [n:127.0.0.1:45618_     ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:37467/solr ready
   [junit4]   2> 1256193 INFO  (closeThreadPool-5702-thread-3) [n:127.0.0.1:45618_     ] o.a.s.c.PackageManager clusterprops.json changed , version 0
   [junit4]   2> 1256227 INFO  (closeThreadPool-5702-thread-4) [n:127.0.0.1:42916_     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (4)
   [junit4]   2> 1256230 INFO  (closeThreadPool-5702-thread-4) [n:127.0.0.1:42916_     ] o.a.s.c.ZkController Publish node=127.0.0.1:42916_ as DOWN
   [junit4]   2> 1256243 INFO  (closeThreadPool-5702-thread-4) [n:127.0.0.1:42916_     ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 1256243 INFO  (closeThreadPool-5702-thread-4) [n:127.0.0.1:42916_     ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:42916_
   [junit4]   2> 1256245 INFO  (zkCallback-5730-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (5)
   [junit4]   2> 1256245 INFO  (zkCallback-5732-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (5)
   [junit4]   2> 1256246 INFO  (zkCallback-5688-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (5)
   [junit4]   2> 1256246 INFO  (zkCallback-5700-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (5)
   [junit4]   2> 1256246 INFO  (zkCallback-5743-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (5)
   [junit4]   2> 1256246 INFO  (zkCallback-5723-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (5)
   [junit4]   2> 1256260 INFO  (zkCallback-5681-thread-3) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (5)
   [junit4]   2> 1256260 INFO  (zkCallback-5753-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (5)
   [junit4]   2> 1256279 INFO  (zkCallback-5768-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (5)
   [junit4]   2> 1256279 INFO  (zkCallback-5766-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (4) -> (5)
   [junit4]   2> 1256304 INFO  (closeThreadPool-5702-thread-4) [n:127.0.0.1:42916_     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 1256308 INFO  (closeThreadPool-5702-thread-3) [n:127.0.0.1:45618_     ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 1256331 INFO  (zkConnectionManagerCallback-5776-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1256332 INFO  (closeThreadPool-5702-thread-4) [n:127.0.0.1:42916_     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 1256336 INFO  (closeThreadPool-5702-thread-4) [n:127.0.0.1:42916_     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (5)
   [junit4]   2> 1256337 INFO  (closeThreadPool-5702-thread-4) [n:127.0.0.1:42916_     ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:37467/solr ready
   [junit4]   2> 1256338 INFO  (closeThreadPool-5702-thread-4) [n:127.0.0.1:42916_     ] o.a.s.c.PackageManager clusterprops.json changed , version 0
   [junit4]   2> 1256381 INFO  (closeThreadPool-5702-thread-3) [n:127.0.0.1:45618_     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@5789c771
   [junit4]   2> 1256420 INFO  (closeThreadPool-5702-thread-3) [n:127.0.0.1:45618_     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@5789c771
   [junit4]   2> 1256420 INFO  (closeThreadPool-5702-thread-3) [n:127.0.0.1:45618_     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@5789c771
   [junit4]   2> 1256436 INFO  (closeThreadPool-5702-thread-3) [n:127.0.0.1:45618_     ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.cloud.UnloadDistributedZkTest_49E4041409F004D7-001/shard-3-001/cores
   [junit4]   2> 1256478 INFO  (closeThreadPool-5702-thread-4) [n:127.0.0.1:42916_     ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 1256771 INFO  (closeThreadPool-5702-thread-4) [n:127.0.0.1:42916_     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@5789c771
   [junit4]   2> 1256845 INFO  (closeThreadPool-5702-thread-4) [n:127.0.0.1:42916_     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@5789c771
   [junit4]   2> 1256845 INFO  (closeThreadPool-5702-thread-4) [n:127.0.0.1:42916_     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@5789c771
   [junit4]   2> 1256899 INFO  (closeThreadPool-5702-thread-4) [n:127.0.0.1:42916_     ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.cloud.UnloadDistributedZkTest_49E4041409F004D7-001/shard-4-001/cores
   [junit4]   2> 1256948 INFO  (closeThreadPool-5702-thread-3) [     ] o.a.s.c.AbstractFullDistribZkTestBase waitForLiveNode: 127.0.0.1:45618_
   [junit4]   2> 1257132 INFO  (closeThreadPool-5702-thread-4) [     ] o.a.s.c.AbstractFullDistribZkTestBase waitForLiveNode: 127.0.0.1:42916_
   [junit4]   2> 1257162 INFO  (qtp1675537933-9880) [n:127.0.0.1:34332_     ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :addreplica with params node=127.0.0.1:42916_&action=ADDREPLICA&collection=collection1&shard=shard1&type=NRT&wt=javabin&version=2 and sendToOCPQueue=true
   [junit4]   2> 1257163 INFO  (qtp1675537933-9881) [n:127.0.0.1:34332_     ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :addreplica with params node=127.0.0.1:34332_&action=ADDREPLICA&collection=collection1&shard=shard1&type=NRT&wt=javabin&version=2 and sendToOCPQueue=true
   [junit4]   2> 1257164 INFO  (qtp1675537933-9879) [n:127.0.0.1:34332_     ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :addreplica with params node=127.0.0.1:45618_&action=ADDREPLICA&collection=collection1&shard=shard2&type=NRT&wt=javabin&version=2 and sendToOCPQueue=true
   [junit4]   2> 1257168 INFO  (qtp1675537933-9882) [n:127.0.0.1:34332_     ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :addreplica with params node=127.0.0.1:36206_&action=ADDREPLICA&collection=collection1&shard=shard2&type=NRT&wt=javabin&version=2 and sendToOCPQueue=true
   [junit4]   2> 1257328 INFO  (OverseerThreadFactory-2062-thread-3-processing-n:127.0.0.1:34774_) [n:127.0.0.1:34774_ c:collection1 s:shard2   ] o.a.s.c.a.c.AddReplicaCmd Node Identified 127.0.0.1:45618_ for creating new replica of shard shard2 for collection collection1
   [junit4]   2> 1257397 INFO  (OverseerThreadFactory-2062-thread-4-processing-n:127.0.0.1:34774_) [n:127.0.0.1:34774_ c:collection1 s:shard1   ] o.a.s.c.a.c.AddReplicaCmd Node Identified 127.0.0.1:42916_ for creating new replica of shard shard1 for collection collection1
   [junit4]   2> 1257405 INFO  (OverseerThreadFactory-2062-thread-3-processing-n:127.0.0.1:34774_) [n:127.0.0.1:34774_ c:collection1 s:shard2   ] o.a.s.c.a.c.AddReplicaCmd Returning CreateReplica command.
   [junit4]   2> 1257442 INFO  (OverseerThreadFactory-2062-thread-4-processing-n:127.0.0.1:34774_) [n:127.0.0.1:34774_ c:collection1 s:shard1   ] o.a.s.c.a.c.AddReplicaCmd Returning CreateReplica command.
   [junit4]   2> 1257533 INFO  (qtp377267611-9931) [n:127.0.0.1:42916_    x:collection1_shard1_replica_n2 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf1&name=collection1_shard1_replica_n2&action=CREATE&collection=collection1&shard=shard1&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 1257600 INFO  (qtp1862406676-9905) [n:127.0.0.1:45618_    x:collection1_shard2_replica_n1 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf1&name=collection1_shard2_replica_n1&action=CREATE&collection=collection1&shard=shard2&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 1258672 INFO  (qtp377267611-9931) [n:127.0.0.1:42916_ c:collection1 s:shard1  x:collection1_shard1_replica_n2 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 8.3.0
   [junit4]   2> 1258785 INFO  (qtp1862406676-9905) [n:127.0.0.1:45618_ c:collection1 s:shard2  x:collection1_shard2_replica_n1 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 8.3.0
   [junit4]   2> 1258965 INFO  (qtp1862406676-9905) [n:127.0.0.1:45618_ c:collection1 s:shard2  x:collection1_shard2_replica_n1 ] o.a.s.s.IndexSchema [collection1_shard2_replica_n1] Schema name=test
   [junit4]   2> 1258968 INFO  (qtp377267611-9931) [n:127.0.0.1:42916_ c:collection1 s:shard1  x:collection1_shard1_replica_n2 ] o.a.s.s.IndexSchema [collection1_shard1_replica_n2] Schema name=test
   [junit4]   2> 1259890 INFO  (qtp377267611-9931) [n:127.0.0.1:42916_ c:collection1 s:shard1  x:collection1_shard1_replica_n2 ] o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 1259948 INFO  (qtp1862406676-9905) [n:127.0.0.1:45618_ c:collection1 s:shard2  x:collection1_shard2_replica_n1 ] o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 1260022 INFO  (qtp377267611-9931) [n:127.0.0.1:42916_ c:collection1 s:shard1  x:collection1_shard1_replica_n2 ] o.a.s.c.CoreContainer Creating SolrCore 'collection1_shard1_replica_n2' using configuration from collection collection1, trusted=true
   [junit4]   2> 1260055 INFO  (qtp377267611-9931) [n:127.0.0.1:42916_ c:collection1 s:shard1  x:collection1_shard1_replica_n2 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.collection1.shard1.replica_n2' (registry 'solr.core.collection1.shard1.replica_n2') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@5789c771
   [junit4]   2> 1260055 INFO  (qtp377267611-9931) [n:127.0.0.1:42916_ c:collection1 s:shard1  x:collection1_shard1_replica_n2 ] o.a.s.c.SolrCore [[collection1_shard1_replica_n2] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.cloud.UnloadDistributedZkTest_49E4041409F004D7-001/shard-4-001/cores/collection1_shard1_replica_n2], dataDir=[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.cloud.UnloadDistributedZkTest_49E4041409F004D7-001/shard-4-001/cores/collection1_shard1_replica_n2/data/]
   [junit4]   2> 1260057 INFO  (qtp377267611-9931) [n:127.0.0.1:42916_ c:collection1 s:shard1  x:collection1_shard1_replica_n2 ] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=14, maxMergeAtOnceExplicit=40, maxMergedSegmentMB=44.4296875, floorSegmentMB=1.7041015625, forceMergeDeletesPctAllowed=0.6647463089672323, segmentsPerTier=36.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.1906980108789277, deletesPctAllowed=21.775055026738894
   [junit4]   2> 1260072 WARN  (qtp377267611-9931) [n:127.0.0.1:42916_ c:collection1 s:shard1  x:collection1_shard1_replica_n2 ] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 1260184 INFO  (qtp1862406676-9905) [n:127.0.0.1:45618_ c:collection1 s:shard2  x:collection1_shard2_replica_n1 ] o.a.s.c.CoreContainer Creating SolrCore 'collection1_shard2_replica_n1' using configuration from collection collection1, trusted=true
   [junit4]   2> 1260217 INFO  (qtp1862406676-9905) [n:127.0.0.1:45618_ c:collection1 s:shard2  x:collection1_shard2_replica_n1 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.collection1.shard2.replica_n1' (registry 'solr.core.collection1.shard2.replica_n1') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@5789c771
   [junit4]   2> 1260217 INFO  (qtp1862406676-9905) [n:127.0.0.1:45618_ c:collection1 s:shard2  x:collection1_shard2_replica_n1 ] o.a.s.c.SolrCore [[collection1_shard2_replica_n1] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.cloud.UnloadDistributedZkTest_49E4041409F004D7-001/shard-3-001/cores/collection1_shard2_replica_n1], dataDir=[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J2/temp/solr.cloud.UnloadDistributedZkTest_49E4041409F004D7-001/shard-3-001/cores/collection1_shard2_replica_n1/data/]
   [junit4]   2> 1260219 INFO  (qtp1862406676-9905) [n:127.0.0.1:45618_ c:collect

[...truncated too long message...]

gs :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

jar-checksums:
    [mkdir] Created dir: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/null1863238593
     [copy] Copying 249 files to /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/null1863238593
   [delete] Deleting directory /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/null1863238593

check-working-copy:
[ivy:cachepath] :: resolving dependencies :: #;working@lucene2-us-west.apache.org
[ivy:cachepath] 	confs: [default]
[ivy:cachepath] 	found org.eclipse.jgit#org.eclipse.jgit;5.3.0.201903130848-r in public
[ivy:cachepath] 	found com.jcraft#jsch;0.1.54 in public
[ivy:cachepath] 	found com.jcraft#jzlib;1.1.1 in public
[ivy:cachepath] 	found com.googlecode.javaewah#JavaEWAH;1.1.6 in public
[ivy:cachepath] 	found org.slf4j#slf4j-api;1.7.2 in public
[ivy:cachepath] 	found org.bouncycastle#bcpg-jdk15on;1.60 in public
[ivy:cachepath] 	found org.bouncycastle#bcprov-jdk15on;1.60 in public
[ivy:cachepath] 	found org.bouncycastle#bcpkix-jdk15on;1.60 in public
[ivy:cachepath] 	found org.slf4j#slf4j-nop;1.7.2 in public
[ivy:cachepath] :: resolution report :: resolve 36ms :: artifacts dl 1ms
	---------------------------------------------------------------------
	|                  |            modules            ||   artifacts   |
	|       conf       | number| search|dwnlded|evicted|| number|dwnlded|
	---------------------------------------------------------------------
	|      default     |   9   |   0   |   0   |   0   ||   9   |   0   |
	---------------------------------------------------------------------
[wc-checker] Initializing working copy...
[wc-checker] Checking working copy status...

-jenkins-base:

BUILD SUCCESSFUL
Total time: 201 minutes 45 seconds
Archiving artifacts
java.lang.InterruptedException: no matches found within 10000
	at hudson.FilePath$ValidateAntFileMask.hasMatch(FilePath.java:2847)
	at hudson.FilePath$ValidateAntFileMask.invoke(FilePath.java:2726)
	at hudson.FilePath$ValidateAntFileMask.invoke(FilePath.java:2707)
	at hudson.FilePath$FileCallableWrapper.call(FilePath.java:3086)
Also:   hudson.remoting.Channel$CallSiteStackTrace: Remote call to lucene2
		at hudson.remoting.Channel.attachCallSiteStackTrace(Channel.java:1741)
		at hudson.remoting.UserRequest$ExceptionResponse.retrieve(UserRequest.java:357)
		at hudson.remoting.Channel.call(Channel.java:955)
		at hudson.FilePath.act(FilePath.java:1072)
		at hudson.FilePath.act(FilePath.java:1061)
		at hudson.FilePath.validateAntFileMask(FilePath.java:2705)
		at hudson.tasks.ArtifactArchiver.perform(ArtifactArchiver.java:243)
		at hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
		at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
		at hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
		at hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
		at hudson.model.Build$BuildExecution.post2(Build.java:186)
		at hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
		at hudson.model.Run.execute(Run.java:1835)
		at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
		at hudson.model.ResourceController.execute(ResourceController.java:97)
		at hudson.model.Executor.run(Executor.java:429)
Caused: hudson.FilePath$TunneledInterruptedException
	at hudson.FilePath$FileCallableWrapper.call(FilePath.java:3088)
	at hudson.remoting.UserRequest.perform(UserRequest.java:212)
	at hudson.remoting.UserRequest.perform(UserRequest.java:54)
	at hudson.remoting.Request$2.run(Request.java:369)
	at hudson.remoting.InterceptingExecutorService$1.call(InterceptingExecutorService.java:72)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
	at java.lang.Thread.run(Thread.java:748)
Caused: java.lang.InterruptedException: java.lang.InterruptedException: no matches found within 10000
	at hudson.FilePath.act(FilePath.java:1074)
	at hudson.FilePath.act(FilePath.java:1061)
	at hudson.FilePath.validateAntFileMask(FilePath.java:2705)
	at hudson.tasks.ArtifactArchiver.perform(ArtifactArchiver.java:243)
	at hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
	at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
	at hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
	at hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
	at hudson.model.Build$BuildExecution.post2(Build.java:186)
	at hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
	at hudson.model.Run.execute(Run.java:1835)
	at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
	at hudson.model.ResourceController.execute(ResourceController.java:97)
	at hudson.model.Executor.run(Executor.java:429)
No artifacts found that match the file pattern "**/*.events,heapdumps/**,**/hs_err_pid*". Configuration error?
Recording test results
Build step 'Publish JUnit test result report' changed build result to UNSTABLE
Email was triggered for: Unstable (Test Failures)
Sending email for trigger: Unstable (Test Failures)

[JENKINS] Lucene-Solr-Tests-8.x - Build # 492 - Still Unstable

Posted by Apache Jenkins Server <je...@builds.apache.org>.
Build: https://builds.apache.org/job/Lucene-Solr-Tests-8.x/492/

2 tests failed.
FAILED:  org.apache.solr.cloud.api.collections.TestHdfsCloudBackupRestore.testRestoreFailure

Error Message:
Failed collection is still in the clusterstate: DocCollection(hdfsbackuprestore_testfailure_restored//collections/hdfsbackuprestore_testfailure_restored/state.json/2)={   "pullReplicas":0,   "replicationFactor":2,   "shards":{     "shard2":{       "range":"0-7fffffff",       "state":"construction",       "replicas":{"core_node2":{           "core":"hdfsbackuprestore_testfailure_restored_shard2_replica_n1",           "base_url":"https://127.0.0.1:37581/solr",           "node_name":"127.0.0.1:37581_solr",           "state":"down",           "type":"NRT",           "force_set_state":"false"}},       "stateTimestamp":"1567075412159408232"},     "shard1":{       "range":"80000000-ffffffff",       "state":"construction",       "replicas":{},       "stateTimestamp":"1567075412159421642"}},   "router":{"name":"compositeId"},   "maxShardsPerNode":"2",   "autoAddReplicas":"false",   "nrtReplicas":2,   "tlogReplicas":0} Expected: not a collection containing "hdfsbackuprestore_testfailure_restored"      but: was <[hdfsbackuprestore_testok, hdfsbackuprestore_testfailure_restored, hdfsbackuprestore_testfailure, hdfsbackuprestore_testok_restored]>

Stack Trace:
java.lang.AssertionError: Failed collection is still in the clusterstate: DocCollection(hdfsbackuprestore_testfailure_restored//collections/hdfsbackuprestore_testfailure_restored/state.json/2)={
  "pullReplicas":0,
  "replicationFactor":2,
  "shards":{
    "shard2":{
      "range":"0-7fffffff",
      "state":"construction",
      "replicas":{"core_node2":{
          "core":"hdfsbackuprestore_testfailure_restored_shard2_replica_n1",
          "base_url":"https://127.0.0.1:37581/solr",
          "node_name":"127.0.0.1:37581_solr",
          "state":"down",
          "type":"NRT",
          "force_set_state":"false"}},
      "stateTimestamp":"1567075412159408232"},
    "shard1":{
      "range":"80000000-ffffffff",
      "state":"construction",
      "replicas":{},
      "stateTimestamp":"1567075412159421642"}},
  "router":{"name":"compositeId"},
  "maxShardsPerNode":"2",
  "autoAddReplicas":"false",
  "nrtReplicas":2,
  "tlogReplicas":0}
Expected: not a collection containing "hdfsbackuprestore_testfailure_restored"
     but: was <[hdfsbackuprestore_testok, hdfsbackuprestore_testfailure_restored, hdfsbackuprestore_testfailure, hdfsbackuprestore_testok_restored]>
	at __randomizedtesting.SeedInfo.seed([4E0D9701184A2A9E:67710924301329B3]:0)
	at org.hamcrest.MatcherAssert.assertThat(MatcherAssert.java:20)
	at org.junit.Assert.assertThat(Assert.java:956)
	at org.apache.solr.cloud.api.collections.AbstractCloudBackupRestoreTestCase.testRestoreFailure(AbstractCloudBackupRestoreTestCase.java:211)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at java.lang.Thread.run(Thread.java:748)


FAILED:  org.apache.solr.cloud.api.collections.TestLocalFSCloudBackupRestore.testRestoreFailure

Error Message:
Failed collection is still in the clusterstate: DocCollection(backuprestore_testfailure_restored//collections/backuprestore_testfailure_restored/state.json/2)={   "pullReplicas":0,   "replicationFactor":2,   "shards":{     "shard2":{       "range":"0-7fffffff",       "state":"construction",       "replicas":{"core_node2":{           "core":"backuprestore_testfailure_restored_shard2_replica_n1",           "base_url":"http://127.0.0.1:33234/solr",           "node_name":"127.0.0.1:33234_solr",           "state":"down",           "type":"NRT",           "force_set_state":"false"}},       "stateTimestamp":"1567078270679883280"},     "shard1":{       "range":"80000000-ffffffff",       "state":"construction",       "replicas":{},       "stateTimestamp":"1567078270679892465"}},   "router":{"name":"compositeId"},   "maxShardsPerNode":"2",   "autoAddReplicas":"false",   "nrtReplicas":2,   "tlogReplicas":0} Expected: not a collection containing "backuprestore_testfailure_restored"      but: was <[backuprestore_testok, backuprestore_testfailure, backuprestore_testfailure_restored, backuprestore_testok_restored]>

Stack Trace:
java.lang.AssertionError: Failed collection is still in the clusterstate: DocCollection(backuprestore_testfailure_restored//collections/backuprestore_testfailure_restored/state.json/2)={
  "pullReplicas":0,
  "replicationFactor":2,
  "shards":{
    "shard2":{
      "range":"0-7fffffff",
      "state":"construction",
      "replicas":{"core_node2":{
          "core":"backuprestore_testfailure_restored_shard2_replica_n1",
          "base_url":"http://127.0.0.1:33234/solr",
          "node_name":"127.0.0.1:33234_solr",
          "state":"down",
          "type":"NRT",
          "force_set_state":"false"}},
      "stateTimestamp":"1567078270679883280"},
    "shard1":{
      "range":"80000000-ffffffff",
      "state":"construction",
      "replicas":{},
      "stateTimestamp":"1567078270679892465"}},
  "router":{"name":"compositeId"},
  "maxShardsPerNode":"2",
  "autoAddReplicas":"false",
  "nrtReplicas":2,
  "tlogReplicas":0}
Expected: not a collection containing "backuprestore_testfailure_restored"
     but: was <[backuprestore_testok, backuprestore_testfailure, backuprestore_testfailure_restored, backuprestore_testok_restored]>
	at __randomizedtesting.SeedInfo.seed([4E0D9701184A2A9E:67710924301329B3]:0)
	at org.hamcrest.MatcherAssert.assertThat(MatcherAssert.java:20)
	at org.junit.Assert.assertThat(Assert.java:956)
	at org.apache.solr.cloud.api.collections.AbstractCloudBackupRestoreTestCase.testRestoreFailure(AbstractCloudBackupRestoreTestCase.java:211)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at java.lang.Thread.run(Thread.java:748)




Build Log:
[...truncated 13394 lines...]
   [junit4] Suite: org.apache.solr.cloud.api.collections.TestHdfsCloudBackupRestore
   [junit4]   2> 338934 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.a.s.SolrTestCaseJ4 SecureRandom sanity checks: test.solr.allowed.securerandom=null & java.security.egd=file:/dev/./urandom
   [junit4]   2> 338951 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.a.s.SolrTestCaseJ4 Created dataDir: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_4E0D9701184A2A9E-001/data-dir-9-001
   [junit4]   2> 338962 WARN  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.a.s.SolrTestCaseJ4 startTrackingSearchers: numOpens=10 numCloses=10
   [junit4]   2> 338962 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.a.s.SolrTestCaseJ4 Using PointFields (NUMERIC_POINTS_SYSPROP=true) w/NUMERIC_DOCVALUES_SYSPROP=true
   [junit4]   2> 338989 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.a.s.SolrTestCaseJ4 Randomized ssl (true) and clientAuth (true) via: @org.apache.solr.util.RandomizeSSL(reason=, value=NaN, ssl=NaN, clientAuth=NaN)
   [junit4]   2> 342594 WARN  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.a.h.u.NativeCodeLoader Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
   [junit4]   1> Formatting using clusterid: testClusterID
   [junit4]   2> 350226 WARN  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.a.h.m.i.MetricsConfig Cannot locate configuration: tried hadoop-metrics2-namenode.properties,hadoop-metrics2.properties
   [junit4]   2> 351833 WARN  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 351978 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 1.8.0_191-b12
   [junit4]   2> 351994 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 351994 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 351994 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 351996 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@54fb9ee{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
   [junit4]   2> 353850 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@5cec5af1{hdfs,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/jetty-lucene2-us-west.apache.org-43747-hdfs-_-any-4815867477447280271.dir/webapp/,AVAILABLE}{/hdfs}
   [junit4]   2> 353905 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.e.j.s.AbstractConnector Started ServerConnector@4da13787{HTTP/1.1,[http/1.1]}{lucene2-us-west.apache.org:43747}
   [junit4]   2> 353905 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.e.j.s.Server Started @353949ms
   [junit4]   2> 362077 WARN  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 362109 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 1.8.0_191-b12
   [junit4]   2> 362122 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 362122 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 362122 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 362122 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@4b2afa75{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
   [junit4]   2> 362976 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@72547b5{datanode,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/jetty-localhost-38032-datanode-_-any-4527236998548102363.dir/webapp/,AVAILABLE}{/datanode}
   [junit4]   2> 362980 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.e.j.s.AbstractConnector Started ServerConnector@39082416{HTTP/1.1,[http/1.1]}{localhost:38032}
   [junit4]   2> 362980 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.e.j.s.Server Started @363023ms
   [junit4]   2> 366254 WARN  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 366255 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 1.8.0_191-b12
   [junit4]   2> 366376 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 366376 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 366376 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 366384 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@7dab02f2{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
   [junit4]   2> 367841 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@4aea4952{datanode,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/jetty-localhost-40687-datanode-_-any-2656067372604467379.dir/webapp/,AVAILABLE}{/datanode}
   [junit4]   2> 367842 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.e.j.s.AbstractConnector Started ServerConnector@6ac5e7a4{HTTP/1.1,[http/1.1]}{localhost:40687}
   [junit4]   2> 367842 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.e.j.s.Server Started @367885ms
   [junit4]   2> 372676 WARN  (Thread-265) [     ] o.a.h.h.s.d.f.i.FsDatasetImpl Lock held time above threshold: lock identifier: org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl lockHeldTimeMs=312 ms. Suppressed 0 lock warnings. The stack trace is: java.lang.Thread.getStackTrace(Thread.java:1559)
   [junit4]   2> org.apache.hadoop.util.StringUtils.getStackTrace(StringUtils.java:1032)
   [junit4]   2> org.apache.hadoop.util.InstrumentedLock.logWarning(InstrumentedLock.java:148)
   [junit4]   2> org.apache.hadoop.util.InstrumentedLock.check(InstrumentedLock.java:186)
   [junit4]   2> org.apache.hadoop.util.InstrumentedLock.unlock(InstrumentedLock.java:133)
   [junit4]   2> org.apache.hadoop.util.AutoCloseableLock.release(AutoCloseableLock.java:84)
   [junit4]   2> org.apache.hadoop.util.AutoCloseableLock.close(AutoCloseableLock.java:96)
   [junit4]   2> org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.activateVolume(FsDatasetImpl.java:429)
   [junit4]   2> org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.addVolume(FsDatasetImpl.java:449)
   [junit4]   2> org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.<init>(FsDatasetImpl.java:334)
   [junit4]   2> org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetFactory.newInstance(FsDatasetFactory.java:34)
   [junit4]   2> org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetFactory.newInstance(FsDatasetFactory.java:30)
   [junit4]   2> org.apache.hadoop.hdfs.server.datanode.DataNode.initStorage(DataNode.java:1732)
   [junit4]   2> org.apache.hadoop.hdfs.server.datanode.DataNode.initBlockPool(DataNode.java:1678)
   [junit4]   2> org.apache.hadoop.hdfs.server.datanode.BPOfferService.verifyAndSetNamespaceInfo(BPOfferService.java:390)
   [junit4]   2> org.apache.hadoop.hdfs.server.datanode.BPServiceActor.connectToNNAndHandshake(BPServiceActor.java:280)
   [junit4]   2> org.apache.hadoop.hdfs.server.datanode.BPServiceActor.run(BPServiceActor.java:819)
   [junit4]   2> java.lang.Thread.run(Thread.java:748)
   [junit4]   2> 
   [junit4]   2> 375742 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0x804f02bdacb9a407: Processing first storage report for DS-58cdbaf2-df89-4ecb-98c5-7ccac28bd359 from datanode 8f4b546c-4238-4abe-a0dc-0eb8566cc48b
   [junit4]   2> 375796 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0x804f02bdacb9a407: from storage DS-58cdbaf2-df89-4ecb-98c5-7ccac28bd359 node DatanodeRegistration(127.0.0.1:41676, datanodeUuid=8f4b546c-4238-4abe-a0dc-0eb8566cc48b, infoPort=42413, infoSecurePort=0, ipcPort=41430, storageInfo=lv=-57;cid=testClusterID;nsid=714383687;c=1567075296700), blocks: 0, hasStaleStorage: true, processing time: 3 msecs, invalidatedBlocks: 0
   [junit4]   2> 375815 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0x629aaa2f76954a6d: Processing first storage report for DS-24037231-2a50-48b4-8406-05829731388c from datanode 88de6798-6e70-4069-ba3e-b40c9b9b5a74
   [junit4]   2> 375816 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0x629aaa2f76954a6d: from storage DS-24037231-2a50-48b4-8406-05829731388c node DatanodeRegistration(127.0.0.1:42762, datanodeUuid=88de6798-6e70-4069-ba3e-b40c9b9b5a74, infoPort=43879, infoSecurePort=0, ipcPort=39957, storageInfo=lv=-57;cid=testClusterID;nsid=714383687;c=1567075296700), blocks: 0, hasStaleStorage: true, processing time: 1 msecs, invalidatedBlocks: 0
   [junit4]   2> 375844 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0x629aaa2f76954a6d: Processing first storage report for DS-ad624b20-3299-49b9-8c24-ae7dc641703e from datanode 88de6798-6e70-4069-ba3e-b40c9b9b5a74
   [junit4]   2> 375844 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0x629aaa2f76954a6d: from storage DS-ad624b20-3299-49b9-8c24-ae7dc641703e node DatanodeRegistration(127.0.0.1:42762, datanodeUuid=88de6798-6e70-4069-ba3e-b40c9b9b5a74, infoPort=43879, infoSecurePort=0, ipcPort=39957, storageInfo=lv=-57;cid=testClusterID;nsid=714383687;c=1567075296700), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0
   [junit4]   2> 375844 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0x804f02bdacb9a407: Processing first storage report for DS-70f57e38-5a07-407d-a93c-52587d38a2e8 from datanode 8f4b546c-4238-4abe-a0dc-0eb8566cc48b
   [junit4]   2> 375844 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0x804f02bdacb9a407: from storage DS-70f57e38-5a07-407d-a93c-52587d38a2e8 node DatanodeRegistration(127.0.0.1:41676, datanodeUuid=8f4b546c-4238-4abe-a0dc-0eb8566cc48b, infoPort=42413, infoSecurePort=0, ipcPort=41430, storageInfo=lv=-57;cid=testClusterID;nsid=714383687;c=1567075296700), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0
   [junit4]   2> 376286 WARN  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.a.s.c.a.c.TestHdfsCloudBackupRestore The NameNode is in SafeMode - Solr will wait 5 seconds and try again.
   [junit4]   2> 381292 WARN  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.a.s.c.a.c.TestHdfsCloudBackupRestore The NameNode is in SafeMode - Solr will wait 5 seconds and try again.
   [junit4]   2> 386829 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.a.s.c.MiniSolrCloudCluster Starting cluster of 2 servers in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_4E0D9701184A2A9E-001/tempDir-002
   [junit4]   2> 386829 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
   [junit4]   2> 386856 INFO  (ZkTestServer Run Thread) [     ] o.a.s.c.ZkTestServer client port:0.0.0.0/0.0.0.0:0
   [junit4]   2> 386856 INFO  (ZkTestServer Run Thread) [     ] o.a.s.c.ZkTestServer Starting server
   [junit4]   2> 386956 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.a.s.c.ZkTestServer start zk server on port:33641
   [junit4]   2> 386956 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.a.s.c.ZkTestServer waitForServerUp: 127.0.0.1:33641
   [junit4]   2> 386956 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.a.s.c.ZkTestServer parse host and port list: 127.0.0.1:33641
   [junit4]   2> 386956 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.a.s.c.ZkTestServer connecting to 127.0.0.1 33641
   [junit4]   2> 386975 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 387013 INFO  (zkConnectionManagerCallback-578-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 387022 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 387025 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 387054 INFO  (zkConnectionManagerCallback-580-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 387055 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 387056 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 387057 INFO  (zkConnectionManagerCallback-582-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 387057 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 387174 WARN  (jetty-launcher-583-thread-2) [     ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 387174 WARN  (jetty-launcher-583-thread-1) [     ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 387174 INFO  (jetty-launcher-583-thread-2) [     ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 387174 INFO  (jetty-launcher-583-thread-1) [     ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 387174 INFO  (jetty-launcher-583-thread-1) [     ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 387174 INFO  (jetty-launcher-583-thread-2) [     ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 387174 INFO  (jetty-launcher-583-thread-1) [     ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 1.8.0_191-b12
   [junit4]   2> 387174 INFO  (jetty-launcher-583-thread-2) [     ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 1.8.0_191-b12
   [junit4]   2> 387188 INFO  (jetty-launcher-583-thread-2) [     ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 387188 INFO  (jetty-launcher-583-thread-2) [     ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 387188 INFO  (jetty-launcher-583-thread-2) [     ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 387189 INFO  (jetty-launcher-583-thread-2) [     ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@4fd41b22{/solr,null,AVAILABLE}
   [junit4]   2> 387190 INFO  (jetty-launcher-583-thread-2) [     ] o.e.j.s.AbstractConnector Started ServerConnector@7fc0d56c{SSL,[ssl, http/1.1]}{127.0.0.1:37581}
   [junit4]   2> 387190 INFO  (jetty-launcher-583-thread-2) [     ] o.e.j.s.Server Started @387233ms
   [junit4]   2> 387190 INFO  (jetty-launcher-583-thread-2) [     ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=37581}
   [junit4]   2> 387190 ERROR (jetty-launcher-583-thread-2) [     ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 387190 INFO  (jetty-launcher-583-thread-2) [     ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 387190 INFO  (jetty-launcher-583-thread-2) [     ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solrâ„¢ version 8.3.0
   [junit4]   2> 387190 INFO  (jetty-launcher-583-thread-2) [     ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 387190 INFO  (jetty-launcher-583-thread-2) [     ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 387190 INFO  (jetty-launcher-583-thread-2) [     ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-08-29T10:42:16.597Z
   [junit4]   2> 387208 INFO  (jetty-launcher-583-thread-2) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 387244 INFO  (jetty-launcher-583-thread-1) [     ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 387244 INFO  (jetty-launcher-583-thread-1) [     ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 387244 INFO  (jetty-launcher-583-thread-1) [     ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 387244 INFO  (jetty-launcher-583-thread-1) [     ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@548f922b{/solr,null,AVAILABLE}
   [junit4]   2> 387249 INFO  (jetty-launcher-583-thread-1) [     ] o.e.j.s.AbstractConnector Started ServerConnector@7c62ff31{SSL,[ssl, http/1.1]}{127.0.0.1:41788}
   [junit4]   2> 387249 INFO  (jetty-launcher-583-thread-1) [     ] o.e.j.s.Server Started @387292ms
   [junit4]   2> 387249 INFO  (jetty-launcher-583-thread-1) [     ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=41788}
   [junit4]   2> 387249 ERROR (jetty-launcher-583-thread-1) [     ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 387249 INFO  (jetty-launcher-583-thread-1) [     ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 387249 INFO  (jetty-launcher-583-thread-1) [     ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solrâ„¢ version 8.3.0
   [junit4]   2> 387249 INFO  (jetty-launcher-583-thread-1) [     ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 387249 INFO  (jetty-launcher-583-thread-1) [     ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 387249 INFO  (jetty-launcher-583-thread-1) [     ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-08-29T10:42:16.656Z
   [junit4]   2> 387251 INFO  (zkConnectionManagerCallback-585-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 387252 INFO  (jetty-launcher-583-thread-2) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 387261 INFO  (jetty-launcher-583-thread-2) [     ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 387281 INFO  (jetty-launcher-583-thread-1) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 387297 INFO  (zkConnectionManagerCallback-587-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 387297 INFO  (jetty-launcher-583-thread-1) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 387298 INFO  (jetty-launcher-583-thread-1) [     ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 387318 INFO  (jetty-launcher-583-thread-2) [     ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@a5c0e50, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 387320 INFO  (jetty-launcher-583-thread-1) [     ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@a5c0e50, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 388528 INFO  (jetty-launcher-583-thread-1) [     ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
   [junit4]   2> 388529 WARN  (jetty-launcher-583-thread-1) [     ] o.a.s.c.s.i.Http2SolrClient Create Http2SolrClient with HTTP/1.1 transport since Java 8 or lower versions does not support SSL + HTTP/2
   [junit4]   2> 388635 WARN  (jetty-launcher-583-thread-1) [     ] o.e.j.u.s.S.config Trusting all certificates configured for Client@4324d32b[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 388635 WARN  (jetty-launcher-583-thread-1) [     ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@4324d32b[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 388672 WARN  (jetty-launcher-583-thread-1) [     ] o.a.s.c.s.i.Http2SolrClient Create Http2SolrClient with HTTP/1.1 transport since Java 8 or lower versions does not support SSL + HTTP/2
   [junit4]   2> 388700 WARN  (jetty-launcher-583-thread-1) [     ] o.e.j.u.s.S.config Trusting all certificates configured for Client@5ad2dc8c[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 388700 WARN  (jetty-launcher-583-thread-1) [     ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@5ad2dc8c[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 388701 INFO  (jetty-launcher-583-thread-1) [     ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:33641/solr
   [junit4]   2> 388715 INFO  (jetty-launcher-583-thread-1) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 388772 INFO  (zkConnectionManagerCallback-595-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 388772 INFO  (jetty-launcher-583-thread-1) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 388930 INFO  (jetty-launcher-583-thread-2) [     ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
   [junit4]   2> 388947 WARN  (jetty-launcher-583-thread-2) [     ] o.a.s.c.s.i.Http2SolrClient Create Http2SolrClient with HTTP/1.1 transport since Java 8 or lower versions does not support SSL + HTTP/2
   [junit4]   2> 388948 WARN  (jetty-launcher-583-thread-2) [     ] o.e.j.u.s.S.config Trusting all certificates configured for Client@1b735181[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 388948 WARN  (jetty-launcher-583-thread-2) [     ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@1b735181[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 388960 WARN  (jetty-launcher-583-thread-2) [     ] o.a.s.c.s.i.Http2SolrClient Create Http2SolrClient with HTTP/1.1 transport since Java 8 or lower versions does not support SSL + HTTP/2
   [junit4]   2> 388961 WARN  (jetty-launcher-583-thread-2) [     ] o.e.j.u.s.S.config Trusting all certificates configured for Client@6e3c817e[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 388961 WARN  (jetty-launcher-583-thread-2) [     ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@6e3c817e[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 388971 INFO  (jetty-launcher-583-thread-2) [     ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:33641/solr
   [junit4]   2> 388991 INFO  (jetty-launcher-583-thread-1) [n:127.0.0.1:41788_solr     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 389007 INFO  (jetty-launcher-583-thread-2) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 389008 INFO  (zkConnectionManagerCallback-599-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 389008 INFO  (jetty-launcher-583-thread-1) [n:127.0.0.1:41788_solr     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 389028 INFO  (zkConnectionManagerCallback-603-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 389028 INFO  (jetty-launcher-583-thread-2) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 389246 INFO  (jetty-launcher-583-thread-2) [n:127.0.0.1:37581_solr     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 389295 INFO  (zkConnectionManagerCallback-607-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 389299 INFO  (jetty-launcher-583-thread-2) [n:127.0.0.1:37581_solr     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 389464 INFO  (jetty-launcher-583-thread-2) [n:127.0.0.1:37581_solr     ] o.a.s.c.OverseerElectionContext I am going to be the leader 127.0.0.1:37581_solr
   [junit4]   2> 389465 INFO  (jetty-launcher-583-thread-2) [n:127.0.0.1:37581_solr     ] o.a.s.c.Overseer Overseer (id=75295907184115720-127.0.0.1:37581_solr-n_0000000000) starting
   [junit4]   2> 389854 INFO  (jetty-launcher-583-thread-2) [n:127.0.0.1:37581_solr     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 389951 INFO  (zkConnectionManagerCallback-614-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 389963 INFO  (jetty-launcher-583-thread-2) [n:127.0.0.1:37581_solr     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 389982 INFO  (jetty-launcher-583-thread-2) [n:127.0.0.1:37581_solr     ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:33641/solr ready
   [junit4]   2> 390000 INFO  (OverseerStateUpdate-75295907184115720-127.0.0.1:37581_solr-n_0000000000) [n:127.0.0.1:37581_solr     ] o.a.s.c.Overseer Starting to work on the main queue : 127.0.0.1:37581_solr
   [junit4]   2> 390001 INFO  (jetty-launcher-583-thread-2) [n:127.0.0.1:37581_solr     ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:37581_solr
   [junit4]   2> 390016 INFO  (jetty-launcher-583-thread-2) [n:127.0.0.1:37581_solr     ] o.a.s.c.PackageManager clusterprops.json changed , version 0
   [junit4]   2> 390017 INFO  (jetty-launcher-583-thread-2) [n:127.0.0.1:37581_solr     ] o.a.s.c.b.r.BackupRepositoryFactory Added backup repository with configuration params {type = repository,name = hdfs,class = org.apache.solr.core.backup.repository.HdfsBackupRepository,attributes = {name=hdfs, class=org.apache.solr.core.backup.repository.HdfsBackupRepository},args = {location=/backup,solr.hdfs.home=hdfs://lucene2-us-west.apache.org:37840/solr,solr.hdfs.confdir=}}
   [junit4]   2> 390017 INFO  (jetty-launcher-583-thread-2) [n:127.0.0.1:37581_solr     ] o.a.s.c.b.r.BackupRepositoryFactory Added backup repository with configuration params {type = repository,name = poisioned,class = org.apache.solr.cloud.api.collections.TestLocalFSCloudBackupRestore$PoinsionedRepository,attributes = {default=true, name=poisioned, class=org.apache.solr.cloud.api.collections.TestLocalFSCloudBackupRestore$PoinsionedRepository},}
   [junit4]   2> 390017 INFO  (jetty-launcher-583-thread-2) [n:127.0.0.1:37581_solr     ] o.a.s.c.b.r.BackupRepositoryFactory Default configuration for backup repository is with configuration params {type = repository,name = poisioned,class = org.apache.solr.cloud.api.collections.TestLocalFSCloudBackupRestore$PoinsionedRepository,attributes = {default=true, name=poisioned, class=org.apache.solr.cloud.api.collections.TestLocalFSCloudBackupRestore$PoinsionedRepository},}
   [junit4]   2> 390040 INFO  (zkCallback-613-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 390081 INFO  (zkCallback-606-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 390227 INFO  (jetty-launcher-583-thread-2) [n:127.0.0.1:37581_solr     ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 390336 INFO  (jetty-launcher-583-thread-2) [n:127.0.0.1:37581_solr     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@a5c0e50
   [junit4]   2> 390391 INFO  (jetty-launcher-583-thread-2) [n:127.0.0.1:37581_solr     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@a5c0e50
   [junit4]   2> 390391 INFO  (jetty-launcher-583-thread-2) [n:127.0.0.1:37581_solr     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@a5c0e50
   [junit4]   2> 390393 INFO  (jetty-launcher-583-thread-2) [n:127.0.0.1:37581_solr     ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_4E0D9701184A2A9E-001/tempDir-002/node2/.
   [junit4]   2> 391023 INFO  (jetty-launcher-583-thread-1) [n:127.0.0.1:41788_solr     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 391451 INFO  (jetty-launcher-583-thread-1) [n:127.0.0.1:41788_solr     ] o.a.s.c.ZkController Publish node=127.0.0.1:41788_solr as DOWN
   [junit4]   2> 391452 INFO  (jetty-launcher-583-thread-1) [n:127.0.0.1:41788_solr     ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 2147483647 transient cores
   [junit4]   2> 391452 INFO  (jetty-launcher-583-thread-1) [n:127.0.0.1:41788_solr     ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:41788_solr
   [junit4]   2> 391454 INFO  (zkCallback-613-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 391473 INFO  (jetty-launcher-583-thread-1) [n:127.0.0.1:41788_solr     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 391488 INFO  (zkCallback-606-thread-2) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 391489 INFO  (zkConnectionManagerCallback-620-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 391489 INFO  (jetty-launcher-583-thread-1) [n:127.0.0.1:41788_solr     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 391490 INFO  (zkCallback-598-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 391524 INFO  (jetty-launcher-583-thread-1) [n:127.0.0.1:41788_solr     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 391572 INFO  (jetty-launcher-583-thread-1) [n:127.0.0.1:41788_solr     ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:33641/solr ready
   [junit4]   2> 391605 INFO  (jetty-launcher-583-thread-1) [n:127.0.0.1:41788_solr     ] o.a.s.c.PackageManager clusterprops.json changed , version 0
   [junit4]   2> 391605 INFO  (jetty-launcher-583-thread-1) [n:127.0.0.1:41788_solr     ] o.a.s.c.b.r.BackupRepositoryFactory Added backup repository with configuration params {type = repository,name = hdfs,class = org.apache.solr.core.backup.repository.HdfsBackupRepository,attributes = {name=hdfs, class=org.apache.solr.core.backup.repository.HdfsBackupRepository},args = {location=/backup,solr.hdfs.home=hdfs://lucene2-us-west.apache.org:37840/solr,solr.hdfs.confdir=}}
   [junit4]   2> 391605 INFO  (jetty-launcher-583-thread-1) [n:127.0.0.1:41788_solr     ] o.a.s.c.b.r.BackupRepositoryFactory Added backup repository with configuration params {type = repository,name = poisioned,class = org.apache.solr.cloud.api.collections.TestLocalFSCloudBackupRestore$PoinsionedRepository,attributes = {default=true, name=poisioned, class=org.apache.solr.cloud.api.collections.TestLocalFSCloudBackupRestore$PoinsionedRepository},}
   [junit4]   2> 391605 INFO  (jetty-launcher-583-thread-1) [n:127.0.0.1:41788_solr     ] o.a.s.c.b.r.BackupRepositoryFactory Default configuration for backup repository is with configuration params {type = repository,name = poisioned,class = org.apache.solr.cloud.api.collections.TestLocalFSCloudBackupRestore$PoinsionedRepository,attributes = {default=true, name=poisioned, class=org.apache.solr.cloud.api.collections.TestLocalFSCloudBackupRestore$PoinsionedRepository},}
   [junit4]   2> 391846 INFO  (jetty-launcher-583-thread-1) [n:127.0.0.1:41788_solr     ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 392024 INFO  (jetty-launcher-583-thread-1) [n:127.0.0.1:41788_solr     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@a5c0e50
   [junit4]   2> 392170 INFO  (jetty-launcher-583-thread-1) [n:127.0.0.1:41788_solr     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@a5c0e50
   [junit4]   2> 392170 INFO  (jetty-launcher-583-thread-1) [n:127.0.0.1:41788_solr     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@a5c0e50
   [junit4]   2> 392171 INFO  (jetty-launcher-583-thread-1) [n:127.0.0.1:41788_solr     ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_4E0D9701184A2A9E-001/tempDir-002/node1/.
   [junit4]   2> 392642 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.a.s.c.MiniSolrCloudCluster waitForAllNodes: numServers=2
   [junit4]   2> 392644 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 392674 INFO  (zkConnectionManagerCallback-626-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 392674 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 392675 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 392676 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:33641/solr ready
   [junit4]   2> 394167 INFO  (TEST-TestHdfsCloudBackupRestore.test-seed#[4E0D9701184A2A9E]) [     ] o.a.s.SolrTestCaseJ4 ###Starting test
   [junit4]   2> 394365 INFO  (qtp865602825-2264) [n:127.0.0.1:41788_solr     ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params collection.configName=conf1&router.name=implicit&version=2&pullReplicas=1&shards=shard1,shard2&property.customKey=customValue&maxShardsPerNode=-1&router.field=shard_s&name=hdfsbackuprestore_testok&nrtReplicas=1&action=CREATE&tlogReplicas=1&wt=javabin and sendToOCPQueue=true
   [junit4]   2> 394470 INFO  (OverseerThreadFactory-530-thread-1-processing-n:127.0.0.1:37581_solr) [n:127.0.0.1:37581_solr     ] o.a.s.c.a.c.CreateCollectionCmd Create collection hdfsbackuprestore_testok
   [junit4]   2> 394632 WARN  (OverseerThreadFactory-530-thread-1-processing-n:127.0.0.1:37581_solr) [n:127.0.0.1:37581_solr     ] o.a.s.c.a.c.CreateCollectionCmd Specified number of replicas of 3 on collection hdfsbackuprestore_testok is higher than the number of Solr instances currently live or live and part of your createNodeSet(2). It's unusual to run two replica of the same slice on the same Solr-instance.
   [junit4]   2> 394669 INFO  (OverseerStateUpdate-75295907184115720-127.0.0.1:37581_solr-n_0000000000) [n:127.0.0.1:37581_solr     ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"hdfsbackuprestore_testok",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "core":"hdfsbackuprestore_testok_shard1_replica_n1",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"https://127.0.0.1:41788/solr",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 394708 INFO  (OverseerStateUpdate-75295907184115720-127.0.0.1:37581_solr-n_0000000000) [n:127.0.0.1:37581_solr     ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"hdfsbackuprestore_testok",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "core":"hdfsbackuprestore_testok_shard1_replica_t2",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"https://127.0.0.1:37581/solr",
   [junit4]   2>   "type":"TLOG",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 394758 INFO  (OverseerStateUpdate-75295907184115720-127.0.0.1:37581_solr-n_0000000000) [n:127.0.0.1:37581_solr     ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"hdfsbackuprestore_testok",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "core":"hdfsbackuprestore_testok_shard1_replica_p4",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"https://127.0.0.1:41788/solr",
   [junit4]   2>   "type":"PULL",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 394794 INFO  (OverseerStateUpdate-75295907184115720-127.0.0.1:37581_solr-n_0000000000) [n:127.0.0.1:37581_solr     ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"hdfsbackuprestore_testok",
   [junit4]   2>   "shard":"shard2",
   [junit4]   2>   "core":"hdfsbackuprestore_testok_shard2_replica_n6",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"https://127.0.0.1:37581/solr",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 394815 INFO  (OverseerStateUpdate-75295907184115720-127.0.0.1:37581_solr-n_0000000000) [n:127.0.0.1:37581_solr     ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"hdfsbackuprestore_testok",
   [junit4]   2>   "shard":"shard2",
   [junit4]   2>   "core":"hdfsbackuprestore_testok_shard2_replica_t8",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"https://127.0.0.1:41788/solr",
   [junit4]   2>   "type":"TLOG",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 394863 INFO  (OverseerStateUpdate-75295907184115720-127.0.0.1:37581_solr-n_0000000000) [n:127.0.0.1:37581_solr     ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"hdfsbackuprestore_testok",
   [junit4]   2>   "shard":"shard2",
   [junit4]   2>   "core":"hdfsbackuprestore_testok_shard2_replica_p10",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"https://127.0.0.1:37581/solr",
   [junit4]   2>   "type":"PULL",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 395122 INFO  (qtp865602825-2263) [n:127.0.0.1:41788_solr    x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf1&newCollection=true&collection=hdfsbackuprestore_testok&version=2&replicaType=NRT&property.customKey=customValue&coreNodeName=core_node3&name=hdfsbackuprestore_testok_shard1_replica_n1&action=CREATE&numShards=2&shard=shard1&wt=javabin
   [junit4]   2> 395122 INFO  (qtp865602825-2262) [n:127.0.0.1:41788_solr    x:hdfsbackuprestore_testok_shard1_replica_p4 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf1&newCollection=true&collection=hdfsbackuprestore_testok&version=2&replicaType=PULL&property.customKey=customValue&coreNodeName=core_node7&name=hdfsbackuprestore_testok_shard1_replica_p4&action=CREATE&numShards=2&shard=shard1&wt=javabin
   [junit4]   2> 395124 INFO  (qtp865602825-2265) [n:127.0.0.1:41788_solr    x:hdfsbackuprestore_testok_shard2_replica_t8 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf1&newCollection=true&collection=hdfsbackuprestore_testok&version=2&replicaType=TLOG&property.customKey=customValue&coreNodeName=core_node11&name=hdfsbackuprestore_testok_shard2_replica_t8&action=CREATE&numShards=2&shard=shard2&wt=javabin
   [junit4]   2> 395576 INFO  (qtp809121568-2255) [n:127.0.0.1:37581_solr    x:hdfsbackuprestore_testok_shard2_replica_p10 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf1&newCollection=true&collection=hdfsbackuprestore_testok&version=2&replicaType=PULL&property.customKey=customValue&coreNodeName=core_node12&name=hdfsbackuprestore_testok_shard2_replica_p10&action=CREATE&numShards=2&shard=shard2&wt=javabin
   [junit4]   2> 395577 INFO  (qtp809121568-2255) [n:127.0.0.1:37581_solr    x:hdfsbackuprestore_testok_shard2_replica_p10 ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 2147483647 transient cores
   [junit4]   2> 395621 INFO  (qtp809121568-2254) [n:127.0.0.1:37581_solr    x:hdfsbackuprestore_testok_shard2_replica_n6 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf1&newCollection=true&collection=hdfsbackuprestore_testok&version=2&replicaType=NRT&property.customKey=customValue&coreNodeName=core_node9&name=hdfsbackuprestore_testok_shard2_replica_n6&action=CREATE&numShards=2&shard=shard2&wt=javabin
   [junit4]   2> 395629 INFO  (qtp809121568-2257) [n:127.0.0.1:37581_solr    x:hdfsbackuprestore_testok_shard1_replica_t2 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf1&newCollection=true&collection=hdfsbackuprestore_testok&version=2&replicaType=TLOG&property.customKey=customValue&coreNodeName=core_node5&name=hdfsbackuprestore_testok_shard1_replica_t2&action=CREATE&numShards=2&shard=shard1&wt=javabin
   [junit4]   2> 396182 INFO  (qtp865602825-2263) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 8.3.0
   [junit4]   2> 396291 INFO  (qtp865602825-2263) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.s.IndexSchema [hdfsbackuprestore_testok_shard1_replica_n1] Schema name=minimal
   [junit4]   2> 396293 INFO  (qtp865602825-2263) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 396294 INFO  (qtp865602825-2263) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.c.CoreContainer Creating SolrCore 'hdfsbackuprestore_testok_shard1_replica_n1' using configuration from collection hdfsbackuprestore_testok, trusted=true
   [junit4]   2> 396294 INFO  (qtp865602825-2263) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.hdfsbackuprestore_testok.shard1.replica_n1' (registry 'solr.core.hdfsbackuprestore_testok.shard1.replica_n1') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@a5c0e50
   [junit4]   2> 396311 INFO  (qtp865602825-2263) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.c.SolrCore [[hdfsbackuprestore_testok_shard1_replica_n1] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_4E0D9701184A2A9E-001/tempDir-002/node1/hdfsbackuprestore_testok_shard1_replica_n1], dataDir=[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_4E0D9701184A2A9E-001/tempDir-002/node1/./hdfsbackuprestore_testok_shard1_replica_n1/data/]
   [junit4]   2> 396408 INFO  (qtp865602825-2265) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_t8 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 8.3.0
   [junit4]   2> 396431 INFO  (qtp865602825-2262) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard1 r:core_node7 x:hdfsbackuprestore_testok_shard1_replica_p4 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 8.3.0
   [junit4]   2> 396446 INFO  (qtp865602825-2265) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_t8 ] o.a.s.s.IndexSchema [hdfsbackuprestore_testok_shard2_replica_t8] Schema name=minimal
   [junit4]   2> 396461 INFO  (qtp865602825-2265) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_t8 ] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 396461 INFO  (qtp865602825-2265) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_t8 ] o.a.s.c.CoreContainer Creating SolrCore 'hdfsbackuprestore_testok_shard2_replica_t8' using configuration from collection hdfsbackuprestore_testok, trusted=true
   [junit4]   2> 396462 INFO  (qtp865602825-2265) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_t8 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.hdfsbackuprestore_testok.shard2.replica_t8' (registry 'solr.core.hdfsbackuprestore_testok.shard2.replica_t8') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@a5c0e50
   [junit4]   2> 396462 INFO  (qtp865602825-2265) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_t8 ] o.a.s.c.SolrCore [[hdfsbackuprestore_testok_shard2_replica_t8] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_4E0D9701184A2A9E-001/tempDir-002/node1/hdfsbackuprestore_testok_shard2_replica_t8], dataDir=[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_4E0D9701184A2A9E-001/tempDir-002/node1/./hdfsbackuprestore_testok_shard2_replica_t8/data/]
   [junit4]   2> 396542 INFO  (qtp865602825-2262) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard1 r:core_node7 x:hdfsbackuprestore_testok_shard1_replica_p4 ] o.a.s.s.IndexSchema [hdfsbackuprestore_testok_shard1_replica_p4] Schema name=minimal
   [junit4]   2> 396562 INFO  (qtp865602825-2262) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard1 r:core_node7 x:hdfsbackuprestore_testok_shard1_replica_p4 ] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 396562 INFO  (qtp865602825-2262) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard1 r:core_node7 x:hdfsbackuprestore_testok_shard1_replica_p4 ] o.a.s.c.CoreContainer Creating SolrCore 'hdfsbackuprestore_testok_shard1_replica_p4' using configuration from collection hdfsbackuprestore_testok, trusted=true
   [junit4]   2> 396584 INFO  (qtp865602825-2262) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard1 r:core_node7 x:hdfsbackuprestore_testok_shard1_replica_p4 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.hdfsbackuprestore_testok.shard1.replica_p4' (registry 'solr.core.hdfsbackuprestore_testok.shard1.replica_p4') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@a5c0e50
   [junit4]   2> 396585 INFO  (qtp865602825-2262) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard1 r:core_node7 x:hdfsbackuprestore_testok_shard1_replica_p4 ] o.a.s.c.SolrCore [[hdfsbackuprestore_testok_shard1_replica_p4] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_4E0D9701184A2A9E-001/tempDir-002/node1/hdfsbackuprestore_testok_shard1_replica_p4], dataDir=[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_4E0D9701184A2A9E-001/tempDir-002/node1/./hdfsbackuprestore_testok_shard1_replica_p4/data/]
   [junit4]   2> 396986 INFO  (qtp865602825-2263) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 396986 INFO  (qtp865602825-2263) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 397053 INFO  (qtp865602825-2263) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 397053 INFO  (qtp865602825-2263) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 397064 INFO  (qtp865602825-2263) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.s.SolrIndexSearcher Opening [Searcher@6f3941f9[hdfsbackuprestore_testok_shard1_replica_n1] main]
   [junit4]   2> 397102 INFO  (qtp865602825-2265) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_t8 ] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 397102 INFO  (qtp865602825-2265) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_t8 ] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 397103 INFO  (qtp865602825-2265) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_t8 ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 397103 INFO  (qtp865602825-2265) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_t8 ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 397132 INFO  (qtp865602825-2263) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 397148 INFO  (qtp865602825-2265) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_t8 ] o.a.s.s.SolrIndexSearcher Opening [Searcher@a444bac[hdfsbackuprestore_testok_shard2_replica_t8] main]
   [junit4]   2> 397152 INFO  (qtp865602825-2263) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 397155 INFO  (qtp865602825-2263) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 397155 INFO  (qtp865602825-2263) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1643197598596595712
   [junit4]   2> 397190 INFO  (qtp809121568-2254) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n6 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 8.3.0
   [junit4]   2> 397225 INFO  (qtp865602825-2262) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard1 r:core_node7 x:hdfsbackuprestore_testok_shard1_replica_p4 ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 397225 INFO  (qtp865602825-2262) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard1 r:core_node7 x:hdfsbackuprestore_testok_shard1_replica_p4 ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 397227 INFO  (qtp809121568-2254) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n6 ] o.a.s.s.IndexSchema [hdfsbackuprestore_testok_shard2_replica_n6] Schema name=minimal
   [junit4]   2> 397230 INFO  (qtp809121568-2254) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n6 ] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 397230 INFO  (qtp809121568-2254) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n6 ] o.a.s.c.CoreContainer Creating SolrCore 'hdfsbackuprestore_testok_shard2_replica_n6' using configuration from collection hdfsbackuprestore_testok, trusted=true
   [junit4]   2> 397230 INFO  (qtp809121568-2254) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n6 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.hdfsbackuprestore_testok.shard2.replica_n6' (registry 'solr.core.hdfsbackuprestore_testok.shard2.replica_n6') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@a5c0e50
   [junit4]   2> 397231 INFO  (qtp809121568-2254) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n6 ] o.a.s.c.SolrCore [[hdfsbackuprestore_testok_shard2_replica_n6] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_4E0D9701184A2A9E-001/tempDir-002/node2/hdfsbackuprestore_testok_shard2_replica_n6], dataDir=[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_4E0D9701184A2A9E-001/tempDir-002/node2/./hdfsbackuprestore_testok_shard2_replica_n6/data/]
   [junit4]   2> 397261 INFO  (qtp865602825-2262) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard1 r:core_node7 x:hdfsbackuprestore_testok_shard1_replica_p4 ] o.a.s.s.SolrIndexSearcher Opening [Searcher@7bfbbe13[hdfsbackuprestore_testok_shard1_replica_p4] main]
   [junit4]   2> 397262 INFO  (qtp865602825-2265) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_t8 ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 397327 INFO  (searcherExecutor-539-thread-1-processing-n:127.0.0.1:41788_solr x:hdfsbackuprestore_testok_shard1_replica_n1 c:hdfsbackuprestore_testok s:shard1 r:core_node3) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.c.SolrCore [hdfsbackuprestore_testok_shard1_replica_n1] Registered new searcher Searcher@6f3941f9[hdfsbackuprestore_testok_shard1_replica_n1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 397328 INFO  (qtp809121568-2257) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_t2 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 8.3.0
   [junit4]   2> 397359 INFO  (qtp865602825-2265) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_t8 ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 397359 INFO  (qtp865602825-2265) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_t8 ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 397359 INFO  (qtp865602825-2265) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_t8 ] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1643197598810505216
   [junit4]   2> 397362 INFO  (qtp809121568-2255) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_p10 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 8.3.0
   [junit4]   2> 397483 INFO  (qtp809121568-2257) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_t2 ] o.a.s.s.IndexSchema [hdfsbackuprestore_testok_shard1_replica_t2] Schema name=minimal
   [junit4]   2> 397485 INFO  (qtp809121568-2257) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_t2 ] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 397485 INFO  (qtp809121568-2257) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_t2 ] o.a.s.c.CoreContainer Creating SolrCore 'hdfsbackuprestore_testok_shard1_replica_t2' using configuration from collection hdfsbackuprestore_testok, trusted=true
   [junit4]   2> 397485 INFO  (qtp809121568-2257) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_t2 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.hdfsbackuprestore_testok.shard1.replica_t2' (registry 'solr.core.hdfsbackuprestore_testok.shard1.replica_t2') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@a5c0e50
   [junit4]   2> 397486 INFO  (qtp809121568-2257) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_t2 ] o.a.s.c.SolrCore [[hdfsbackuprestore_testok_shard1_replica_t2] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_4E0D9701184A2A9E-001/tempDir-002/node2/hdfsbackuprestore_testok_shard1_replica_t2], dataDir=[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_4E0D9701184A2A9E-001/tempDir-002/node2/./hdfsbackuprestore_testok_shard1_replica_t2/data/]
   [junit4]   2> 397542 INFO  (searcherExecutor-540-thread-1-processing-n:127.0.0.1:41788_solr x:hdfsbackuprestore_testok_shard2_replica_t8 c:hdfsbackuprestore_testok s:shard2 r:core_node11) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_t8 ] o.a.s.c.SolrCore [hdfsbackuprestore_testok_shard2_replica_t8] Registered new searcher Searcher@a444bac[hdfsbackuprestore_testok_shard2_replica_t8] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 397543 INFO  (qtp809121568-2255) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_p10 ] o.a.s.s.IndexSchema [hdfsbackuprestore_testok_shard2_replica_p10] Schema name=minimal
   [junit4]   2> 397570 INFO  (qtp809121568-2255) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_p10 ] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 397570 INFO  (qtp809121568-2255) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_p10 ] o.a.s.c.CoreContainer Creating SolrCore 'hdfsbackuprestore_testok_shard2_replica_p10' using configuration from collection hdfsbackuprestore_testok, trusted=true
   [junit4]   2> 397570 INFO  (qtp809121568-2255) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_p10 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.hdfsbackuprestore_testok.shard2.replica_p10' (registry 'solr.core.hdfsbackuprestore_testok.shard2.replica_p10') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@a5c0e50
   [junit4]   2> 397570 INFO  (qtp809121568-2255) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_p10 ] o.a.s.c.SolrCore [[hdfsbackuprestore_testok_shard2_replica_p10] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_4E0D9701184A2A9E-001/tempDir-002/node2/hdfsbackuprestore_testok_shard2_replica_p10], dataDir=[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_4E0D9701184A2A9E-001/tempDir-002/node2/./hdfsbackuprestore_testok_shard2_replica_p10/data/]
   [junit4]   2> 397577 INFO  (qtp865602825-2262) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard1 r:core_node7 x:hdfsbackuprestore_testok_shard1_replica_p4 ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 397589 INFO  (qtp865602825-2262) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard1 r:core_node7 x:hdfsbackuprestore_testok_shard1_replica_p4 ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 397590 INFO  (qtp865602825-2262) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard1 r:core_node7 x:hdfsbackuprestore_testok_shard1_replica_p4 ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 397799 INFO  (searcherExecutor-541-thread-1-processing-n:127.0.0.1:41788_solr x:hdfsbackuprestore_testok_shard1_replica_p4 c:hdfsbackuprestore_testok s:shard1 r:core_node7) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard1 r:core_node7 x:hdfsbackuprestore_testok_shard1_replica_p4 ] o.a.s.c.SolrCore [hdfsbackuprestore_testok_shard1_replica_p4] Registered new searcher Searcher@7bfbbe13[hdfsbackuprestore_testok_shard1_replica_p4] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 397946 INFO  (qtp865602825-2262) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard1 r:core_node7 x:hdfsbackuprestore_testok_shard1_replica_p4 ] o.a.s.c.ZkController hdfsbackuprestore_testok_shard1_replica_p4 starting background replication from leader
   [junit4]   2> 397948 INFO  (qtp865602825-2262) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard1 r:core_node7 x:hdfsbackuprestore_testok_shard1_replica_p4 ] o.a.s.c.ReplicateFromLeader Will start replication from leader with poll interval: 00:00:01
   [junit4]   2> 397998 INFO  (qtp865602825-2263) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.c.ZkShardTerms Successful update of terms at /collections/hdfsbackuprestore_testok/terms/shard1 to Terms{values={core_node3=0}, version=0}
   [junit4]   2> 397998 INFO  (qtp865602825-2263) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/hdfsbackuprestore_testok/leaders/shard1
   [junit4]   2> 398006 INFO  (qtp865602825-2262) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard1 r:core_node7 x:hdfsbackuprestore_testok_shard1_replica_p4 ] o.a.s.h.ReplicationHandler Poll scheduled at an interval of 1000ms
   [junit4]   2> 398006 INFO  (qtp865602825-2262) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard1 r:core_node7 x:hdfsbackuprestore_testok_shard1_replica_p4 ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 398176 INFO  (qtp865602825-2265) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_t8 ] o.a.s.c.ZkShardTerms Successful update of terms at /collections/hdfsbackuprestore_testok/terms/shard2 to Terms{values={core_node11=0}, version=0}
   [junit4]   2> 398200 INFO  (qtp809121568-2254) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n6 ] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 398200 INFO  (qtp809121568-2254) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n6 ] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 398202 INFO  (qtp809121568-2254) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n6 ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 398202 INFO  (qtp809121568-2254) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n6 ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 398217 INFO  (qtp809121568-2257) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_t2 ] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 398217 INFO  (qtp809121568-2257) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_t2 ] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 398218 INFO  (qtp809121568-2257) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_t2 ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 398218 INFO  (qtp809121568-2257) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_t2 ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 398254 INFO  (qtp865602825-2265) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_t8 ] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/hdfsbackuprestore_testok/leaders/shard2
   [junit4]   2> 398334 INFO  (qtp809121568-2257) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_t2 ] o.a.s.s.SolrIndexSearcher Opening [Searcher@24dc1267[hdfsbackuprestore_testok_shard1_replica_t2] main]
   [junit4]   2> 398335 INFO  (qtp809121568-2254) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n6 ] o.a.s.s.SolrIndexSearcher Opening [Searcher@480c3d9f[hdfsbackuprestore_testok_shard2_replica_n6] main]
   [junit4]   2> 398340 INFO  (qtp809121568-2255) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_p10 ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 398340 INFO  (qtp809121568-2255) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_p10 ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 398340 INFO  (qtp865602825-2263) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContext Waiting until we see more replicas up for shard shard1: total=2 found=1 timeoutin=9999ms
   [junit4]   2> 398356 INFO  (qtp865602825-2265) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_t8 ] o.a.s.c.ShardLeaderElectionContext Waiting until we see more replicas up for shard shard2: total=2 found=1 timeoutin=9999ms
   [junit4]   2> 398368 INFO  (zkCallback-598-thread-2) [     ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/hdfsbackuprestore_testok/state.json] for collection [hdfsbackuprestore_testok] has occurred - updating... (live nodes size: [2])
   [junit4]   2> 398368 INFO  (zkCallback-598-thread-1) [     ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/hdfsbackuprestore_testok/state.json] for collection [hdfsbackuprestore_testok] has occurred - updating... (live nodes size: [2])
   [junit4]   2> 398383 INFO  (qtp809121568-2255) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_p10 ] o.a.s.s.SolrIndexSearcher Opening [Searcher@11479cea[hdfsbackuprestore_testok_shard2_replica_p10] main]
   [junit4]   2> 398407 INFO  (zkCallback-598-thread-3) [     ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/hdfsbackuprestore_testok/state.json] for collection [hdfsbackuprestore_testok] has occurred - updating... (live nodes size: [2])
   [junit4]   2> 398407 INFO  (zkCallback-598-thread-4) [     ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/hdfsbackuprestore_testok/state.json] for collection [hdfsbackuprestore_testok] has occurred - updating... (live nodes size: [2])
   [junit4]   2> 398415 INFO  (qtp809121568-2254) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n6 ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 398420 INFO  (qtp809121568-2254) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n6 ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 398420 INFO  (qtp809121568-2257) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_t2 ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 398420 INFO  (qtp809121568-2254) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n6 ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 398420 INFO  (qtp809121568-2254) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n6 ] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1643197599923044352
   [junit4]   2> 398422 INFO  (qtp809121568-2257) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_t2 ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 398423 INFO  (qtp809121568-2257) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_t2 ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 398423 INFO  (qtp809121568-2257) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_t2 ] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1643197599926190080
   [junit4]   2> 398431 INFO  (qtp809121568-2255) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_p10 ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 398432 INFO  (qtp809121568-2255) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_p10 ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 398432 INFO  (qtp809121568-2255) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_p10 ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 398453 INFO  (searcherExecutor-553-thread-1-processing-n:127.0.0.1:37581_solr x:hdfsbackuprestore_testok_shard2_replica_n6 c:hdfsbackuprestore_testok s:shard2 r:core_node9) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n6 ] o.a.s.c.SolrCore [hdfsbackuprestore_testok_shard2_replica_n6] Registered new searcher Searcher@480c3d9f[hdfsbackuprestore_testok_shard2_replica_n6] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 398477 INFO  (searcherExecutor-555-thread-1-processing-n:127.0.0.1:37581_solr x:hdfsbackuprestore_testok_shard2_replica_p10 c:hdfsbackuprestore_testok s:shard2 r:core_node12) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_p10 ] o.a.s.c.SolrCore [hdfsbackuprestore_testok_shard2_replica_p10] Registered new searcher Searcher@11479cea[hdfsbackuprestore_testok_shard2_replica_p10] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 398478 INFO  (zkCallback-598-thread-4) [     ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncCon

[...truncated too long message...]

gs :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

jar-checksums:
    [mkdir] Created dir: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/null2090723009
     [copy] Copying 249 files to /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/null2090723009
   [delete] Deleting directory /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/null2090723009

check-working-copy:
[ivy:cachepath] :: resolving dependencies :: #;working@lucene2-us-west.apache.org
[ivy:cachepath] 	confs: [default]
[ivy:cachepath] 	found org.eclipse.jgit#org.eclipse.jgit;5.3.0.201903130848-r in public
[ivy:cachepath] 	found com.jcraft#jsch;0.1.54 in public
[ivy:cachepath] 	found com.jcraft#jzlib;1.1.1 in public
[ivy:cachepath] 	found com.googlecode.javaewah#JavaEWAH;1.1.6 in public
[ivy:cachepath] 	found org.slf4j#slf4j-api;1.7.2 in public
[ivy:cachepath] 	found org.bouncycastle#bcpg-jdk15on;1.60 in public
[ivy:cachepath] 	found org.bouncycastle#bcprov-jdk15on;1.60 in public
[ivy:cachepath] 	found org.bouncycastle#bcpkix-jdk15on;1.60 in public
[ivy:cachepath] 	found org.slf4j#slf4j-nop;1.7.2 in public
[ivy:cachepath] :: resolution report :: resolve 34ms :: artifacts dl 4ms
	---------------------------------------------------------------------
	|                  |            modules            ||   artifacts   |
	|       conf       | number| search|dwnlded|evicted|| number|dwnlded|
	---------------------------------------------------------------------
	|      default     |   9   |   0   |   0   |   0   ||   9   |   0   |
	---------------------------------------------------------------------
[wc-checker] Initializing working copy...
[wc-checker] Checking working copy status...

-jenkins-base:

BUILD SUCCESSFUL
Total time: 199 minutes 22 seconds
Archiving artifacts
java.lang.InterruptedException: no matches found within 10000
	at hudson.FilePath$ValidateAntFileMask.hasMatch(FilePath.java:2847)
	at hudson.FilePath$ValidateAntFileMask.invoke(FilePath.java:2726)
	at hudson.FilePath$ValidateAntFileMask.invoke(FilePath.java:2707)
	at hudson.FilePath$FileCallableWrapper.call(FilePath.java:3086)
Also:   hudson.remoting.Channel$CallSiteStackTrace: Remote call to lucene2
		at hudson.remoting.Channel.attachCallSiteStackTrace(Channel.java:1741)
		at hudson.remoting.UserRequest$ExceptionResponse.retrieve(UserRequest.java:357)
		at hudson.remoting.Channel.call(Channel.java:955)
		at hudson.FilePath.act(FilePath.java:1072)
		at hudson.FilePath.act(FilePath.java:1061)
		at hudson.FilePath.validateAntFileMask(FilePath.java:2705)
		at hudson.tasks.ArtifactArchiver.perform(ArtifactArchiver.java:243)
		at hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
		at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
		at hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
		at hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
		at hudson.model.Build$BuildExecution.post2(Build.java:186)
		at hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
		at hudson.model.Run.execute(Run.java:1835)
		at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
		at hudson.model.ResourceController.execute(ResourceController.java:97)
		at hudson.model.Executor.run(Executor.java:429)
Caused: hudson.FilePath$TunneledInterruptedException
	at hudson.FilePath$FileCallableWrapper.call(FilePath.java:3088)
	at hudson.remoting.UserRequest.perform(UserRequest.java:212)
	at hudson.remoting.UserRequest.perform(UserRequest.java:54)
	at hudson.remoting.Request$2.run(Request.java:369)
	at hudson.remoting.InterceptingExecutorService$1.call(InterceptingExecutorService.java:72)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
	at java.lang.Thread.run(Thread.java:748)
Caused: java.lang.InterruptedException: java.lang.InterruptedException: no matches found within 10000
	at hudson.FilePath.act(FilePath.java:1074)
	at hudson.FilePath.act(FilePath.java:1061)
	at hudson.FilePath.validateAntFileMask(FilePath.java:2705)
	at hudson.tasks.ArtifactArchiver.perform(ArtifactArchiver.java:243)
	at hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
	at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
	at hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
	at hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
	at hudson.model.Build$BuildExecution.post2(Build.java:186)
	at hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
	at hudson.model.Run.execute(Run.java:1835)
	at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
	at hudson.model.ResourceController.execute(ResourceController.java:97)
	at hudson.model.Executor.run(Executor.java:429)
No artifacts found that match the file pattern "**/*.events,heapdumps/**,**/hs_err_pid*". Configuration error?
Recording test results
Build step 'Publish JUnit test result report' changed build result to UNSTABLE
Email was triggered for: Unstable (Test Failures)
Sending email for trigger: Unstable (Test Failures)

Re: [JENKINS] Lucene-Solr-Tests-8.x - Build # 491 - Unstable

Posted by Ishan Chattopadhyaya <ic...@gmail.com>.
I'm looking into this failure. I think I caused it; apologies.

On Thu, Aug 29, 2019 at 12:56 PM Apache Jenkins Server
<je...@builds.apache.org> wrote:
>
> Build: https://builds.apache.org/job/Lucene-Solr-Tests-8.x/491/
>
> 2 tests failed.
> FAILED:  org.apache.solr.cloud.api.collections.TestHdfsCloudBackupRestore.testRestoreFailure
>
> Error Message:
> Failed collection is still in the clusterstate: DocCollection(hdfsbackuprestore_testfailure_restored//collections/hdfsbackuprestore_testfailure_restored/state.json/2)={   "pullReplicas":0,   "replicationFactor":1,   "shards":{     "shard2":{       "range":"0-7fffffff",       "state":"construction",       "replicas":{"core_node2":{           "core":"hdfsbackuprestore_testfailure_restored_shard2_replica_n1",           "base_url":"https://127.0.0.1:36659/solr",           "node_name":"127.0.0.1:36659_solr",           "state":"down",           "type":"NRT",           "force_set_state":"false"}},       "stateTimestamp":"1567059232049688251"},     "shard1":{       "range":"80000000-ffffffff",       "state":"construction",       "replicas":{},       "stateTimestamp":"1567059232049701653"}},   "router":{"name":"compositeId"},   "maxShardsPerNode":"1",   "autoAddReplicas":"false",   "nrtReplicas":1,   "tlogReplicas":0} Expected: not a collection containing "hdfsbackuprestore_testfailure_restored"      but: was <[hdfsbackuprestore_testok, hdfsbackuprestore_testfailure_restored, hdfsbackuprestore_testfailure, hdfsbackuprestore_testok_restored]>
>
> Stack Trace:
> java.lang.AssertionError: Failed collection is still in the clusterstate: DocCollection(hdfsbackuprestore_testfailure_restored//collections/hdfsbackuprestore_testfailure_restored/state.json/2)={
>   "pullReplicas":0,
>   "replicationFactor":1,
>   "shards":{
>     "shard2":{
>       "range":"0-7fffffff",
>       "state":"construction",
>       "replicas":{"core_node2":{
>           "core":"hdfsbackuprestore_testfailure_restored_shard2_replica_n1",
>           "base_url":"https://127.0.0.1:36659/solr",
>           "node_name":"127.0.0.1:36659_solr",
>           "state":"down",
>           "type":"NRT",
>           "force_set_state":"false"}},
>       "stateTimestamp":"1567059232049688251"},
>     "shard1":{
>       "range":"80000000-ffffffff",
>       "state":"construction",
>       "replicas":{},
>       "stateTimestamp":"1567059232049701653"}},
>   "router":{"name":"compositeId"},
>   "maxShardsPerNode":"1",
>   "autoAddReplicas":"false",
>   "nrtReplicas":1,
>   "tlogReplicas":0}
> Expected: not a collection containing "hdfsbackuprestore_testfailure_restored"
>      but: was <[hdfsbackuprestore_testok, hdfsbackuprestore_testfailure_restored, hdfsbackuprestore_testfailure, hdfsbackuprestore_testok_restored]>
>         at __randomizedtesting.SeedInfo.seed([E037D74065656872:C94B49654D3C6B5F]:0)
>         at org.hamcrest.MatcherAssert.assertThat(MatcherAssert.java:20)
>         at org.junit.Assert.assertThat(Assert.java:956)
>         at org.apache.solr.cloud.api.collections.AbstractCloudBackupRestoreTestCase.testRestoreFailure(AbstractCloudBackupRestoreTestCase.java:211)
>         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>         at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>         at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>         at java.lang.reflect.Method.invoke(Method.java:498)
>         at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
>         at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938)
>         at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974)
>         at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988)
>         at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
>         at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
>         at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
>         at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
>         at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
>         at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
>         at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
>         at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
>         at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
>         at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
>         at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947)
>         at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832)
>         at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883)
>         at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894)
>         at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
>         at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
>         at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
>         at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
>         at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
>         at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
>         at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
>         at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
>         at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
>         at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
>         at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
>         at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
>         at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
>         at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
>         at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
>         at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
>         at java.lang.Thread.run(Thread.java:748)
>
>
> FAILED:  org.apache.solr.cloud.api.collections.TestLocalFSCloudBackupRestore.testRestoreFailure
>
> Error Message:
> Failed collection is still in the clusterstate: DocCollection(backuprestore_testfailure_restored//collections/backuprestore_testfailure_restored/state.json/2)={   "pullReplicas":0,   "replicationFactor":1,   "shards":{     "shard2":{       "range":"0-7fffffff",       "state":"construction",       "replicas":{"core_node2":{           "core":"backuprestore_testfailure_restored_shard2_replica_n1",           "base_url":"http://127.0.0.1:33205/solr",           "node_name":"127.0.0.1:33205_solr",           "state":"down",           "type":"NRT",           "force_set_state":"false"}},       "stateTimestamp":"1567060879213084847"},     "shard1":{       "range":"80000000-ffffffff",       "state":"construction",       "replicas":{},       "stateTimestamp":"1567060879213099152"}},   "router":{"name":"compositeId"},   "maxShardsPerNode":"1",   "autoAddReplicas":"false",   "nrtReplicas":1,   "tlogReplicas":0} Expected: not a collection containing "backuprestore_testfailure_restored"      but: was <[backuprestore_testok, backuprestore_testfailure, backuprestore_testfailure_restored, backuprestore_testok_restored]>
>
> Stack Trace:
> java.lang.AssertionError: Failed collection is still in the clusterstate: DocCollection(backuprestore_testfailure_restored//collections/backuprestore_testfailure_restored/state.json/2)={
>   "pullReplicas":0,
>   "replicationFactor":1,
>   "shards":{
>     "shard2":{
>       "range":"0-7fffffff",
>       "state":"construction",
>       "replicas":{"core_node2":{
>           "core":"backuprestore_testfailure_restored_shard2_replica_n1",
>           "base_url":"http://127.0.0.1:33205/solr",
>           "node_name":"127.0.0.1:33205_solr",
>           "state":"down",
>           "type":"NRT",
>           "force_set_state":"false"}},
>       "stateTimestamp":"1567060879213084847"},
>     "shard1":{
>       "range":"80000000-ffffffff",
>       "state":"construction",
>       "replicas":{},
>       "stateTimestamp":"1567060879213099152"}},
>   "router":{"name":"compositeId"},
>   "maxShardsPerNode":"1",
>   "autoAddReplicas":"false",
>   "nrtReplicas":1,
>   "tlogReplicas":0}
> Expected: not a collection containing "backuprestore_testfailure_restored"
>      but: was <[backuprestore_testok, backuprestore_testfailure, backuprestore_testfailure_restored, backuprestore_testok_restored]>
>         at __randomizedtesting.SeedInfo.seed([E037D74065656872:C94B49654D3C6B5F]:0)
>         at org.hamcrest.MatcherAssert.assertThat(MatcherAssert.java:20)
>         at org.junit.Assert.assertThat(Assert.java:956)
>         at org.apache.solr.cloud.api.collections.AbstractCloudBackupRestoreTestCase.testRestoreFailure(AbstractCloudBackupRestoreTestCase.java:211)
>         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>         at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>         at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>         at java.lang.reflect.Method.invoke(Method.java:498)
>         at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
>         at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938)
>         at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974)
>         at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988)
>         at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
>         at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
>         at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
>         at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
>         at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
>         at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
>         at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
>         at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
>         at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
>         at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
>         at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947)
>         at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832)
>         at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883)
>         at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894)
>         at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
>         at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
>         at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
>         at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
>         at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
>         at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
>         at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
>         at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
>         at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
>         at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
>         at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
>         at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
>         at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
>         at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
>         at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
>         at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
>         at java.lang.Thread.run(Thread.java:748)
>
>
>
>
> Build Log:
> [...truncated 13726 lines...]
>    [junit4] Suite: org.apache.solr.cloud.api.collections.TestHdfsCloudBackupRestore
>    [junit4]   1> Formatting using clusterid: testClusterID
>    [junit4]   2> 439279 WARN  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.h.m.i.MetricsConfig Cannot locate configuration: tried hadoop-metrics2-namenode.properties,hadoop-metrics2.properties
>    [junit4]   2> 439296 WARN  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
>    [junit4]   2> 439298 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 1.8.0_191-b12
>    [junit4]   2> 439300 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.session DefaultSessionIdManager workerName=node0
>    [junit4]   2> 439300 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.session No SessionScavenger set, using defaults
>    [junit4]   2> 439300 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.session node0 Scavenging every 600000ms
>    [junit4]   2> 439301 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@2774068b{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
>    [junit4]   2> 439459 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@f007949{hdfs,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/jetty-localhost.localdomain-36239-hdfs-_-any-924387434669286531.dir/webapp/,AVAILABLE}{/hdfs}
>    [junit4]   2> 439460 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.AbstractConnector Started ServerConnector@7f6b887c{HTTP/1.1,[http/1.1]}{localhost.localdomain:36239}
>    [junit4]   2> 439461 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.Server Started @439527ms
>    [junit4]   2> 439553 WARN  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
>    [junit4]   2> 439556 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 1.8.0_191-b12
>    [junit4]   2> 439556 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.session DefaultSessionIdManager workerName=node0
>    [junit4]   2> 439556 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.session No SessionScavenger set, using defaults
>    [junit4]   2> 439557 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.session node0 Scavenging every 600000ms
>    [junit4]   2> 439557 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@6adf3fad{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
>    [junit4]   2> 439714 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@1c703108{datanode,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/jetty-localhost-37543-datanode-_-any-7314119788980653551.dir/webapp/,AVAILABLE}{/datanode}
>    [junit4]   2> 439715 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.AbstractConnector Started ServerConnector@7162d9c9{HTTP/1.1,[http/1.1]}{localhost:37543}
>    [junit4]   2> 439715 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.Server Started @439781ms
>    [junit4]   2> 439791 WARN  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
>    [junit4]   2> 439792 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 1.8.0_191-b12
>    [junit4]   2> 439794 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.session DefaultSessionIdManager workerName=node0
>    [junit4]   2> 439794 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.session No SessionScavenger set, using defaults
>    [junit4]   2> 439794 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.session node0 Scavenging every 600000ms
>    [junit4]   2> 439795 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@11532006{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
>    [junit4]   2> 439974 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0xcac9d599fa230d3: Processing first storage report for DS-d29fa2ae-e164-4cca-aa51-f36bddc1bd73 from datanode f9a9e1ed-6c2b-46ce-b8bb-7bae1b0f893d
>    [junit4]   2> 439974 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0xcac9d599fa230d3: from storage DS-d29fa2ae-e164-4cca-aa51-f36bddc1bd73 node DatanodeRegistration(127.0.0.1:38477, datanodeUuid=f9a9e1ed-6c2b-46ce-b8bb-7bae1b0f893d, infoPort=34367, infoSecurePort=0, ipcPort=41639, storageInfo=lv=-57;cid=testClusterID;nsid=968518402;c=1567059213337), blocks: 0, hasStaleStorage: true, processing time: 0 msecs, invalidatedBlocks: 0
>    [junit4]   2> 439974 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0xcac9d599fa230d3: Processing first storage report for DS-1c4da6b9-2544-4f1f-b527-c4142a5267fd from datanode f9a9e1ed-6c2b-46ce-b8bb-7bae1b0f893d
>    [junit4]   2> 439974 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0xcac9d599fa230d3: from storage DS-1c4da6b9-2544-4f1f-b527-c4142a5267fd node DatanodeRegistration(127.0.0.1:38477, datanodeUuid=f9a9e1ed-6c2b-46ce-b8bb-7bae1b0f893d, infoPort=34367, infoSecurePort=0, ipcPort=41639, storageInfo=lv=-57;cid=testClusterID;nsid=968518402;c=1567059213337), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0
>    [junit4]   2> 440012 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@397cc67d{datanode,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/jetty-localhost-42969-datanode-_-any-8925702212772852981.dir/webapp/,AVAILABLE}{/datanode}
>    [junit4]   2> 440012 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.AbstractConnector Started ServerConnector@4892d943{HTTP/1.1,[http/1.1]}{localhost:42969}
>    [junit4]   2> 440012 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.e.j.s.Server Started @440079ms
>    [junit4]   2> 440173 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0xb57ad50ac595db20: Processing first storage report for DS-ae78d8eb-dd57-4c19-ae6e-ea8f8519c130 from datanode d9c8819b-1365-4c42-ae05-ffe965768d2c
>    [junit4]   2> 440173 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0xb57ad50ac595db20: from storage DS-ae78d8eb-dd57-4c19-ae6e-ea8f8519c130 node DatanodeRegistration(127.0.0.1:42413, datanodeUuid=d9c8819b-1365-4c42-ae05-ffe965768d2c, infoPort=40741, infoSecurePort=0, ipcPort=34355, storageInfo=lv=-57;cid=testClusterID;nsid=968518402;c=1567059213337), blocks: 0, hasStaleStorage: true, processing time: 0 msecs, invalidatedBlocks: 0
>    [junit4]   2> 440173 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0xb57ad50ac595db20: Processing first storage report for DS-ef48ae90-a08b-4a82-9795-00787d190e45 from datanode d9c8819b-1365-4c42-ae05-ffe965768d2c
>    [junit4]   2> 440173 INFO  (Block report processor) [     ] BlockStateChange BLOCK* processReport 0xb57ad50ac595db20: from storage DS-ef48ae90-a08b-4a82-9795-00787d190e45 node DatanodeRegistration(127.0.0.1:42413, datanodeUuid=d9c8819b-1365-4c42-ae05-ffe965768d2c, infoPort=40741, infoSecurePort=0, ipcPort=34355, storageInfo=lv=-57;cid=testClusterID;nsid=968518402;c=1567059213337), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0
>    [junit4]   2> 440259 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.s.c.MiniSolrCloudCluster Starting cluster of 2 servers in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_E037D74065656872-001/tempDir-002
>    [junit4]   2> 440260 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
>    [junit4]   2> 440260 INFO  (ZkTestServer Run Thread) [     ] o.a.s.c.ZkTestServer client port:0.0.0.0/0.0.0.0:0
>    [junit4]   2> 440260 INFO  (ZkTestServer Run Thread) [     ] o.a.s.c.ZkTestServer Starting server
>    [junit4]   2> 440360 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.s.c.ZkTestServer start zk server on port:45147
>    [junit4]   2> 440360 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.s.c.ZkTestServer waitForServerUp: 127.0.0.1:45147
>    [junit4]   2> 440360 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.s.c.ZkTestServer parse host and port list: 127.0.0.1:45147
>    [junit4]   2> 440360 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.s.c.ZkTestServer connecting to 127.0.0.1 45147
>    [junit4]   2> 440363 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
>    [junit4]   2> 440367 INFO  (zkConnectionManagerCallback-2523-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
>    [junit4]   2> 440367 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
>    [junit4]   2> 440371 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
>    [junit4]   2> 440372 INFO  (zkConnectionManagerCallback-2525-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
>    [junit4]   2> 440372 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
>    [junit4]   2> 440376 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
>    [junit4]   2> 440378 INFO  (zkConnectionManagerCallback-2527-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
>    [junit4]   2> 440378 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
>    [junit4]   2> 440486 WARN  (jetty-launcher-2528-thread-1) [     ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
>    [junit4]   2> 440486 WARN  (jetty-launcher-2528-thread-2) [     ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
>    [junit4]   2> 440487 INFO  (jetty-launcher-2528-thread-2) [     ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
>    [junit4]   2> 440487 INFO  (jetty-launcher-2528-thread-1) [     ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
>    [junit4]   2> 440487 INFO  (jetty-launcher-2528-thread-2) [     ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
>    [junit4]   2> 440487 INFO  (jetty-launcher-2528-thread-1) [     ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
>    [junit4]   2> 440487 INFO  (jetty-launcher-2528-thread-2) [     ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 1.8.0_191-b12
>    [junit4]   2> 440487 INFO  (jetty-launcher-2528-thread-1) [     ] o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: afcf563148970e98786327af5e07c261fda175d3; jvm 1.8.0_191-b12
>    [junit4]   2> 440495 INFO  (jetty-launcher-2528-thread-1) [     ] o.e.j.s.session DefaultSessionIdManager workerName=node0
>    [junit4]   2> 440495 INFO  (jetty-launcher-2528-thread-1) [     ] o.e.j.s.session No SessionScavenger set, using defaults
>    [junit4]   2> 440496 INFO  (jetty-launcher-2528-thread-1) [     ] o.e.j.s.session node0 Scavenging every 660000ms
>    [junit4]   2> 440496 INFO  (jetty-launcher-2528-thread-2) [     ] o.e.j.s.session DefaultSessionIdManager workerName=node0
>    [junit4]   2> 440496 INFO  (jetty-launcher-2528-thread-2) [     ] o.e.j.s.session No SessionScavenger set, using defaults
>    [junit4]   2> 440496 INFO  (jetty-launcher-2528-thread-2) [     ] o.e.j.s.session node0 Scavenging every 600000ms
>    [junit4]   2> 440496 INFO  (jetty-launcher-2528-thread-1) [     ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@45e0eb05{/solr,null,AVAILABLE}
>    [junit4]   2> 440496 INFO  (jetty-launcher-2528-thread-2) [     ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@2f3604af{/solr,null,AVAILABLE}
>    [junit4]   2> 440497 INFO  (jetty-launcher-2528-thread-2) [     ] o.e.j.s.AbstractConnector Started ServerConnector@224c8694{SSL,[ssl, http/1.1]}{127.0.0.1:36659}
>    [junit4]   2> 440497 INFO  (jetty-launcher-2528-thread-2) [     ] o.e.j.s.Server Started @440564ms
>    [junit4]   2> 440497 INFO  (jetty-launcher-2528-thread-2) [     ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=36659}
>    [junit4]   2> 440498 INFO  (jetty-launcher-2528-thread-1) [     ] o.e.j.s.AbstractConnector Started ServerConnector@341995c6{SSL,[ssl, http/1.1]}{127.0.0.1:46735}
>    [junit4]   2> 440498 INFO  (jetty-launcher-2528-thread-1) [     ] o.e.j.s.Server Started @440564ms
>    [junit4]   2> 440498 INFO  (jetty-launcher-2528-thread-1) [     ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=46735}
>    [junit4]   2> 440498 ERROR (jetty-launcher-2528-thread-2) [     ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
>    [junit4]   2> 440498 INFO  (jetty-launcher-2528-thread-2) [     ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
>    [junit4]   2> 440498 ERROR (jetty-launcher-2528-thread-1) [     ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
>    [junit4]   2> 440498 INFO  (jetty-launcher-2528-thread-2) [     ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 8.3.0
>    [junit4]   2> 440498 INFO  (jetty-launcher-2528-thread-1) [     ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
>    [junit4]   2> 440498 INFO  (jetty-launcher-2528-thread-2) [     ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
>    [junit4]   2> 440498 INFO  (jetty-launcher-2528-thread-2) [     ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
>    [junit4]   2> 440498 INFO  (jetty-launcher-2528-thread-1) [     ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 8.3.0
>    [junit4]   2> 440498 INFO  (jetty-launcher-2528-thread-1) [     ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
>    [junit4]   2> 440498 INFO  (jetty-launcher-2528-thread-1) [     ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
>    [junit4]   2> 440498 INFO  (jetty-launcher-2528-thread-2) [     ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-08-29T06:13:34.605Z
>    [junit4]   2> 440498 INFO  (jetty-launcher-2528-thread-1) [     ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-08-29T06:13:34.605Z
>    [junit4]   2> 440500 INFO  (jetty-launcher-2528-thread-1) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
>    [junit4]   2> 440503 INFO  (jetty-launcher-2528-thread-2) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
>    [junit4]   2> 440503 INFO  (zkConnectionManagerCallback-2530-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
>    [junit4]   2> 440503 INFO  (jetty-launcher-2528-thread-1) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
>    [junit4]   2> 440504 INFO  (zkConnectionManagerCallback-2532-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
>    [junit4]   2> 440504 INFO  (jetty-launcher-2528-thread-2) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
>    [junit4]   2> 440504 INFO  (jetty-launcher-2528-thread-1) [     ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
>    [junit4]   2> 440505 INFO  (jetty-launcher-2528-thread-2) [     ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
>    [junit4]   2> 440523 INFO  (jetty-launcher-2528-thread-1) [     ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@481a4267, but no JMX reporters were configured - adding default JMX reporter.
>    [junit4]   2> 440530 INFO  (jetty-launcher-2528-thread-2) [     ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@481a4267, but no JMX reporters were configured - adding default JMX reporter.
>    [junit4]   2> 440887 INFO  (jetty-launcher-2528-thread-2) [     ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
>    [junit4]   2> 440888 WARN  (jetty-launcher-2528-thread-2) [     ] o.a.s.c.s.i.Http2SolrClient Create Http2SolrClient with HTTP/1.1 transport since Java 8 or lower versions does not support SSL + HTTP/2
>    [junit4]   2> 440889 WARN  (jetty-launcher-2528-thread-2) [     ] o.e.j.u.s.S.config Trusting all certificates configured for Client@4b3ef0b7[provider=null,keyStore=null,trustStore=null]
>    [junit4]   2> 440889 WARN  (jetty-launcher-2528-thread-2) [     ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@4b3ef0b7[provider=null,keyStore=null,trustStore=null]
>    [junit4]   2> 440890 INFO  (jetty-launcher-2528-thread-1) [     ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
>    [junit4]   2> 440891 WARN  (jetty-launcher-2528-thread-1) [     ] o.a.s.c.s.i.Http2SolrClient Create Http2SolrClient with HTTP/1.1 transport since Java 8 or lower versions does not support SSL + HTTP/2
>    [junit4]   2> 440893 WARN  (jetty-launcher-2528-thread-1) [     ] o.e.j.u.s.S.config Trusting all certificates configured for Client@7637eb82[provider=null,keyStore=null,trustStore=null]
>    [junit4]   2> 440893 WARN  (jetty-launcher-2528-thread-1) [     ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@7637eb82[provider=null,keyStore=null,trustStore=null]
>    [junit4]   2> 440895 WARN  (jetty-launcher-2528-thread-2) [     ] o.a.s.c.s.i.Http2SolrClient Create Http2SolrClient with HTTP/1.1 transport since Java 8 or lower versions does not support SSL + HTTP/2
>    [junit4]   2> 440901 WARN  (jetty-launcher-2528-thread-1) [     ] o.a.s.c.s.i.Http2SolrClient Create Http2SolrClient with HTTP/1.1 transport since Java 8 or lower versions does not support SSL + HTTP/2
>    [junit4]   2> 440901 WARN  (jetty-launcher-2528-thread-2) [     ] o.e.j.u.s.S.config Trusting all certificates configured for Client@74bd9149[provider=null,keyStore=null,trustStore=null]
>    [junit4]   2> 440901 WARN  (jetty-launcher-2528-thread-2) [     ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@74bd9149[provider=null,keyStore=null,trustStore=null]
>    [junit4]   2> 440902 WARN  (jetty-launcher-2528-thread-1) [     ] o.e.j.u.s.S.config Trusting all certificates configured for Client@18e664b8[provider=null,keyStore=null,trustStore=null]
>    [junit4]   2> 440902 WARN  (jetty-launcher-2528-thread-1) [     ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for Client@18e664b8[provider=null,keyStore=null,trustStore=null]
>    [junit4]   2> 440903 INFO  (jetty-launcher-2528-thread-2) [     ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:45147/solr
>    [junit4]   2> 440903 INFO  (jetty-launcher-2528-thread-1) [     ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:45147/solr
>    [junit4]   2> 440905 INFO  (jetty-launcher-2528-thread-1) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
>    [junit4]   2> 440906 INFO  (jetty-launcher-2528-thread-2) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
>    [junit4]   2> 440906 INFO  (zkConnectionManagerCallback-2546-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
>    [junit4]   2> 440906 INFO  (jetty-launcher-2528-thread-1) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
>    [junit4]   2> 440911 INFO  (zkConnectionManagerCallback-2544-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
>    [junit4]   2> 440911 INFO  (jetty-launcher-2528-thread-2) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
>    [junit4]   2> 441011 INFO  (jetty-launcher-2528-thread-1) [n:127.0.0.1:46735_solr     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
>    [junit4]   2> 441014 INFO  (jetty-launcher-2528-thread-2) [n:127.0.0.1:36659_solr     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
>    [junit4]   2> 441014 INFO  (zkConnectionManagerCallback-2548-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
>    [junit4]   2> 441014 INFO  (jetty-launcher-2528-thread-1) [n:127.0.0.1:46735_solr     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
>    [junit4]   2> 441024 INFO  (zkConnectionManagerCallback-2550-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
>    [junit4]   2> 441024 INFO  (jetty-launcher-2528-thread-2) [n:127.0.0.1:36659_solr     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
>    [junit4]   2> 441203 INFO  (jetty-launcher-2528-thread-1) [n:127.0.0.1:46735_solr     ] o.a.s.c.OverseerElectionContext I am going to be the leader 127.0.0.1:46735_solr
>    [junit4]   2> 441205 INFO  (jetty-launcher-2528-thread-1) [n:127.0.0.1:46735_solr     ] o.a.s.c.Overseer Overseer (id=72285712308305927-127.0.0.1:46735_solr-n_0000000000) starting
>    [junit4]   2> 441223 INFO  (jetty-launcher-2528-thread-1) [n:127.0.0.1:46735_solr     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
>    [junit4]   2> 441231 INFO  (zkConnectionManagerCallback-2559-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
>    [junit4]   2> 441231 INFO  (jetty-launcher-2528-thread-1) [n:127.0.0.1:46735_solr     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
>    [junit4]   2> 441237 INFO  (jetty-launcher-2528-thread-1) [n:127.0.0.1:46735_solr     ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:45147/solr ready
>    [junit4]   2> 441243 INFO  (jetty-launcher-2528-thread-2) [n:127.0.0.1:36659_solr     ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:36659_solr
>    [junit4]   2> 441246 INFO  (OverseerStateUpdate-72285712308305927-127.0.0.1:46735_solr-n_0000000000) [n:127.0.0.1:46735_solr     ] o.a.s.c.Overseer Starting to work on the main queue : 127.0.0.1:46735_solr
>    [junit4]   2> 441247 INFO  (OverseerStateUpdate-72285712308305927-127.0.0.1:46735_solr-n_0000000000) [n:127.0.0.1:46735_solr     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
>    [junit4]   2> 441257 INFO  (jetty-launcher-2528-thread-1) [n:127.0.0.1:46735_solr     ] o.a.s.c.ZkController Publish node=127.0.0.1:46735_solr as DOWN
>    [junit4]   2> 441259 INFO  (jetty-launcher-2528-thread-1) [n:127.0.0.1:46735_solr     ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 2147483647 transient cores
>    [junit4]   2> 441259 INFO  (jetty-launcher-2528-thread-1) [n:127.0.0.1:46735_solr     ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:46735_solr
>    [junit4]   2> 441262 INFO  (jetty-launcher-2528-thread-1) [n:127.0.0.1:46735_solr     ] o.a.s.c.PackageManager clusterprops.json changed , version 0
>    [junit4]   2> 441262 INFO  (jetty-launcher-2528-thread-1) [n:127.0.0.1:46735_solr     ] o.a.s.c.b.r.BackupRepositoryFactory Added backup repository with configuration params {type = repository,name = hdfs,class = org.apache.solr.core.backup.repository.HdfsBackupRepository,attributes = {name=hdfs, class=org.apache.solr.core.backup.repository.HdfsBackupRepository},args = {location=/backup,solr.hdfs.home=hdfs://localhost.localdomain:46481/solr,solr.hdfs.confdir=}}
>    [junit4]   2> 441262 INFO  (jetty-launcher-2528-thread-1) [n:127.0.0.1:46735_solr     ] o.a.s.c.b.r.BackupRepositoryFactory Added backup repository with configuration params {type = repository,name = poisioned,class = org.apache.solr.cloud.api.collections.TestLocalFSCloudBackupRestore$PoinsionedRepository,attributes = {default=true, name=poisioned, class=org.apache.solr.cloud.api.collections.TestLocalFSCloudBackupRestore$PoinsionedRepository},}
>    [junit4]   2> 441262 INFO  (jetty-launcher-2528-thread-1) [n:127.0.0.1:46735_solr     ] o.a.s.c.b.r.BackupRepositoryFactory Default configuration for backup repository is with configuration params {type = repository,name = poisioned,class = org.apache.solr.cloud.api.collections.TestLocalFSCloudBackupRestore$PoinsionedRepository,attributes = {default=true, name=poisioned, class=org.apache.solr.cloud.api.collections.TestLocalFSCloudBackupRestore$PoinsionedRepository},}
>    [junit4]   2> 441267 INFO  (zkCallback-2547-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
>    [junit4]   2> 441274 INFO  (zkCallback-2558-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
>    [junit4]   2> 441277 INFO  (zkCallback-2549-thread-1) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
>    [junit4]   2> 441280 INFO  (jetty-launcher-2528-thread-2) [n:127.0.0.1:36659_solr     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
>    [junit4]   2> 441301 INFO  (zkConnectionManagerCallback-2564-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
>    [junit4]   2> 441301 INFO  (jetty-launcher-2528-thread-2) [n:127.0.0.1:36659_solr     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
>    [junit4]   2> 441302 INFO  (jetty-launcher-2528-thread-2) [n:127.0.0.1:36659_solr     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
>    [junit4]   2> 441305 INFO  (jetty-launcher-2528-thread-2) [n:127.0.0.1:36659_solr     ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:45147/solr ready
>    [junit4]   2> 441306 INFO  (jetty-launcher-2528-thread-2) [n:127.0.0.1:36659_solr     ] o.a.s.c.PackageManager clusterprops.json changed , version 0
>    [junit4]   2> 441306 INFO  (jetty-launcher-2528-thread-2) [n:127.0.0.1:36659_solr     ] o.a.s.c.b.r.BackupRepositoryFactory Added backup repository with configuration params {type = repository,name = hdfs,class = org.apache.solr.core.backup.repository.HdfsBackupRepository,attributes = {name=hdfs, class=org.apache.solr.core.backup.repository.HdfsBackupRepository},args = {location=/backup,solr.hdfs.home=hdfs://localhost.localdomain:46481/solr,solr.hdfs.confdir=}}
>    [junit4]   2> 441306 INFO  (jetty-launcher-2528-thread-2) [n:127.0.0.1:36659_solr     ] o.a.s.c.b.r.BackupRepositoryFactory Added backup repository with configuration params {type = repository,name = poisioned,class = org.apache.solr.cloud.api.collections.TestLocalFSCloudBackupRestore$PoinsionedRepository,attributes = {default=true, name=poisioned, class=org.apache.solr.cloud.api.collections.TestLocalFSCloudBackupRestore$PoinsionedRepository},}
>    [junit4]   2> 441306 INFO  (jetty-launcher-2528-thread-2) [n:127.0.0.1:36659_solr     ] o.a.s.c.b.r.BackupRepositoryFactory Default configuration for backup repository is with configuration params {type = repository,name = poisioned,class = org.apache.solr.cloud.api.collections.TestLocalFSCloudBackupRestore$PoinsionedRepository,attributes = {default=true, name=poisioned, class=org.apache.solr.cloud.api.collections.TestLocalFSCloudBackupRestore$PoinsionedRepository},}
>    [junit4]   2> 441332 INFO  (jetty-launcher-2528-thread-1) [n:127.0.0.1:46735_solr     ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
>    [junit4]   2> 441378 INFO  (jetty-launcher-2528-thread-2) [n:127.0.0.1:36659_solr     ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
>    [junit4]   2> 441409 INFO  (jetty-launcher-2528-thread-1) [n:127.0.0.1:46735_solr     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@481a4267
>    [junit4]   2> 441436 INFO  (jetty-launcher-2528-thread-2) [n:127.0.0.1:36659_solr     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@481a4267
>    [junit4]   2> 441444 INFO  (jetty-launcher-2528-thread-1) [n:127.0.0.1:46735_solr     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@481a4267
>    [junit4]   2> 441444 INFO  (jetty-launcher-2528-thread-1) [n:127.0.0.1:46735_solr     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@481a4267
>    [junit4]   2> 441445 INFO  (jetty-launcher-2528-thread-1) [n:127.0.0.1:46735_solr     ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_E037D74065656872-001/tempDir-002/node1/.
>    [junit4]   2> 441454 INFO  (jetty-launcher-2528-thread-2) [n:127.0.0.1:36659_solr     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@481a4267
>    [junit4]   2> 441454 INFO  (jetty-launcher-2528-thread-2) [n:127.0.0.1:36659_solr     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@481a4267
>    [junit4]   2> 441456 INFO  (jetty-launcher-2528-thread-2) [n:127.0.0.1:36659_solr     ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_E037D74065656872-001/tempDir-002/node2/.
>    [junit4]   2> 441578 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.s.c.MiniSolrCloudCluster waitForAllNodes: numServers=2
>    [junit4]   2> 441579 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
>    [junit4]   2> 441584 INFO  (zkConnectionManagerCallback-2571-thread-1) [     ] o.a.s.c.c.ConnectionManager zkClient has connected
>    [junit4]   2> 441585 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
>    [junit4]   2> 441588 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
>    [junit4]   2> 441607 INFO  (SUITE-TestHdfsCloudBackupRestore-seed#[E037D74065656872]-worker) [     ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:45147/solr ready
>    [junit4]   2> 441715 INFO  (qtp1840676713-6927) [n:127.0.0.1:36659_solr     ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params collection.configName=conf1&router.name=implicit&version=2&pullReplicas=0&shards=shard1,shard2&property.customKey=customValue&maxShardsPerNode=3&router.field=shard_s&autoAddReplicas=true&name=hdfsbackuprestore_testok&nrtReplicas=2&action=CREATE&tlogReplicas=1&wt=javabin and sendToOCPQueue=true
>    [junit4]   2> 441723 INFO  (OverseerThreadFactory-1679-thread-1-processing-n:127.0.0.1:46735_solr) [n:127.0.0.1:46735_solr     ] o.a.s.c.a.c.CreateCollectionCmd Create collection hdfsbackuprestore_testok
>    [junit4]   2> 441830 WARN  (OverseerThreadFactory-1679-thread-1-processing-n:127.0.0.1:46735_solr) [n:127.0.0.1:46735_solr     ] o.a.s.c.a.c.CreateCollectionCmd Specified number of replicas of 3 on collection hdfsbackuprestore_testok is higher than the number of Solr instances currently live or live and part of your createNodeSet(2). It's unusual to run two replica of the same slice on the same Solr-instance.
>    [junit4]   2> 441836 INFO  (OverseerStateUpdate-72285712308305927-127.0.0.1:46735_solr-n_0000000000) [n:127.0.0.1:46735_solr     ] o.a.s.c.o.SliceMutator createReplica() {
>    [junit4]   2>   "operation":"ADDREPLICA",
>    [junit4]   2>   "collection":"hdfsbackuprestore_testok",
>    [junit4]   2>   "shard":"shard1",
>    [junit4]   2>   "core":"hdfsbackuprestore_testok_shard1_replica_n1",
>    [junit4]   2>   "state":"down",
>    [junit4]   2>   "base_url":"https://127.0.0.1:46735/solr",
>    [junit4]   2>   "type":"NRT",
>    [junit4]   2>   "waitForFinalState":"false"}
>    [junit4]   2> 441841 INFO  (OverseerStateUpdate-72285712308305927-127.0.0.1:46735_solr-n_0000000000) [n:127.0.0.1:46735_solr     ] o.a.s.c.o.SliceMutator createReplica() {
>    [junit4]   2>   "operation":"ADDREPLICA",
>    [junit4]   2>   "collection":"hdfsbackuprestore_testok",
>    [junit4]   2>   "shard":"shard1",
>    [junit4]   2>   "core":"hdfsbackuprestore_testok_shard1_replica_n2",
>    [junit4]   2>   "state":"down",
>    [junit4]   2>   "base_url":"https://127.0.0.1:36659/solr",
>    [junit4]   2>   "type":"NRT",
>    [junit4]   2>   "waitForFinalState":"false"}
>    [junit4]   2> 441847 INFO  (OverseerStateUpdate-72285712308305927-127.0.0.1:46735_solr-n_0000000000) [n:127.0.0.1:46735_solr     ] o.a.s.c.o.SliceMutator createReplica() {
>    [junit4]   2>   "operation":"ADDREPLICA",
>    [junit4]   2>   "collection":"hdfsbackuprestore_testok",
>    [junit4]   2>   "shard":"shard1",
>    [junit4]   2>   "core":"hdfsbackuprestore_testok_shard1_replica_t4",
>    [junit4]   2>   "state":"down",
>    [junit4]   2>   "base_url":"https://127.0.0.1:46735/solr",
>    [junit4]   2>   "type":"TLOG",
>    [junit4]   2>   "waitForFinalState":"false"}
>    [junit4]   2> 441852 INFO  (OverseerStateUpdate-72285712308305927-127.0.0.1:46735_solr-n_0000000000) [n:127.0.0.1:46735_solr     ] o.a.s.c.o.SliceMutator createReplica() {
>    [junit4]   2>   "operation":"ADDREPLICA",
>    [junit4]   2>   "collection":"hdfsbackuprestore_testok",
>    [junit4]   2>   "shard":"shard2",
>    [junit4]   2>   "core":"hdfsbackuprestore_testok_shard2_replica_n7",
>    [junit4]   2>   "state":"down",
>    [junit4]   2>   "base_url":"https://127.0.0.1:36659/solr",
>    [junit4]   2>   "type":"NRT",
>    [junit4]   2>   "waitForFinalState":"false"}
>    [junit4]   2> 441855 INFO  (OverseerStateUpdate-72285712308305927-127.0.0.1:46735_solr-n_0000000000) [n:127.0.0.1:46735_solr     ] o.a.s.c.o.SliceMutator createReplica() {
>    [junit4]   2>   "operation":"ADDREPLICA",
>    [junit4]   2>   "collection":"hdfsbackuprestore_testok",
>    [junit4]   2>   "shard":"shard2",
>    [junit4]   2>   "core":"hdfsbackuprestore_testok_shard2_replica_n8",
>    [junit4]   2>   "state":"down",
>    [junit4]   2>   "base_url":"https://127.0.0.1:46735/solr",
>    [junit4]   2>   "type":"NRT",
>    [junit4]   2>   "waitForFinalState":"false"}
>    [junit4]   2> 441859 INFO  (OverseerStateUpdate-72285712308305927-127.0.0.1:46735_solr-n_0000000000) [n:127.0.0.1:46735_solr     ] o.a.s.c.o.SliceMutator createReplica() {
>    [junit4]   2>   "operation":"ADDREPLICA",
>    [junit4]   2>   "collection":"hdfsbackuprestore_testok",
>    [junit4]   2>   "shard":"shard2",
>    [junit4]   2>   "core":"hdfsbackuprestore_testok_shard2_replica_t10",
>    [junit4]   2>   "state":"down",
>    [junit4]   2>   "base_url":"https://127.0.0.1:36659/solr",
>    [junit4]   2>   "type":"TLOG",
>    [junit4]   2>   "waitForFinalState":"false"}
>    [junit4]   2> 442066 INFO  (qtp1840676713-6923) [n:127.0.0.1:36659_solr    x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf1&newCollection=true&collection=hdfsbackuprestore_testok&version=2&replicaType=NRT&property.customKey=customValue&coreNodeName=core_node5&name=hdfsbackuprestore_testok_shard1_replica_n2&action=CREATE&numShards=2&shard=shard1&wt=javabin
>    [junit4]   2> 442066 INFO  (qtp1840676713-6923) [n:127.0.0.1:36659_solr    x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 2147483647 transient cores
>    [junit4]   2> 442078 INFO  (qtp1840676713-6919) [n:127.0.0.1:36659_solr    x:hdfsbackuprestore_testok_shard2_replica_n7 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf1&newCollection=true&collection=hdfsbackuprestore_testok&version=2&replicaType=NRT&property.customKey=customValue&coreNodeName=core_node9&name=hdfsbackuprestore_testok_shard2_replica_n7&action=CREATE&numShards=2&shard=shard2&wt=javabin
>    [junit4]   2> 442084 INFO  (qtp1840676713-6921) [n:127.0.0.1:36659_solr    x:hdfsbackuprestore_testok_shard2_replica_t10 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf1&newCollection=true&collection=hdfsbackuprestore_testok&version=2&replicaType=TLOG&property.customKey=customValue&coreNodeName=core_node12&name=hdfsbackuprestore_testok_shard2_replica_t10&action=CREATE&numShards=2&shard=shard2&wt=javabin
>    [junit4]   2> 442098 INFO  (qtp2078506737-6920) [n:127.0.0.1:46735_solr    x:hdfsbackuprestore_testok_shard1_replica_t4 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf1&newCollection=true&collection=hdfsbackuprestore_testok&version=2&replicaType=TLOG&property.customKey=customValue&coreNodeName=core_node6&name=hdfsbackuprestore_testok_shard1_replica_t4&action=CREATE&numShards=2&shard=shard1&wt=javabin
>    [junit4]   2> 442101 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr    x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf1&newCollection=true&collection=hdfsbackuprestore_testok&version=2&replicaType=NRT&property.customKey=customValue&coreNodeName=core_node3&name=hdfsbackuprestore_testok_shard1_replica_n1&action=CREATE&numShards=2&shard=shard1&wt=javabin
>    [junit4]   2> 442112 INFO  (qtp2078506737-6924) [n:127.0.0.1:46735_solr    x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf1&newCollection=true&collection=hdfsbackuprestore_testok&version=2&replicaType=NRT&property.customKey=customValue&coreNodeName=core_node11&name=hdfsbackuprestore_testok_shard2_replica_n8&action=CREATE&numShards=2&shard=shard2&wt=javabin
>    [junit4]   2> 443135 INFO  (qtp1840676713-6921) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_t10 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 8.3.0
>    [junit4]   2> 443135 INFO  (qtp1840676713-6923) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 8.3.0
>    [junit4]   2> 443138 INFO  (qtp1840676713-6919) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n7 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 8.3.0
>    [junit4]   2> 443144 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 8.3.0
>    [junit4]   2> 443144 INFO  (qtp2078506737-6920) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node6 x:hdfsbackuprestore_testok_shard1_replica_t4 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 8.3.0
>    [junit4]   2> 443151 INFO  (qtp2078506737-6924) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 8.3.0
>    [junit4]   2> 443188 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.s.IndexSchema [hdfsbackuprestore_testok_shard1_replica_n1] Schema name=minimal
>    [junit4]   2> 443204 INFO  (qtp2078506737-6920) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node6 x:hdfsbackuprestore_testok_shard1_replica_t4 ] o.a.s.s.IndexSchema [hdfsbackuprestore_testok_shard1_replica_t4] Schema name=minimal
>    [junit4]   2> 443207 INFO  (qtp2078506737-6920) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node6 x:hdfsbackuprestore_testok_shard1_replica_t4 ] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
>    [junit4]   2> 443208 INFO  (qtp2078506737-6920) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node6 x:hdfsbackuprestore_testok_shard1_replica_t4 ] o.a.s.c.CoreContainer Creating SolrCore 'hdfsbackuprestore_testok_shard1_replica_t4' using configuration from collection hdfsbackuprestore_testok, trusted=true
>    [junit4]   2> 443208 INFO  (qtp2078506737-6920) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node6 x:hdfsbackuprestore_testok_shard1_replica_t4 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.hdfsbackuprestore_testok.shard1.replica_t4' (registry 'solr.core.hdfsbackuprestore_testok.shard1.replica_t4') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@481a4267
>    [junit4]   2> 443215 INFO  (qtp1840676713-6919) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n7 ] o.a.s.s.IndexSchema [hdfsbackuprestore_testok_shard2_replica_n7] Schema name=minimal
>    [junit4]   2> 443218 INFO  (qtp1840676713-6923) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.s.IndexSchema [hdfsbackuprestore_testok_shard1_replica_n2] Schema name=minimal
>    [junit4]   2> 443218 INFO  (qtp1840676713-6919) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n7 ] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
>    [junit4]   2> 443218 INFO  (qtp1840676713-6919) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n7 ] o.a.s.c.CoreContainer Creating SolrCore 'hdfsbackuprestore_testok_shard2_replica_n7' using configuration from collection hdfsbackuprestore_testok, trusted=true
>    [junit4]   2> 443219 INFO  (qtp1840676713-6919) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n7 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.hdfsbackuprestore_testok.shard2.replica_n7' (registry 'solr.core.hdfsbackuprestore_testok.shard2.replica_n7') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@481a4267
>    [junit4]   2> 443222 INFO  (qtp1840676713-6921) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_t10 ] o.a.s.s.IndexSchema [hdfsbackuprestore_testok_shard2_replica_t10] Schema name=minimal
>    [junit4]   2> 443226 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
>    [junit4]   2> 443226 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.c.CoreContainer Creating SolrCore 'hdfsbackuprestore_testok_shard1_replica_n1' using configuration from collection hdfsbackuprestore_testok, trusted=true
>    [junit4]   2> 443227 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.hdfsbackuprestore_testok.shard1.replica_n1' (registry 'solr.core.hdfsbackuprestore_testok.shard1.replica_n1') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@481a4267
>    [junit4]   2> 443230 INFO  (qtp1840676713-6923) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
>    [junit4]   2> 443231 INFO  (qtp1840676713-6923) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.c.CoreContainer Creating SolrCore 'hdfsbackuprestore_testok_shard1_replica_n2' using configuration from collection hdfsbackuprestore_testok, trusted=true
>    [junit4]   2> 443231 INFO  (qtp1840676713-6919) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n7 ] o.a.s.c.SolrCore [[hdfsbackuprestore_testok_shard2_replica_n7] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_E037D74065656872-001/tempDir-002/node2/hdfsbackuprestore_testok_shard2_replica_n7], dataDir=[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_E037D74065656872-001/tempDir-002/node2/./hdfsbackuprestore_testok_shard2_replica_n7/data/]
>    [junit4]   2> 443232 INFO  (qtp1840676713-6923) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.hdfsbackuprestore_testok.shard1.replica_n2' (registry 'solr.core.hdfsbackuprestore_testok.shard1.replica_n2') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@481a4267
>    [junit4]   2> 443232 INFO  (qtp1840676713-6923) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.c.SolrCore [[hdfsbackuprestore_testok_shard1_replica_n2] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_E037D74065656872-001/tempDir-002/node2/hdfsbackuprestore_testok_shard1_replica_n2], dataDir=[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_E037D74065656872-001/tempDir-002/node2/./hdfsbackuprestore_testok_shard1_replica_n2/data/]
>    [junit4]   2> 443233 INFO  (qtp2078506737-6920) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node6 x:hdfsbackuprestore_testok_shard1_replica_t4 ] o.a.s.c.SolrCore [[hdfsbackuprestore_testok_shard1_replica_t4] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_E037D74065656872-001/tempDir-002/node1/hdfsbackuprestore_testok_shard1_replica_t4], dataDir=[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_E037D74065656872-001/tempDir-002/node1/./hdfsbackuprestore_testok_shard1_replica_t4/data/]
>    [junit4]   2> 443234 INFO  (qtp1840676713-6921) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_t10 ] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
>    [junit4]   2> 443234 INFO  (qtp1840676713-6921) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_t10 ] o.a.s.c.CoreContainer Creating SolrCore 'hdfsbackuprestore_testok_shard2_replica_t10' using configuration from collection hdfsbackuprestore_testok, trusted=true
>    [junit4]   2> 443235 INFO  (qtp1840676713-6921) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_t10 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.hdfsbackuprestore_testok.shard2.replica_t10' (registry 'solr.core.hdfsbackuprestore_testok.shard2.replica_t10') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@481a4267
>    [junit4]   2> 443235 INFO  (qtp1840676713-6921) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_t10 ] o.a.s.c.SolrCore [[hdfsbackuprestore_testok_shard2_replica_t10] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_E037D74065656872-001/tempDir-002/node2/hdfsbackuprestore_testok_shard2_replica_t10], dataDir=[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_E037D74065656872-001/tempDir-002/node2/./hdfsbackuprestore_testok_shard2_replica_t10/data/]
>    [junit4]   2> 443236 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.c.SolrCore [[hdfsbackuprestore_testok_shard1_replica_n1] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_E037D74065656872-001/tempDir-002/node1/hdfsbackuprestore_testok_shard1_replica_n1], dataDir=[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_E037D74065656872-001/tempDir-002/node1/./hdfsbackuprestore_testok_shard1_replica_n1/data/]
>    [junit4]   2> 443240 INFO  (qtp2078506737-6924) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.s.IndexSchema [hdfsbackuprestore_testok_shard2_replica_n8] Schema name=minimal
>    [junit4]   2> 443243 INFO  (qtp2078506737-6924) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
>    [junit4]   2> 443243 INFO  (qtp2078506737-6924) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.c.CoreContainer Creating SolrCore 'hdfsbackuprestore_testok_shard2_replica_n8' using configuration from collection hdfsbackuprestore_testok, trusted=true
>    [junit4]   2> 443244 INFO  (qtp2078506737-6924) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.hdfsbackuprestore_testok.shard2.replica_n8' (registry 'solr.core.hdfsbackuprestore_testok.shard2.replica_n8') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@481a4267
>    [junit4]   2> 443244 INFO  (qtp2078506737-6924) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.c.SolrCore [[hdfsbackuprestore_testok_shard2_replica_n8] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_E037D74065656872-001/tempDir-002/node1/hdfsbackuprestore_testok_shard2_replica_n8], dataDir=[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_E037D74065656872-001/tempDir-002/node1/./hdfsbackuprestore_testok_shard2_replica_n8/data/]
>    [junit4]   2> 443410 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
>    [junit4]   2> 443410 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
>    [junit4]   2> 443412 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
>    [junit4]   2> 443412 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
>    [junit4]   2> 443418 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.s.SolrIndexSearcher Opening [Searcher@5d6f00f3[hdfsbackuprestore_testok_shard1_replica_n1] main]
>    [junit4]   2> 443427 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
>    [junit4]   2> 443428 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
>    [junit4]   2> 443432 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
>    [junit4]   2> 443432 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1643180686090174464
>    [junit4]   2> 443440 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.c.ZkShardTerms Successful update of terms at /collections/hdfsbackuprestore_testok/terms/shard1 to Terms{values={core_node3=0}, version=0}
>    [junit4]   2> 443441 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/hdfsbackuprestore_testok/leaders/shard1
>    [junit4]   2> 443444 INFO  (searcherExecutor-1690-thread-1-processing-n:127.0.0.1:46735_solr x:hdfsbackuprestore_testok_shard1_replica_n1 c:hdfsbackuprestore_testok s:shard1 r:core_node3) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.c.SolrCore [hdfsbackuprestore_testok_shard1_replica_n1] Registered new searcher Searcher@5d6f00f3[hdfsbackuprestore_testok_shard1_replica_n1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
>    [junit4]   2> 443449 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContext Waiting until we see more replicas up for shard shard1: total=3 found=1 timeoutin=9999ms
>    [junit4]   2> 443462 INFO  (qtp2078506737-6924) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
>    [junit4]   2> 443462 INFO  (qtp2078506737-6924) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
>    [junit4]   2> 443462 INFO  (qtp1840676713-6923) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
>    [junit4]   2> 443462 INFO  (qtp1840676713-6923) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
>    [junit4]   2> 443463 INFO  (qtp2078506737-6924) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
>    [junit4]   2> 443463 INFO  (qtp2078506737-6924) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
>    [junit4]   2> 443463 INFO  (qtp1840676713-6923) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
>    [junit4]   2> 443464 INFO  (qtp1840676713-6923) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
>    [junit4]   2> 443466 INFO  (qtp2078506737-6920) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node6 x:hdfsbackuprestore_testok_shard1_replica_t4 ] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
>    [junit4]   2> 443466 INFO  (qtp2078506737-6920) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node6 x:hdfsbackuprestore_testok_shard1_replica_t4 ] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
>    [junit4]   2> 443467 INFO  (qtp1840676713-6923) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.s.SolrIndexSearcher Opening [Searcher@7add75ac[hdfsbackuprestore_testok_shard1_replica_n2] main]
>    [junit4]   2> 443467 INFO  (qtp2078506737-6920) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node6 x:hdfsbackuprestore_testok_shard1_replica_t4 ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
>    [junit4]   2> 443467 INFO  (qtp2078506737-6920) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node6 x:hdfsbackuprestore_testok_shard1_replica_t4 ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
>    [junit4]   2> 443470 INFO  (qtp2078506737-6920) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node6 x:hdfsbackuprestore_testok_shard1_replica_t4 ] o.a.s.s.SolrIndexSearcher Opening [Searcher@abe7412[hdfsbackuprestore_testok_shard1_replica_t4] main]
>    [junit4]   2> 443473 INFO  (qtp1840676713-6923) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
>    [junit4]   2> 443475 INFO  (qtp1840676713-6923) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
>    [junit4]   2> 443475 INFO  (qtp1840676713-6923) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
>    [junit4]   2> 443476 INFO  (qtp1840676713-6923) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1643180686136311808
>    [junit4]   2> 443484 INFO  (qtp2078506737-6920) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node6 x:hdfsbackuprestore_testok_shard1_replica_t4 ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
>    [junit4]   2> 443484 INFO  (qtp2078506737-6924) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.s.SolrIndexSearcher Opening [Searcher@40bbb2b8[hdfsbackuprestore_testok_shard2_replica_n8] main]
>    [junit4]   2> 443485 INFO  (qtp2078506737-6920) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node6 x:hdfsbackuprestore_testok_shard1_replica_t4 ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
>    [junit4]   2> 443485 INFO  (qtp2078506737-6920) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node6 x:hdfsbackuprestore_testok_shard1_replica_t4 ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
>    [junit4]   2> 443485 INFO  (qtp2078506737-6920) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node6 x:hdfsbackuprestore_testok_shard1_replica_t4 ] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1643180686145748992
>    [junit4]   2> 443487 INFO  (qtp2078506737-6924) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
>    [junit4]   2> 443488 INFO  (qtp2078506737-6924) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
>    [junit4]   2> 443488 INFO  (qtp2078506737-6924) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
>    [junit4]   2> 443489 INFO  (qtp2078506737-6924) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1643180686148894720
>    [junit4]   2> 443494 INFO  (qtp1840676713-6921) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_t10 ] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
>    [junit4]   2> 443494 INFO  (qtp1840676713-6921) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_t10 ] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
>    [junit4]   2> 443494 INFO  (searcherExecutor-1691-thread-1-processing-n:127.0.0.1:36659_solr x:hdfsbackuprestore_testok_shard1_replica_n2 c:hdfsbackuprestore_testok s:shard1 r:core_node5) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.c.SolrCore [hdfsbackuprestore_testok_shard1_replica_n2] Registered new searcher Searcher@7add75ac[hdfsbackuprestore_testok_shard1_replica_n2] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
>    [junit4]   2> 443495 INFO  (qtp1840676713-6921) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_t10 ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
>    [junit4]   2> 443495 INFO  (qtp1840676713-6921) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_t10 ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
>    [junit4]   2> 443496 INFO  (searcherExecutor-1693-thread-1-processing-n:127.0.0.1:46735_solr x:hdfsbackuprestore_testok_shard2_replica_n8 c:hdfsbackuprestore_testok s:shard2 r:core_node11) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.c.SolrCore [hdfsbackuprestore_testok_shard2_replica_n8] Registered new searcher Searcher@40bbb2b8[hdfsbackuprestore_testok_shard2_replica_n8] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
>    [junit4]   2> 443497 INFO  (qtp2078506737-6920) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node6 x:hdfsbackuprestore_testok_shard1_replica_t4 ] o.a.s.c.ZkShardTerms Successful update of terms at /collections/hdfsbackuprestore_testok/terms/shard1 to Terms{values={core_node6=0, core_node3=0}, version=1}
>    [junit4]   2> 443498 INFO  (qtp2078506737-6920) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node6 x:hdfsbackuprestore_testok_shard1_replica_t4 ] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/hdfsbackuprestore_testok/leaders/shard1
>    [junit4]   2> 443500 INFO  (qtp1840676713-6921) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_t10 ] o.a.s.s.SolrIndexSearcher Opening [Searcher@69173d6[hdfsbackuprestore_testok_shard2_replica_t10] main]
>    [junit4]   2> 443501 INFO  (searcherExecutor-1688-thread-1-processing-n:127.0.0.1:46735_solr x:hdfsbackuprestore_testok_shard1_replica_t4 c:hdfsbackuprestore_testok s:shard1 r:core_node6) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node6 x:hdfsbackuprestore_testok_shard1_replica_t4 ] o.a.s.c.SolrCore [hdfsbackuprestore_testok_shard1_replica_t4] Registered new searcher Searcher@abe7412[hdfsbackuprestore_testok_shard1_replica_t4] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
>    [junit4]   2> 443503 INFO  (qtp1840676713-6921) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_t10 ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
>    [junit4]   2> 443504 INFO  (qtp1840676713-6921) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_t10 ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
>    [junit4]   2> 443505 INFO  (qtp1840676713-6921) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_t10 ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
>    [junit4]   2> 443505 INFO  (qtp1840676713-6921) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_t10 ] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1643180686166720512
>    [junit4]   2> 443505 INFO  (qtp1840676713-6923) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.c.ZkShardTerms Successful update of terms at /collections/hdfsbackuprestore_testok/terms/shard1 to Terms{values={core_node6=0, core_node3=0, core_node5=0}, version=2}
>    [junit4]   2> 443505 INFO  (qtp1840676713-6923) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/hdfsbackuprestore_testok/leaders/shard1
>    [junit4]   2> 443514 INFO  (qtp2078506737-6924) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.c.ZkShardTerms Successful update of terms at /collections/hdfsbackuprestore_testok/terms/shard2 to Terms{values={core_node11=0}, version=0}
>    [junit4]   2> 443514 INFO  (qtp2078506737-6924) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/hdfsbackuprestore_testok/leaders/shard2
>    [junit4]   2> 443520 INFO  (searcherExecutor-1692-thread-1-processing-n:127.0.0.1:36659_solr x:hdfsbackuprestore_testok_shard2_replica_t10 c:hdfsbackuprestore_testok s:shard2 r:core_node12) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_t10 ] o.a.s.c.SolrCore [hdfsbackuprestore_testok_shard2_replica_t10] Registered new searcher Searcher@69173d6[hdfsbackuprestore_testok_shard2_replica_t10] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
>    [junit4]   2> 443521 INFO  (qtp1840676713-6921) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_t10 ] o.a.s.c.ZkShardTerms Successful update of terms at /collections/hdfsbackuprestore_testok/terms/shard2 to Terms{values={core_node12=0, core_node11=0}, version=1}
>    [junit4]   2> 443524 INFO  (qtp2078506737-6924) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_n8 ] o.a.s.c.ShardLeaderElectionContext Waiting until we see more replicas up for shard shard2: total=3 found=1 timeoutin=9998ms
>    [junit4]   2> 443530 INFO  (qtp1840676713-6921) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_t10 ] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/hdfsbackuprestore_testok/leaders/shard2
>    [junit4]   2> 443530 INFO  (qtp1840676713-6919) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n7 ] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
>    [junit4]   2> 443530 INFO  (qtp1840676713-6919) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n7 ] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
>    [junit4]   2> 443532 INFO  (qtp1840676713-6919) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n7 ] o.a.s.u.CommitTracker Hard AutoCommit: disabled
>    [junit4]   2> 443532 INFO  (qtp1840676713-6919) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n7 ] o.a.s.u.CommitTracker Soft AutoCommit: disabled
>    [junit4]   2> 443536 INFO  (qtp1840676713-6919) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n7 ] o.a.s.s.SolrIndexSearcher Opening [Searcher@50d2c5a[hdfsbackuprestore_testok_shard2_replica_n7] main]
>    [junit4]   2> 443538 INFO  (qtp1840676713-6919) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n7 ] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
>    [junit4]   2> 443538 INFO  (qtp1840676713-6919) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n7 ] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
>    [junit4]   2> 443539 INFO  (qtp1840676713-6919) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n7 ] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
>    [junit4]   2> 443539 INFO  (qtp1840676713-6919) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n7 ] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1643180686202372096
>    [junit4]   2> 443545 INFO  (searcherExecutor-1689-thread-1-processing-n:127.0.0.1:36659_solr x:hdfsbackuprestore_testok_shard2_replica_n7 c:hdfsbackuprestore_testok s:shard2 r:core_node9) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n7 ] o.a.s.c.SolrCore [hdfsbackuprestore_testok_shard2_replica_n7] Registered new searcher Searcher@50d2c5a[hdfsbackuprestore_testok_shard2_replica_n7] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
>    [junit4]   2> 443546 INFO  (qtp1840676713-6919) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n7 ] o.a.s.c.ZkShardTerms Successful update of terms at /collections/hdfsbackuprestore_testok/terms/shard2 to Terms{values={core_node12=0, core_node11=0, core_node9=0}, version=2}
>    [junit4]   2> 443546 INFO  (qtp1840676713-6919) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n7 ] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/hdfsbackuprestore_testok/leaders/shard2
>    [junit4]   2> 443952 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
>    [junit4]   2> 443952 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
>    [junit4]   2> 443952 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.c.SyncStrategy Sync replicas to https://127.0.0.1:46735/solr/hdfsbackuprestore_testok_shard1_replica_n1/
>    [junit4]   2> 443953 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.u.PeerSync PeerSync: core=hdfsbackuprestore_testok_shard1_replica_n1 url=https://127.0.0.1:46735/solr START replicas=[https://127.0.0.1:36659/solr/hdfsbackuprestore_testok_shard1_replica_n2/, https://127.0.0.1:46735/solr/hdfsbackuprestore_testok_shard1_replica_t4/] nUpdates=100
>    [junit4]   2> 443954 INFO  (qtp2078506737-6918) [n:127.0.0.1:46735_solr c:hdfsbackuprestore_testok s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.u.PeerSync PeerSync: core=hdfsbackuprestore_testok_shard1_replica_n1 url=https://127.0.0.1:46735/solr DONE.  We have no versions.  sync failed.
>    [junit4]   2> 443961 INFO  (qtp1840676713-6925) [n:127.0.0.1:36659_solr c:hdfsbackuprestore_testok s:shard1 r:core_node5 x:hdfsbackuprestore_testok_shard1_replica_n2 ] o.a.s.c.S.Request [hdfsbac
>
> [...truncated too long message...]
>
>  loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml
>
> resolve:
>
> ivy-availability-check:
> [loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.
>
> -ivy-fail-disallowed-ivy-version:
>
> ivy-fail:
>
> ivy-configure:
> [ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml
>
> resolve:
>
> ivy-availability-check:
> [loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.
>
> -ivy-fail-disallowed-ivy-version:
>
> ivy-fail:
>
> ivy-fail:
>
> ivy-configure:
> [ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml
>
> resolve:
>
> ivy-availability-check:
> [loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.
>
> -ivy-fail-disallowed-ivy-version:
>
> ivy-fail:
>
> ivy-configure:
> [ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml
>
> resolve:
>
> ivy-availability-check:
> [loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.
>
> -ivy-fail-disallowed-ivy-version:
>
> ivy-fail:
>
> ivy-fail:
>
> ivy-configure:
> [ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml
>
> resolve:
>
> ivy-availability-check:
> [loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.
>
> -ivy-fail-disallowed-ivy-version:
>
> ivy-fail:
>
> ivy-fail:
>
> ivy-configure:
> [ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml
>
> resolve:
>
> ivy-availability-check:
> [loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.
>
> -ivy-fail-disallowed-ivy-version:
>
> ivy-fail:
>
> ivy-fail:
>
> ivy-configure:
> [ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml
>
> resolve:
>
> ivy-availability-check:
> [loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.
>
> -ivy-fail-disallowed-ivy-version:
>
> ivy-fail:
>
> ivy-fail:
>
> ivy-configure:
> [ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml
>
> resolve:
>
> ivy-availability-check:
> [loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.
>
> -ivy-fail-disallowed-ivy-version:
>
> ivy-fail:
>
> ivy-fail:
>
> ivy-configure:
> [ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml
>
> resolve:
>
> ivy-availability-check:
> [loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.
>
> -ivy-fail-disallowed-ivy-version:
>
> ivy-fail:
>
> ivy-fail:
>
> ivy-configure:
> [ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml
>
> resolve:
>
> ivy-availability-check:
> [loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.
>
> -ivy-fail-disallowed-ivy-version:
>
> ivy-fail:
>
> ivy-fail:
>
> ivy-configure:
> [ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml
>
> resolve:
>
> ivy-availability-check:
> [loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.
>
> -ivy-fail-disallowed-ivy-version:
>
> ivy-fail:
>
> ivy-fail:
>
> ivy-configure:
> [ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml
>
> resolve:
>
> ivy-availability-check:
> [loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.
>
> -ivy-fail-disallowed-ivy-version:
>
> ivy-fail:
>
> ivy-fail:
>
> ivy-configure:
> [ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml
>
> resolve:
>
> ivy-availability-check:
> [loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.
>
> -ivy-fail-disallowed-ivy-version:
>
> ivy-fail:
>
> ivy-fail:
>
> ivy-configure:
> [ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml
>
> resolve:
>
> ivy-availability-check:
> [loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.
>
> -ivy-fail-disallowed-ivy-version:
>
> ivy-fail:
>
> ivy-fail:
>
> ivy-configure:
> [ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml
>
> resolve:
>
> jar-checksums:
>     [mkdir] Created dir: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/null699052273
>      [copy] Copying 249 files to /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/null699052273
>    [delete] Deleting directory /home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/null699052273
>
> check-working-copy:
> [ivy:cachepath] :: resolving dependencies :: #;working@lucene1-us-west
> [ivy:cachepath]         confs: [default]
> [ivy:cachepath]         found org.eclipse.jgit#org.eclipse.jgit;5.3.0.201903130848-r in public
> [ivy:cachepath]         found com.jcraft#jsch;0.1.54 in public
> [ivy:cachepath]         found com.jcraft#jzlib;1.1.1 in public
> [ivy:cachepath]         found com.googlecode.javaewah#JavaEWAH;1.1.6 in public
> [ivy:cachepath]         found org.slf4j#slf4j-api;1.7.2 in public
> [ivy:cachepath]         found org.bouncycastle#bcpg-jdk15on;1.60 in public
> [ivy:cachepath]         found org.bouncycastle#bcprov-jdk15on;1.60 in public
> [ivy:cachepath]         found org.bouncycastle#bcpkix-jdk15on;1.60 in public
> [ivy:cachepath]         found org.slf4j#slf4j-nop;1.7.2 in public
> [ivy:cachepath] :: resolution report :: resolve 30ms :: artifacts dl 2ms
>         ---------------------------------------------------------------------
>         |                  |            modules            ||   artifacts   |
>         |       conf       | number| search|dwnlded|evicted|| number|dwnlded|
>         ---------------------------------------------------------------------
>         |      default     |   9   |   0   |   0   |   0   ||   9   |   0   |
>         ---------------------------------------------------------------------
> [wc-checker] Initializing working copy...
> [wc-checker] Checking working copy status...
>
> -jenkins-base:
>
> BUILD SUCCESSFUL
> Total time: 118 minutes 58 seconds
> Archiving artifacts
> java.lang.InterruptedException: no matches found within 10000
>         at hudson.FilePath$ValidateAntFileMask.hasMatch(FilePath.java:2847)
>         at hudson.FilePath$ValidateAntFileMask.invoke(FilePath.java:2726)
>         at hudson.FilePath$ValidateAntFileMask.invoke(FilePath.java:2707)
>         at hudson.FilePath$FileCallableWrapper.call(FilePath.java:3086)
> Also:   hudson.remoting.Channel$CallSiteStackTrace: Remote call to lucene
>                 at hudson.remoting.Channel.attachCallSiteStackTrace(Channel.java:1741)
>                 at hudson.remoting.UserRequest$ExceptionResponse.retrieve(UserRequest.java:357)
>                 at hudson.remoting.Channel.call(Channel.java:955)
>                 at hudson.FilePath.act(FilePath.java:1072)
>                 at hudson.FilePath.act(FilePath.java:1061)
>                 at hudson.FilePath.validateAntFileMask(FilePath.java:2705)
>                 at hudson.tasks.ArtifactArchiver.perform(ArtifactArchiver.java:243)
>                 at hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
>                 at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
>                 at hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
>                 at hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
>                 at hudson.model.Build$BuildExecution.post2(Build.java:186)
>                 at hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
>                 at hudson.model.Run.execute(Run.java:1835)
>                 at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
>                 at hudson.model.ResourceController.execute(ResourceController.java:97)
>                 at hudson.model.Executor.run(Executor.java:429)
> Caused: hudson.FilePath$TunneledInterruptedException
>         at hudson.FilePath$FileCallableWrapper.call(FilePath.java:3088)
>         at hudson.remoting.UserRequest.perform(UserRequest.java:212)
>         at hudson.remoting.UserRequest.perform(UserRequest.java:54)
>         at hudson.remoting.Request$2.run(Request.java:369)
>         at hudson.remoting.InterceptingExecutorService$1.call(InterceptingExecutorService.java:72)
>         at java.util.concurrent.FutureTask.run(FutureTask.java:266)
>         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
>         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
>         at java.lang.Thread.run(Thread.java:744)
> Caused: java.lang.InterruptedException: java.lang.InterruptedException: no matches found within 10000
>         at hudson.FilePath.act(FilePath.java:1074)
>         at hudson.FilePath.act(FilePath.java:1061)
>         at hudson.FilePath.validateAntFileMask(FilePath.java:2705)
>         at hudson.tasks.ArtifactArchiver.perform(ArtifactArchiver.java:243)
>         at hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
>         at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
>         at hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
>         at hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
>         at hudson.model.Build$BuildExecution.post2(Build.java:186)
>         at hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
>         at hudson.model.Run.execute(Run.java:1835)
>         at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
>         at hudson.model.ResourceController.execute(ResourceController.java:97)
>         at hudson.model.Executor.run(Executor.java:429)
> No artifacts found that match the file pattern "**/*.events,heapdumps/**,**/hs_err_pid*". Configuration error?
> Recording test results
> Build step 'Publish JUnit test result report' changed build result to UNSTABLE
> Email was triggered for: Unstable (Test Failures)
> Sending email for trigger: Unstable (Test Failures)
>
> ---------------------------------------------------------------------
> To unsubscribe, e-mail: dev-unsubscribe@lucene.apache.org
> For additional commands, e-mail: dev-help@lucene.apache.org

---------------------------------------------------------------------
To unsubscribe, e-mail: dev-unsubscribe@lucene.apache.org
For additional commands, e-mail: dev-help@lucene.apache.org