You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@lucene.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2019/05/16 20:36:00 UTC

[JENKINS] Lucene-Solr-NightlyTests-master - Build # 1849 - Still Failing

Build: https://builds.apache.org/job/Lucene-Solr-NightlyTests-master/1849/

2 tests failed.
FAILED:  org.apache.solr.spelling.SpellCheckCollatorTest.testEstimatedHitCounts

Error Message:
Exception during query

Stack Trace:
java.lang.RuntimeException: Exception during query
	at __randomizedtesting.SeedInfo.seed([35554349D37B0989:4EEFD7C76441959]:0)
	at org.apache.solr.SolrTestCaseJ4.assertQ(SolrTestCaseJ4.java:941)
	at org.apache.solr.SolrTestCaseJ4.assertQ(SolrTestCaseJ4.java:901)
	at org.apache.solr.spelling.SpellCheckCollatorTest.testEstimatedHitCounts(SpellCheckCollatorTest.java:569)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.base/java.lang.reflect.Method.invoke(Method.java:566)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at java.base/java.lang.Thread.run(Thread.java:834)
Caused by: java.lang.RuntimeException: REQUEST FAILED: xpath=//lst[@name='spellcheck']/lst[@name='collations']/lst[@name='collation']/long[@name='hits' and 3 <= . and . <= 13]
	xml response was: <?xml version="1.0" encoding="UTF-8"?>
<response>
<lst name="responseHeader"><int name="status">0</int><int name="QTime">4</int></lst><result name="response" numFound="0" start="0"></result><lst name="spellcheck"><lst name="suggestions"><lst name="everother"><int name="numFound">1</int><int name="startOffset">9</int><int name="endOffset">18</int><arr name="suggestion"><str>everyother</str></arr></lst></lst><lst name="collations"><lst name="collation"><str name="collationQuery">teststop:everyother</str><long name="hits">14</long><lst name="misspellingsAndCorrections"><str name="everother">everyother</str></lst></lst></lst></lst>
</response>

	request was:spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:everother&spellcheck.collateMaxCollectDocs=5
	at org.apache.solr.SolrTestCaseJ4.assertQ(SolrTestCaseJ4.java:934)
	... 40 more


FAILED:  org.apache.solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest.testSimple

Error Message:
Waiting for collection testSimple2 Timeout waiting to see state for collection=testSimple2 :DocCollection(testSimple2//collections/testSimple2/state.json/25)={   "pullReplicas":"0",   "replicationFactor":"2",   "shards":{     "shard1":{       "range":"80000000-ffffffff",       "state":"active",       "replicas":{         "core_node2":{           "dataDir":"hdfs://localhost:35040/solr_hdfs_home/testSimple2/core_node2/data/",           "base_url":"http://127.0.0.1:42384/solr",           "node_name":"127.0.0.1:42384_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:35040/solr_hdfs_home/testSimple2/core_node2/data/tlog",           "core":"testSimple2_shard1_replica_n1",           "shared_storage":"true",           "state":"down"},         "core_node5":{           "dataDir":"hdfs://localhost:35040/solr_hdfs_home/testSimple2/core_node5/data/",           "base_url":"http://127.0.0.1:43787/solr",           "node_name":"127.0.0.1:43787_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:35040/solr_hdfs_home/testSimple2/core_node5/data/tlog",           "core":"testSimple2_shard1_replica_n3",           "shared_storage":"true",           "state":"active",           "leader":"true"}}},     "shard2":{       "range":"0-7fffffff",       "state":"active",       "replicas":{         "core_node7":{           "dataDir":"hdfs://localhost:35040/solr_hdfs_home/testSimple2/core_node7/data/",           "base_url":"http://127.0.0.1:42384/solr",           "node_name":"127.0.0.1:42384_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:35040/solr_hdfs_home/testSimple2/core_node7/data/tlog",           "core":"testSimple2_shard2_replica_n4",           "shared_storage":"true",           "state":"down"},         "core_node8":{           "dataDir":"hdfs://localhost:35040/solr_hdfs_home/testSimple2/core_node8/data/",           "base_url":"http://127.0.0.1:43787/solr",           "node_name":"127.0.0.1:43787_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:35040/solr_hdfs_home/testSimple2/core_node8/data/tlog",           "core":"testSimple2_shard2_replica_n6",           "shared_storage":"true",           "state":"active",           "leader":"true"}}}},   "router":{"name":"compositeId"},   "maxShardsPerNode":"2",   "autoAddReplicas":"true",   "nrtReplicas":"2",   "tlogReplicas":"0"} Live Nodes: [127.0.0.1:35328_solr, 127.0.0.1:43787_solr] Last available state: DocCollection(testSimple2//collections/testSimple2/state.json/25)={   "pullReplicas":"0",   "replicationFactor":"2",   "shards":{     "shard1":{       "range":"80000000-ffffffff",       "state":"active",       "replicas":{         "core_node2":{           "dataDir":"hdfs://localhost:35040/solr_hdfs_home/testSimple2/core_node2/data/",           "base_url":"http://127.0.0.1:42384/solr",           "node_name":"127.0.0.1:42384_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:35040/solr_hdfs_home/testSimple2/core_node2/data/tlog",           "core":"testSimple2_shard1_replica_n1",           "shared_storage":"true",           "state":"down"},         "core_node5":{           "dataDir":"hdfs://localhost:35040/solr_hdfs_home/testSimple2/core_node5/data/",           "base_url":"http://127.0.0.1:43787/solr",           "node_name":"127.0.0.1:43787_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:35040/solr_hdfs_home/testSimple2/core_node5/data/tlog",           "core":"testSimple2_shard1_replica_n3",           "shared_storage":"true",           "state":"active",           "leader":"true"}}},     "shard2":{       "range":"0-7fffffff",       "state":"active",       "replicas":{         "core_node7":{           "dataDir":"hdfs://localhost:35040/solr_hdfs_home/testSimple2/core_node7/data/",           "base_url":"http://127.0.0.1:42384/solr",           "node_name":"127.0.0.1:42384_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:35040/solr_hdfs_home/testSimple2/core_node7/data/tlog",           "core":"testSimple2_shard2_replica_n4",           "shared_storage":"true",           "state":"down"},         "core_node8":{           "dataDir":"hdfs://localhost:35040/solr_hdfs_home/testSimple2/core_node8/data/",           "base_url":"http://127.0.0.1:43787/solr",           "node_name":"127.0.0.1:43787_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:35040/solr_hdfs_home/testSimple2/core_node8/data/tlog",           "core":"testSimple2_shard2_replica_n6",           "shared_storage":"true",           "state":"active",           "leader":"true"}}}},   "router":{"name":"compositeId"},   "maxShardsPerNode":"2",   "autoAddReplicas":"true",   "nrtReplicas":"2",   "tlogReplicas":"0"}

Stack Trace:
java.lang.AssertionError: Waiting for collection testSimple2
Timeout waiting to see state for collection=testSimple2 :DocCollection(testSimple2//collections/testSimple2/state.json/25)={
  "pullReplicas":"0",
  "replicationFactor":"2",
  "shards":{
    "shard1":{
      "range":"80000000-ffffffff",
      "state":"active",
      "replicas":{
        "core_node2":{
          "dataDir":"hdfs://localhost:35040/solr_hdfs_home/testSimple2/core_node2/data/",
          "base_url":"http://127.0.0.1:42384/solr",
          "node_name":"127.0.0.1:42384_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:35040/solr_hdfs_home/testSimple2/core_node2/data/tlog",
          "core":"testSimple2_shard1_replica_n1",
          "shared_storage":"true",
          "state":"down"},
        "core_node5":{
          "dataDir":"hdfs://localhost:35040/solr_hdfs_home/testSimple2/core_node5/data/",
          "base_url":"http://127.0.0.1:43787/solr",
          "node_name":"127.0.0.1:43787_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:35040/solr_hdfs_home/testSimple2/core_node5/data/tlog",
          "core":"testSimple2_shard1_replica_n3",
          "shared_storage":"true",
          "state":"active",
          "leader":"true"}}},
    "shard2":{
      "range":"0-7fffffff",
      "state":"active",
      "replicas":{
        "core_node7":{
          "dataDir":"hdfs://localhost:35040/solr_hdfs_home/testSimple2/core_node7/data/",
          "base_url":"http://127.0.0.1:42384/solr",
          "node_name":"127.0.0.1:42384_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:35040/solr_hdfs_home/testSimple2/core_node7/data/tlog",
          "core":"testSimple2_shard2_replica_n4",
          "shared_storage":"true",
          "state":"down"},
        "core_node8":{
          "dataDir":"hdfs://localhost:35040/solr_hdfs_home/testSimple2/core_node8/data/",
          "base_url":"http://127.0.0.1:43787/solr",
          "node_name":"127.0.0.1:43787_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:35040/solr_hdfs_home/testSimple2/core_node8/data/tlog",
          "core":"testSimple2_shard2_replica_n6",
          "shared_storage":"true",
          "state":"active",
          "leader":"true"}}}},
  "router":{"name":"compositeId"},
  "maxShardsPerNode":"2",
  "autoAddReplicas":"true",
  "nrtReplicas":"2",
  "tlogReplicas":"0"}
Live Nodes: [127.0.0.1:35328_solr, 127.0.0.1:43787_solr]
Last available state: DocCollection(testSimple2//collections/testSimple2/state.json/25)={
  "pullReplicas":"0",
  "replicationFactor":"2",
  "shards":{
    "shard1":{
      "range":"80000000-ffffffff",
      "state":"active",
      "replicas":{
        "core_node2":{
          "dataDir":"hdfs://localhost:35040/solr_hdfs_home/testSimple2/core_node2/data/",
          "base_url":"http://127.0.0.1:42384/solr",
          "node_name":"127.0.0.1:42384_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:35040/solr_hdfs_home/testSimple2/core_node2/data/tlog",
          "core":"testSimple2_shard1_replica_n1",
          "shared_storage":"true",
          "state":"down"},
        "core_node5":{
          "dataDir":"hdfs://localhost:35040/solr_hdfs_home/testSimple2/core_node5/data/",
          "base_url":"http://127.0.0.1:43787/solr",
          "node_name":"127.0.0.1:43787_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:35040/solr_hdfs_home/testSimple2/core_node5/data/tlog",
          "core":"testSimple2_shard1_replica_n3",
          "shared_storage":"true",
          "state":"active",
          "leader":"true"}}},
    "shard2":{
      "range":"0-7fffffff",
      "state":"active",
      "replicas":{
        "core_node7":{
          "dataDir":"hdfs://localhost:35040/solr_hdfs_home/testSimple2/core_node7/data/",
          "base_url":"http://127.0.0.1:42384/solr",
          "node_name":"127.0.0.1:42384_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:35040/solr_hdfs_home/testSimple2/core_node7/data/tlog",
          "core":"testSimple2_shard2_replica_n4",
          "shared_storage":"true",
          "state":"down"},
        "core_node8":{
          "dataDir":"hdfs://localhost:35040/solr_hdfs_home/testSimple2/core_node8/data/",
          "base_url":"http://127.0.0.1:43787/solr",
          "node_name":"127.0.0.1:43787_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:35040/solr_hdfs_home/testSimple2/core_node8/data/tlog",
          "core":"testSimple2_shard2_replica_n6",
          "shared_storage":"true",
          "state":"active",
          "leader":"true"}}}},
  "router":{"name":"compositeId"},
  "maxShardsPerNode":"2",
  "autoAddReplicas":"true",
  "nrtReplicas":"2",
  "tlogReplicas":"0"}
	at __randomizedtesting.SeedInfo.seed([35554349D37B0989:DE667B7F488DD58]:0)
	at org.junit.Assert.fail(Assert.java:88)
	at org.apache.solr.cloud.SolrCloudTestCase.waitForState(SolrCloudTestCase.java:310)
	at org.apache.solr.cloud.autoscaling.AutoAddReplicasIntegrationTest.testSimple(AutoAddReplicasIntegrationTest.java:169)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.base/java.lang.reflect.Method.invoke(Method.java:566)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at java.base/java.lang.Thread.run(Thread.java:834)




Build Log:
[...truncated 14005 lines...]
   [junit4] Suite: org.apache.solr.spelling.SpellCheckCollatorTest
   [junit4]   2> Creating dataDir: /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.spelling.SpellCheckCollatorTest_35554349D37B0989-001/init-core-data-001
   [junit4]   2> 2007377 INFO  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.a.s.c.SolrResourceLoader [null] Added 2 libs to classloader, from paths: [/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/lib, /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/lib/classes]
   [junit4]   2> 2007407 INFO  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
   [junit4]   2> 2007430 INFO  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.a.s.s.IndexSchema [null] Schema name=test
   [junit4]   2> 2007527 INFO  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 2007608 INFO  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
   [junit4]   2> 2007611 WARN  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@fd0b933[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 2007614 WARN  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@1d3f4ee4[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 2007633 INFO  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 2147483647 transient cores
   [junit4]   2> 2007633 INFO  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 2007658 INFO  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@2fb8882b
   [junit4]   2> 2007673 INFO  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@2fb8882b
   [junit4]   2> 2007673 INFO  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@2fb8882b
   [junit4]   2> 2007676 INFO  (coreLoadExecutor-7020-thread-1) [    x:collection1] o.a.s.c.SolrResourceLoader [null] Added 2 libs to classloader, from paths: [/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/lib, /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/lib/classes]
   [junit4]   2> 2007697 INFO  (coreLoadExecutor-7020-thread-1) [    x:collection1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
   [junit4]   2> 2007730 INFO  (coreLoadExecutor-7020-thread-1) [    x:collection1] o.a.s.s.IndexSchema [collection1] Schema name=test
   [junit4]   2> 2007872 INFO  (coreLoadExecutor-7020-thread-1) [    x:collection1] o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 2007882 INFO  (coreLoadExecutor-7020-thread-1) [    x:collection1] o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from instancedir /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1, trusted=true
   [junit4]   2> 2007883 INFO  (coreLoadExecutor-7020-thread-1) [    x:collection1] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.collection1' (registry 'solr.core.collection1') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@2fb8882b
   [junit4]   2> 2007883 INFO  (coreLoadExecutor-7020-thread-1) [    x:collection1] o.a.s.c.SolrCore [[collection1] ] Opening new SolrCore at [/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1], dataDir=[/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.spelling.SpellCheckCollatorTest_35554349D37B0989-001/init-core-data-001/]
   [junit4]   2> 2007885 INFO  (coreLoadExecutor-7020-thread-1) [    x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.LogByteSizeMergePolicy: [LogByteSizeMergePolicy: minMergeSize=1677721, mergeFactor=33, maxMergeSize=2147483648, maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=false, maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.0]
   [junit4]   2> 2007957 INFO  (coreLoadExecutor-7020-thread-1) [    x:collection1] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 2007957 INFO  (coreLoadExecutor-7020-thread-1) [    x:collection1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 2007958 INFO  (coreLoadExecutor-7020-thread-1) [    x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.MockRandomMergePolicy: org.apache.lucene.index.MockRandomMergePolicy@3a51391a
   [junit4]   2> 2007959 INFO  (coreLoadExecutor-7020-thread-1) [    x:collection1] o.a.s.s.SolrIndexSearcher Opening [Searcher@625a2c99[collection1] main]
   [junit4]   2> 2007959 WARN  (coreLoadExecutor-7020-thread-1) [    x:collection1] o.a.s.r.ManagedResourceStorage Cannot write to config directory /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf; switching to use InMemory storage instead.
   [junit4]   2> 2007960 INFO  (coreLoadExecutor-7020-thread-1) [    x:collection1] o.a.s.h.c.SpellCheckComponent Initializing spell checkers
   [junit4]   2> 2007994 INFO  (coreLoadExecutor-7020-thread-1) [    x:collection1] o.a.s.h.c.SpellCheckComponent Registering newSearcher listener for spellchecker: default
   [junit4]   2> 2008005 INFO  (coreLoadExecutor-7020-thread-1) [    x:collection1] o.a.s.h.c.SpellCheckComponent Registering newSearcher listener for spellchecker: default_teststop
   [junit4]   2> 2008005 INFO  (coreLoadExecutor-7020-thread-1) [    x:collection1] o.a.s.s.DirectSolrSpellChecker init: {name=direct,classname=solr.DirectSolrSpellChecker,minQueryLength=3,maxQueryFrequency=100.0,field=teststop}
   [junit4]   2> 2008005 INFO  (coreLoadExecutor-7020-thread-1) [    x:collection1] o.a.s.s.DirectSolrSpellChecker init: {name=direct_lowerfilt,classname=solr.DirectSolrSpellChecker,minQueryLength=3,maxQueryFrequency=100.0,field=lowerfilt}
   [junit4]   2> 2008017 INFO  (coreLoadExecutor-7020-thread-1) [    x:collection1] o.a.s.h.c.SpellCheckComponent Registering newSearcher listener for spellchecker: threshold
   [junit4]   2> 2008018 INFO  (coreLoadExecutor-7020-thread-1) [    x:collection1] o.a.s.s.DirectSolrSpellChecker init: {name=threshold_direct,classname=solr.DirectSolrSpellChecker,field=lowerfilt,spellcheckIndexDir=spellcheckerThreshold,buildOnCommit=true,thresholdTokenFrequency=0.29}
   [junit4]   2> 2008018 INFO  (coreLoadExecutor-7020-thread-1) [    x:collection1] o.a.s.h.c.SpellCheckComponent Registering newSearcher listener for spellchecker: threshold_direct
   [junit4]   2> 2008028 INFO  (coreLoadExecutor-7020-thread-1) [    x:collection1] o.a.s.h.c.SpellCheckComponent Registering newSearcher listener for spellchecker: multipleFields
   [junit4]   2> 2008071 INFO  (coreLoadExecutor-7020-thread-1) [    x:collection1] o.a.s.h.c.SpellCheckComponent Registering newSearcher listener for spellchecker: freq
   [junit4]   2> 2008088 INFO  (coreLoadExecutor-7020-thread-1) [    x:collection1] o.a.s.h.c.SpellCheckComponent Registering newSearcher listener for spellchecker: fqcn
   [junit4]   2> 2008089 INFO  (coreLoadExecutor-7020-thread-1) [    x:collection1] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 2008090 INFO  (searcherExecutor-7021-thread-1-processing-x:collection1) [    x:collection1] o.a.s.h.c.SpellCheckComponent Loading spell index for spellchecker: default
   [junit4]   2> 2008092 INFO  (searcherExecutor-7021-thread-1-processing-x:collection1) [    x:collection1] o.a.s.h.c.SpellCheckComponent Loading spell index for spellchecker: default_teststop
   [junit4]   2> 2008093 INFO  (searcherExecutor-7021-thread-1-processing-x:collection1) [    x:collection1] o.a.s.h.c.SpellCheckComponent Loading spell index for spellchecker: direct
   [junit4]   2> 2008093 INFO  (searcherExecutor-7021-thread-1-processing-x:collection1) [    x:collection1] o.a.s.h.c.SpellCheckComponent Loading spell index for spellchecker: direct_lowerfilt
   [junit4]   2> 2008093 INFO  (searcherExecutor-7021-thread-1-processing-x:collection1) [    x:collection1] o.a.s.h.c.SpellCheckComponent Loading spell index for spellchecker: wordbreak
   [junit4]   2> 2008093 INFO  (searcherExecutor-7021-thread-1-processing-x:collection1) [    x:collection1] o.a.s.h.c.SpellCheckComponent Loading spell index for spellchecker: threshold
   [junit4]   2> 2008095 INFO  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=null path=null params={}{add=[0]} 0 3
   [junit4]   2> 2008099 INFO  (searcherExecutor-7021-thread-1-processing-x:collection1) [    x:collection1] o.a.s.h.c.SpellCheckComponent Loading spell index for spellchecker: threshold_direct
   [junit4]   2> 2008099 INFO  (searcherExecutor-7021-thread-1-processing-x:collection1) [    x:collection1] o.a.s.h.c.SpellCheckComponent Loading spell index for spellchecker: multipleFields
   [junit4]   2> 2008099 INFO  (searcherExecutor-7021-thread-1-processing-x:collection1) [    x:collection1] o.a.s.h.c.SpellCheckComponent Loading spell index for spellchecker: jarowinkler
   [junit4]   2> 2008100 INFO  (searcherExecutor-7021-thread-1-processing-x:collection1) [    x:collection1] o.a.s.h.c.SpellCheckComponent Loading spell index for spellchecker: external
   [junit4]   2> 2008100 INFO  (searcherExecutor-7021-thread-1-processing-x:collection1) [    x:collection1] o.a.s.h.c.SpellCheckComponent Loading spell index for spellchecker: freq
   [junit4]   2> 2008101 INFO  (searcherExecutor-7021-thread-1-processing-x:collection1) [    x:collection1] o.a.s.h.c.SpellCheckComponent Loading spell index for spellchecker: fqcn
   [junit4]   2> 2008101 INFO  (searcherExecutor-7021-thread-1-processing-x:collection1) [    x:collection1] o.a.s.h.c.SpellCheckComponent Loading spell index for spellchecker: perDict
   [junit4]   2> 2008102 INFO  (searcherExecutor-7021-thread-1-processing-x:collection1) [    x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher Searcher@625a2c99[collection1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 2008122 INFO  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=null path=null params={}{add=[1]} 0 24
   [junit4]   2> 2008138 INFO  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=null path=null params={}{add=[2]} 0 14
   [junit4]   2> 2008177 INFO  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=null path=null params={}{add=[3]} 0 38
   [junit4]   2> 2008180 INFO  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=null path=null params={}{add=[4]} 0 1
   [junit4]   2> 2008217 INFO  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=null path=null params={}{add=[5]} 0 37
   [junit4]   2> 2008219 INFO  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=null path=null params={}{add=[6]} 0 0
   [junit4]   2> 2008298 INFO  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=null path=null params={}{add=[7]} 0 78
   [junit4]   2> 2008300 INFO  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=null path=null params={}{add=[8]} 0 1
   [junit4]   2> 2008312 INFO  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=null path=null params={}{add=[9]} 0 12
   [junit4]   2> 2008314 INFO  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=null path=null params={}{add=[10]} 0 0
   [junit4]   2> 2008322 INFO  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=null path=null params={}{add=[11]} 0 7
   [junit4]   2> 2008323 INFO  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=null path=null params={}{add=[12]} 0 0
   [junit4]   2> 2008331 INFO  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=null path=null params={}{add=[13]} 0 8
   [junit4]   2> 2008333 INFO  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=null path=null params={}{add=[14]} 0 0
   [junit4]   2> 2008347 INFO  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=null path=null params={}{add=[15]} 0 13
   [junit4]   2> 2008349 INFO  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=null path=null params={}{add=[16]} 0 0
   [junit4]   2> 2008349 INFO  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.a.s.u.DirectUpdateHandler2 start commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 2008349 INFO  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.a.s.u.SolrIndexWriter Calling setCommitData with IW:org.apache.solr.update.SolrIndexWriter@3e180cad commitCommandVersion:0
   [junit4]   2> 2008370 INFO  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.a.s.s.SolrIndexSearcher Opening [Searcher@1a560917[collection1] main]
   [junit4]   2> 2008370 INFO  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 2008370 INFO  (searcherExecutor-7021-thread-1) [    ] o.a.s.h.c.SpellCheckComponent Building spell index for spell checker: default
   [junit4]   2> 2008467 INFO  (searcherExecutor-7021-thread-1) [    ] o.a.s.h.c.SpellCheckComponent Building spell index for spell checker: default_teststop
   [junit4]   2> 2008534 INFO  (searcherExecutor-7021-thread-1) [    ] o.a.s.h.c.SpellCheckComponent Building spell index for spell checker: threshold
   [junit4]   2> 2008557 INFO  (searcherExecutor-7021-thread-1) [    ] o.a.s.h.c.SpellCheckComponent Building spell index for spell checker: threshold_direct
   [junit4]   2> 2008557 INFO  (searcherExecutor-7021-thread-1) [    ] o.a.s.h.c.SpellCheckComponent Building spell index for spell checker: multipleFields
   [junit4]   2> 2008628 INFO  (searcherExecutor-7021-thread-1) [    ] o.a.s.h.c.SpellCheckComponent Building spell index for spell checker: freq
   [junit4]   2> 2008934 INFO  (searcherExecutor-7021-thread-1) [    ] o.a.s.h.c.SpellCheckComponent Building spell index for spell checker: fqcn
   [junit4]   2> 2009013 INFO  (searcherExecutor-7021-thread-1) [    ] o.a.s.c.SolrCore [collection1] Registered new searcher Searcher@1a560917[collection1] main{ExitableDirectoryReader(UninvertingDirectoryReader(Uninverting(_2(9.0.0):c2:[diagnostics={timestamp=1558034123096, os=Linux, java.runtime.version=11.0.1+13-LTS, mergeFactor=1, os.arch=amd64, source=merge, java.vendor=Oracle Corporation, os.version=4.4.0-137-generic, mergeMaxNumSegments=-1, java.version=11.0.1, java.vm.version=11.0.1+13-LTS, lucene.version=9.0.0}]:[attributes={Lucene50StoredFieldsFormat.mode=BEST_SPEED}]) Uninverting(_1(9.0.0):c2:[diagnostics={os.version=4.4.0-137-generic, os=Linux, java.vendor=Oracle Corporation, timestamp=1558034123071, java.version=11.0.1, java.vm.version=11.0.1+13-LTS, lucene.version=9.0.0, source=flush, os.arch=amd64, java.runtime.version=11.0.1+13-LTS}]:[attributes={Lucene50StoredFieldsFormat.mode=BEST_SPEED}]) Uninverting(_3(9.0.0):c2:[diagnostics={os.version=4.4.0-137-generic, os=Linux, java.vendor=Oracle Corporation, timestamp=1558034123120, java.version=11.0.1, java.vm.version=11.0.1+13-LTS, lucene.version=9.0.0, source=flush, os.arch=amd64, java.runtime.version=11.0.1+13-LTS}]:[attributes={Lucene50StoredFieldsFormat.mode=BEST_SPEED}]) Uninverting(_4(9.0.0):c2:[diagnostics={os.version=4.4.0-137-generic, os=Linux, java.vendor=Oracle Corporation, timestamp=1558034123158, java.version=11.0.1, java.vm.version=11.0.1+13-LTS, lucene.version=9.0.0, source=flush, os.arch=amd64, java.runtime.version=11.0.1+13-LTS}]:[attributes={Lucene50StoredFieldsFormat.mode=BEST_SPEED}]) Uninverting(_5(9.0.0):c2:[diagnostics={os.version=4.4.0-137-generic, os=Linux, java.vendor=Oracle Corporation, timestamp=1558034123225, java.version=11.0.1, java.vm.version=11.0.1+13-LTS, lucene.version=9.0.0, source=flush, os.arch=amd64, java.runtime.version=11.0.1+13-LTS}]:[attributes={Lucene50StoredFieldsFormat.mode=BEST_SPEED}]) Uninverting(_6(9.0.0):c2:[diagnostics={os.version=4.4.0-137-generic, os=Linux, java.vendor=Oracle Corporation, timestamp=1558034123236, java.version=11.0.1, java.vm.version=11.0.1+13-LTS, lucene.version=9.0.0, source=flush, os.arch=amd64, java.runtime.version=11.0.1+13-LTS}]:[attributes={Lucene50StoredFieldsFormat.mode=BEST_SPEED}]) Uninverting(_7(9.0.0):c2:[diagnostics={os.version=4.4.0-137-generic, os=Linux, java.vendor=Oracle Corporation, timestamp=1558034123245, java.version=11.0.1, java.vm.version=11.0.1+13-LTS, lucene.version=9.0.0, source=flush, os.arch=amd64, java.runtime.version=11.0.1+13-LTS}]:[attributes={Lucene50StoredFieldsFormat.mode=BEST_SPEED}]) Uninverting(_8(9.0.0):c2:[diagnostics={os.version=4.4.0-137-generic, os=Linux, java.vendor=Oracle Corporation, timestamp=1558034123256, java.version=11.0.1, java.vm.version=11.0.1+13-LTS, lucene.version=9.0.0, source=flush, os.arch=amd64, java.runtime.version=11.0.1+13-LTS}]:[attributes={Lucene50StoredFieldsFormat.mode=BEST_SPEED}]) Uninverting(_9(9.0.0):c1:[diagnostics={os.version=4.4.0-137-generic, os=Linux, java.vendor=Oracle Corporation, timestamp=1558034123279, java.version=11.0.1, java.vm.version=11.0.1+13-LTS, lucene.version=9.0.0, source=flush, os.arch=amd64, java.runtime.version=11.0.1+13-LTS}]:[attributes={Lucene50StoredFieldsFormat.mode=BEST_SPEED}])))}
   [junit4]   2> 2009013 INFO  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=null path=null params={}{commit=} 0 664
   [junit4]   2> 2009178 INFO  (TEST-SpellCheckCollatorTest.testCollateWithOverride-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={mm=1&q=partisian+politcal+mashine&defType=edismax&spellcheck.dictionary=direct&qt=/spellCheckCompRH&spellcheck=true&qf=teststop&spellcheck.maxCollations=10&spellcheck.count=10&spellcheck.maxCollationTries=10&wt=xml&spellcheck.collate=true} hits=0 status=0 QTime=157
   [junit4]   2> 2009230 INFO  (TEST-SpellCheckCollatorTest.testCollateWithOverride-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={mm=1&spellcheck.dictionary=direct&qt=/spellCheckCompRH&spellcheck.maxCollations=10&spellcheck.maxCollationTries=10&q=partisian+politcal+mashine&defType=edismax&spellcheck.collateParam.mm=100%25&spellcheck=true&qf=teststop&spellcheck.count=10&wt=xml&spellcheck.collate=true} hits=0 status=0 QTime=51
   [junit4]   2> 2009277 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia} hits=0 status=0 QTime=6
   [junit4]   2> 2009282 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=0} hits=0 status=0 QTime=4
   [junit4]   2> 2009288 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=30} hits=0 status=0 QTime=4
   [junit4]   2> 2009293 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=100} hits=0 status=0 QTime=4
   [junit4]   2> 2009299 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=10000} hits=0 status=0 QTime=5
   [junit4]   2> 2009305 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=7} hits=0 status=0 QTime=4
   [junit4]   2> 2009311 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=15} hits=0 status=0 QTime=4
   [junit4]   2> 2009317 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=12} hits=0 status=0 QTime=4
   [junit4]   2> 2009322 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=9} hits=0 status=0 QTime=4
   [junit4]   2> 2009331 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=15} hits=0 status=0 QTime=8
   [junit4]   2> 2009336 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=7} hits=0 status=0 QTime=4
   [junit4]   2> 2009353 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=12} hits=0 status=0 QTime=11
   [junit4]   2> 2009358 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=10} hits=0 status=0 QTime=4
   [junit4]   2> 2009363 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=12} hits=0 status=0 QTime=4
   [junit4]   2> 2009368 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=6} hits=0 status=0 QTime=4
   [junit4]   2> 2009373 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=6} hits=0 status=0 QTime=3
   [junit4]   2> 2009377 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=13} hits=0 status=0 QTime=3
   [junit4]   2> 2009380 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=16} hits=0 status=0 QTime=3
   [junit4]   2> 2009384 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=7} hits=0 status=0 QTime=3
   [junit4]   2> 2009388 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=17} hits=0 status=0 QTime=3
   [junit4]   2> 2009392 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=13} hits=0 status=0 QTime=3
   [junit4]   2> 2009395 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=12} hits=0 status=0 QTime=2
   [junit4]   2> 2009399 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=16} hits=0 status=0 QTime=3
   [junit4]   2> 2009403 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=10} hits=0 status=0 QTime=3
   [junit4]   2> 2009407 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=4} hits=0 status=0 QTime=3
   [junit4]   2> 2009414 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=1} hits=0 status=0 QTime=5
   [junit4]   2> 2009421 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=6} hits=0 status=0 QTime=6
   [junit4]   2> 2009425 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=16} hits=0 status=0 QTime=3
   [junit4]   2> 2009432 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=9} hits=0 status=0 QTime=5
   [junit4]   2> 2009437 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=8} hits=0 status=0 QTime=5
   [junit4]   2> 2009443 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=7} hits=0 status=0 QTime=4
   [junit4]   2> 2009449 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=17} hits=0 status=0 QTime=5
   [junit4]   2> 2009455 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=2} hits=0 status=0 QTime=4
   [junit4]   2> 2009460 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=8} hits=0 status=0 QTime=5
   [junit4]   2> 2009466 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=14} hits=0 status=0 QTime=4
   [junit4]   2> 2009470 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=4} hits=0 status=0 QTime=3
   [junit4]   2> 2009473 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=13} hits=0 status=0 QTime=3
   [junit4]   2> 2009477 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=9} hits=0 status=0 QTime=2
   [junit4]   2> 2009481 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=12} hits=0 status=0 QTime=2
   [junit4]   2> 2009488 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=6} hits=0 status=0 QTime=6
   [junit4]   2> 2009492 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=10} hits=0 status=0 QTime=3
   [junit4]   2> 2009496 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=10} hits=0 status=0 QTime=3
   [junit4]   2> 2009500 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=15} hits=0 status=0 QTime=3
   [junit4]   2> 2009504 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=7} hits=0 status=0 QTime=3
   [junit4]   2> 2009508 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=17} hits=0 status=0 QTime=3
   [junit4]   2> 2009513 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=15} hits=0 status=0 QTime=3
   [junit4]   2> 2009520 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=2} hits=0 status=0 QTime=6
   [junit4]   2> 2009525 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=10} hits=0 status=0 QTime=4
   [junit4]   2> 2009530 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=2} hits=0 status=0 QTime=4
   [junit4]   2> 2009536 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=13} hits=0 status=0 QTime=4
   [junit4]   2> 2009543 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=17} hits=0 status=0 QTime=6
   [junit4]   2> 2009547 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=12} hits=0 status=0 QTime=3
   [junit4]   2> 2009551 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=17} hits=0 status=0 QTime=3
   [junit4]   2> 2009559 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=6} hits=0 status=0 QTime=7
   [junit4]   2> 2009567 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=15} hits=0 status=0 QTime=7
   [junit4]   2> 2009578 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=8} hits=0 status=0 QTime=9
   [junit4]   2> 2009585 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=17} hits=0 status=0 QTime=6
   [junit4]   2> 2009592 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=17} hits=0 status=0 QTime=6
   [junit4]   2> 2009597 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=2} hits=0 status=0 QTime=4
   [junit4]   2> 2009602 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:metnoia&spellcheck.collateMaxCollectDocs=11} hits=0 status=0 QTime=3
   [junit4]   2> 2009608 INFO  (TEST-SpellCheckCollatorTest.testEstimatedHitCounts-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:everother&spellcheck.collateMaxCollectDocs=5} hits=0 status=0 QTime=4
   [junit4]   2> NOTE: download the large Jenkins line-docs file by running 'ant get-jenkins-line-docs' in the lucene directory.
   [junit4]   2> NOTE: reproduce with: ant test  -Dtestcase=SpellCheckCollatorTest -Dtests.method=testEstimatedHitCounts -Dtests.seed=35554349D37B0989 -Dtests.multiplier=2 -Dtests.nightly=true -Dtests.slow=true -Dtests.linedocsfile=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/test-data/enwiki.random.lines.txt -Dtests.locale=et-EE -Dtests.timezone=Australia/Tasmania -Dtests.asserts=true -Dtests.file.encoding=UTF-8
   [junit4] ERROR   0.35s J0 | SpellCheckCollatorTest.testEstimatedHitCounts <<<
   [junit4]    > Throwable #1: java.lang.RuntimeException: Exception during query
   [junit4]    > 	at __randomizedtesting.SeedInfo.seed([35554349D37B0989:4EEFD7C76441959]:0)
   [junit4]    > 	at org.apache.solr.SolrTestCaseJ4.assertQ(SolrTestCaseJ4.java:941)
   [junit4]    > 	at org.apache.solr.SolrTestCaseJ4.assertQ(SolrTestCaseJ4.java:901)
   [junit4]    > 	at org.apache.solr.spelling.SpellCheckCollatorTest.testEstimatedHitCounts(SpellCheckCollatorTest.java:569)
   [junit4]    > 	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
   [junit4]    > 	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
   [junit4]    > 	at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
   [junit4]    > 	at java.base/java.lang.reflect.Method.invoke(Method.java:566)
   [junit4]    > 	at java.base/java.lang.Thread.run(Thread.java:834)
   [junit4]    > Caused by: java.lang.RuntimeException: REQUEST FAILED: xpath=//lst[@name='spellcheck']/lst[@name='collations']/lst[@name='collation']/long[@name='hits' and 3 <= . and . <= 13]
   [junit4]    > 	xml response was: <?xml version="1.0" encoding="UTF-8"?>
   [junit4]    > <response>
   [junit4]    > <lst name="responseHeader"><int name="status">0</int><int name="QTime">4</int></lst><result name="response" numFound="0" start="0"></result><lst name="spellcheck"><lst name="suggestions"><lst name="everother"><int name="numFound">1</int><int name="startOffset">9</int><int name="endOffset">18</int><arr name="suggestion"><str>everyother</str></arr></lst></lst><lst name="collations"><lst name="collation"><str name="collationQuery">teststop:everyother</str><long name="hits">14</long><lst name="misspellingsAndCorrections"><str name="everother">everyother</str></lst></lst></lst></lst>
   [junit4]    > </response>
   [junit4]    > 	request was:spellcheck=true&spellcheck.dictionary=direct&spellcheck.count=1&spellcheck.collate=true&spellcheck.maxCollationTries=1&spellcheck.maxCollations=1&spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&q=teststop:everother&spellcheck.collateMaxCollectDocs=5
   [junit4]    > 	at org.apache.solr.SolrTestCaseJ4.assertQ(SolrTestCaseJ4.java:934)
   [junit4]    > 	... 40 more
   [junit4]   1> id:[1 TO 10] AND lowerfilt:love
   [junit4]   2> 2010166 INFO  (TEST-SpellCheckCollatorTest.testContextSensitiveCollate-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&spellcheck.dictionary=direct&indent=true&spellcheck.maxCollations=1&spellcheck.maxCollationTries=10&spellcheck.alternativeTermCount=5&spellcheck.build=true&spellcheck.extendedResults=true&q=teststop:(flew+AND+form+AND+heathrow)&spellcheck.maxResultsForSuggest=0&spellcheck=true&spellcheck.count=10&wt=xml&spellcheck.collate=true} hits=0 status=0 QTime=8
   [junit4]   2> 2010174 INFO  (TEST-SpellCheckCollatorTest.testContextSensitiveCollate-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&spellcheck.dictionary=direct&indent=true&spellcheck.maxCollations=1&spellcheck.maxCollationTries=10&spellcheck.alternativeTermCount=5&spellcheck.extendedResults=true&q=teststop:(june+AND+customs)&spellcheck.maxResultsForSuggest=1&spellcheck=true&spellcheck.count=10&wt=xml&spellcheck.collate=true} hits=1 status=0 QTime=5
   [junit4]   2> 2010177 INFO  (TEST-SpellCheckCollatorTest.testContextSensitiveCollate-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={mm=2&q=teststop:(june+customs)&qt=/spellCheckCompRH&spellcheck.dictionary=direct&indent=true&spellcheck=true&spellcheck.count=10&spellcheck.alternativeTermCount=0&wt=xml&spellcheck.collate=true} hits=2 status=0 QTime=1
   [junit4]   2> 2010234 INFO  (TEST-SpellCheckCollatorTest.testContextSensitiveCollate-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&spellcheck.dictionary=default_teststop&indent=true&spellcheck.maxCollations=1&spellcheck.maxCollationTries=10&spellcheck.alternativeTermCount=5&spellcheck.build=true&spellcheck.extendedResults=true&q=teststop:(flew+AND+form+AND+heathrow)&spellcheck.maxResultsForSuggest=0&spellcheck=true&spellcheck.count=10&wt=xml&spellcheck.collate=true} hits=0 status=0 QTime=56
   [junit4]   2> 2010239 INFO  (TEST-SpellCheckCollatorTest.testContextSensitiveCollate-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={spellcheck.collateExtendedResults=true&qt=/spellCheckCompRH&spellcheck.dictionary=default_teststop&indent=true&spellcheck.maxCollations=1&spellcheck.maxCollationTries=10&spellcheck.alternativeTermCount=5&spellcheck.extendedResults=true&q=teststop:(june+AND+customs)&spellcheck.maxResultsForSuggest=1&spellcheck=true&spellcheck.count=10&wt=xml&spellcheck.collate=true} hits=1 status=0 QTime=3
   [junit4]   2> 2010242 INFO  (TEST-SpellCheckCollatorTest.testContextSensitiveCollate-seed#[35554349D37B0989]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={mm=2&q=teststop:(june+customs)&qt=/spellCheckCompRH&spellcheck.dictionary=default_teststop&indent=true&spellcheck=true&spellcheck.count=10&spellcheck.alternativeTermCount=0&wt=xml&spellcheck.collate=true} hits=2 status=0 QTime=1
   [junit4]   2> 2010243 INFO  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.a.s.c.CoreContainer Shutting down CoreContainer instance=304105115
   [junit4]   2> 2010244 INFO  (coreCloseExecutor-7025-thread-1) [    x:collection1] o.a.s.c.SolrCore [collection1]  CLOSING SolrCore org.apache.solr.core.SolrCore@67d66cb3
   [junit4]   2> 2010244 INFO  (coreCloseExecutor-7025-thread-1) [    x:collection1] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.core.collection1, tag=67d66cb3
   [junit4]   2> 2010244 INFO  (coreCloseExecutor-7025-thread-1) [    x:collection1] o.a.s.m.r.SolrJmxReporter Closing reporter [org.apache.solr.metrics.reporters.SolrJmxReporter@3d4bd178: rootName = null, domain = solr.core.collection1, service url = null, agent id = null] for registry solr.core.collection1 / com.codahale.metrics.MetricRegistry@1b365be5
   [junit4]   2> 2010262 INFO  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.node, tag=null
   [junit4]   2> 2010262 INFO  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.a.s.m.r.SolrJmxReporter Closing reporter [org.apache.solr.metrics.reporters.SolrJmxReporter@404adaf6: rootName = null, domain = solr.node, service url = null, agent id = null] for registry solr.node / com.codahale.metrics.MetricRegistry@309d8d2f
   [junit4]   2> 2010272 INFO  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.jvm, tag=null
   [junit4]   2> 2010272 INFO  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.a.s.m.r.SolrJmxReporter Closing reporter [org.apache.solr.metrics.reporters.SolrJmxReporter@166ea2ce: rootName = null, domain = solr.jvm, service url = null, agent id = null] for registry solr.jvm / com.codahale.metrics.MetricRegistry@b2d57f1
   [junit4]   2> 2010279 INFO  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.jetty, tag=null
   [junit4]   2> 2010279 INFO  (SUITE-SpellCheckCollatorTest-seed#[35554349D37B0989]-worker) [    ] o.a.s.m.r.SolrJmxReporter Closing reporter [org.apache.solr.metrics.reporters.SolrJmxReporter@62cface4: rootName = null, domain = solr.jetty, service url = null, agent id = null] for registry solr.jetty / com.codahale.metrics.MetricRegistry@21623257
   [junit4]   2> NOTE: leaving temporary files on disk at: /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.spelling.SpellCheckCollatorTest_35554349D37B0989-001
   [junit4]   2> NOTE: test params are: codec=Asserting(Lucene80): {start1=PostingsFormat(name=Direct), range_facet_l_dv=TestBloomFilteredLucenePostings(BloomFilteringPostingsFormat(Lucene50(blocksize=128))), multiDefault=PostingsFormat(name=LuceneVarGapDocFreqInterval), intDefault=PostingsFormat(name=MockRandom), range_facet_l=PostingsFormat(name=MockRandom), lowerfilt1and2=PostingsFormat(name=LuceneVarGapDocFreqInterval), end4=PostingsFormat(name=Direct), end3=PostingsFormat(name=LuceneVarGapDocFreqInterval), end2=TestBloomFilteredLucenePostings(BloomFilteringPostingsFormat(Lucene50(blocksize=128))), end1=PostingsFormat(name=MockRandom), lowerfilt=PostingsFormat(name=LuceneVarGapDocFreqInterval), gram1=PostingsFormat(name=MockRandom), id=TestBloomFilteredLucenePostings(BloomFilteringPostingsFormat(Lucene50(blocksize=128))), gram2=TestBloomFilteredLucenePostings(BloomFilteringPostingsFormat(Lucene50(blocksize=128))), gram3=PostingsFormat(name=LuceneVarGapDocFreqInterval), start3=TestBloomFilteredLucenePostings(BloomFilteringPostingsFormat(Lucene50(blocksize=128))), gram4=PostingsFormat(name=Direct), start2=PostingsFormat(name=LuceneVarGapDocFreqInterval), timestamp=PostingsFormat(name=MockRandom), start4=PostingsFormat(name=MockRandom), lowerfilt1=PostingsFormat(name=Direct), teststop=PostingsFormat(name=LuceneVarGapDocFreqInterval), _root_=PostingsFormat(name=MockRandom), id_i1=PostingsFormat(name=LuceneVarGapDocFreqInterval), range_facet_i_dv=PostingsFormat(name=MockRandom), word=PostingsFormat(name=MockRandom)}, docValues:{range_facet_l_dv=DocValuesFormat(name=Direct), multiDefault=DocValuesFormat(name=Asserting), teststop=DocValuesFormat(name=Asserting), _root_=DocValuesFormat(name=Lucene80), intDefault=DocValuesFormat(name=Lucene80), range_facet_l=DocValuesFormat(name=Lucene80), lowerfilt1and2=DocValuesFormat(name=Asserting), id_i1=DocValuesFormat(name=Asserting), lowerfilt=DocValuesFormat(name=Asserting), range_facet_i_dv=DocValuesFormat(name=Lucene80), id=DocValuesFormat(name=Direct), intDvoDefault=DocValuesFormat(name=Lucene80), timestamp=DocValuesFormat(name=Lucene80)}, maxPointsInLeafNode=621, maxMBSortInHeap=5.808596316163167, sim=Asserting(org.apache.lucene.search.similarities.AssertingSimilarity@2d8b626d), locale=et-EE, timezone=Australia/Tasmania
   [junit4]   2> NOTE: Linux 4.4.0-137-generic amd64/Oracle Corporation 11.0.1 (64-bit)/cpus=4,threads=1,free=168406240,total=536870912
   [junit4]   2> NOTE: All tests run in this JVM: [TestOrdValues, StatelessScriptUpdateProcessorFactoryTest, SolrPluginUtilsTest, DocumentAnalysisRequestHandlerTest, AnalysisAfterCoreReloadTest, ForceLeaderWithTlogReplicasTest, CoreMergeIndexesAdminHandlerTest, BlockCacheTest, MetricsConfigTest, FullHLLTest, CloneFieldUpdateProcessorFactoryTest, WrapperMergePolicyFactoryTest, LoggingHandlerTest, ExternalFileFieldSortTest, ChangedSchemaMergeTest, MultiSolrCloudTestCaseTest, TestCollationField, SchemaApiFailureTest, HdfsRestartWhileUpdatingTest, NodeAddedTriggerTest, SharedFSAutoReplicaFailoverTest, TestStressRecovery, HdfsDirectoryTest, SearchRateTriggerTest, HighlighterMaxOffsetTest, RollingRestartTest, TestInitParams, TestBulkSchemaConcurrent, TestDistributedMissingSort, CdcrBidirectionalTest, PeerSyncReplicationTest, TestSimTriggerIntegration, AnalysisErrorHandlingTest, TestSubQueryTransformerDistrib, TestFieldCacheWithThreads, TestDeprecatedFilters, CustomCollectionTest, StatsComponentTest, TestSolrDeletionPolicy2, TestSolrCLIRunExample, TestRebalanceLeaders, CustomTermsComponentTest, SolrJmxReporterCloudTest, TestLeaderElectionZkExpiry, LeaderFailoverAfterPartitionTest, TestQueryTypes, TestCoreContainer, ShardsWhitelistTest, ResponseBuilderTest, VersionInfoTest, ConcurrentCreateRoutedAliasTest, TestSkipOverseerOperations, AsyncCallRequestStatusResponseTest, JavabinLoaderTest, MoreLikeThisHandlerTest, UnloadDistributedZkTest, TestReplicationHandlerDiskOverFlow, TestCollectionAPIs, BJQParserTest, TestTolerantUpdateProcessorRandomCloud, SpellCheckCollatorTest]
   [junit4] Completed [324/857 (1!)] on J0 in 3.03s, 11 tests, 1 error <<< FAILURES!

[...truncated 456 lines...]
   [junit4] Suite: org.apache.solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest
   [junit4]   2> Creating dataDir: /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_35554349D37B0989-001/init-core-data-001
   [junit4]   1> Formatting using clusterid: testClusterID
   [junit4]   2> 3069802 WARN  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[35554349D37B0989]-worker) [    ] o.a.h.m.i.MetricsConfig Cannot locate configuration: tried hadoop-metrics2-namenode.properties,hadoop-metrics2.properties
   [junit4]   2> 3069843 WARN  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[35554349D37B0989]-worker) [    ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 3069846 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[35554349D37B0989]-worker) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 3069849 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[35554349D37B0989]-worker) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 3069849 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[35554349D37B0989]-worker) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 3069849 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[35554349D37B0989]-worker) [    ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 3069849 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[35554349D37B0989]-worker) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@1e59d4be{static,/static,jar:file:/x1/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
   [junit4]   2> 3070117 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[35554349D37B0989]-worker) [    ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@318afa9f{hdfs,/,file:///x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/jetty-localhost-45833-hdfs-_-any-11859608640119150602.dir/webapp/,AVAILABLE}{/hdfs}
   [junit4]   2> 3070118 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[35554349D37B0989]-worker) [    ] o.e.j.s.AbstractConnector Started ServerConnector@175fb0bd{HTTP/1.1,[http/1.1]}{localhost:45833}
   [junit4]   2> 3070118 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[35554349D37B0989]-worker) [    ] o.e.j.s.Server Started @3070174ms
   [junit4]   2> 3070245 WARN  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[35554349D37B0989]-worker) [    ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 3070247 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[35554349D37B0989]-worker) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 3070248 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[35554349D37B0989]-worker) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 3070248 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[35554349D37B0989]-worker) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 3070248 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[35554349D37B0989]-worker) [    ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 3070248 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[35554349D37B0989]-worker) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@1a721ba{static,/static,jar:file:/x1/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
   [junit4]   2> 3070420 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[35554349D37B0989]-worker) [    ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@b3e950e{datanode,/,file:///x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/jetty-localhost-32845-datanode-_-any-3418181467145684711.dir/webapp/,AVAILABLE}{/datanode}
   [junit4]   2> 3070420 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[35554349D37B0989]-worker) [    ] o.e.j.s.AbstractConnector Started ServerConnector@5460ac26{HTTP/1.1,[http/1.1]}{localhost:32845}
   [junit4]   2> 3070420 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[35554349D37B0989]-worker) [    ] o.e.j.s.Server Started @3070476ms
   [junit4]   2> 3070476 WARN  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[35554349D37B0989]-worker) [    ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 3070480 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[35554349D37B0989]-worker) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 3070481 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[35554349D37B0989]-worker) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 3070481 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[35554349D37B0989]-worker) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 3070481 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[35554349D37B0989]-worker) [    ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 3070481 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[35554349D37B0989]-worker) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@1418ac{static,/static,jar:file:/x1/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
   [junit4]   2> 3070597 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0x3e925dcf7b525354: Processing first storage report for DS-94bd0915-fcd8-4785-be10-e1642b3d8eae from datanode ea55ca53-6403-49c4-b250-d414bd685fb8
   [junit4]   2> 3070598 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0x3e925dcf7b525354: from storage DS-94bd0915-fcd8-4785-be10-e1642b3d8eae node DatanodeRegistration(127.0.0.1:45970, datanodeUuid=ea55ca53-6403-49c4-b250-d414bd685fb8, infoPort=37748, infoSecurePort=0, ipcPort=40500, storageInfo=lv=-57;cid=testClusterID;nsid=1091001222;c=1558035184657), blocks: 0, hasStaleStorage: true, processing time: 0 msecs, invalidatedBlocks: 0
   [junit4]   2> 3070598 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0x3e925dcf7b525354: Processing first storage report for DS-9c031835-f235-42ca-a1fa-bdad2bab8b29 from datanode ea55ca53-6403-49c4-b250-d414bd685fb8
   [junit4]   2> 3070598 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0x3e925dcf7b525354: from storage DS-9c031835-f235-42ca-a1fa-bdad2bab8b29 node DatanodeRegistration(127.0.0.1:45970, datanodeUuid=ea55ca53-6403-49c4-b250-d414bd685fb8, infoPort=37748, infoSecurePort=0, ipcPort=40500, storageInfo=lv=-57;cid=testClusterID;nsid=1091001222;c=1558035184657), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0
   [junit4]   2> 3070710 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[35554349D37B0989]-worker) [    ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@46d07c44{datanode,/,file:///x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/jetty-localhost-38128-datanode-_-any-15674679880655231333.dir/webapp/,AVAILABLE}{/datanode}
   [junit4]   2> 3070710 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[35554349D37B0989]-worker) [    ] o.e.j.s.AbstractConnector Started ServerConnector@36ea018{HTTP/1.1,[http/1.1]}{localhost:38128}
   [junit4]   2> 3070710 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[35554349D37B0989]-worker) [    ] o.e.j.

[...truncated too long message...]

rceLoader.java (at line 19)
 [ecj-lint] 	import javax.naming.Context;
 [ecj-lint] 	       ^^^^^^^^^^^^^^^^^^^^
 [ecj-lint] The type javax.naming.Context is not accessible
 [ecj-lint] ----------
 [ecj-lint] 6. ERROR in /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java (at line 20)
 [ecj-lint] 	import javax.naming.InitialContext;
 [ecj-lint] 	       ^^^^^^^^^^^^^^^^^^^^^^^^^^^
 [ecj-lint] The type javax.naming.InitialContext is not accessible
 [ecj-lint] ----------
 [ecj-lint] 7. ERROR in /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java (at line 21)
 [ecj-lint] 	import javax.naming.NamingException;
 [ecj-lint] 	       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 [ecj-lint] The type javax.naming.NamingException is not accessible
 [ecj-lint] ----------
 [ecj-lint] 8. ERROR in /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java (at line 22)
 [ecj-lint] 	import javax.naming.NoInitialContextException;
 [ecj-lint] 	       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 [ecj-lint] The type javax.naming.NoInitialContextException is not accessible
 [ecj-lint] ----------
 [ecj-lint] 9. ERROR in /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java (at line 776)
 [ecj-lint] 	Context c = new InitialContext();
 [ecj-lint] 	^^^^^^^
 [ecj-lint] Context cannot be resolved to a type
 [ecj-lint] ----------
 [ecj-lint] 10. ERROR in /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java (at line 776)
 [ecj-lint] 	Context c = new InitialContext();
 [ecj-lint] 	                ^^^^^^^^^^^^^^
 [ecj-lint] InitialContext cannot be resolved to a type
 [ecj-lint] ----------
 [ecj-lint] 11. ERROR in /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java (at line 779)
 [ecj-lint] 	} catch (NoInitialContextException e) {
 [ecj-lint] 	         ^^^^^^^^^^^^^^^^^^^^^^^^^
 [ecj-lint] NoInitialContextException cannot be resolved to a type
 [ecj-lint] ----------
 [ecj-lint] 12. ERROR in /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java (at line 781)
 [ecj-lint] 	} catch (NamingException e) {
 [ecj-lint] 	         ^^^^^^^^^^^^^^^
 [ecj-lint] NamingException cannot be resolved to a type
 [ecj-lint] ----------
 [ecj-lint] ----------
 [ecj-lint] 13. WARNING in /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/java/org/apache/solr/handler/admin/SegmentsInfoRequestHandler.java (at line 215)
 [ecj-lint] 	leafReader = ((FilterLeafReader)leafReader).getDelegate();
 [ecj-lint] 	^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 [ecj-lint] Resource leak: 'leafReader' is not closed at this location
 [ecj-lint] ----------
 [ecj-lint] ----------
 [ecj-lint] 14. WARNING in /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandler.java (at line 142)
 [ecj-lint] 	return new JavaBinCodec(null, stringCache).setReadStringAsCharSeq(true);
 [ecj-lint] 	       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 [ecj-lint] Resource leak: '<unassigned Closeable value>' is never closed
 [ecj-lint] ----------
 [ecj-lint] ----------
 [ecj-lint] 15. WARNING in /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/java/org/apache/solr/handler/loader/JavabinLoader.java (at line 137)
 [ecj-lint] 	new JavaBinCodec() {
 [ecj-lint]       SolrParams params;
 [ecj-lint]       AddUpdateCommand addCmd = null;
 [ecj-lint] 
 [ecj-lint]       @Override
 [ecj-lint]       public List<Object> readIterator(DataInputInputStream fis) throws IOException {
 [ecj-lint]         while (true) {
 [ecj-lint]           Object o = readVal(fis);
 [ecj-lint]           if (o == END_OBJ) break;
 [ecj-lint]           if (o instanceof NamedList) {
 [ecj-lint]             params = ((NamedList) o).toSolrParams();
 [ecj-lint]           } else {
 [ecj-lint]             try {
 [ecj-lint]               if (o instanceof byte[]) {
 [ecj-lint]                 if (params != null) req.setParams(params);
 [ecj-lint]                 byte[] buf = (byte[]) o;
 [ecj-lint]                 contentStreamLoader.load(req, rsp, new ContentStreamBase.ByteArrayStream(buf, null), processor);
 [ecj-lint]               } else {
 [ecj-lint]                 throw new RuntimeException("unsupported type ");
 [ecj-lint]               }
 [ecj-lint]             } catch (Exception e) {
 [ecj-lint]               throw new RuntimeException(e);
 [ecj-lint]             } finally {
 [ecj-lint]               params = null;
 [ecj-lint]               req.setParams(old);
 [ecj-lint]             }
 [ecj-lint]           }
 [ecj-lint]         }
 [ecj-lint]         return Collections.emptyList();
 [ecj-lint]       }
 [ecj-lint] 
 [ecj-lint]     }.unmarshal(in);
 [ecj-lint] 	^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 [ecj-lint] Resource leak: '<unassigned Closeable value>' is never closed
 [ecj-lint] ----------
 [ecj-lint] ----------
 [ecj-lint] 16. INFO in /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/java/org/apache/solr/logging/log4j2/Log4j2Watcher.java (at line 187)
 [ecj-lint] 	if (logger == root || root.equals(logger) || isRootLogger(name) || "".equals(name)) {
 [ecj-lint] 	                                  ^^^^^^
 [ecj-lint] Unlikely argument type for equals(): Map.Entry<String,LoggerConfig> seems to be unrelated to Logger
 [ecj-lint] ----------
 [ecj-lint] ----------
 [ecj-lint] 17. WARNING in /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/java/org/apache/solr/update/SolrIndexSplitter.java (at line 258)
 [ecj-lint] 	HardlinkCopyDirectoryWrapper hardLinkedDir = new HardlinkCopyDirectoryWrapper(splitDir);
 [ecj-lint] 	                             ^^^^^^^^^^^^^
 [ecj-lint] Resource leak: 'hardLinkedDir' is never closed
 [ecj-lint] ----------
 [ecj-lint] ----------
 [ecj-lint] 18. WARNING in /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/java/org/apache/solr/util/FileUtils.java (at line 50)
 [ecj-lint] 	in = new FileInputStream(src).getChannel();
 [ecj-lint] 	     ^^^^^^^^^^^^^^^^^^^^^^^^
 [ecj-lint] Resource leak: '<unassigned Closeable value>' is never closed
 [ecj-lint] ----------
 [ecj-lint] 19. WARNING in /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/java/org/apache/solr/util/FileUtils.java (at line 51)
 [ecj-lint] 	out = new FileOutputStream(destination).getChannel();
 [ecj-lint] 	      ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 [ecj-lint] Resource leak: '<unassigned Closeable value>' is never closed
 [ecj-lint] ----------
 [ecj-lint] ----------
 [ecj-lint] 20. WARNING in /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/java/org/apache/solr/util/SolrCLI.java (at line 1143)
 [ecj-lint] 	SnapshotCloudManager snapshotCloudManager = new SnapshotCloudManager(simCloudManager, config);
 [ecj-lint] 	                     ^^^^^^^^^^^^^^^^^^^^
 [ecj-lint] Resource leak: 'snapshotCloudManager' is never closed
 [ecj-lint] ----------
 [ecj-lint] ----------
 [ecj-lint] 21. WARNING in /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/java/org/apache/solr/util/TestInjection.java (at line 263)
 [ecj-lint] 	timers.remove(this);
 [ecj-lint] 	              ^^^^
 [ecj-lint] Unlikely argument type new TimerTask(){} for remove(Object) on a Collection<Timer>
 [ecj-lint] ----------
 [ecj-lint] 21 problems (9 errors, 11 warnings, 1 info)

BUILD FAILED
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/build.xml:652: The following error occurred while executing this line:
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/build.xml:101: The following error occurred while executing this line:
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build.xml:681: The following error occurred while executing this line:
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/common-build.xml:2010: The following error occurred while executing this line:
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/common-build.xml:2049: Compile failed; see the compiler error output for details.

Total time: 331 minutes 4 seconds
Build step 'Invoke Ant' marked build as failure
Archiving artifacts
Recording test results
Email was triggered for: Failure - Any
Sending email for trigger: Failure - Any

[JENKINS] Lucene-Solr-NightlyTests-master - Build # 1859 - Failure

Posted by Apache Jenkins Server <je...@builds.apache.org>.
Build: https://builds.apache.org/job/Lucene-Solr-NightlyTests-master/1859/

1 tests failed.
FAILED:  org.apache.solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest.testSimple

Error Message:
Waiting for collection testSimple2 Timeout waiting to see state for collection=testSimple2 :DocCollection(testSimple2//collections/testSimple2/state.json/23)={   "pullReplicas":"0",   "replicationFactor":"2",   "shards":{     "shard1":{       "range":"80000000-ffffffff",       "state":"active",       "replicas":{         "core_node3":{           "dataDir":"hdfs://localhost:45877/solr_hdfs_home/testSimple2/core_node3/data/",           "base_url":"http://127.0.0.1:45011/solr",           "node_name":"127.0.0.1:45011_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:45877/solr_hdfs_home/testSimple2/core_node3/data/tlog",           "core":"testSimple2_shard1_replica_n1",           "shared_storage":"true",           "state":"down"},         "core_node5":{           "dataDir":"hdfs://localhost:45877/solr_hdfs_home/testSimple2/core_node5/data/",           "base_url":"http://127.0.0.1:32960/solr",           "node_name":"127.0.0.1:32960_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:45877/solr_hdfs_home/testSimple2/core_node5/data/tlog",           "core":"testSimple2_shard1_replica_n2",           "shared_storage":"true",           "state":"active",           "leader":"true"}}},     "shard2":{       "range":"0-7fffffff",       "state":"active",       "replicas":{         "core_node7":{           "dataDir":"hdfs://localhost:45877/solr_hdfs_home/testSimple2/core_node7/data/",           "base_url":"http://127.0.0.1:45011/solr",           "node_name":"127.0.0.1:45011_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:45877/solr_hdfs_home/testSimple2/core_node7/data/tlog",           "core":"testSimple2_shard2_replica_n4",           "shared_storage":"true",           "state":"down"},         "core_node8":{           "dataDir":"hdfs://localhost:45877/solr_hdfs_home/testSimple2/core_node8/data/",           "base_url":"http://127.0.0.1:32960/solr",           "node_name":"127.0.0.1:32960_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:45877/solr_hdfs_home/testSimple2/core_node8/data/tlog",           "core":"testSimple2_shard2_replica_n6",           "shared_storage":"true",           "state":"active",           "leader":"true"}}}},   "router":{"name":"compositeId"},   "maxShardsPerNode":"2",   "autoAddReplicas":"true",   "nrtReplicas":"2",   "tlogReplicas":"0"} Live Nodes: [127.0.0.1:32960_solr, 127.0.0.1:35101_solr] Last available state: DocCollection(testSimple2//collections/testSimple2/state.json/23)={   "pullReplicas":"0",   "replicationFactor":"2",   "shards":{     "shard1":{       "range":"80000000-ffffffff",       "state":"active",       "replicas":{         "core_node3":{           "dataDir":"hdfs://localhost:45877/solr_hdfs_home/testSimple2/core_node3/data/",           "base_url":"http://127.0.0.1:45011/solr",           "node_name":"127.0.0.1:45011_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:45877/solr_hdfs_home/testSimple2/core_node3/data/tlog",           "core":"testSimple2_shard1_replica_n1",           "shared_storage":"true",           "state":"down"},         "core_node5":{           "dataDir":"hdfs://localhost:45877/solr_hdfs_home/testSimple2/core_node5/data/",           "base_url":"http://127.0.0.1:32960/solr",           "node_name":"127.0.0.1:32960_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:45877/solr_hdfs_home/testSimple2/core_node5/data/tlog",           "core":"testSimple2_shard1_replica_n2",           "shared_storage":"true",           "state":"active",           "leader":"true"}}},     "shard2":{       "range":"0-7fffffff",       "state":"active",       "replicas":{         "core_node7":{           "dataDir":"hdfs://localhost:45877/solr_hdfs_home/testSimple2/core_node7/data/",           "base_url":"http://127.0.0.1:45011/solr",           "node_name":"127.0.0.1:45011_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:45877/solr_hdfs_home/testSimple2/core_node7/data/tlog",           "core":"testSimple2_shard2_replica_n4",           "shared_storage":"true",           "state":"down"},         "core_node8":{           "dataDir":"hdfs://localhost:45877/solr_hdfs_home/testSimple2/core_node8/data/",           "base_url":"http://127.0.0.1:32960/solr",           "node_name":"127.0.0.1:32960_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:45877/solr_hdfs_home/testSimple2/core_node8/data/tlog",           "core":"testSimple2_shard2_replica_n6",           "shared_storage":"true",           "state":"active",           "leader":"true"}}}},   "router":{"name":"compositeId"},   "maxShardsPerNode":"2",   "autoAddReplicas":"true",   "nrtReplicas":"2",   "tlogReplicas":"0"}

Stack Trace:
java.lang.AssertionError: Waiting for collection testSimple2
Timeout waiting to see state for collection=testSimple2 :DocCollection(testSimple2//collections/testSimple2/state.json/23)={
  "pullReplicas":"0",
  "replicationFactor":"2",
  "shards":{
    "shard1":{
      "range":"80000000-ffffffff",
      "state":"active",
      "replicas":{
        "core_node3":{
          "dataDir":"hdfs://localhost:45877/solr_hdfs_home/testSimple2/core_node3/data/",
          "base_url":"http://127.0.0.1:45011/solr",
          "node_name":"127.0.0.1:45011_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:45877/solr_hdfs_home/testSimple2/core_node3/data/tlog",
          "core":"testSimple2_shard1_replica_n1",
          "shared_storage":"true",
          "state":"down"},
        "core_node5":{
          "dataDir":"hdfs://localhost:45877/solr_hdfs_home/testSimple2/core_node5/data/",
          "base_url":"http://127.0.0.1:32960/solr",
          "node_name":"127.0.0.1:32960_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:45877/solr_hdfs_home/testSimple2/core_node5/data/tlog",
          "core":"testSimple2_shard1_replica_n2",
          "shared_storage":"true",
          "state":"active",
          "leader":"true"}}},
    "shard2":{
      "range":"0-7fffffff",
      "state":"active",
      "replicas":{
        "core_node7":{
          "dataDir":"hdfs://localhost:45877/solr_hdfs_home/testSimple2/core_node7/data/",
          "base_url":"http://127.0.0.1:45011/solr",
          "node_name":"127.0.0.1:45011_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:45877/solr_hdfs_home/testSimple2/core_node7/data/tlog",
          "core":"testSimple2_shard2_replica_n4",
          "shared_storage":"true",
          "state":"down"},
        "core_node8":{
          "dataDir":"hdfs://localhost:45877/solr_hdfs_home/testSimple2/core_node8/data/",
          "base_url":"http://127.0.0.1:32960/solr",
          "node_name":"127.0.0.1:32960_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:45877/solr_hdfs_home/testSimple2/core_node8/data/tlog",
          "core":"testSimple2_shard2_replica_n6",
          "shared_storage":"true",
          "state":"active",
          "leader":"true"}}}},
  "router":{"name":"compositeId"},
  "maxShardsPerNode":"2",
  "autoAddReplicas":"true",
  "nrtReplicas":"2",
  "tlogReplicas":"0"}
Live Nodes: [127.0.0.1:32960_solr, 127.0.0.1:35101_solr]
Last available state: DocCollection(testSimple2//collections/testSimple2/state.json/23)={
  "pullReplicas":"0",
  "replicationFactor":"2",
  "shards":{
    "shard1":{
      "range":"80000000-ffffffff",
      "state":"active",
      "replicas":{
        "core_node3":{
          "dataDir":"hdfs://localhost:45877/solr_hdfs_home/testSimple2/core_node3/data/",
          "base_url":"http://127.0.0.1:45011/solr",
          "node_name":"127.0.0.1:45011_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:45877/solr_hdfs_home/testSimple2/core_node3/data/tlog",
          "core":"testSimple2_shard1_replica_n1",
          "shared_storage":"true",
          "state":"down"},
        "core_node5":{
          "dataDir":"hdfs://localhost:45877/solr_hdfs_home/testSimple2/core_node5/data/",
          "base_url":"http://127.0.0.1:32960/solr",
          "node_name":"127.0.0.1:32960_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:45877/solr_hdfs_home/testSimple2/core_node5/data/tlog",
          "core":"testSimple2_shard1_replica_n2",
          "shared_storage":"true",
          "state":"active",
          "leader":"true"}}},
    "shard2":{
      "range":"0-7fffffff",
      "state":"active",
      "replicas":{
        "core_node7":{
          "dataDir":"hdfs://localhost:45877/solr_hdfs_home/testSimple2/core_node7/data/",
          "base_url":"http://127.0.0.1:45011/solr",
          "node_name":"127.0.0.1:45011_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:45877/solr_hdfs_home/testSimple2/core_node7/data/tlog",
          "core":"testSimple2_shard2_replica_n4",
          "shared_storage":"true",
          "state":"down"},
        "core_node8":{
          "dataDir":"hdfs://localhost:45877/solr_hdfs_home/testSimple2/core_node8/data/",
          "base_url":"http://127.0.0.1:32960/solr",
          "node_name":"127.0.0.1:32960_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:45877/solr_hdfs_home/testSimple2/core_node8/data/tlog",
          "core":"testSimple2_shard2_replica_n6",
          "shared_storage":"true",
          "state":"active",
          "leader":"true"}}}},
  "router":{"name":"compositeId"},
  "maxShardsPerNode":"2",
  "autoAddReplicas":"true",
  "nrtReplicas":"2",
  "tlogReplicas":"0"}
	at __randomizedtesting.SeedInfo.seed([10677DD7C00EF4B3:28D45929E7FD2062]:0)
	at org.junit.Assert.fail(Assert.java:88)
	at org.apache.solr.cloud.SolrCloudTestCase.waitForState(SolrCloudTestCase.java:310)
	at org.apache.solr.cloud.autoscaling.AutoAddReplicasIntegrationTest.testSimple(AutoAddReplicasIntegrationTest.java:169)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.base/java.lang.reflect.Method.invoke(Method.java:566)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at java.base/java.lang.Thread.run(Thread.java:834)




Build Log:
[...truncated 14941 lines...]
   [junit4] Suite: org.apache.solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest
   [junit4]   2> 3979438 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[10677DD7C00EF4B3]-worker) [    ] o.a.s.SolrTestCaseJ4 SecureRandom sanity checks: test.solr.allowed.securerandom=null & java.security.egd=file:/dev/./urandom
   [junit4]   2> Creating dataDir: /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_10677DD7C00EF4B3-001/init-core-data-001
   [junit4]   2> 3979439 WARN  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[10677DD7C00EF4B3]-worker) [    ] o.a.s.SolrTestCaseJ4 startTrackingSearchers: numOpens=34 numCloses=34
   [junit4]   2> 3979439 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[10677DD7C00EF4B3]-worker) [    ] o.a.s.SolrTestCaseJ4 Using PointFields (NUMERIC_POINTS_SYSPROP=true) w/NUMERIC_DOCVALUES_SYSPROP=false
   [junit4]   2> 3979440 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[10677DD7C00EF4B3]-worker) [    ] o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (true) via: @org.apache.solr.util.RandomizeSSL(reason="", ssl=0.0/0.0, value=0.0/0.0, clientAuth=0.0/0.0)
   [junit4]   1> Formatting using clusterid: testClusterID
   [junit4]   2> 3979481 WARN  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[10677DD7C00EF4B3]-worker) [    ] o.a.h.m.i.MetricsConfig Cannot locate configuration: tried hadoop-metrics2-namenode.properties,hadoop-metrics2.properties
   [junit4]   2> 3979510 WARN  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[10677DD7C00EF4B3]-worker) [    ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 3979512 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[10677DD7C00EF4B3]-worker) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 3979514 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[10677DD7C00EF4B3]-worker) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 3979514 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[10677DD7C00EF4B3]-worker) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 3979514 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[10677DD7C00EF4B3]-worker) [    ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 3979514 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[10677DD7C00EF4B3]-worker) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@39729781{static,/static,jar:file:/x1/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
   [junit4]   2> 3979685 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[10677DD7C00EF4B3]-worker) [    ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@122fbda4{hdfs,/,file:///x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/jetty-localhost-42003-hdfs-_-any-3961427032236587243.dir/webapp/,AVAILABLE}{/hdfs}
   [junit4]   2> 3979686 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[10677DD7C00EF4B3]-worker) [    ] o.e.j.s.AbstractConnector Started ServerConnector@2a0f31bf{HTTP/1.1,[http/1.1]}{localhost:42003}
   [junit4]   2> 3979686 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[10677DD7C00EF4B3]-worker) [    ] o.e.j.s.Server Started @3979735ms
   [junit4]   2> 3979760 WARN  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[10677DD7C00EF4B3]-worker) [    ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 3979762 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[10677DD7C00EF4B3]-worker) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 3979763 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[10677DD7C00EF4B3]-worker) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 3979763 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[10677DD7C00EF4B3]-worker) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 3979763 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[10677DD7C00EF4B3]-worker) [    ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 3979764 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[10677DD7C00EF4B3]-worker) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@348eecbf{static,/static,jar:file:/x1/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
   [junit4]   2> 3979973 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[10677DD7C00EF4B3]-worker) [    ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@610f3ca2{datanode,/,file:///x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/jetty-localhost-33315-datanode-_-any-11170477955664541907.dir/webapp/,AVAILABLE}{/datanode}
   [junit4]   2> 3979974 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[10677DD7C00EF4B3]-worker) [    ] o.e.j.s.AbstractConnector Started ServerConnector@9bdec38{HTTP/1.1,[http/1.1]}{localhost:33315}
   [junit4]   2> 3979974 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[10677DD7C00EF4B3]-worker) [    ] o.e.j.s.Server Started @3980023ms
   [junit4]   2> 3980088 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0x38e93a6bd82c042: Processing first storage report for DS-3dbf6477-93bf-4a4d-b054-acde0d145a76 from datanode 70e843be-cfbe-4c53-b671-65dc39b1460d
   [junit4]   2> 3980089 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0x38e93a6bd82c042: from storage DS-3dbf6477-93bf-4a4d-b054-acde0d145a76 node DatanodeRegistration(127.0.0.1:39122, datanodeUuid=70e843be-cfbe-4c53-b671-65dc39b1460d, infoPort=32843, infoSecurePort=0, ipcPort=46806, storageInfo=lv=-57;cid=testClusterID;nsid=358290576;c=1559396822072), blocks: 0, hasStaleStorage: true, processing time: 0 msecs, invalidatedBlocks: 0
   [junit4]   2> 3980089 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0x38e93a6bd82c042: Processing first storage report for DS-bc63e626-764a-4ac6-83c8-2fa026e44e03 from datanode 70e843be-cfbe-4c53-b671-65dc39b1460d
   [junit4]   2> 3980089 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0x38e93a6bd82c042: from storage DS-bc63e626-764a-4ac6-83c8-2fa026e44e03 node DatanodeRegistration(127.0.0.1:39122, datanodeUuid=70e843be-cfbe-4c53-b671-65dc39b1460d, infoPort=32843, infoSecurePort=0, ipcPort=46806, storageInfo=lv=-57;cid=testClusterID;nsid=358290576;c=1559396822072), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0
   [junit4]   2> 3980284 INFO  (TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[10677DD7C00EF4B3]) [    ] o.a.s.SolrTestCaseJ4 ###Starting testSimple
   [junit4]   2> 3980285 INFO  (TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[10677DD7C00EF4B3]) [    ] o.a.s.c.MiniSolrCloudCluster Starting cluster of 3 servers in /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_10677DD7C00EF4B3-001/tempDir-002
   [junit4]   2> 3980285 INFO  (TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[10677DD7C00EF4B3]) [    ] o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
   [junit4]   2> 3980286 INFO  (ZkTestServer Run Thread) [    ] o.a.s.c.ZkTestServer client port:0.0.0.0/0.0.0.0:0
   [junit4]   2> 3980286 INFO  (ZkTestServer Run Thread) [    ] o.a.s.c.ZkTestServer Starting server
   [junit4]   2> 3980386 INFO  (TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[10677DD7C00EF4B3]) [    ] o.a.s.c.ZkTestServer start zk server on port:34849
   [junit4]   2> 3980386 INFO  (TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[10677DD7C00EF4B3]) [    ] o.a.s.c.ZkTestServer parse host and port list: 127.0.0.1:34849
   [junit4]   2> 3980386 INFO  (TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[10677DD7C00EF4B3]) [    ] o.a.s.c.ZkTestServer connecting to 127.0.0.1 34849
   [junit4]   2> 3980407 INFO  (zkConnectionManagerCallback-18238-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 3980411 INFO  (zkConnectionManagerCallback-18240-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 3980412 INFO  (zkConnectionManagerCallback-18242-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 3980414 WARN  (jetty-launcher-18243-thread-1) [    ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 3980415 INFO  (jetty-launcher-18243-thread-1) [    ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 3980415 INFO  (jetty-launcher-18243-thread-1) [    ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 3980415 INFO  (jetty-launcher-18243-thread-1) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 3980415 WARN  (jetty-launcher-18243-thread-2) [    ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 3980416 INFO  (jetty-launcher-18243-thread-2) [    ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 3980416 INFO  (jetty-launcher-18243-thread-2) [    ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 3980416 INFO  (jetty-launcher-18243-thread-2) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 3980416 WARN  (jetty-launcher-18243-thread-3) [    ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 3980417 INFO  (jetty-launcher-18243-thread-3) [    ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 3980417 INFO  (jetty-launcher-18243-thread-3) [    ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 3980417 INFO  (jetty-launcher-18243-thread-3) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 3980418 INFO  (jetty-launcher-18243-thread-2) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 3980418 INFO  (jetty-launcher-18243-thread-2) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 3980418 INFO  (jetty-launcher-18243-thread-2) [    ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 3980418 INFO  (jetty-launcher-18243-thread-2) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@6d2aabe5{/solr,null,AVAILABLE}
   [junit4]   2> 3980419 INFO  (jetty-launcher-18243-thread-2) [    ] o.e.j.s.AbstractConnector Started ServerConnector@5e4ac213{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:35101}
   [junit4]   2> 3980419 INFO  (jetty-launcher-18243-thread-2) [    ] o.e.j.s.Server Started @3980468ms
   [junit4]   2> 3980419 INFO  (jetty-launcher-18243-thread-2) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=35101}
   [junit4]   2> 3980419 ERROR (jetty-launcher-18243-thread-2) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 3980419 INFO  (jetty-launcher-18243-thread-2) [    ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 3980419 INFO  (jetty-launcher-18243-thread-2) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 9.0.0
   [junit4]   2> 3980419 INFO  (jetty-launcher-18243-thread-2) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 3980419 INFO  (jetty-launcher-18243-thread-2) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 3980419 INFO  (jetty-launcher-18243-thread-2) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-06-01T13:47:03.032933Z
   [junit4]   2> 3980421 INFO  (jetty-launcher-18243-thread-1) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 3980421 INFO  (jetty-launcher-18243-thread-1) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 3980421 INFO  (jetty-launcher-18243-thread-1) [    ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 3980422 INFO  (jetty-launcher-18243-thread-1) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@48717d12{/solr,null,AVAILABLE}
   [junit4]   2> 3980422 INFO  (jetty-launcher-18243-thread-1) [    ] o.e.j.s.AbstractConnector Started ServerConnector@4b0fabf1{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:45011}
   [junit4]   2> 3980422 INFO  (jetty-launcher-18243-thread-1) [    ] o.e.j.s.Server Started @3980471ms
   [junit4]   2> 3980422 INFO  (jetty-launcher-18243-thread-1) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=45011}
   [junit4]   2> 3980422 ERROR (jetty-launcher-18243-thread-1) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 3980422 INFO  (jetty-launcher-18243-thread-1) [    ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 3980422 INFO  (jetty-launcher-18243-thread-1) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 9.0.0
   [junit4]   2> 3980422 INFO  (jetty-launcher-18243-thread-1) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 3980422 INFO  (jetty-launcher-18243-thread-1) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 3980422 INFO  (jetty-launcher-18243-thread-1) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-06-01T13:47:03.035888Z
   [junit4]   2> 3980425 INFO  (jetty-launcher-18243-thread-3) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 3980425 INFO  (jetty-launcher-18243-thread-3) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 3980425 INFO  (jetty-launcher-18243-thread-3) [    ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 3980425 INFO  (jetty-launcher-18243-thread-3) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@7f3f5c23{/solr,null,AVAILABLE}
   [junit4]   2> 3980426 INFO  (jetty-launcher-18243-thread-3) [    ] o.e.j.s.AbstractConnector Started ServerConnector@60f123a5{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:32960}
   [junit4]   2> 3980426 INFO  (jetty-launcher-18243-thread-3) [    ] o.e.j.s.Server Started @3980475ms
   [junit4]   2> 3980426 INFO  (jetty-launcher-18243-thread-3) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=32960}
   [junit4]   2> 3980426 ERROR (jetty-launcher-18243-thread-3) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 3980426 INFO  (jetty-launcher-18243-thread-3) [    ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 3980427 INFO  (jetty-launcher-18243-thread-3) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 9.0.0
   [junit4]   2> 3980427 INFO  (jetty-launcher-18243-thread-3) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 3980427 INFO  (jetty-launcher-18243-thread-3) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 3980427 INFO  (jetty-launcher-18243-thread-3) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-06-01T13:47:03.040515Z
   [junit4]   2> 3980429 INFO  (zkConnectionManagerCallback-18245-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 3980429 INFO  (zkConnectionManagerCallback-18247-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 3980430 INFO  (jetty-launcher-18243-thread-2) [    ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 3980430 INFO  (jetty-launcher-18243-thread-1) [    ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 3980437 INFO  (jetty-launcher-18243-thread-1) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 3980437 INFO  (jetty-launcher-18243-thread-1) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 3980438 INFO  (jetty-launcher-18243-thread-1) [    ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@65b080f2, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 3980442 INFO  (jetty-launcher-18243-thread-2) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 3980442 INFO  (jetty-launcher-18243-thread-2) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 3980443 INFO  (jetty-launcher-18243-thread-2) [    ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@65b080f2, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 3980463 INFO  (zkConnectionManagerCallback-18249-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 3980464 INFO  (jetty-launcher-18243-thread-3) [    ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 3980467 INFO  (jetty-launcher-18243-thread-3) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 3980467 INFO  (jetty-launcher-18243-thread-3) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 3980469 INFO  (jetty-launcher-18243-thread-3) [    ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@65b080f2, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 3980572 INFO  (jetty-launcher-18243-thread-2) [    ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
   [junit4]   2> 3980573 WARN  (jetty-launcher-18243-thread-2) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@5f2ed72b[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 3980576 WARN  (jetty-launcher-18243-thread-2) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@61e643db[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 3980578 INFO  (jetty-launcher-18243-thread-2) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:34849/solr
   [junit4]   2> 3980583 INFO  (zkConnectionManagerCallback-18258-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 3980589 INFO  (jetty-launcher-18243-thread-1) [    ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
   [junit4]   2> 3980592 WARN  (jetty-launcher-18243-thread-1) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@2560013e[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 3980595 INFO  (zkConnectionManagerCallback-18260-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 3980597 WARN  (jetty-launcher-18243-thread-1) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@765cc171[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 3980598 INFO  (jetty-launcher-18243-thread-1) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:34849/solr
   [junit4]   2> 3980602 INFO  (zkConnectionManagerCallback-18266-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 3980605 INFO  (zkConnectionManagerCallback-18270-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 3980628 INFO  (jetty-launcher-18243-thread-2) [n:127.0.0.1:35101_solr    ] o.a.s.c.OverseerElectionContext I am going to be the leader 127.0.0.1:35101_solr
   [junit4]   2> 3980629 INFO  (jetty-launcher-18243-thread-2) [n:127.0.0.1:35101_solr    ] o.a.s.c.Overseer Overseer (id=73403151157035015-127.0.0.1:35101_solr-n_0000000000) starting
   [junit4]   2> 3980640 INFO  (zkConnectionManagerCallback-18277-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 3980644 INFO  (jetty-launcher-18243-thread-2) [n:127.0.0.1:35101_solr    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:34849/solr ready
   [junit4]   2> 3980646 INFO  (OverseerStateUpdate-73403151157035015-127.0.0.1:35101_solr-n_0000000000) [n:127.0.0.1:35101_solr    ] o.a.s.c.Overseer Starting to work on the main queue : 127.0.0.1:35101_solr
   [junit4]   2> 3980652 INFO  (jetty-launcher-18243-thread-2) [n:127.0.0.1:35101_solr    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:35101_solr
   [junit4]   2> 3980654 INFO  (zkCallback-18276-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 3980669 INFO  (jetty-launcher-18243-thread-3) [    ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
   [junit4]   2> 3980670 INFO  (zkCallback-18259-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 3980672 WARN  (jetty-launcher-18243-thread-3) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@6cd94883[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 3980675 WARN  (jetty-launcher-18243-thread-3) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@7a4f18fb[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 3980677 INFO  (jetty-launcher-18243-thread-3) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:34849/solr
   [junit4]   2> 3980679 INFO  (jetty-launcher-18243-thread-2) [n:127.0.0.1:35101_solr    ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 3980679 INFO  (zkConnectionManagerCallback-18284-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 3980683 INFO  (zkConnectionManagerCallback-18286-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 3980696 INFO  (jetty-launcher-18243-thread-3) [n:127.0.0.1:32960_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 3980701 INFO  (jetty-launcher-18243-thread-3) [n:127.0.0.1:32960_solr    ] o.a.s.c.ZkController Publish node=127.0.0.1:32960_solr as DOWN
   [junit4]   2> 3980702 INFO  (jetty-launcher-18243-thread-3) [n:127.0.0.1:32960_solr    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 3980702 INFO  (jetty-launcher-18243-thread-3) [n:127.0.0.1:32960_solr    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:32960_solr
   [junit4]   2> 3980704 INFO  (zkCallback-18276-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 3980704 INFO  (zkCallback-18259-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 3980705 INFO  (zkCallback-18285-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 3980707 INFO  (zkConnectionManagerCallback-18293-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 3980709 INFO  (jetty-launcher-18243-thread-3) [n:127.0.0.1:32960_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 3980710 INFO  (jetty-launcher-18243-thread-2) [n:127.0.0.1:35101_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@65b080f2
   [junit4]   2> 3980711 INFO  (jetty-launcher-18243-thread-3) [n:127.0.0.1:32960_solr    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:34849/solr ready
   [junit4]   2> 3980716 INFO  (jetty-launcher-18243-thread-1) [n:127.0.0.1:45011_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 3980720 INFO  (jetty-launcher-18243-thread-1) [n:127.0.0.1:45011_solr    ] o.a.s.c.ZkController Publish node=127.0.0.1:45011_solr as DOWN
   [junit4]   2> 3980721 INFO  (jetty-launcher-18243-thread-1) [n:127.0.0.1:45011_solr    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 3980721 INFO  (jetty-launcher-18243-thread-1) [n:127.0.0.1:45011_solr    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:45011_solr
   [junit4]   2> 3980723 INFO  (zkCallback-18285-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 3980723 INFO  (zkCallback-18259-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 3980723 INFO  (zkCallback-18276-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 3980724 INFO  (zkCallback-18292-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 3980727 INFO  (zkCallback-18269-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 3980729 INFO  (jetty-launcher-18243-thread-2) [n:127.0.0.1:35101_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@65b080f2
   [junit4]   2> 3980729 INFO  (jetty-launcher-18243-thread-2) [n:127.0.0.1:35101_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@65b080f2
   [junit4]   2> 3980729 INFO  (zkConnectionManagerCallback-18298-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 3980731 INFO  (jetty-launcher-18243-thread-2) [n:127.0.0.1:35101_solr    ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_10677DD7C00EF4B3-001/tempDir-002/node2/.
   [junit4]   2> 3980731 INFO  (jetty-launcher-18243-thread-1) [n:127.0.0.1:45011_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (3)
   [junit4]   2> 3980733 INFO  (jetty-launcher-18243-thread-1) [n:127.0.0.1:45011_solr    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:34849/solr ready
   [junit4]   2> 3980735 INFO  (jetty-launcher-18243-thread-3) [n:127.0.0.1:32960_solr    ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 3980753 INFO  (jetty-launcher-18243-thread-1) [n:127.0.0.1:45011_solr    ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 3980771 INFO  (jetty-launcher-18243-thread-3) [n:127.0.0.1:32960_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@65b080f2
   [junit4]   2> 3980786 INFO  (jetty-launcher-18243-thread-1) [n:127.0.0.1:45011_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@65b080f2
   [junit4]   2> 3980794 INFO  (jetty-launcher-18243-thread-3) [n:127.0.0.1:32960_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@65b080f2
   [junit4]   2> 3980794 INFO  (jetty-launcher-18243-thread-3) [n:127.0.0.1:32960_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@65b080f2
   [junit4]   2> 3980796 INFO  (jetty-launcher-18243-thread-3) [n:127.0.0.1:32960_solr    ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_10677DD7C00EF4B3-001/tempDir-002/node3/.
   [junit4]   2> 3980809 INFO  (jetty-launcher-18243-thread-1) [n:127.0.0.1:45011_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@65b080f2
   [junit4]   2> 3980809 INFO  (jetty-launcher-18243-thread-1) [n:127.0.0.1:45011_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@65b080f2
   [junit4]   2> 3980811 INFO  (jetty-launcher-18243-thread-1) [n:127.0.0.1:45011_solr    ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_10677DD7C00EF4B3-001/tempDir-002/node1/.
   [junit4]   2> 3980913 INFO  (zkConnectionManagerCallback-18305-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 3980915 INFO  (TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[10677DD7C00EF4B3]) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (3)
   [junit4]   2> 3980916 INFO  (TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[10677DD7C00EF4B3]) [    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:34849/solr ready
   [junit4]   2> 3980923 INFO  (qtp1282395907-122894) [n:127.0.0.1:32960_solr    ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/cluster params={wt=javabin&version=2} status=0 QTime=1
   [junit4]   2> 3980925 INFO  (qtp2090721238-122873) [n:127.0.0.1:35101_solr    ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params collection.configName=conf&maxShardsPerNode=2&autoAddReplicas=true&name=testSimple1&nrtReplicas=2&action=CREATE&numShards=2&createNodeSet=127.0.0.1:35101_solr,127.0.0.1:45011_solr&wt=javabin&version=2 and sendToOCPQueue=true
   [junit4]   2> 3980928 INFO  (OverseerThreadFactory-14941-thread-1-processing-n:127.0.0.1:35101_solr) [n:127.0.0.1:35101_solr    ] o.a.s.c.a.c.CreateCollectionCmd Create collection testSimple1
   [junit4]   2> 3981039 INFO  (OverseerStateUpdate-73403151157035015-127.0.0.1:35101_solr-n_0000000000) [n:127.0.0.1:35101_solr    ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"testSimple1",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "core":"testSimple1_shard1_replica_n1",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"http://127.0.0.1:35101/solr",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 3981042 INFO  (OverseerStateUpdate-73403151157035015-127.0.0.1:35101_solr-n_0000000000) [n:127.0.0.1:35101_solr    ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"testSimple1",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "core":"testSimple1_shard1_replica_n2",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"http://127.0.0.1:45011/solr",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 3981046 INFO  (OverseerStateUpdate-73403151157035015-127.0.0.1:35101_solr-n_0000000000) [n:127.0.0.1:35101_solr    ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"testSimple1",
   [junit4]   2>   "shard":"shard2",
   [junit4]   2>   "core":"testSimple1_shard2_replica_n5",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"http://127.0.0.1:35101/solr",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 3981050 INFO  (OverseerStateUpdate-73403151157035015-127.0.0.1:35101_solr-n_0000000000) [n:127.0.0.1:35101_solr    ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"testSimple1",
   [junit4]   2>   "shard":"shard2",
   [junit4]   2>   "core":"testSimple1_shard2_replica_n7",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"http://127.0.0.1:45011/solr",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 3981254 INFO  (qtp2090721238-122876) [n:127.0.0.1:35101_solr    x:testSimple1_shard1_replica_n1] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node3&collection.configName=conf&newCollection=true&name=testSimple1_shard1_replica_n1&action=CREATE&numShards=2&collection=testSimple1&shard=shard1&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 3981254 INFO  (qtp2090721238-122872) [n:127.0.0.1:35101_solr    x:testSimple1_shard2_replica_n5] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node6&collection.configName=conf&newCollection=true&name=testSimple1_shard2_replica_n5&action=CREATE&numShards=2&collection=testSimple1&shard=shard2&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 3981255 INFO  (qtp2090721238-122872) [n:127.0.0.1:35101_solr    x:testSimple1_shard2_replica_n5] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 3981257 INFO  (qtp1364517442-122884) [n:127.0.0.1:45011_solr    x:testSimple1_shard1_replica_n2] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node4&collection.configName=conf&newCollection=true&name=testSimple1_shard1_replica_n2&action=CREATE&numShards=2&collection=testSimple1&shard=shard1&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 3981259 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr    x:testSimple1_shard2_replica_n7] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node8&collection.configName=conf&newCollection=true&name=testSimple1_shard2_replica_n7&action=CREATE&numShards=2&collection=testSimple1&shard=shard2&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 3982270 INFO  (qtp2090721238-122872) [n:127.0.0.1:35101_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
   [junit4]   2> 3982278 INFO  (qtp1364517442-122884) [n:127.0.0.1:45011_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n2] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
   [junit4]   2> 3982280 INFO  (qtp2090721238-122872) [n:127.0.0.1:35101_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.s.IndexSchema [testSimple1_shard2_replica_n5] Schema name=minimal
   [junit4]   2> 3982305 INFO  (qtp2090721238-122876) [n:127.0.0.1:35101_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
   [junit4]   2> 3982305 INFO  (qtp2090721238-122872) [n:127.0.0.1:35101_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 3982305 INFO  (qtp2090721238-122872) [n:127.0.0.1:35101_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.c.CoreContainer Creating SolrCore 'testSimple1_shard2_replica_n5' using configuration from collection testSimple1, trusted=true
   [junit4]   2> 3982306 INFO  (qtp2090721238-122872) [n:127.0.0.1:35101_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.testSimple1.shard2.replica_n5' (registry 'solr.core.testSimple1.shard2.replica_n5') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@65b080f2
   [junit4]   2> 3982306 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
   [junit4]   2> 3982306 INFO  (qtp2090721238-122872) [n:127.0.0.1:35101_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://localhost:45877/solr_hdfs_home
   [junit4]   2> 3982306 INFO  (qtp2090721238-122872) [n:127.0.0.1:35101_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 3982307 INFO  (qtp2090721238-122872) [n:127.0.0.1:35101_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.c.SolrCore [[testSimple1_shard2_replica_n5] ] Opening new SolrCore at [/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_10677DD7C00EF4B3-001/tempDir-002/node2/testSimple1_shard2_replica_n5], dataDir=[hdfs://localhost:45877/solr_hdfs_home/testSimple1/core_node6/data/]
   [junit4]   2> 3982309 INFO  (qtp2090721238-122872) [n:127.0.0.1:35101_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:45877/solr_hdfs_home/testSimple1/core_node6/data/snapshot_metadata
   [junit4]   2> 3982317 INFO  (qtp2090721238-122872) [n:127.0.0.1:35101_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 3982318 INFO  (qtp2090721238-122872) [n:127.0.0.1:35101_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 3982318 INFO  (qtp2090721238-122872) [n:127.0.0.1:35101_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 3982323 INFO  (qtp2090721238-122876) [n:127.0.0.1:35101_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.s.IndexSchema [testSimple1_shard1_replica_n1] Schema name=minimal
   [junit4]   2> 3982323 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.s.IndexSchema [testSimple1_shard2_replica_n7] Schema name=minimal
   [junit4]   2> 3982323 INFO  (qtp1364517442-122884) [n:127.0.0.1:45011_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n2] o.a.s.s.IndexSchema [testSimple1_shard1_replica_n2] Schema name=minimal
   [junit4]   2> 3982325 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 3982326 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.CoreContainer Creating SolrCore 'testSimple1_shard2_replica_n7' using configuration from collection testSimple1, trusted=true
   [junit4]   2> 3982326 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.testSimple1.shard2.replica_n7' (registry 'solr.core.testSimple1.shard2.replica_n7') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@65b080f2
   [junit4]   2> 3982326 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://localhost:45877/solr_hdfs_home
   [junit4]   2> 3982326 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 3982326 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.SolrCore [[testSimple1_shard2_replica_n7] ] Opening new SolrCore at [/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_10677DD7C00EF4B3-001/tempDir-002/node1/testSimple1_shard2_replica_n7], dataDir=[hdfs://localhost:45877/solr_hdfs_home/testSimple1/core_node8/data/]
   [junit4]   2> 3982327 INFO  (qtp2090721238-122872) [n:127.0.0.1:35101_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 3982328 INFO  (qtp2090721238-122872) [n:127.0.0.1:35101_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:45877/solr_hdfs_home/testSimple1/core_node6/data
   [junit4]   2> 3982329 INFO  (qtp1364517442-122884) [n:127.0.0.1:45011_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n2] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 3982329 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:45877/solr_hdfs_home/testSimple1/core_node8/data/snapshot_metadata
   [junit4]   2> 3982329 INFO  (qtp1364517442-122884) [n:127.0.0.1:45011_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n2] o.a.s.c.CoreContainer Creating SolrCore 'testSimple1_shard1_replica_n2' using configuration from collection testSimple1, trusted=true
   [junit4]   2> 3982329 INFO  (qtp1364517442-122884) [n:127.0.0.1:45011_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n2] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.testSimple1.shard1.replica_n2' (registry 'solr.core.testSimple1.shard1.replica_n2') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@65b080f2
   [junit4]   2> 3982329 INFO  (qtp1364517442-122884) [n:127.0.0.1:45011_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n2] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://localhost:45877/solr_hdfs_home
   [junit4]   2> 3982330 INFO  (qtp2090721238-122876) [n:127.0.0.1:35101_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 3982330 INFO  (qtp1364517442-122884) [n:127.0.0.1:45011_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n2] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 3982330 INFO  (qtp2090721238-122876) [n:127.0.0.1:35101_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.CoreContainer Creating SolrCore 'testSimple1_shard1_replica_n1' using configuration from collection testSimple1, trusted=true
   [junit4]   2> 3982330 INFO  (qtp2090721238-122876) [n:127.0.0.1:35101_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.testSimple1.shard1.replica_n1' (registry 'solr.core.testSimple1.shard1.replica_n1') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@65b080f2
   [junit4]   2> 3982330 INFO  (qtp1364517442-122884) [n:127.0.0.1:45011_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n2] o.a.s.c.SolrCore [[testSimple1_shard1_replica_n2] ] Opening new SolrCore at [/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_10677DD7C00EF4B3-001/tempDir-002/node1/testSimple1_shard1_replica_n2], dataDir=[hdfs://localhost:45877/solr_hdfs_home/testSimple1/core_node4/data/]
   [junit4]   2> 3982331 INFO  (qtp2090721238-122876) [n:127.0.0.1:35101_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://localhost:45877/solr_hdfs_home
   [junit4]   2> 3982331 INFO  (qtp2090721238-122876) [n:127.0.0.1:35101_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 3982331 INFO  (qtp2090721238-122876) [n:127.0.0.1:35101_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.SolrCore [[testSimple1_shard1_replica_n1] ] Opening new SolrCore at [/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_10677DD7C00EF4B3-001/tempDir-002/node2/testSimple1_shard1_replica_n1], dataDir=[hdfs://localhost:45877/solr_hdfs_home/testSimple1/core_node3/data/]
   [junit4]   2> 3982333 INFO  (qtp2090721238-122876) [n:127.0.0.1:35101_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:45877/solr_hdfs_home/testSimple1/core_node3/data/snapshot_metadata
   [junit4]   2> 3982335 INFO  (qtp1364517442-122884) [n:127.0.0.1:45011_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n2] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:45877/solr_hdfs_home/testSimple1/core_node4/data/snapshot_metadata
   [junit4]   2> 3982337 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 3982337 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 3982337 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 3982347 INFO  (qtp2090721238-122876) [n:127.0.0.1:35101_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 3982347 INFO  (qtp2090721238-122876) [n:127.0.0.1:35101_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 3982347 INFO  (qtp2090721238-122876) [n:127.0.0.1:35101_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 3982347 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 3982348 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:45877/solr_hdfs_home/testSimple1/core_node8/data
   [junit4]   2> 3982349 INFO  (qtp1364517442-122884) [n:127.0.0.1:45011_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n2] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 3982349 INFO  (qtp1364517442-122884) [n:127.0.0.1:45011_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n2] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 3982349 INFO  (qtp1364517442-122884) [n:127.0.0.1:45011_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n2] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 3982359 INFO  (qtp1364517442-122884) [n:127.0.0.1:45011_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n2] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 3982360 INFO  (qtp1364517442-122884) [n:127.0.0.1:45011_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n2] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:45877/solr_hdfs_home/testSimple1/core_node4/data
   [junit4]   2> 3982365 INFO  (qtp2090721238-122876) [n:127.0.0.1:35101_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 3982370 INFO  (qtp2090721238-122876) [n:127.0.0.1:35101_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:45877/solr_hdfs_home/testSimple1/core_node3/data
   [junit4]   2> 3982371 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:45877/solr_hdfs_home/testSimple1/core_node8/data/index
   [junit4]   2> 3982376 INFO  (qtp2090721238-122872) [n:127.0.0.1:35101_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:45877/solr_hdfs_home/testSimple1/core_node6/data/index
   [junit4]   2> 3982381 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 3982381 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 3982381 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 3982386 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 3982387 INFO  (qtp2090721238-122872) [n:127.0.0.1:35101_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 3982387 INFO  (qtp2090721238-122872) [n:127.0.0.1:35101_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 3982387 INFO  (qtp2090721238-122872) [n:127.0.0.1:35101_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 3982391 INFO  (qtp1364517442-122884) [n:127.0.0.1:45011_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n2] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:45877/solr_hdfs_home/testSimple1/core_node4/data/index
   [junit4]   2> 3982393 INFO  (qtp2090721238-122876) [n:127.0.0.1:35101_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:45877/solr_hdfs_home/testSimple1/core_node3/data/index
   [junit4]   2> 3982401 INFO  (qtp1364517442-122884) [n:127.0.0.1:45011_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n2] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 3982401 INFO  (qtp1364517442-122884) [n:127.0.0.1:45011_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n2] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 3982401 INFO  (qtp1364517442-122884) [n:127.0.0.1:45011_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n2] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 3982401 INFO  (qtp2090721238-122876) [n:127.0.0.1:35101_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 3982401 INFO  (qtp2090721238-122876) [n:127.0.0.1:35101_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 3982401 INFO  (qtp2090721238-122876) [n:127.0.0.1:35101_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 3982402 INFO  (qtp2090721238-122872) [n:127.0.0.1:35101_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 3982407 INFO  (qtp1364517442-122884) [n:127.0.0.1:45011_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n2] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 3982414 INFO  (qtp2090721238-122876) [n:127.0.0.1:35101_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 3982539 INFO  (qtp2090721238-122872) [n:127.0.0.1:35101_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 3982539 INFO  (qtp2090721238-122872) [n:127.0.0.1:35101_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 3982539 INFO  (qtp2090721238-122872) [n:127.0.0.1:35101_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=3
   [junit4]   2> 3982552 INFO  (qtp2090721238-122872) [n:127.0.0.1:35101_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 3982552 INFO  (qtp2090721238-122872) [n:127.0.0.1:35101_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 3982555 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 3982555 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 3982556 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=3
   [junit4]   2> 3982556 INFO  (qtp1364517442-122884) [n:127.0.0.1:45011_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n2] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 3982556 INFO  (qtp1364517442-122884) [n:127.0.0.1:45011_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n2] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 3982556 INFO  (qtp1364517442-122884) [n:127.0.0.1:45011_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n2] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=3
   [junit4]   2> 3982570 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 3982570 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 3982573 INFO  (qtp1364517442-122884) [n:127.0.0.1:45011_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n2] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 3982593 INFO  (qtp1364517442-122884) [n:127.0.0.1:45011_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n2] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 3982596 INFO  (qtp2090721238-122876) [n:127.0.0.1:35101_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 3982596 INFO  (qtp2090721238-122876) [n:127.0.0.1:35101_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 3982596 INFO  (qtp2090721238-122876) [n:127.0.0.1:35101_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=3
   [junit4]   2> 3982600 INFO  (qtp2090721238-122872) [n:127.0.0.1:35101_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.s.SolrIndexSearcher Opening [Searcher@5aaaed51[testSimple1_shard2_replica_n5] main]
   [junit4]   2> 3982603 INFO  (qtp2090721238-122872) [n:127.0.0.1:35101_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf
   [junit4]   2> 3982603 INFO  (qtp2090721238-122872) [n:127.0.0.1:35101_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf
   [junit4]   2> 3982604 INFO  (qtp2090721238-122872) [n:127.0.0.1:35101_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 3982605 INFO  (qtp2090721238-122872) [n:127.0.0.1:35101_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1635146085399789568
   [junit4]   2> 3982612 INFO  (searcherExecutor-14957-thread-1-processing-n:127.0.0.1:35101_solr x:testSimple1_shard2_replica_n5 c:testSimple1 s:shard2 r:core_node6) [n:127.0.0.1:35101_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.c.SolrCore [testSimple1_shard2_replica_n5] Registered new searcher Searcher@5aaaed51[testSimple1_shard2_replica_n5] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 3982613 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.s.SolrIndexSearcher Opening [Searcher@4fc36120[testSimple1_shard2_replica_n7] main]
   [junit4]   2> 3982615 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf
   [junit4]   2> 3982615 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf
   [junit4]   2> 3982616 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 3982616 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1635146085411323904
   [junit4]   2> 3982616 INFO  (qtp1364517442-122884) [n:127.0.0.1:45011_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n2] o.a.s.s.SolrIndexSearcher Opening [Searcher@4f106e3c[testSimple1_shard1_replica_n2] main]
   [junit4]   2> 3982617 INFO  (qtp2090721238-122872) [n:127.0.0.1:35101_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.c.ZkShardTerms Successful update of terms at /collections/testSimple1/terms/shard2 to Terms{values={core_node6=0}, version=0}
   [junit4]   2> 3982617 INFO  (qtp2090721238-122872) [n:127.0.0.1:35101_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/testSimple1/leaders/shard2
   [junit4]   2> 3982618 INFO  (qtp1364517442-122884) [n:127.0.0.1:45011_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n2] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf
   [junit4]   2> 3982619 INFO  (qtp1364517442-122884) [n:127.0.0.1:45011_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n2] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf
   [junit4]   2> 3982619 INFO  (qtp2090721238-122876) [n:127.0.0.1:35101_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 3982619 INFO  (qtp2090721238-122876) [n:127.0.0.1:35101_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 3982620 INFO  (qtp1364517442-122884) [n:127.0.0.1:45011_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n2] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 3982620 INFO  (qtp1364517442-122884) [n:127.0.0.1:45011_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n2] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1635146085415518208
   [junit4]   2> 3982620 INFO  (searcherExecutor-14958-thread-1-processing-n:127.0.0.1:45011_solr x:testSimple1_shard2_replica_n7 c:testSimple1 s:shard2 r:core_node8) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.SolrCore [testSimple1_shard2_replica_n7] Registered new searcher Searcher@4fc36120[testSimple1_shard2_replica_n7] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 3982624 INFO  (searcherExecutor-14959-thread-1-processing-n:127.0.0.1:45011_solr x:testSimple1_shard1_replica_n2 c:testSimple1 s:shard1 r:core_node4) [n:127.0.0.1:45011_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n2] o.a.s.c.SolrCore [testSimple1_shard1_replica_n2] Registered new searcher Searcher@4f106e3c[testSimple1_shard1_replica_n2] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 3982625 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.ZkShardTerms Successful update of terms at /collections/testSimple1/terms/shard2 to Terms{values={core_node6=0, core_node8=0}, version=1}
   [junit4]   2> 3982626 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/testSimple1/leaders/shard2
   [junit4]   2> 3982630 INFO  (qtp2090721238-122876) [n:127.0.0.1:35101_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.s.SolrIndexSearcher Opening [Searcher@667364eb[testSimple1_shard1_replica_n1] main]
   [junit4]   2> 3982631 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 3982631 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 3982631 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.SyncStrategy Sync replicas to http://127.0.0.1:45011/solr/testSimple1_shard2_replica_n7/
   [junit4]   2> 3982631 INFO  (qtp1364517442-122884) [n:127.0.0.1:45011_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n2] o.a.s.c.ZkShardTerms Successful update of terms at /collections/testSimple1/terms/shard1 to Terms{values={core_node4=0}, version=0}
   [junit4]   2> 3982631 INFO  (qtp1364517442-122884) [n:127.0.0.1:45011_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n2] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/testSimple1/leaders/shard1
   [junit4]   2> 3982632 INFO  (qtp2090721238-122876) [n:127.0.0.1:35101_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf
   [junit4]   2> 3982632 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.u.PeerSync PeerSync: core=testSimple1_shard2_replica_n7 url=http://127.0.0.1:45011/solr START replicas=[http://127.0.0.1:35101/solr/testSimple1_shard2_replica_n5/] nUpdates=100
   [junit4]   2> 3982632 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.u.PeerSync PeerSync: core=testSimple1_shard2_replica_n7 url=http://127.0.0.1:45011/solr DONE.  We have no versions.  sync failed.
   [junit4]   2> 3982633 INFO  (qtp2090721238-122876) [n:127.0.0.1:35101_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf
   [junit4]   2> 3982633 INFO  (qtp2090721238-122876) [n:127.0.0.1:35101_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 3982634 INFO  (qtp2090721238-122876) [n:127.0.0.1:35101_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1635146085430198272
   [junit4]   2> 3982634 INFO  (qtp2090721238-122875) [n:127.0.0.1:35101_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.c.S.Request [testSimple1_shard2_replica_n5]  webapp=/solr path=/get params={distrib=false&qt=/get&fingerprint=false&getVersions=100&wt=javabin&version=2} status=0 QTime=0
   [junit4]   2> 3982634 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.SyncStrategy Leader's attempt to sync with shard failed, moving to the next candidate
   [junit4]   2> 3982634 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.ShardLeaderElectionContext We failed sync, but we have no versions - we can't sync in that case - we were active before, so become leader anyway
   [junit4]   2> 3982634 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.ShardLeaderElectionContextBase Creating leader registration node /collections/testSimple1/leaders/shard2/leader after winning as /collections/testSimple1/leader_elect/shard2/election/73403151157035017-core_node8-n_0000000000
   [junit4]   2> 3982637 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.ShardLeaderElectionContext I am the new leader: http://127.0.0.1:45011/solr/testSimple1_shard2_replica_n7/ shard2
   [junit4]   2> 3982640 INFO  (searcherExecutor-14960-thread-1-processing-n:127.0.0.1:35101_solr x:testSimple1_shard1_replica_n1 c:testSimple1 s:shard1 r:core_node3) [n:127.0.0.1:35101_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.SolrCore [testSimple1_shard1_replica_n1] Registered new searcher Searcher@667364eb[testSimple1_shard1_replica_n1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 3982642 INFO  (qtp1364517442-122884) [n:127.0.0.1:45011_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n2] o.a.s.c.ShardLeaderElectionContext Waiting until we see more replicas up for shard shard1: total=2 found=1 timeoutin=14999ms
   [junit4]   2> 3982645 INFO  (qtp2090721238-122876) [n:127.0.0.1:35101_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.ZkShardTerms Successful update of terms at /collections/testSimple1/terms/shard1 to Terms{values={core_node3=0, core_node4=0}, version=1}
   [junit4]   2> 3982645 INFO  (qtp2090721238-122876) [n:127.0.0.1:35101_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/testSimple1/leaders/shard1
   [junit4]   2> 3982744 INFO  (zkCallback-18269-thread-1) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/testSimple1/state.json] for collection [testSimple1] has occurred - updating... (live nodes size: [3])
   [junit4]   2> 3982744 INFO  (zkCallback-18269-thread-2) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/testSimple1/state.json] for collection [testSimple1] has occurred - updating... (live nodes size: [3])
   [junit4]   2> 3982744 INFO  (zkCallback-18269-thread-3) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/testSimple1/state.json] for collection [testSimple1] has occurred - updating... (live nodes size: [3])
   [junit4]   2> 3982745 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 3982747 INFO  (qtp1364517442-122885) [n:127.0.0.1:45011_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={qt=/admin/cores&coreNodeName=core_node8&collection.configName=conf&newCollection=true&name=testSimple1_shard2_replica_n7&action=CREATE&numShards=2&collection=testSimple1&shard=shard2&wt=javabin&version=2&replicaType=NRT} status=0 QTime=1488
   [junit4]   2> 3982848 INFO  (zkCallback-18269-thread-2) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/testSimple1/state.json] for collection [testSimple1] has occurred - updating... (live nodes size: [3])
   [junit4]   2> 3982848 INFO  (zkCallback-18269-thread-1) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/testSimple1/state.json] for collection [testSimple1] has occurred - updating... (live nodes size: [3])
   [junit4]   2> 3982848 INFO  (zkCallback-18269-thread-3) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/testSimple1/state.json] for collection [testSimple1] has occurred - updating... (live nodes size: [3])
   [junit4]   2> 3982848 INFO  (zkCallback-18269-thread-4) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/testSimple1/state.json] for collection [testSimple1] has occurred - updating... (live nodes size: [3])
   [junit4]   2> 3983143 INFO  (qtp1364517442-122884) [n:127.0.0.1:45011_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n2] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 3983143 INFO  (qtp1364517442-122884) [n:127.0.0.1:45011_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n2] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 3983143 INFO  (qtp1364517442-122884) [n:127.0.0.1:45011_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n2] o.a.s.c.SyncStrategy Sync replicas to http://127.0.0.1:45011/solr/testSimple1_shard1_replica_n2/
   [junit4]   2> 3983144 INFO  (qtp1364517442-122884) [n:127.0.0.1:45011_solr c:testSimple1 s:shard1 r:c

[...truncated too long message...]

ieldsOptimizer, DaemonStreamApiTest, TestSimClusterStateProvider, StatelessScriptUpdateProcessorFactoryTest, TestLMJelinekMercerSimilarityFactory, TestBulkSchemaConcurrent, TestPivotHelperCode, CdcrBootstrapTest, HdfsSyncSliceTest, HdfsChaosMonkeyNothingIsSafeTest, CreateRoutedAliasTest, TestNumericTokenStream, TestMultiWordSynonyms, TestSimGenericDistributedQueue, TestSQLHandlerNonCloud, TestSolrIndexConfig, TestFacetMethods, TestAuthorizationFramework, OverseerModifyCollectionTest, MultiThreadedOCPTest, TestPerFieldSimilarity, TestFuzzyAnalyzedSuggestions, HdfsCollectionsAPIDistributedZkTest, TestUtils, ScheduledTriggerTest, CircularListTest, TestJsonFacets, TestImplicitCoreProperties, TestFieldResource, RAMDirectoryFactoryTest, TestCollectionAPI, TestHashQParserPlugin, SolrJmxReporterCloudTest, JavabinLoaderTest, TestConfig, DirectSolrConnectionTest, SchemaApiFailureTest, TestLuceneMatchVersion, ImplicitSnitchTest, NodeLostTriggerIntegrationTest, CoreAdminOperationTest, TestComplexPhraseQParserPlugin, SignificantTermsQParserPluginTest, ScriptEngineTest, IgnoreLargeDocumentProcessorFactoryTest, MoreLikeThisComponentTest, SuggesterFSTTest, TestSubQueryTransformerDistrib, TestSolrCLIRunExample, TlogReplayBufferedWhileIndexingTest, TestQueryTypes, SparseHLLTest, OutOfBoxZkACLAndCredentialsProvidersTest, OrderedExecutorTest, TestStressRecovery, TestIndexingPerformance, TestUnifiedSolrHighlighter, TokenizerChainTest, TestDistributedMap, BasicDistributedZkTest, XmlInterpolationTest, ByteBuffersDirectoryFactoryTest, TestValueSourceCache, HttpPartitionTest, TestExactSharedStatsCache, LeaderElectionIntegrationTest, TestJsonRequest, TestOmitPositions, CloneFieldUpdateProcessorFactoryTest, TestExceedMaxTermLength, TransactionLogTest, TestHashPartitioner, ZkSolrClientTest, SoftAutoCommitTest, TestGraphMLResponseWriter, WrapperMergePolicyFactoryTest, TestManagedSchema, SOLR749Test, TestRemoteStreaming, MetricsConfigTest, MigrateRouteKeyTest, HighlighterMaxOffsetTest, BadCopyFieldTest, TestCloudPivotFacet, PreAnalyzedFieldTest, TestRestManager, TestSolrFieldCacheBean, StatsComponentTest, TestReplicationHandler, TestRealTimeGet, FullSolrCloudDistribCmdsTest, VersionInfoTest, TriggerEventQueueTest, TestNumericTerms64, TestSolr4Spatial, DirectUpdateHandlerTest, AlternateDirectoryTest, SortByFunctionTest, MinimalSchemaTest, TestBM25SimilarityFactory, DistributedFacetSimpleRefinementLongTailTest, ResponseHeaderTest, FullHLLTest, AnalysisAfterCoreReloadTest, BasicFunctionalityTest, ConvertedLegacyTest, CursorPagingTest, DisMaxRequestHandlerTest, DistributedIntervalFacetingTest, SampleTest, SolrInfoBeanTest, SolrTestCaseJ4Test, TestCrossCoreJoin, TestCursorMarkWithoutUniqueKey, TestDistributedGrouping, TestDistributedMissingSort, TestDistributedSearch, TestDocumentBuilder, TestGroupingSearch, TestHighlightDedupGrouping, TestJoin, TestRandomDVFaceting, TestSimpleTrackingShardHandler, PathHierarchyTokenizerFactoryTest, ProtectedTermFilterFactoryTest, TestCharFilters, TestJettySolrRunner, ConnectionReuseTest, ActionThrottleTest, AddReplicaTest, AliasIntegrationTest, ChaosMonkeySafeLeaderWithPullReplicasTest, ClusterStateTest, ClusterStateUpdateTest, CollectionPropsTest, ConcurrentCreateRoutedAliasTest, ConfigSetsAPITest, ConnectionManagerTest, DeleteLastCustomShardedReplicaTest, DeleteNodeTest, DeleteReplicaTest, DistributedVersionInfoTest, LeaderVoteWaitTimeoutTest, MoveReplicaHDFSTest, OverseerTest, RecoveryZkTest, RemoteQueryErrorTest, ReplaceNodeNoTargetTest, ReplaceNodeTest, ReplicationFactorTest, RoutingToNodesWithPropertiesTest, SSLMigrationTest, SaslZkACLProviderTest, ShardRoutingCustomTest, ShardRoutingTest, SharedFSAutoReplicaFailoverTest, SliceStateTest, SolrCLIZkUtilsTest, SolrCloudExampleTest, SyncSliceTest, TestCloudDeleteByQuery, TestCloudRecovery2, TestClusterProperties, TestConfigSetsAPI, TestConfigSetsAPIExclusivity, TestConfigSetsAPIZkFailure, TestCryptoKeys, TestLeaderElectionZkExpiry, TestPrepRecovery, TestRandomFlRTGCloud, TestShortCircuitedRequests, TestSkipOverseerOperations, TestSolrCloudWithDelegationTokens, TestSolrCloudWithSecureImpersonation, TestStressCloudBlindAtomicUpdates, TestTolerantUpdateProcessorRandomCloud, ZkControllerTest, ZkFailoverTest, ZkNodePropsTest, AsyncCallRequestStatusResponseTest, CollectionReloadTest, CollectionTooManyReplicasTest, CollectionsAPIAsyncDistributedZkTest, TestHdfsCloudBackupRestore, TestLocalFSCloudBackupRestore, TestReplicaProperties, AutoAddReplicasIntegrationTest, HdfsAutoAddReplicasIntegrationTest]
   [junit4] Completed [573/858 (1!)] on J1 in 82.40s, 1 test, 1 failure <<< FAILURES!

[...truncated 48233 lines...]
-ecj-javadoc-lint-tests:
    [mkdir] Created dir: /tmp/ecj42026678
 [ecj-lint] Compiling 48 source files to /tmp/ecj42026678
 [ecj-lint] invalid Class-Path header in manifest of jar file: /x1/jenkins/.ivy2/cache/org.restlet.jee/org.restlet/jars/org.restlet-2.3.0.jar
 [ecj-lint] invalid Class-Path header in manifest of jar file: /x1/jenkins/.ivy2/cache/org.restlet.jee/org.restlet.ext.servlet/jars/org.restlet.ext.servlet-2.3.0.jar
 [ecj-lint] ----------
 [ecj-lint] 1. ERROR in /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/MockInitialContextFactory.java (at line 23)
 [ecj-lint] 	import javax.naming.NamingException;
 [ecj-lint] 	       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 [ecj-lint] The type javax.naming.NamingException is not accessible
 [ecj-lint] ----------
 [ecj-lint] 2. ERROR in /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/MockInitialContextFactory.java (at line 28)
 [ecj-lint] 	public class MockInitialContextFactory implements InitialContextFactory {
 [ecj-lint] 	             ^^^^^^^^^^^^^^^^^^^^^^^^^
 [ecj-lint] The type MockInitialContextFactory must implement the inherited abstract method InitialContextFactory.getInitialContext(Hashtable<?,?>)
 [ecj-lint] ----------
 [ecj-lint] 3. ERROR in /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/MockInitialContextFactory.java (at line 30)
 [ecj-lint] 	private final javax.naming.Context context;
 [ecj-lint] 	              ^^^^^^^^^^^^^^^^^^^^
 [ecj-lint] The type javax.naming.Context is not accessible
 [ecj-lint] ----------
 [ecj-lint] 4. ERROR in /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/MockInitialContextFactory.java (at line 33)
 [ecj-lint] 	context = mock(javax.naming.Context.class);
 [ecj-lint] 	^^^^^^^
 [ecj-lint] context cannot be resolved to a variable
 [ecj-lint] ----------
 [ecj-lint] 5. ERROR in /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/MockInitialContextFactory.java (at line 33)
 [ecj-lint] 	context = mock(javax.naming.Context.class);
 [ecj-lint] 	               ^^^^^^^^^^^^^^^^^^^^
 [ecj-lint] The type javax.naming.Context is not accessible
 [ecj-lint] ----------
 [ecj-lint] 6. ERROR in /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/MockInitialContextFactory.java (at line 36)
 [ecj-lint] 	when(context.lookup(anyString())).thenAnswer(invocation -> objects.get(invocation.getArgument(0)));
 [ecj-lint] 	     ^^^^^^^
 [ecj-lint] context cannot be resolved
 [ecj-lint] ----------
 [ecj-lint] 7. ERROR in /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/MockInitialContextFactory.java (at line 38)
 [ecj-lint] 	} catch (NamingException e) {
 [ecj-lint] 	         ^^^^^^^^^^^^^^^
 [ecj-lint] NamingException cannot be resolved to a type
 [ecj-lint] ----------
 [ecj-lint] 8. ERROR in /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/MockInitialContextFactory.java (at line 45)
 [ecj-lint] 	public javax.naming.Context getInitialContext(Hashtable env) {
 [ecj-lint] 	       ^^^^^^^^^^^^^^^^^^^^
 [ecj-lint] The type javax.naming.Context is not accessible
 [ecj-lint] ----------
 [ecj-lint] 9. ERROR in /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/MockInitialContextFactory.java (at line 46)
 [ecj-lint] 	return context;
 [ecj-lint] 	       ^^^^^^^
 [ecj-lint] context cannot be resolved to a variable
 [ecj-lint] ----------
 [ecj-lint] 9 problems (9 errors)

BUILD FAILED
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/build.xml:652: The following error occurred while executing this line:
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/build.xml:101: The following error occurred while executing this line:
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build.xml:651: The following error occurred while executing this line:
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/common-build.xml:479: The following error occurred while executing this line:
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/common-build.xml:2015: The following error occurred while executing this line:
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/common-build.xml:2048: Compile failed; see the compiler error output for details.

Total time: 338 minutes 57 seconds
Build step 'Invoke Ant' marked build as failure
Archiving artifacts
Recording test results
Email was triggered for: Failure - Any
Sending email for trigger: Failure - Any

[JENKINS] Lucene-Solr-NightlyTests-master - Build # 1858 - Still unstable

Posted by Apache Jenkins Server <je...@builds.apache.org>.
Build: https://builds.apache.org/job/Lucene-Solr-NightlyTests-master/1858/

1 tests failed.
FAILED:  org.apache.solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest.testSimple

Error Message:
Waiting for collection testSimple2 Timeout waiting to see state for collection=testSimple2 :DocCollection(testSimple2//collections/testSimple2/state.json/23)={   "pullReplicas":"0",   "replicationFactor":"2",   "shards":{     "shard1":{       "range":"80000000-ffffffff",       "state":"active",       "replicas":{         "core_node3":{           "dataDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node3/data/",           "base_url":"http://127.0.0.1:41913/solr",           "node_name":"127.0.0.1:41913_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node3/data/tlog",           "core":"testSimple2_shard1_replica_n1",           "shared_storage":"true",           "state":"active",           "leader":"true"},         "core_node5":{           "dataDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node5/data/",           "base_url":"http://127.0.0.1:46515/solr",           "node_name":"127.0.0.1:46515_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node5/data/tlog",           "core":"testSimple2_shard1_replica_n2",           "shared_storage":"true",           "state":"down"}}},     "shard2":{       "range":"0-7fffffff",       "state":"active",       "replicas":{         "core_node7":{           "dataDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node7/data/",           "base_url":"http://127.0.0.1:41913/solr",           "node_name":"127.0.0.1:41913_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node7/data/tlog",           "core":"testSimple2_shard2_replica_n4",           "shared_storage":"true",           "state":"active",           "leader":"true"},         "core_node8":{           "dataDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node8/data/",           "base_url":"http://127.0.0.1:46515/solr",           "node_name":"127.0.0.1:46515_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node8/data/tlog",           "core":"testSimple2_shard2_replica_n6",           "shared_storage":"true",           "state":"down"}}}},   "router":{"name":"compositeId"},   "maxShardsPerNode":"2",   "autoAddReplicas":"true",   "nrtReplicas":"2",   "tlogReplicas":"0"} Live Nodes: [127.0.0.1:41913_solr, 127.0.0.1:43581_solr] Last available state: DocCollection(testSimple2//collections/testSimple2/state.json/23)={   "pullReplicas":"0",   "replicationFactor":"2",   "shards":{     "shard1":{       "range":"80000000-ffffffff",       "state":"active",       "replicas":{         "core_node3":{           "dataDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node3/data/",           "base_url":"http://127.0.0.1:41913/solr",           "node_name":"127.0.0.1:41913_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node3/data/tlog",           "core":"testSimple2_shard1_replica_n1",           "shared_storage":"true",           "state":"active",           "leader":"true"},         "core_node5":{           "dataDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node5/data/",           "base_url":"http://127.0.0.1:46515/solr",           "node_name":"127.0.0.1:46515_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node5/data/tlog",           "core":"testSimple2_shard1_replica_n2",           "shared_storage":"true",           "state":"down"}}},     "shard2":{       "range":"0-7fffffff",       "state":"active",       "replicas":{         "core_node7":{           "dataDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node7/data/",           "base_url":"http://127.0.0.1:41913/solr",           "node_name":"127.0.0.1:41913_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node7/data/tlog",           "core":"testSimple2_shard2_replica_n4",           "shared_storage":"true",           "state":"active",           "leader":"true"},         "core_node8":{           "dataDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node8/data/",           "base_url":"http://127.0.0.1:46515/solr",           "node_name":"127.0.0.1:46515_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node8/data/tlog",           "core":"testSimple2_shard2_replica_n6",           "shared_storage":"true",           "state":"down"}}}},   "router":{"name":"compositeId"},   "maxShardsPerNode":"2",   "autoAddReplicas":"true",   "nrtReplicas":"2",   "tlogReplicas":"0"}

Stack Trace:
java.lang.AssertionError: Waiting for collection testSimple2
Timeout waiting to see state for collection=testSimple2 :DocCollection(testSimple2//collections/testSimple2/state.json/23)={
  "pullReplicas":"0",
  "replicationFactor":"2",
  "shards":{
    "shard1":{
      "range":"80000000-ffffffff",
      "state":"active",
      "replicas":{
        "core_node3":{
          "dataDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node3/data/",
          "base_url":"http://127.0.0.1:41913/solr",
          "node_name":"127.0.0.1:41913_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node3/data/tlog",
          "core":"testSimple2_shard1_replica_n1",
          "shared_storage":"true",
          "state":"active",
          "leader":"true"},
        "core_node5":{
          "dataDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node5/data/",
          "base_url":"http://127.0.0.1:46515/solr",
          "node_name":"127.0.0.1:46515_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node5/data/tlog",
          "core":"testSimple2_shard1_replica_n2",
          "shared_storage":"true",
          "state":"down"}}},
    "shard2":{
      "range":"0-7fffffff",
      "state":"active",
      "replicas":{
        "core_node7":{
          "dataDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node7/data/",
          "base_url":"http://127.0.0.1:41913/solr",
          "node_name":"127.0.0.1:41913_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node7/data/tlog",
          "core":"testSimple2_shard2_replica_n4",
          "shared_storage":"true",
          "state":"active",
          "leader":"true"},
        "core_node8":{
          "dataDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node8/data/",
          "base_url":"http://127.0.0.1:46515/solr",
          "node_name":"127.0.0.1:46515_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node8/data/tlog",
          "core":"testSimple2_shard2_replica_n6",
          "shared_storage":"true",
          "state":"down"}}}},
  "router":{"name":"compositeId"},
  "maxShardsPerNode":"2",
  "autoAddReplicas":"true",
  "nrtReplicas":"2",
  "tlogReplicas":"0"}
Live Nodes: [127.0.0.1:41913_solr, 127.0.0.1:43581_solr]
Last available state: DocCollection(testSimple2//collections/testSimple2/state.json/23)={
  "pullReplicas":"0",
  "replicationFactor":"2",
  "shards":{
    "shard1":{
      "range":"80000000-ffffffff",
      "state":"active",
      "replicas":{
        "core_node3":{
          "dataDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node3/data/",
          "base_url":"http://127.0.0.1:41913/solr",
          "node_name":"127.0.0.1:41913_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node3/data/tlog",
          "core":"testSimple2_shard1_replica_n1",
          "shared_storage":"true",
          "state":"active",
          "leader":"true"},
        "core_node5":{
          "dataDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node5/data/",
          "base_url":"http://127.0.0.1:46515/solr",
          "node_name":"127.0.0.1:46515_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node5/data/tlog",
          "core":"testSimple2_shard1_replica_n2",
          "shared_storage":"true",
          "state":"down"}}},
    "shard2":{
      "range":"0-7fffffff",
      "state":"active",
      "replicas":{
        "core_node7":{
          "dataDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node7/data/",
          "base_url":"http://127.0.0.1:41913/solr",
          "node_name":"127.0.0.1:41913_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node7/data/tlog",
          "core":"testSimple2_shard2_replica_n4",
          "shared_storage":"true",
          "state":"active",
          "leader":"true"},
        "core_node8":{
          "dataDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node8/data/",
          "base_url":"http://127.0.0.1:46515/solr",
          "node_name":"127.0.0.1:46515_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:37562/solr_hdfs_home/testSimple2/core_node8/data/tlog",
          "core":"testSimple2_shard2_replica_n6",
          "shared_storage":"true",
          "state":"down"}}}},
  "router":{"name":"compositeId"},
  "maxShardsPerNode":"2",
  "autoAddReplicas":"true",
  "nrtReplicas":"2",
  "tlogReplicas":"0"}
	at __randomizedtesting.SeedInfo.seed([361D0D430A63287:3BD2F42A1755E656]:0)
	at org.junit.Assert.fail(Assert.java:88)
	at org.apache.solr.cloud.SolrCloudTestCase.waitForState(SolrCloudTestCase.java:310)
	at org.apache.solr.cloud.autoscaling.AutoAddReplicasIntegrationTest.testSimple(AutoAddReplicasIntegrationTest.java:169)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.base/java.lang.reflect.Method.invoke(Method.java:566)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at java.base/java.lang.Thread.run(Thread.java:834)




Build Log:
[...truncated 14756 lines...]
   [junit4] Suite: org.apache.solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest
   [junit4]   2> 2980352 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] o.a.s.SolrTestCaseJ4 SecureRandom sanity checks: test.solr.allowed.securerandom=null & java.security.egd=file:/dev/./urandom
   [junit4]   2> Creating dataDir: /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_361D0D430A63287-001/init-core-data-001
   [junit4]   2> 2980353 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] o.a.s.SolrTestCaseJ4 Using PointFields (NUMERIC_POINTS_SYSPROP=true) w/NUMERIC_DOCVALUES_SYSPROP=true
   [junit4]   2> 2980354 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (false) via: @org.apache.solr.util.RandomizeSSL(reason="", ssl=0.0/0.0, value=0.0/0.0, clientAuth=0.0/0.0)
   [junit4]   1> Formatting using clusterid: testClusterID
   [junit4]   2> 2980419 WARN  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] o.a.h.m.i.MetricsConfig Cannot locate configuration: tried hadoop-metrics2-namenode.properties,hadoop-metrics2.properties
   [junit4]   2> 2980435 WARN  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 2980438 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 2980439 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 2980439 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 2980439 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 2980440 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@6d01064b{static,/static,jar:file:/x1/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
   [junit4]   2> 2980668 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@27cf48f3{hdfs,/,file:///x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/jetty-localhost-35532-hdfs-_-any-8530677796192721066.dir/webapp/,AVAILABLE}{/hdfs}
   [junit4]   2> 2980669 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] o.e.j.s.AbstractConnector Started ServerConnector@1187843d{HTTP/1.1,[http/1.1]}{localhost:35532}
   [junit4]   2> 2980669 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] o.e.j.s.Server Started @2980734ms
   [junit4]   2> 2980748 WARN  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 2980750 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 2980751 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 2980751 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 2980751 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 2980751 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@bda7ed7{static,/static,jar:file:/x1/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
   [junit4]   2> 2980926 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@4c12ea3{datanode,/,file:///x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/jetty-localhost-35336-datanode-_-any-4716850712572889762.dir/webapp/,AVAILABLE}{/datanode}
   [junit4]   2> 2980926 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] o.e.j.s.AbstractConnector Started ServerConnector@112e080c{HTTP/1.1,[http/1.1]}{localhost:35336}
   [junit4]   2> 2980926 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[361D0D430A63287]-worker) [    ] o.e.j.s.Server Started @2980992ms
   [junit4]   2> 2981101 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0x43539fc59a27abe7: Processing first storage report for DS-040cda49-d6f2-494a-8530-00b6a7a01fd6 from datanode 256d0d71-b1d4-4ba5-908b-e9c8fa080cc3
   [junit4]   2> 2981102 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0x43539fc59a27abe7: from storage DS-040cda49-d6f2-494a-8530-00b6a7a01fd6 node DatanodeRegistration(127.0.0.1:40143, datanodeUuid=256d0d71-b1d4-4ba5-908b-e9c8fa080cc3, infoPort=34049, infoSecurePort=0, ipcPort=41564, storageInfo=lv=-57;cid=testClusterID;nsid=1842625238;c=1559264232657), blocks: 0, hasStaleStorage: true, processing time: 0 msecs, invalidatedBlocks: 0
   [junit4]   2> 2981102 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0x43539fc59a27abe7: Processing first storage report for DS-9502dc70-d4c3-4373-aa7e-2a0c83d266fd from datanode 256d0d71-b1d4-4ba5-908b-e9c8fa080cc3
   [junit4]   2> 2981102 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0x43539fc59a27abe7: from storage DS-9502dc70-d4c3-4373-aa7e-2a0c83d266fd node DatanodeRegistration(127.0.0.1:40143, datanodeUuid=256d0d71-b1d4-4ba5-908b-e9c8fa080cc3, infoPort=34049, infoSecurePort=0, ipcPort=41564, storageInfo=lv=-57;cid=testClusterID;nsid=1842625238;c=1559264232657), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0
   [junit4]   2> 2981136 INFO  (TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[361D0D430A63287]) [    ] o.a.s.SolrTestCaseJ4 ###Starting testSimple
   [junit4]   2> 2981137 INFO  (TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[361D0D430A63287]) [    ] o.a.s.c.MiniSolrCloudCluster Starting cluster of 3 servers in /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_361D0D430A63287-001/tempDir-002
   [junit4]   2> 2981137 INFO  (TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[361D0D430A63287]) [    ] o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
   [junit4]   2> 2981138 INFO  (ZkTestServer Run Thread) [    ] o.a.s.c.ZkTestServer client port:0.0.0.0/0.0.0.0:0
   [junit4]   2> 2981138 INFO  (ZkTestServer Run Thread) [    ] o.a.s.c.ZkTestServer Starting server
   [junit4]   2> 2981238 INFO  (TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[361D0D430A63287]) [    ] o.a.s.c.ZkTestServer start zk server on port:33581
   [junit4]   2> 2981238 INFO  (TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[361D0D430A63287]) [    ] o.a.s.c.ZkTestServer parse host and port list: 127.0.0.1:33581
   [junit4]   2> 2981238 INFO  (TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[361D0D430A63287]) [    ] o.a.s.c.ZkTestServer connecting to 127.0.0.1 33581
   [junit4]   2> 2981248 INFO  (zkConnectionManagerCallback-14872-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2981255 INFO  (zkConnectionManagerCallback-14874-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2981257 INFO  (zkConnectionManagerCallback-14876-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2981261 WARN  (jetty-launcher-14877-thread-2) [    ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 2981261 WARN  (jetty-launcher-14877-thread-3) [    ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 2981261 WARN  (jetty-launcher-14877-thread-1) [    ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 2981261 INFO  (jetty-launcher-14877-thread-3) [    ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 2981261 INFO  (jetty-launcher-14877-thread-3) [    ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 2981261 INFO  (jetty-launcher-14877-thread-2) [    ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 2981261 INFO  (jetty-launcher-14877-thread-2) [    ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 2981262 INFO  (jetty-launcher-14877-thread-3) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 2981262 INFO  (jetty-launcher-14877-thread-2) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 2981262 INFO  (jetty-launcher-14877-thread-1) [    ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 2981262 INFO  (jetty-launcher-14877-thread-1) [    ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 2981262 INFO  (jetty-launcher-14877-thread-1) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 2981265 INFO  (jetty-launcher-14877-thread-1) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 2981265 INFO  (jetty-launcher-14877-thread-1) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 2981265 INFO  (jetty-launcher-14877-thread-1) [    ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 2981265 INFO  (jetty-launcher-14877-thread-1) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@4e5a0d38{/solr,null,AVAILABLE}
   [junit4]   2> 2981265 INFO  (jetty-launcher-14877-thread-3) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 2981265 INFO  (jetty-launcher-14877-thread-3) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 2981265 INFO  (jetty-launcher-14877-thread-3) [    ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 2981266 INFO  (jetty-launcher-14877-thread-3) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@5ca0d666{/solr,null,AVAILABLE}
   [junit4]   2> 2981266 INFO  (jetty-launcher-14877-thread-1) [    ] o.e.j.s.AbstractConnector Started ServerConnector@28bbc790{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:41913}
   [junit4]   2> 2981266 INFO  (jetty-launcher-14877-thread-1) [    ] o.e.j.s.Server Started @2981331ms
   [junit4]   2> 2981266 INFO  (jetty-launcher-14877-thread-1) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=41913}
   [junit4]   2> 2981266 INFO  (jetty-launcher-14877-thread-3) [    ] o.e.j.s.AbstractConnector Started ServerConnector@3e5111be{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:43581}
   [junit4]   2> 2981266 INFO  (jetty-launcher-14877-thread-3) [    ] o.e.j.s.Server Started @2981332ms
   [junit4]   2> 2981266 INFO  (jetty-launcher-14877-thread-3) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=43581}
   [junit4]   2> 2981266 ERROR (jetty-launcher-14877-thread-1) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 2981266 INFO  (jetty-launcher-14877-thread-1) [    ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 2981266 INFO  (jetty-launcher-14877-thread-1) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 9.0.0
   [junit4]   2> 2981266 ERROR (jetty-launcher-14877-thread-3) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 2981266 INFO  (jetty-launcher-14877-thread-1) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 2981266 INFO  (jetty-launcher-14877-thread-3) [    ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 2981266 INFO  (jetty-launcher-14877-thread-1) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 2981266 INFO  (jetty-launcher-14877-thread-1) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-05-31T00:57:13.529769Z
   [junit4]   2> 2981266 INFO  (jetty-launcher-14877-thread-3) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 9.0.0
   [junit4]   2> 2981266 INFO  (jetty-launcher-14877-thread-3) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 2981266 INFO  (jetty-launcher-14877-thread-3) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 2981266 INFO  (jetty-launcher-14877-thread-3) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-05-31T00:57:13.529889Z
   [junit4]   2> 2981267 INFO  (jetty-launcher-14877-thread-2) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 2981267 INFO  (jetty-launcher-14877-thread-2) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 2981267 INFO  (jetty-launcher-14877-thread-2) [    ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 2981268 INFO  (jetty-launcher-14877-thread-2) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@15132aad{/solr,null,AVAILABLE}
   [junit4]   2> 2981269 INFO  (jetty-launcher-14877-thread-2) [    ] o.e.j.s.AbstractConnector Started ServerConnector@635b805f{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:46515}
   [junit4]   2> 2981269 INFO  (jetty-launcher-14877-thread-2) [    ] o.e.j.s.Server Started @2981334ms
   [junit4]   2> 2981269 INFO  (jetty-launcher-14877-thread-2) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=46515}
   [junit4]   2> 2981269 ERROR (jetty-launcher-14877-thread-2) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 2981269 INFO  (jetty-launcher-14877-thread-2) [    ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 2981269 INFO  (jetty-launcher-14877-thread-2) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 9.0.0
   [junit4]   2> 2981269 INFO  (jetty-launcher-14877-thread-2) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 2981269 INFO  (jetty-launcher-14877-thread-2) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 2981269 INFO  (jetty-launcher-14877-thread-2) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-05-31T00:57:13.532829Z
   [junit4]   2> 2981270 INFO  (zkConnectionManagerCallback-14881-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2981271 INFO  (zkConnectionManagerCallback-14879-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2981271 INFO  (jetty-launcher-14877-thread-3) [    ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 2981271 INFO  (jetty-launcher-14877-thread-1) [    ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 2981272 INFO  (zkConnectionManagerCallback-14883-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2981272 INFO  (jetty-launcher-14877-thread-2) [    ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 2981275 INFO  (jetty-launcher-14877-thread-1) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 2981275 INFO  (jetty-launcher-14877-thread-1) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 2981275 INFO  (jetty-launcher-14877-thread-2) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 2981275 INFO  (jetty-launcher-14877-thread-2) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 2981277 INFO  (jetty-launcher-14877-thread-1) [    ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@266cae0c, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 2981277 INFO  (jetty-launcher-14877-thread-2) [    ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@266cae0c, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 2981280 INFO  (jetty-launcher-14877-thread-3) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 2981280 INFO  (jetty-launcher-14877-thread-3) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 2981282 INFO  (jetty-launcher-14877-thread-3) [    ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@266cae0c, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 2981384 INFO  (jetty-launcher-14877-thread-3) [    ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
   [junit4]   2> 2981385 WARN  (jetty-launcher-14877-thread-3) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@5cdf813a[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 2981389 WARN  (jetty-launcher-14877-thread-3) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@73dca498[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 2981390 INFO  (jetty-launcher-14877-thread-3) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:33581/solr
   [junit4]   2> 2981393 INFO  (zkConnectionManagerCallback-14892-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2981396 INFO  (zkConnectionManagerCallback-14894-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2981500 INFO  (jetty-launcher-14877-thread-2) [    ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
   [junit4]   2> 2981501 WARN  (jetty-launcher-14877-thread-2) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@68c21517[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 2981504 WARN  (jetty-launcher-14877-thread-2) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@48a3eb44[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 2981506 INFO  (jetty-launcher-14877-thread-2) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:33581/solr
   [junit4]   2> 2981507 INFO  (zkConnectionManagerCallback-14902-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2981509 INFO  (jetty-launcher-14877-thread-3) [n:127.0.0.1:43581_solr    ] o.a.s.c.OverseerElectionContext I am going to be the leader 127.0.0.1:43581_solr
   [junit4]   2> 2981510 INFO  (zkConnectionManagerCallback-14904-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2981510 INFO  (jetty-launcher-14877-thread-3) [n:127.0.0.1:43581_solr    ] o.a.s.c.Overseer Overseer (id=73394461771694087-127.0.0.1:43581_solr-n_0000000000) starting
   [junit4]   2> 2981521 INFO  (zkConnectionManagerCallback-14911-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2981524 INFO  (jetty-launcher-14877-thread-3) [n:127.0.0.1:43581_solr    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:33581/solr ready
   [junit4]   2> 2981530 INFO  (jetty-launcher-14877-thread-2) [n:127.0.0.1:46515_solr    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:46515_solr
   [junit4]   2> 2981542 INFO  (OverseerStateUpdate-73394461771694087-127.0.0.1:43581_solr-n_0000000000) [n:127.0.0.1:43581_solr    ] o.a.s.c.Overseer Starting to work on the main queue : 127.0.0.1:43581_solr
   [junit4]   2> 2981543 INFO  (OverseerStateUpdate-73394461771694087-127.0.0.1:43581_solr-n_0000000000) [n:127.0.0.1:43581_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 2981546 INFO  (jetty-launcher-14877-thread-3) [n:127.0.0.1:43581_solr    ] o.a.s.c.ZkController Publish node=127.0.0.1:43581_solr as DOWN
   [junit4]   2> 2981548 INFO  (jetty-launcher-14877-thread-3) [n:127.0.0.1:43581_solr    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 2981548 INFO  (jetty-launcher-14877-thread-3) [n:127.0.0.1:43581_solr    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:43581_solr
   [junit4]   2> 2981548 INFO  (zkCallback-14910-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 2981552 INFO  (zkCallback-14910-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 2981566 INFO  (zkCallback-14903-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 2981574 INFO  (zkCallback-14893-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 2981582 INFO  (jetty-launcher-14877-thread-3) [n:127.0.0.1:43581_solr    ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 2981584 INFO  (zkConnectionManagerCallback-14916-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2981587 INFO  (jetty-launcher-14877-thread-2) [n:127.0.0.1:46515_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 2981588 INFO  (jetty-launcher-14877-thread-2) [n:127.0.0.1:46515_solr    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:33581/solr ready
   [junit4]   2> 2981641 INFO  (jetty-launcher-14877-thread-2) [n:127.0.0.1:46515_solr    ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 2981644 INFO  (jetty-launcher-14877-thread-3) [n:127.0.0.1:43581_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@266cae0c
   [junit4]   2> 2981683 INFO  (jetty-launcher-14877-thread-3) [n:127.0.0.1:43581_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@266cae0c
   [junit4]   2> 2981684 INFO  (jetty-launcher-14877-thread-3) [n:127.0.0.1:43581_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@266cae0c
   [junit4]   2> 2981688 INFO  (jetty-launcher-14877-thread-3) [n:127.0.0.1:43581_solr    ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_361D0D430A63287-001/tempDir-002/node3/.
   [junit4]   2> 2981697 INFO  (jetty-launcher-14877-thread-2) [n:127.0.0.1:46515_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@266cae0c
   [junit4]   2> 2981704 INFO  (jetty-launcher-14877-thread-1) [    ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
   [junit4]   2> 2981706 WARN  (jetty-launcher-14877-thread-1) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@439a77a7[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 2981709 WARN  (jetty-launcher-14877-thread-1) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@3df98575[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 2981710 INFO  (jetty-launcher-14877-thread-1) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:33581/solr
   [junit4]   2> 2981712 INFO  (zkConnectionManagerCallback-14924-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2981716 INFO  (zkConnectionManagerCallback-14926-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2981718 INFO  (jetty-launcher-14877-thread-2) [n:127.0.0.1:46515_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@266cae0c
   [junit4]   2> 2981718 INFO  (jetty-launcher-14877-thread-2) [n:127.0.0.1:46515_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@266cae0c
   [junit4]   2> 2981720 INFO  (jetty-launcher-14877-thread-2) [n:127.0.0.1:46515_solr    ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_361D0D430A63287-001/tempDir-002/node2/.
   [junit4]   2> 2981723 INFO  (jetty-launcher-14877-thread-1) [n:127.0.0.1:41913_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 2981731 INFO  (jetty-launcher-14877-thread-1) [n:127.0.0.1:41913_solr    ] o.a.s.c.ZkController Publish node=127.0.0.1:41913_solr as DOWN
   [junit4]   2> 2981732 INFO  (jetty-launcher-14877-thread-1) [n:127.0.0.1:41913_solr    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 2981732 INFO  (jetty-launcher-14877-thread-1) [n:127.0.0.1:41913_solr    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:41913_solr
   [junit4]   2> 2981734 INFO  (zkCallback-14903-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 2981734 INFO  (zkCallback-14910-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 2981734 INFO  (zkCallback-14893-thread-2) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 2981734 INFO  (zkCallback-14915-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 2981736 INFO  (zkCallback-14925-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 2981739 INFO  (zkConnectionManagerCallback-14933-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2981741 INFO  (jetty-launcher-14877-thread-1) [n:127.0.0.1:41913_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (3)
   [junit4]   2> 2981742 INFO  (jetty-launcher-14877-thread-1) [n:127.0.0.1:41913_solr    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:33581/solr ready
   [junit4]   2> 2981763 INFO  (jetty-launcher-14877-thread-1) [n:127.0.0.1:41913_solr    ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 2981795 INFO  (jetty-launcher-14877-thread-1) [n:127.0.0.1:41913_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@266cae0c
   [junit4]   2> 2981816 INFO  (jetty-launcher-14877-thread-1) [n:127.0.0.1:41913_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@266cae0c
   [junit4]   2> 2981816 INFO  (jetty-launcher-14877-thread-1) [n:127.0.0.1:41913_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@266cae0c
   [junit4]   2> 2981818 INFO  (jetty-launcher-14877-thread-1) [n:127.0.0.1:41913_solr    ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_361D0D430A63287-001/tempDir-002/node1/.
   [junit4]   2> 2981896 INFO  (zkConnectionManagerCallback-14939-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 2981898 INFO  (TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[361D0D430A63287]) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (3)
   [junit4]   2> 2981900 INFO  (TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[361D0D430A63287]) [    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:33581/solr ready
   [junit4]   2> 2981907 INFO  (qtp1194898800-43151) [n:127.0.0.1:41913_solr    ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/cluster params={wt=javabin&version=2} status=0 QTime=2
   [junit4]   2> 2981910 INFO  (qtp502828306-43156) [n:127.0.0.1:46515_solr    ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params collection.configName=conf&maxShardsPerNode=2&autoAddReplicas=true&name=testSimple1&nrtReplicas=2&action=CREATE&numShards=2&createNodeSet=127.0.0.1:43581_solr,127.0.0.1:46515_solr&wt=javabin&version=2 and sendToOCPQueue=true
   [junit4]   2> 2981914 INFO  (OverseerThreadFactory-12067-thread-1-processing-n:127.0.0.1:43581_solr) [n:127.0.0.1:43581_solr    ] o.a.s.c.a.c.CreateCollectionCmd Create collection testSimple1
   [junit4]   2> 2982024 INFO  (OverseerStateUpdate-73394461771694087-127.0.0.1:43581_solr-n_0000000000) [n:127.0.0.1:43581_solr    ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"testSimple1",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "core":"testSimple1_shard1_replica_n1",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"http://127.0.0.1:43581/solr",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 2982027 INFO  (OverseerStateUpdate-73394461771694087-127.0.0.1:43581_solr-n_0000000000) [n:127.0.0.1:43581_solr    ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"testSimple1",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "core":"testSimple1_shard1_replica_n2",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"http://127.0.0.1:46515/solr",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 2982031 INFO  (OverseerStateUpdate-73394461771694087-127.0.0.1:43581_solr-n_0000000000) [n:127.0.0.1:43581_solr    ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"testSimple1",
   [junit4]   2>   "shard":"shard2",
   [junit4]   2>   "core":"testSimple1_shard2_replica_n4",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"http://127.0.0.1:43581/solr",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 2982033 INFO  (OverseerStateUpdate-73394461771694087-127.0.0.1:43581_solr-n_0000000000) [n:127.0.0.1:43581_solr    ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"testSimple1",
   [junit4]   2>   "shard":"shard2",
   [junit4]   2>   "core":"testSimple1_shard2_replica_n6",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"http://127.0.0.1:46515/solr",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 2982237 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr    x:testSimple1_shard1_replica_n2] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node5&collection.configName=conf&newCollection=true&name=testSimple1_shard1_replica_n2&action=CREATE&numShards=2&collection=testSimple1&shard=shard1&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 2982237 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr    x:testSimple1_shard1_replica_n2] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 2982238 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr    x:testSimple1_shard2_replica_n6] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node8&collection.configName=conf&newCollection=true&name=testSimple1_shard2_replica_n6&action=CREATE&numShards=2&collection=testSimple1&shard=shard2&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 2982238 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr    x:testSimple1_shard2_replica_n4] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node7&collection.configName=conf&newCollection=true&name=testSimple1_shard2_replica_n4&action=CREATE&numShards=2&collection=testSimple1&shard=shard2&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 2982238 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr    x:testSimple1_shard1_replica_n1] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node3&collection.configName=conf&newCollection=true&name=testSimple1_shard1_replica_n1&action=CREATE&numShards=2&collection=testSimple1&shard=shard1&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 2983252 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
   [junit4]   2> 2983252 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
   [junit4]   2> 2983252 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
   [junit4]   2> 2983280 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
   [junit4]   2> 2983289 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.s.IndexSchema [testSimple1_shard1_replica_n1] Schema name=minimal
   [junit4]   2> 2983289 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.s.IndexSchema [testSimple1_shard2_replica_n6] Schema name=minimal
   [junit4]   2> 2983289 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] o.a.s.s.IndexSchema [testSimple1_shard1_replica_n2] Schema name=minimal
   [junit4]   2> 2983291 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.s.IndexSchema [testSimple1_shard2_replica_n4] Schema name=minimal
   [junit4]   2> 2983291 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 2983291 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 2983291 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.c.CoreContainer Creating SolrCore 'testSimple1_shard2_replica_n6' using configuration from collection testSimple1, trusted=true
   [junit4]   2> 2983291 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] o.a.s.c.CoreContainer Creating SolrCore 'testSimple1_shard1_replica_n2' using configuration from collection testSimple1, trusted=true
   [junit4]   2> 2983291 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 2983292 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.CoreContainer Creating SolrCore 'testSimple1_shard1_replica_n1' using configuration from collection testSimple1, trusted=true
   [junit4]   2> 2983292 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.testSimple1.shard2.replica_n6' (registry 'solr.core.testSimple1.shard2.replica_n6') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@266cae0c
   [junit4]   2> 2983292 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.testSimple1.shard1.replica_n2' (registry 'solr.core.testSimple1.shard1.replica_n2') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@266cae0c
   [junit4]   2> 2983292 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.testSimple1.shard1.replica_n1' (registry 'solr.core.testSimple1.shard1.replica_n1') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@266cae0c
   [junit4]   2> 2983293 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 2983293 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.CoreContainer Creating SolrCore 'testSimple1_shard2_replica_n4' using configuration from collection testSimple1, trusted=true
   [junit4]   2> 2983294 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.testSimple1.shard2.replica_n4' (registry 'solr.core.testSimple1.shard2.replica_n4') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@266cae0c
   [junit4]   2> 2983295 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://localhost:37562/solr_hdfs_home
   [junit4]   2> 2983295 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 2983295 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.c.SolrCore [[testSimple1_shard2_replica_n6] ] Opening new SolrCore at [/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_361D0D430A63287-001/tempDir-002/node2/testSimple1_shard2_replica_n6], dataDir=[hdfs://localhost:37562/solr_hdfs_home/testSimple1/core_node8/data/]
   [junit4]   2> 2983296 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://localhost:37562/solr_hdfs_home
   [junit4]   2> 2983296 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 2983296 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.SolrCore [[testSimple1_shard1_replica_n1] ] Opening new SolrCore at [/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_361D0D430A63287-001/tempDir-002/node3/testSimple1_shard1_replica_n1], dataDir=[hdfs://localhost:37562/solr_hdfs_home/testSimple1/core_node3/data/]
   [junit4]   2> 2983296 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://localhost:37562/solr_hdfs_home
   [junit4]   2> 2983296 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 2983296 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] o.a.s.c.SolrCore [[testSimple1_shard1_replica_n2] ] Opening new SolrCore at [/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_361D0D430A63287-001/tempDir-002/node2/testSimple1_shard1_replica_n2], dataDir=[hdfs://localhost:37562/solr_hdfs_home/testSimple1/core_node5/data/]
   [junit4]   2> 2983296 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://localhost:37562/solr_hdfs_home
   [junit4]   2> 2983297 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 2983297 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.SolrCore [[testSimple1_shard2_replica_n4] ] Opening new SolrCore at [/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_361D0D430A63287-001/tempDir-002/node3/testSimple1_shard2_replica_n4], dataDir=[hdfs://localhost:37562/solr_hdfs_home/testSimple1/core_node7/data/]
   [junit4]   2> 2983297 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:37562/solr_hdfs_home/testSimple1/core_node8/data/snapshot_metadata
   [junit4]   2> 2983306 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:37562/solr_hdfs_home/testSimple1/core_node5/data/snapshot_metadata
   [junit4]   2> 2983306 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:37562/solr_hdfs_home/testSimple1/core_node7/data/snapshot_metadata
   [junit4]   2> 2983306 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:37562/solr_hdfs_home/testSimple1/core_node3/data/snapshot_metadata
   [junit4]   2> 2983313 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 2983313 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 2983313 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 2983314 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 2983314 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 2983314 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 2983314 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 2983314 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 2983314 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 2983321 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 2983321 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 2983321 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 2983327 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 2983328 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 2983329 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:37562/solr_hdfs_home/testSimple1/core_node7/data
   [junit4]   2> 2983331 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:37562/solr_hdfs_home/testSimple1/core_node5/data
   [junit4]   2> 2983340 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 2983342 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:37562/solr_hdfs_home/testSimple1/core_node3/data
   [junit4]   2> 2983353 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:37562/solr_hdfs_home/testSimple1/core_node7/data/index
   [junit4]   2> 2983356 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:37562/solr_hdfs_home/testSimple1/core_node5/data/index
   [junit4]   2> 2983361 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 2983361 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 2983361 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 2983363 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:37562/solr_hdfs_home/testSimple1/core_node3/data/index
   [junit4]   2> 2983363 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 2983363 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 2983363 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 2983367 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 2983371 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 2983376 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 2983376 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 2983376 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 2983385 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 2983473 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 2983474 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:37562/solr_hdfs_home/testSimple1/core_node8/data
   [junit4]   2> 2983526 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:37562/solr_hdfs_home/testSimple1/core_node8/data/index
   [junit4]   2> 2983536 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 2983536 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 2983536 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=3
   [junit4]   2> 2983545 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 2983545 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 2983545 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=3
   [junit4]   2> 2983546 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 2983547 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 2983547 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 2983549 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 2983549 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 2983558 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 2983564 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 2983564 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 2983582 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 2983582 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 2983582 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=3
   [junit4]   2> 2983582 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.s.SolrIndexSearcher Opening [Searcher@1800cf53[testSimple1_shard1_replica_n1] main]
   [junit4]   2> 2983583 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] o.a.s.s.SolrIndexSearcher Opening [Searcher@584223c2[testSimple1_shard1_replica_n2] main]
   [junit4]   2> 2983584 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf
   [junit4]   2> 2983585 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf
   [junit4]   2> 2983586 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf
   [junit4]   2> 2983586 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 2983587 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf
   [junit4]   2> 2983587 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1635007055370649600
   [junit4]   2> 2983590 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 2983591 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1635007055374843904
   [junit4]   2> 2983593 INFO  (searcherExecutor-12084-thread-1-processing-n:127.0.0.1:46515_solr x:testSimple1_shard1_replica_n2 c:testSimple1 s:shard1 r:core_node5) [n:127.0.0.1:46515_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] o.a.s.c.SolrCore [testSimple1_shard1_replica_n2] Registered new searcher Searcher@584223c2[testSimple1_shard1_replica_n2] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 2983594 INFO  (searcherExecutor-12085-thread-1-processing-n:127.0.0.1:43581_solr x:testSimple1_shard1_replica_n1 c:testSimple1 s:shard1 r:core_node3) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.SolrCore [testSimple1_shard1_replica_n1] Registered new searcher Searcher@1800cf53[testSimple1_shard1_replica_n1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 2983599 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.ZkShardTerms Successful update of terms at /collections/testSimple1/terms/shard1 to Terms{values={core_node3=0}, version=0}
   [junit4]   2> 2983600 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/testSimple1/leaders/shard1
   [junit4]   2> 2983600 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] o.a.s.c.ZkShardTerms Successful update of terms at /collections/testSimple1/terms/shard1 to Terms{values={core_node3=0, core_node5=0}, version=1}
   [junit4]   2> 2983601 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/testSimple1/leaders/shard1
   [junit4]   2> 2983605 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 2983605 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 2983606 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 2983606 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 2983606 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.SyncStrategy Sync replicas to http://127.0.0.1:43581/solr/testSimple1_shard1_replica_n1/
   [junit4]   2> 2983607 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.u.PeerSync PeerSync: core=testSimple1_shard1_replica_n1 url=http://127.0.0.1:43581/solr START replicas=[http://127.0.0.1:46515/solr/testSimple1_shard1_replica_n2/] nUpdates=100
   [junit4]   2> 2983609 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.u.PeerSync PeerSync: core=testSimple1_shard1_replica_n1 url=http://127.0.0.1:43581/solr DONE.  We have no versions.  sync failed.
   [junit4]   2> 2983613 INFO  (qtp502828306-43164) [n:127.0.0.1:46515_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] o.a.s.c.S.Request [testSimple1_shard1_replica_n2]  webapp=/solr path=/get params={distrib=false&qt=/get&fingerprint=false&getVersions=100&wt=javabin&version=2} status=0 QTime=1
   [junit4]   2> 2983613 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.SyncStrategy Leader's attempt to sync with shard failed, moving to the next candidate
   [junit4]   2> 2983613 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContext We failed sync, but we have no versions - we can't sync in that case - we were active before, so become leader anyway
   [junit4]   2> 2983613 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContextBase Creating leader registration node /collections/testSimple1/leaders/shard1/leader after winning as /collections/testSimple1/leader_elect/shard1/election/73394461771694087-core_node3-n_0000000000
   [junit4]   2> 2983616 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContext I am the new leader: http://127.0.0.1:43581/solr/testSimple1_shard1_replica_n1/ shard1
   [junit4]   2> 2983622 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.s.SolrIndexSearcher Opening [Searcher@433c2d97[testSimple1_shard2_replica_n4] main]
   [junit4]   2> 2983623 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf
   [junit4]   2> 2983624 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf
   [junit4]   2> 2983625 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 2983625 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1635007055410495488
   [junit4]   2> 2983628 INFO  (searcherExecutor-12086-thread-1-processing-n:127.0.0.1:43581_solr x:testSimple1_shard2_replica_n4 c:testSimple1 s:shard2 r:core_node7) [n:127.0.0.1:43581_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.SolrCore [testSimple1_shard2_replica_n4] Registered new searcher Searcher@433c2d97[testSimple1_shard2_replica_n4] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 2983630 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.ZkShardTerms Successful update of terms at /collections/testSimple1/terms/shard2 to Terms{values={core_node7=0}, version=0}
   [junit4]   2> 2983630 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/testSimple1/leaders/shard2
   [junit4]   2> 2983636 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.ShardLeaderElectionContext Waiting until we see more replicas up for shard shard2: total=2 found=1 timeoutin=14999ms
   [junit4]   2> 2983737 INFO  (zkCallback-14893-thread-2) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/testSimple1/state.json] for collection [testSimple1] has occurred - updating... (live nodes size: [3])
   [junit4]   2> 2983737 INFO  (zkCallback-14893-thread-1) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/testSimple1/state.json] for collection [testSimple1] has occurred - updating... (live nodes size: [3])
   [junit4]   2> 2983739 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 2983742 INFO  (qtp2116305613-43158) [n:127.0.0.1:43581_solr c:testSimple1 s:shard1 r:core_node3 x:testSimple1_shard1_replica_n1] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={qt=/admin/cores&coreNodeName=core_node3&collection.configName=conf&newCollection=true&name=testSimple1_shard1_replica_n1&action=CREATE&numShards=2&collection=testSimple1&shard=shard1&wt=javabin&version=2&replicaType=NRT} status=0 QTime=1503
   [junit4]   2> 2983842 INFO  (zkCallback-14893-thread-1) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/testSimple1/state.json] for collection [testSimple1] has occurred - updating... (live nodes size: [3])
   [junit4]   2> 2983842 INFO  (zkCallback-14893-thread-2) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/testSimple1/state.json] for collection [testSimple1] has occurred - updating... (live nodes size: [3])
   [junit4]   2> 2983842 INFO  (zkCallback-14893-thread-3) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/testSimple1/state.json] for collection [testSimple1] has occurred - updating... (live nodes size: [3])
   [junit4]   2> 2984108 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 2984108 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 2984108 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=3
   [junit4]   2> 2984121 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 2984121 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 2984134 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.s.SolrIndexSearcher Opening [Searcher@787559aa[testSimple1_shard2_replica_n6] main]
   [junit4]   2> 2984135 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf
   [junit4]   2> 2984136 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf
   [junit4]   2> 2984136 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 2984137 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1635007055947366400
   [junit4]   2> 2984141 INFO  (searcherExecutor-12083-thread-1-processing-n:127.0.0.1:46515_solr x:testSimple1_shard2_replica_n6 c:testSimple1 s:shard2 r:core_node8) [n:127.0.0.1:46515_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.c.SolrCore [testSimple1_shard2_replica_n6] Registered new searcher Searcher@787559aa[testSimple1_shard2_replica_n6] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 2984144 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.c.ZkShardTerms Successful update of terms at /collections/testSimple1/terms/shard2 to Terms{values={core_node7=0, core_node8=0}, version=1}
   [junit4]   2> 2984144 INFO  (qtp502828306-43152) [n:127.0.0.1:46515_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/testSimple1/leaders/shard2
   [junit4]   2> 2984612 INFO  (qtp502828306-43163) [n:127.0.0.1:46515_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n2] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={qt=/admin/cores&coreNodeName=core_node5&collection.configName=conf&newCollection=true&name=testSimple1_shard1_replica_n2&action=CREATE&numShards=2&collection=testSimple1&shard=shard1&wt=javabin&version=2&replicaType=NRT} status=0 QTime=2375
   [junit4]   2> 2984637 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 2984637 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 2984637 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.SyncStrategy Sync replicas to http://127.0.0.1:43581/solr/testSimple1_shard2_replica_n4/
   [junit4]   2> 2984637 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.u.PeerSync PeerSync: core=testSimple1_shard2_replica_n4 url=http://127.0.0.1:43581/solr START replicas=[http://127.0.0.1:46515/solr/testSimple1_shard2_replica_n6/] nUpdates=100
   [junit4]   2> 2984638 INFO  (qtp2116305613-43154) [n:127.0.0.1:43581_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.u.PeerSync PeerSync: core=testSimple1_shard2_replica_n4 url=http://127.0.0.1:43581/solr DONE.  We have no versions.  sync failed.
   [junit4]   2> 2984639 INFO  (qtp502828306-43160) [n:127.0.0.1:46515_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a

[...truncated too long message...]

ail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

jar-checksums:
    [mkdir] Created dir: /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/null666246468
     [copy] Copying 240 files to /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/null666246468
   [delete] Deleting directory /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/null666246468

check-working-copy:
[ivy:cachepath] :: resolving dependencies :: #;working@lucene1-us-west
[ivy:cachepath] 	confs: [default]
[ivy:cachepath] 	found org.eclipse.jgit#org.eclipse.jgit;5.3.0.201903130848-r in public
[ivy:cachepath] 	found com.jcraft#jsch;0.1.54 in public
[ivy:cachepath] 	found com.jcraft#jzlib;1.1.1 in public
[ivy:cachepath] 	found com.googlecode.javaewah#JavaEWAH;1.1.6 in public
[ivy:cachepath] 	found org.slf4j#slf4j-api;1.7.2 in public
[ivy:cachepath] 	found org.bouncycastle#bcpg-jdk15on;1.60 in public
[ivy:cachepath] 	found org.bouncycastle#bcprov-jdk15on;1.60 in public
[ivy:cachepath] 	found org.bouncycastle#bcpkix-jdk15on;1.60 in public
[ivy:cachepath] 	found org.slf4j#slf4j-nop;1.7.2 in public
[ivy:cachepath] :: resolution report :: resolve 53ms :: artifacts dl 4ms
	---------------------------------------------------------------------
	|                  |            modules            ||   artifacts   |
	|       conf       | number| search|dwnlded|evicted|| number|dwnlded|
	---------------------------------------------------------------------
	|      default     |   9   |   0   |   0   |   0   ||   9   |   0   |
	---------------------------------------------------------------------
[wc-checker] Initializing working copy...
[wc-checker] Checking working copy status...

-jenkins-base:

BUILD SUCCESSFUL
Total time: 327 minutes 44 seconds
Archiving artifacts
java.lang.InterruptedException: no matches found within 10000
	at hudson.FilePath$ValidateAntFileMask.hasMatch(FilePath.java:2847)
	at hudson.FilePath$ValidateAntFileMask.invoke(FilePath.java:2726)
	at hudson.FilePath$ValidateAntFileMask.invoke(FilePath.java:2707)
	at hudson.FilePath$FileCallableWrapper.call(FilePath.java:3086)
Also:   hudson.remoting.Channel$CallSiteStackTrace: Remote call to lucene
		at hudson.remoting.Channel.attachCallSiteStackTrace(Channel.java:1741)
		at hudson.remoting.UserRequest$ExceptionResponse.retrieve(UserRequest.java:357)
		at hudson.remoting.Channel.call(Channel.java:955)
		at hudson.FilePath.act(FilePath.java:1072)
		at hudson.FilePath.act(FilePath.java:1061)
		at hudson.FilePath.validateAntFileMask(FilePath.java:2705)
		at hudson.tasks.ArtifactArchiver.perform(ArtifactArchiver.java:243)
		at hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
		at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
		at hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
		at hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
		at hudson.model.Build$BuildExecution.post2(Build.java:186)
		at hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
		at hudson.model.Run.execute(Run.java:1835)
		at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
		at hudson.model.ResourceController.execute(ResourceController.java:97)
		at hudson.model.Executor.run(Executor.java:429)
Caused: hudson.FilePath$TunneledInterruptedException
	at hudson.FilePath$FileCallableWrapper.call(FilePath.java:3088)
	at hudson.remoting.UserRequest.perform(UserRequest.java:212)
	at hudson.remoting.UserRequest.perform(UserRequest.java:54)
	at hudson.remoting.Request$2.run(Request.java:369)
	at hudson.remoting.InterceptingExecutorService$1.call(InterceptingExecutorService.java:72)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
	at java.lang.Thread.run(Thread.java:748)
Caused: java.lang.InterruptedException: java.lang.InterruptedException: no matches found within 10000
	at hudson.FilePath.act(FilePath.java:1074)
	at hudson.FilePath.act(FilePath.java:1061)
	at hudson.FilePath.validateAntFileMask(FilePath.java:2705)
	at hudson.tasks.ArtifactArchiver.perform(ArtifactArchiver.java:243)
	at hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
	at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
	at hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
	at hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
	at hudson.model.Build$BuildExecution.post2(Build.java:186)
	at hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
	at hudson.model.Run.execute(Run.java:1835)
	at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
	at hudson.model.ResourceController.execute(ResourceController.java:97)
	at hudson.model.Executor.run(Executor.java:429)
No artifacts found that match the file pattern "**/*.events,heapdumps/**,**/hs_err_pid*". Configuration error?
Recording test results
Build step 'Publish JUnit test result report' changed build result to UNSTABLE
Email was triggered for: Unstable (Test Failures)
Sending email for trigger: Unstable (Test Failures)

[JENKINS] Lucene-Solr-NightlyTests-master - Build # 1857 - Still Failing

Posted by Apache Jenkins Server <je...@builds.apache.org>.
Build: https://builds.apache.org/job/Lucene-Solr-NightlyTests-master/1857/

1 tests failed.
FAILED:  org.apache.solr.cloud.autoscaling.sim.TestSimLargeCluster.testAddNode

Error Message:
did not finish processing all events in time: started=1, finished=0

Stack Trace:
java.lang.AssertionError: did not finish processing all events in time: started=1, finished=0
	at __randomizedtesting.SeedInfo.seed([7D2F3D5486215375:DAC020F7496CDC6D]:0)
	at org.junit.Assert.fail(Assert.java:88)
	at org.apache.solr.cloud.autoscaling.sim.TestSimLargeCluster.testAddNode(TestSimLargeCluster.java:341)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.base/java.lang.reflect.Method.invoke(Method.java:566)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at java.base/java.lang.Thread.run(Thread.java:834)




Build Log:
[...truncated 13756 lines...]
   [junit4] Suite: org.apache.solr.cloud.autoscaling.sim.TestSimLargeCluster
   [junit4]   2> Creating dataDir: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.autoscaling.sim.TestSimLargeCluster_7D2F3D5486215375-001/init-core-data-001
   [junit4]   2> 1404782 WARN  (SUITE-TestSimLargeCluster-seed#[7D2F3D5486215375]-worker) [    ] o.a.s.SolrTestCaseJ4 startTrackingSearchers: numOpens=2 numCloses=2
   [junit4]   2> 1404782 INFO  (SUITE-TestSimLargeCluster-seed#[7D2F3D5486215375]-worker) [    ] o.a.s.SolrTestCaseJ4 Using TrieFields (NUMERIC_POINTS_SYSPROP=false) w/NUMERIC_DOCVALUES_SYSPROP=true
   [junit4]   2> 1404783 INFO  (SUITE-TestSimLargeCluster-seed#[7D2F3D5486215375]-worker) [    ] o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (false) via: @org.apache.solr.util.RandomizeSSL(reason="", ssl=0.0/0.0, value=0.0/0.0, clientAuth=0.0/0.0)
   [junit4]   2> 1404783 INFO  (SUITE-TestSimLargeCluster-seed#[7D2F3D5486215375]-worker) [    ] o.a.s.SolrTestCaseJ4 SecureRandom sanity checks: test.solr.allowed.securerandom=null & java.security.egd=file:/dev/./urandom
   [junit4]   2> 1404891 INFO  (TEST-TestSimLargeCluster.testNodeLost-seed#[7D2F3D5486215375]) [    ] o.a.s.SolrTestCaseJ4 ###Starting testNodeLost
   [junit4]   2> 1405082 DEBUG (TEST-TestSimLargeCluster.testNodeLost-seed#[7D2F3D5486215375]) [    ] o.a.s.c.a.s.SimClusterStateProvider --- new Overseer leader: 127.0.0.1:10000_solr
   [junit4]   2> 1405098 DEBUG (Simulated OverseerAutoScalingTriggerThread) [    ] o.a.s.c.a.OverseerTriggerThread Adding .auto_add_replicas and .scheduled_maintenance triggers
   [junit4]   2> 1405098 DEBUG (Simulated OverseerAutoScalingTriggerThread) [    ] o.a.s.c.a.OverseerTriggerThread Refreshing /autoscaling.json with znode version 1
   [junit4]   2> 1405099 DEBUG (Simulated OverseerAutoScalingTriggerThread) [    ] o.a.s.c.a.OverseerTriggerThread Current znodeVersion 1, lastZnodeVersion 0
   [junit4]   2> 1405099 DEBUG (Simulated OverseerAutoScalingTriggerThread) [    ] o.a.s.c.a.OverseerTriggerThread Processed trigger updates upto znodeVersion 1
   [junit4]   2> 1405134 DEBUG (TEST-TestSimLargeCluster.testNodeLost-seed#[7D2F3D5486215375]) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=0
   [junit4]   2> 1405136 DEBUG (TEST-TestSimLargeCluster.testNodeLost-seed#[7D2F3D5486215375]) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 0
   [junit4]   2> 1405136 INFO  (TEST-TestSimLargeCluster.testNodeLost-seed#[7D2F3D5486215375]) [    ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 1405170 DEBUG (Simulated OverseerAutoScalingTriggerThread) [    ] o.a.s.c.a.NodeLostTrigger NodeLostTrigger .auto_add_replicas - Initial livenodes: [127.0.0.1:10043_solr, 127.0.0.1:10004_solr, 127.0.0.1:10018_solr, 127.0.0.1:10049_solr, 127.0.0.1:10026_solr, 127.0.0.1:10012_solr, 127.0.0.1:10020_solr, 127.0.0.1:10005_solr, 127.0.0.1:10013_solr, 127.0.0.1:10021_solr, 127.0.0.1:10035_solr, 127.0.0.1:10019_solr, 127.0.0.1:10027_solr, 127.0.0.1:10022_solr, 127.0.0.1:10039_solr, 127.0.0.1:10033_solr, 127.0.0.1:10025_solr, 127.0.0.1:10017_solr, 127.0.0.1:10047_solr, 127.0.0.1:10044_solr, 127.0.0.1:10028_solr, 127.0.0.1:10036_solr, 127.0.0.1:10006_solr, 127.0.0.1:10041_solr, 127.0.0.1:10009_solr, 127.0.0.1:10000_solr, 127.0.0.1:10030_solr, 127.0.0.1:10014_solr, 127.0.0.1:10003_solr, 127.0.0.1:10011_solr, 127.0.0.1:10001_solr, 127.0.0.1:10040_solr, 127.0.0.1:10046_solr, 127.0.0.1:10023_solr, 127.0.0.1:10007_solr, 127.0.0.1:10015_solr, 127.0.0.1:10010_solr, 127.0.0.1:10016_solr, 127.0.0.1:10038_solr, 127.0.0.1:10032_solr, 127.0.0.1:10008_solr, 127.0.0.1:10024_solr, 127.0.0.1:10002_solr, 127.0.0.1:10031_solr, 127.0.0.1:10034_solr, 127.0.0.1:10045_solr, 127.0.0.1:10037_solr, 127.0.0.1:10042_solr, 127.0.0.1:10029_solr, 127.0.0.1:10048_solr]
   [junit4]   2> 1405190 DEBUG (Simulated OverseerAutoScalingTriggerThread) [    ] o.a.s.c.a.OverseerTriggerThread -- cleaning old nodeLost / nodeAdded markers
   [junit4]   2> 1405190 DEBUG (Simulated OverseerAutoScalingTriggerThread) [    ] o.a.s.c.a.OverseerTriggerThread Current znodeVersion 1, lastZnodeVersion 1
   [junit4]   2> 1405198 DEBUG (ScheduledTrigger-1529-thread-2) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: .auto_add_replicas with currently live nodes: 75 and last live nodes: 50
   [junit4]   2> 1405280 DEBUG (ScheduledTrigger-1529-thread-1) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: .auto_add_replicas with currently live nodes: 99 and last live nodes: 75
   [junit4]   2> 1405292 DEBUG (ScheduledTrigger-1529-thread-2) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: .auto_add_replicas with currently live nodes: 99 and last live nodes: 99
   [junit4]   2> 1405302 DEBUG (ScheduledTrigger-1529-thread-1) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: .auto_add_replicas with currently live nodes: 100 and last live nodes: 99
   [junit4]   2> 1405316 DEBUG (ScheduledTrigger-1529-thread-1) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: .auto_add_replicas with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1405336 DEBUG (ScheduledTrigger-1529-thread-1) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: .auto_add_replicas with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1405352 DEBUG (ScheduledTrigger-1529-thread-1) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: .auto_add_replicas with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1405389 DEBUG (ScheduledTrigger-1529-thread-1) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: .auto_add_replicas with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1405409 DEBUG (ScheduledTrigger-1529-thread-1) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: .auto_add_replicas with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1405425 DEBUG (ScheduledTrigger-1529-thread-1) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: .auto_add_replicas with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1405439 DEBUG (simCloudManagerPool-1528-thread-2) [    ] o.a.s.c.a.AutoScalingHandler Verified autoscaling configuration
   [junit4]   2> 1405440 DEBUG (simCloudManagerPool-1528-thread-2) [    ] o.a.s.c.a.OverseerTriggerThread Refreshing /autoscaling.json with znode version 2
   [junit4]   2> 1405446 DEBUG (ScheduledTrigger-1529-thread-1) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: .auto_add_replicas with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1405462 DEBUG (Simulated OverseerAutoScalingTriggerThread) [    ] o.a.s.c.a.OverseerTriggerThread Processed trigger updates upto znodeVersion 2
   [junit4]   2> 1405462 DEBUG (Simulated OverseerAutoScalingTriggerThread) [    ] o.a.s.c.a.OverseerTriggerThread -- cleaning old nodeLost / nodeAdded markers
   [junit4]   2> 1405462 DEBUG (Simulated OverseerAutoScalingTriggerThread) [    ] o.a.s.c.a.OverseerTriggerThread Current znodeVersion 2, lastZnodeVersion 2
   [junit4]   2> 1405462 DEBUG (ScheduledTrigger-1529-thread-1) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: .auto_add_replicas with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1405482 DEBUG (ScheduledTrigger-1529-thread-1) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: .auto_add_replicas with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1405498 DEBUG (ScheduledTrigger-1529-thread-1) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: .auto_add_replicas with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1405498 DEBUG (simCloudManagerPool-1528-thread-4) [    ] o.a.s.c.a.AutoScalingHandler Verified autoscaling configuration
   [junit4]   2> 1405498 DEBUG (simCloudManagerPool-1528-thread-4) [    ] o.a.s.c.a.OverseerTriggerThread Refreshing /autoscaling.json with znode version 3
   [junit4]   2> 1405499 DEBUG (Simulated OverseerAutoScalingTriggerThread) [    ] o.a.s.c.a.OverseerTriggerThread Processed trigger updates upto znodeVersion 3
   [junit4]   2> 1405499 DEBUG (Simulated OverseerAutoScalingTriggerThread) [    ] o.a.s.c.a.OverseerTriggerThread -- cleaning old nodeLost / nodeAdded markers
   [junit4]   2> 1405499 DEBUG (Simulated OverseerAutoScalingTriggerThread) [    ] o.a.s.c.a.OverseerTriggerThread Current znodeVersion 3, lastZnodeVersion 3
   [junit4]   2> 1405499 DEBUG (TEST-TestSimLargeCluster.testNodeLost-seed#[7D2F3D5486215375]) [    ] o.a.s.c.a.s.SimClusterStateProvider -- simCreateCollection .system, currentVersion=1
   [junit4]   2> 1405499 DEBUG (TEST-TestSimLargeCluster.testNodeLost-seed#[7D2F3D5486215375]) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=1
   [junit4]   2> 1405553 DEBUG (TEST-TestSimLargeCluster.testNodeLost-seed#[7D2F3D5486215375]) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 1
   [junit4]   2> 1405608 DEBUG (TEST-TestSimLargeCluster.testNodeLost-seed#[7D2F3D5486215375]) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=2
   [junit4]   2> 1405610 DEBUG (TEST-TestSimLargeCluster.testNodeLost-seed#[7D2F3D5486215375]) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 2
   [junit4]   2> 1405696 DEBUG (simCloudManagerPool-1528-thread-5) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=3
   [junit4]   2> 1405766 DEBUG (simCloudManagerPool-1528-thread-5) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 3
   [junit4]   2> 1405766 DEBUG (simCloudManagerPool-1528-thread-5) [    ] o.a.s.c.a.s.SimClusterStateProvider Running leader election (.system / shard1)
   [junit4]   2> 1405767 DEBUG (simCloudManagerPool-1528-thread-5) [    ] o.a.s.c.a.s.SimClusterStateProvider -- elected new leader for .system / shard1 (currentVersion=4): {"core_node1":{
   [junit4]   2>     "core":".system_shard1_replica_n1",
   [junit4]   2>     "shard":"shard1",
   [junit4]   2>     "collection":".system",
   [junit4]   2>     "node_name":"127.0.0.1:10037_solr",
   [junit4]   2>     "type":"NRT",
   [junit4]   2>     "leader":"true",
   [junit4]   2>     "SEARCHER.searcher.maxDoc":0,
   [junit4]   2>     "SEARCHER.searcher.deletedDocs":0,
   [junit4]   2>     "INDEX.sizeInBytes":10240,
   [junit4]   2>     "state":"active",
   [junit4]   2>     "INDEX.sizeInGB":9.5367431640625E-6,
   [junit4]   2>     "SEARCHER.searcher.numDocs":0}}
   [junit4]   2> 1405818 DEBUG (simCloudManagerPool-1528-thread-6) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=4
   [junit4]   2> 1405958 DEBUG (simCloudManagerPool-1528-thread-6) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 4
   [junit4]   2> 1405972 DEBUG (MetricsHistoryHandler-1532-thread-1) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=5
   [junit4]   2> 1406009 DEBUG (MetricsHistoryHandler-1532-thread-1) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 5
   [junit4]   2> 1406029 DEBUG (simCloudManagerPool-1528-thread-7) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=6
   [junit4]   2> 1406029 DEBUG (simCloudManagerPool-1528-thread-7) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 6
   [junit4]   2> 1406045 DEBUG (TEST-TestSimLargeCluster.testNodeLost-seed#[7D2F3D5486215375]) [    ] o.a.s.c.a.s.SimClusterStateProvider -- finished createCollection .system, currentVersion=7
   [junit4]   2> 1406062 DEBUG (TEST-TestSimLargeCluster.testNodeLost-seed#[7D2F3D5486215375]) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=7
   [junit4]   2> 1406077 DEBUG (TEST-TestSimLargeCluster.testNodeLost-seed#[7D2F3D5486215375]) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 7
   [junit4]   2> 1406303 DEBUG (simCloudManagerPool-1528-thread-8) [    ] o.a.s.c.a.AutoScalingHandler Verified autoscaling configuration
   [junit4]   2> 1406303 DEBUG (simCloudManagerPool-1528-thread-8) [    ] o.a.s.c.a.OverseerTriggerThread Refreshing /autoscaling.json with znode version 4
   [junit4]   2> 1406305 DEBUG (Simulated OverseerAutoScalingTriggerThread) [    ] o.a.s.c.a.OverseerTriggerThread Processed trigger updates upto znodeVersion 4
   [junit4]   2> 1406305 DEBUG (Simulated OverseerAutoScalingTriggerThread) [    ] o.a.s.c.a.NodeLostTrigger NodeLostTrigger node_lost_trigger3 - Initial livenodes: [127.0.0.1:10082_solr, 127.0.0.1:10004_solr, 127.0.0.1:10018_solr, 127.0.0.1:10049_solr, 127.0.0.1:10020_solr, 127.0.0.1:10065_solr, 127.0.0.1:10021_solr, 127.0.0.1:10005_solr, 127.0.0.1:10066_solr, 127.0.0.1:10035_solr, 127.0.0.1:10051_solr, 127.0.0.1:10019_solr, 127.0.0.1:10096_solr, 127.0.0.1:10022_solr, 127.0.0.1:10033_solr, 127.0.0.1:10017_solr, 127.0.0.1:10047_solr, 127.0.0.1:10095_solr, 127.0.0.1:10081_solr, 127.0.0.1:10084_solr, 127.0.0.1:10036_solr, 127.0.0.1:10052_solr, 127.0.0.1:10006_solr, 127.0.0.1:10079_solr, 127.0.0.1:10098_solr, 127.0.0.1:10003_solr, 127.0.0.1:10001_solr, 127.0.0.1:10054_solr, 127.0.0.1:10099_solr, 127.0.0.1:10071_solr, 127.0.0.1:10023_solr, 127.0.0.1:10076_solr, 127.0.0.1:10007_solr, 127.0.0.1:10055_solr, 127.0.0.1:10010_solr, 127.0.0.1:10016_solr, 127.0.0.1:10070_solr, 127.0.0.1:10077_solr, 127.0.0.1:10038_solr, 127.0.0.1:10032_solr, 127.0.0.1:10093_solr, 127.0.0.1:10061_solr, 127.0.0.1:10067_solr, 127.0.0.1:10086_solr, 127.0.0.1:10089_solr, 127.0.0.1:10045_solr, 127.0.0.1:10042_solr, 127.0.0.1:10029_solr, 127.0.0.1:10083_solr, 127.0.0.1:10080_solr, 127.0.0.1:10048_solr, 127.0.0.1:10064_solr, 127.0.0.1:10043_solr, 127.0.0.1:10057_solr, 127.0.0.1:10074_solr, 127.0.0.1:10026_solr, 127.0.0.1:10012_solr, 127.0.0.1:10060_solr, 127.0.0.1:10073_solr, 127.0.0.1:10058_solr, 127.0.0.1:10013_solr, 127.0.0.1:10090_solr, 127.0.0.1:10088_solr, 127.0.0.1:10027_solr, 127.0.0.1:10039_solr, 127.0.0.1:10025_solr, 127.0.0.1:10044_solr, 127.0.0.1:10028_solr, 127.0.0.1:10092_solr, 127.0.0.1:10041_solr, 127.0.0.1:10087_solr, 127.0.0.1:10009_solr, 127.0.0.1:10000_solr, 127.0.0.1:10030_solr, 127.0.0.1:10014_solr, 127.0.0.1:10011_solr, 127.0.0.1:10040_solr, 127.0.0.1:10085_solr, 127.0.0.1:10046_solr, 127.0.0.1:10068_solr, 127.0.0.1:10063_solr, 127.0.0.1:10015_solr, 127.0.0.1:10069_solr, 127.0.0.1:10062_solr, 127.0.0.1:10008_solr, 127.0.0.1:10024_solr, 127.0.0.1:10050_solr, 127.0.0.1:10075_solr, 127.0.0.1:10078_solr, 127.0.0.1:10002_solr, 127.0.0.1:10097_solr, 127.0.0.1:10031_solr, 127.0.0.1:10034_solr, 127.0.0.1:10091_solr, 127.0.0.1:10094_solr, 127.0.0.1:10037_solr, 127.0.0.1:10053_solr, 127.0.0.1:10059_solr, 127.0.0.1:10056_solr, 127.0.0.1:10072_solr]
   [junit4]   2> 1406305 DEBUG (Simulated OverseerAutoScalingTriggerThread) [    ] o.a.s.c.a.OverseerTriggerThread -- cleaning old nodeLost / nodeAdded markers
   [junit4]   2> 1406305 DEBUG (Simulated OverseerAutoScalingTriggerThread) [    ] o.a.s.c.a.OverseerTriggerThread Current znodeVersion 4, lastZnodeVersion 4
   [junit4]   2> 1406305 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1406321 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1406341 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1406357 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1406379 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1406394 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1406410 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1406427 DEBUG (simCloudManagerPool-1528-thread-10) [    ] o.a.s.c.a.AutoScalingHandler Verified autoscaling configuration
   [junit4]   2> 1406427 DEBUG (simCloudManagerPool-1528-thread-10) [    ] o.a.s.c.a.OverseerTriggerThread Refreshing /autoscaling.json with znode version 5
   [junit4]   2> 1406427 DEBUG (Simulated OverseerAutoScalingTriggerThread) [    ] o.a.s.c.a.OverseerTriggerThread Processed trigger updates upto znodeVersion 5
   [junit4]   2> 1406427 DEBUG (Simulated OverseerAutoScalingTriggerThread) [    ] o.a.s.c.a.OverseerTriggerThread -- cleaning old nodeLost / nodeAdded markers
   [junit4]   2> 1406427 DEBUG (Simulated OverseerAutoScalingTriggerThread) [    ] o.a.s.c.a.OverseerTriggerThread Current znodeVersion 5, lastZnodeVersion 5
   [junit4]   2> 1406430 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1406432 DEBUG (simCloudManagerPool-1528-thread-13) [    ] o.a.s.c.a.s.SimClusterStateProvider -- simCreateCollection testNodeLost, currentVersion=8
   [junit4]   2> 1406432 DEBUG (simCloudManagerPool-1528-thread-13) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=8
   [junit4]   2> 1406432 DEBUG (simCloudManagerPool-1528-thread-13) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 8
   [junit4]   2> 1406433 DEBUG (simCloudManagerPool-1528-thread-14) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=9
   [junit4]   2> 1406433 DEBUG (simCloudManagerPool-1528-thread-14) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 9
   [junit4]   2> 1406447 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1406467 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1406483 DEBUG (simCloudManagerPool-1528-thread-15) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=10
   [junit4]   2> 1406483 DEBUG (simCloudManagerPool-1528-thread-15) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 10
   [junit4]   2> 1406483 DEBUG (simCloudManagerPool-1528-thread-15) [    ] o.a.s.c.a.s.SimClusterStateProvider Running leader election (testNodeLost / shard1)
   [junit4]   2> 1406483 DEBUG (simCloudManagerPool-1528-thread-15) [    ] o.a.s.c.a.s.SimClusterStateProvider -- elected new leader for testNodeLost / shard1 (currentVersion=11): {"core_node1":{
   [junit4]   2>     "core":"testNodeLost_shard1_replica_n1",
   [junit4]   2>     "shard":"shard1",
   [junit4]   2>     "collection":"testNodeLost",
   [junit4]   2>     "node_name":"127.0.0.1:10049_solr",
   [junit4]   2>     "type":"NRT",
   [junit4]   2>     "leader":"true",
   [junit4]   2>     "SEARCHER.searcher.maxDoc":0,
   [junit4]   2>     "SEARCHER.searcher.deletedDocs":0,
   [junit4]   2>     "INDEX.sizeInBytes":10240,
   [junit4]   2>     "state":"active",
   [junit4]   2>     "INDEX.sizeInGB":9.5367431640625E-6,
   [junit4]   2>     "SEARCHER.searcher.numDocs":0}}
   [junit4]   2> 1406483 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1406503 DEBUG (simCloudManagerPool-1528-thread-16) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=11
   [junit4]   2> 1406503 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1406523 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1406539 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1406556 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1406613 DEBUG (simCloudManagerPool-1528-thread-16) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 11
   [junit4]   2> 1406629 DEBUG (simCloudManagerPool-1528-thread-17) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=12
   [junit4]   2> 1408100 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1408104 DEBUG (simCloudManagerPool-1528-thread-17) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 12
   [junit4]   2> 1408115 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1408123 DEBUG (simCloudManagerPool-1528-thread-17) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=13
   [junit4]   2> 1408123 DEBUG (simCloudManagerPool-1528-thread-17) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 13
   [junit4]   2> 1408123 DEBUG (simCloudManagerPool-1528-thread-17) [    ] o.a.s.c.a.s.SimClusterStateProvider Running leader election (testNodeLost / shard19)
   [junit4]   2> 1408123 DEBUG (simCloudManagerPool-1528-thread-17) [    ] o.a.s.c.a.s.SimClusterStateProvider -- elected new leader for testNodeLost / shard19 (currentVersion=14): {"core_node190":{
   [junit4]   2>     "core":"testNodeLost_shard19_replica_n190",
   [junit4]   2>     "shard":"shard19",
   [junit4]   2>     "collection":"testNodeLost",
   [junit4]   2>     "node_name":"127.0.0.1:10004_solr",
   [junit4]   2>     "type":"NRT",
   [junit4]   2>     "leader":"true",
   [junit4]   2>     "SEARCHER.searcher.maxDoc":0,
   [junit4]   2>     "SEARCHER.searcher.deletedDocs":0,
   [junit4]   2>     "INDEX.sizeInBytes":10240,
   [junit4]   2>     "state":"active",
   [junit4]   2>     "INDEX.sizeInGB":9.5367431640625E-6,
   [junit4]   2>     "SEARCHER.searcher.numDocs":0}}
   [junit4]   2> 1408135 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1408139 DEBUG (simCloudManagerPool-1528-thread-17) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=14
   [junit4]   2> 1408140 DEBUG (simCloudManagerPool-1528-thread-17) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 14
   [junit4]   2> 1408140 DEBUG (simCloudManagerPool-1528-thread-17) [    ] o.a.s.c.a.s.SimClusterStateProvider Running leader election (testNodeLost / shard20)
   [junit4]   2> 1408140 DEBUG (simCloudManagerPool-1528-thread-17) [    ] o.a.s.c.a.s.SimClusterStateProvider -- elected new leader for testNodeLost / shard20 (currentVersion=15): {"core_node195":{
   [junit4]   2>     "core":"testNodeLost_shard20_replica_n195",
   [junit4]   2>     "shard":"shard20",
   [junit4]   2>     "collection":"testNodeLost",
   [junit4]   2>     "node_name":"127.0.0.1:10063_solr",
   [junit4]   2>     "type":"NRT",
   [junit4]   2>     "leader":"true",
   [junit4]   2>     "SEARCHER.searcher.maxDoc":0,
   [junit4]   2>     "SEARCHER.searcher.deletedDocs":0,
   [junit4]   2>     "INDEX.sizeInBytes":10240,
   [junit4]   2>     "state":"active",
   [junit4]   2>     "INDEX.sizeInGB":9.5367431640625E-6,
   [junit4]   2>     "SEARCHER.searcher.numDocs":0}}
   [junit4]   2> 1408151 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1408159 DEBUG (simCloudManagerPool-1528-thread-18) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=15
   [junit4]   2> 1408161 DEBUG (simCloudManagerPool-1528-thread-18) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 15
   [junit4]   2> 1408171 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1408188 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1408196 DEBUG (simCloudManagerPool-1528-thread-19) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=16
   [junit4]   2> 1408197 DEBUG (simCloudManagerPool-1528-thread-19) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 16
   [junit4]   2> 1408208 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1408212 DEBUG (simCloudManagerPool-1528-thread-20) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=17
   [junit4]   2> 1408213 DEBUG (simCloudManagerPool-1528-thread-20) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 17
   [junit4]   2> 1408224 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1408232 DEBUG (simCloudManagerPool-1528-thread-21) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=18
   [junit4]   2> 1408233 DEBUG (simCloudManagerPool-1528-thread-21) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 18
   [junit4]   2> 1408244 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1408262 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1408277 DEBUG (simCloudManagerPool-1528-thread-24) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=19
   [junit4]   2> 1408281 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1408293 DEBUG (simCloudManagerPool-1528-thread-24) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 19
   [junit4]   2> 1408297 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1408313 DEBUG (simCloudManagerPool-1528-thread-25) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=20
   [junit4]   2> 1408317 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1408330 DEBUG (simCloudManagerPool-1528-thread-25) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 20
   [junit4]   2> 1408330 DEBUG (simCloudManagerPool-1528-thread-25) [    ] o.a.s.c.a.s.SimClusterStateProvider Running leader election (testNodeLost / shard2)
   [junit4]   2> 1408330 DEBUG (simCloudManagerPool-1528-thread-25) [    ] o.a.s.c.a.s.SimClusterStateProvider -- elected new leader for testNodeLost / shard2 (currentVersion=21): {"core_node11":{
   [junit4]   2>     "core":"testNodeLost_shard2_replica_n11",
   [junit4]   2>     "shard":"shard2",
   [junit4]   2>     "collection":"testNodeLost",
   [junit4]   2>     "node_name":"127.0.0.1:10048_solr",
   [junit4]   2>     "type":"NRT",
   [junit4]   2>     "leader":"true",
   [junit4]   2>     "SEARCHER.searcher.maxDoc":0,
   [junit4]   2>     "SEARCHER.searcher.deletedDocs":0,
   [junit4]   2>     "INDEX.sizeInBytes":10240,
   [junit4]   2>     "state":"active",
   [junit4]   2>     "INDEX.sizeInGB":9.5367431640625E-6,
   [junit4]   2>     "SEARCHER.searcher.numDocs":0}}
   [junit4]   2> 1408333 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1408350 DEBUG (simCloudManagerPool-1528-thread-26) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=21
   [junit4]   2> 1408354 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1408366 DEBUG (simCloudManagerPool-1528-thread-26) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 21
   [junit4]   2> 1408370 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1408386 DEBUG (simCloudManagerPool-1528-thread-27) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=22
   [junit4]   2> 1408390 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1408403 DEBUG (simCloudManagerPool-1528-thread-27) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 22
   [junit4]   2> 1408423 DEBUG (simCloudManagerPool-1528-thread-28) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=23
   [junit4]   2> 1408427 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1408439 DEBUG (simCloudManagerPool-1528-thread-28) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 23
   [junit4]   2> 1408443 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1408459 DEBUG (simCloudManagerPool-1528-thread-29) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=24
   [junit4]   2> 1408463 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1408479 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1408480 DEBUG (simCloudManagerPool-1528-thread-29) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 24
   [junit4]   2> 1408480 DEBUG (MetricsHistoryHandler-1532-thread-1) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=25
   [junit4]   2> 1408516 DEBUG (MetricsHistoryHandler-1532-thread-1) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 25
   [junit4]   2> 1408520 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1408532 DEBUG (simCloudManagerPool-1528-thread-30) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=26
   [junit4]   2> 1408532 DEBUG (simCloudManagerPool-1528-thread-30) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 26
   [junit4]   2> 1408552 DEBUG (simCloudManagerPool-1528-thread-31) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=27
   [junit4]   2> 1408589 DEBUG (simCloudManagerPool-1528-thread-31) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 27
   [junit4]   2> 1408644 DEBUG (simCloudManagerPool-1528-thread-32) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=28
   [junit4]   2> 1408653 DEBUG (ScheduledTrigger-1529-thread-2) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1408670 DEBUG (ScheduledTrigger-1529-thread-2) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1408683 DEBUG (ScheduledTrigger-1529-thread-2) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1408698 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1408706 DEBUG (simCloudManagerPool-1528-thread-32) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 28
   [junit4]   2> 1408714 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1408722 DEBUG (simCloudManagerPool-1528-thread-33) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=29
   [junit4]   2> 1408746 DEBUG (simCloudManagerPool-1528-thread-33) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 29
   [junit4]   2> 1408751 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1408771 DEBUG (simCloudManagerPool-1528-thread-34) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=30
   [junit4]   2> 1408771 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1408788 DEBUG (simCloudManagerPool-1528-thread-34) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 30
   [junit4]   2> 1408788 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1408807 DEBUG (simCloudManagerPool-1528-thread-35) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=31
   [junit4]   2> 1408808 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1408824 DEBUG (simCloudManagerPool-1528-thread-35) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 31
   [junit4]   2> 1408824 DEBUG (simCloudManagerPool-1528-thread-35) [    ] o.a.s.c.a.s.SimClusterStateProvider Running leader election (testNodeLost / shard3)
   [junit4]   2> 1408824 DEBUG (simCloudManagerPool-1528-thread-35) [    ] o.a.s.c.a.s.SimClusterStateProvider -- elected new leader for testNodeLost / shard3 (currentVersion=32): {"core_node21":{
   [junit4]   2>     "core":"testNodeLost_shard3_replica_n21",
   [junit4]   2>     "shard":"shard3",
   [junit4]   2>     "collection":"testNodeLost",
   [junit4]   2>     "node_name":"127.0.0.1:10045_solr",
   [junit4]   2>     "type":"NRT",
   [junit4]   2>     "leader":"true",
   [junit4]   2>     "SEARCHER.searcher.maxDoc":0,
   [junit4]   2>     "SEARCHER.searcher.deletedDocs":0,
   [junit4]   2>     "INDEX.sizeInBytes":10240,
   [junit4]   2>     "state":"active",
   [junit4]   2>     "INDEX.sizeInGB":9.5367431640625E-6,
   [junit4]   2>     "SEARCHER.searcher.numDocs":0}}
   [junit4]   2> 1408824 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1408844 DEBUG (simCloudManagerPool-1528-thread-36) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=32
   [junit4]   2> 1408844 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1408861 DEBUG (simCloudManagerPool-1528-thread-36) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 32
   [junit4]   2> 1408861 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1408880 DEBUG (simCloudManagerPool-1528-thread-37) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=33
   [junit4]   2> 1408880 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1408897 DEBUG (simCloudManagerPool-1528-thread-37) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 33
   [junit4]   2> 1408897 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1408917 DEBUG (simCloudManagerPool-1528-thread-38) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=34
   [junit4]   2> 1408917 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1408934 DEBUG (simCloudManagerPool-1528-thread-38) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 34
   [junit4]   2> 1408934 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1408953 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1408965 DEBUG (simCloudManagerPool-1528-thread-39) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=35
   [junit4]   2> 1408969 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409003 DEBUG (simCloudManagerPool-1528-thread-39) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 35
   [junit4]   2> 1409022 DEBUG (simCloudManagerPool-1528-thread-40) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=36
   [junit4]   2> 1409026 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409042 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409043 DEBUG (simCloudManagerPool-1528-thread-40) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 36
   [junit4]   2> 1409063 DEBUG (simCloudManagerPool-1528-thread-41) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=37
   [junit4]   2> 1409063 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409079 DEBUG (simCloudManagerPool-1528-thread-41) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 37
   [junit4]   2> 1409079 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409095 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409095 DEBUG (simCloudManagerPool-1528-thread-42) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=38
   [junit4]   2> 1409115 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409116 DEBUG (simCloudManagerPool-1528-thread-42) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 38
   [junit4]   2> 1409132 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409144 DEBUG (simCloudManagerPool-1528-thread-43) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=39
   [junit4]   2> 1409145 DEBUG (simCloudManagerPool-1528-thread-43) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 39
   [junit4]   2> 1409152 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409168 DEBUG (simCloudManagerPool-1528-thread-44) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=40
   [junit4]   2> 1409168 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409189 DEBUG (simCloudManagerPool-1528-thread-44) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 40
   [junit4]   2> 1409189 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409205 DEBUG (simCloudManagerPool-1528-thread-45) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=41
   [junit4]   2> 1409205 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409242 DEBUG (simCloudManagerPool-1528-thread-45) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 41
   [junit4]   2> 1409242 DEBUG (simCloudManagerPool-1528-thread-45) [    ] o.a.s.c.a.s.SimClusterStateProvider Running leader election (testNodeLost / shard4)
   [junit4]   2> 1409242 DEBUG (simCloudManagerPool-1528-thread-45) [    ] o.a.s.c.a.s.SimClusterStateProvider -- elected new leader for testNodeLost / shard4 (currentVersion=42): {"core_node31":{
   [junit4]   2>     "core":"testNodeLost_shard4_replica_n31",
   [junit4]   2>     "shard":"shard4",
   [junit4]   2>     "collection":"testNodeLost",
   [junit4]   2>     "node_name":"127.0.0.1:10065_solr",
   [junit4]   2>     "type":"NRT",
   [junit4]   2>     "leader":"true",
   [junit4]   2>     "SEARCHER.searcher.maxDoc":0,
   [junit4]   2>     "SEARCHER.searcher.deletedDocs":0,
   [junit4]   2>     "INDEX.sizeInBytes":10240,
   [junit4]   2>     "state":"active",
   [junit4]   2>     "INDEX.sizeInGB":9.5367431640625E-6,
   [junit4]   2>     "SEARCHER.searcher.numDocs":0}}
   [junit4]   2> 1409242 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409261 DEBUG (simCloudManagerPool-1528-thread-46) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=42
   [junit4]   2> 1409262 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409278 DEBUG (simCloudManagerPool-1528-thread-46) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 42
   [junit4]   2> 1409278 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409298 DEBUG (simCloudManagerPool-1528-thread-47) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=43
   [junit4]   2> 1409298 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409314 DEBUG (simCloudManagerPool-1528-thread-47) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 43
   [junit4]   2> 1409314 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409334 DEBUG (simCloudManagerPool-1528-thread-48) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=44
   [junit4]   2> 1409334 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409351 DEBUG (simCloudManagerPool-1528-thread-48) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 44
   [junit4]   2> 1409352 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409371 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409383 DEBUG (simCloudManagerPool-1528-thread-49) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=45
   [junit4]   2> 1409387 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409404 DEBUG (simCloudManagerPool-1528-thread-49) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 45
   [junit4]   2> 1409408 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409419 DEBUG (simCloudManagerPool-1528-thread-50) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=46
   [junit4]   2> 1409423 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409440 DEBUG (simCloudManagerPool-1528-thread-50) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 46
   [junit4]   2> 1409444 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409456 DEBUG (simCloudManagerPool-1528-thread-51) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=47
   [junit4]   2> 1409460 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409480 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409493 DEBUG (simCloudManagerPool-1528-thread-51) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 47
   [junit4]   2> 1409496 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409513 DEBUG (simCloudManagerPool-1528-thread-52) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=48
   [junit4]   2> 1409514 DEBUG (simCloudManagerPool-1528-thread-52) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 48
   [junit4]   2> 1409516 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409529 DEBUG (simCloudManagerPool-1528-thread-53) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=49
   [junit4]   2> 1409529 DEBUG (simCloudManagerPool-1528-thread-53) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 49
   [junit4]   2> 1409533 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409553 DEBUG (simCloudManagerPool-1528-thread-54) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=50
   [junit4]   2> 1409553 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409569 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409589 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409605 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409622 DEBUG (simCloudManagerPool-1528-thread-54) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 50
   [junit4]   2> 1409626 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409638 DEBUG (simCloudManagerPool-1528-thread-55) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=51
   [junit4]   2> 1409639 DEBUG (simCloudManagerPool-1528-thread-55) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 51
   [junit4]   2> 1409639 DEBUG (simCloudManagerPool-1528-thread-55) [    ] o.a.s.c.a.s.SimClusterStateProvider Running leader election (testNodeLost / shard5)
   [junit4]   2> 1409639 DEBUG (simCloudManagerPool-1528-thread-55) [    ] o.a.s.c.a.s.SimClusterStateProvider -- elected new leader for testNodeLost / shard5 (currentVersion=52): {"core_node41":{
   [junit4]   2>     "core":"testNodeLost_shard5_replica_n41",
   [junit4]   2>     "shard":"shard5",
   [junit4]   2>     "collection":"testNodeLost",
   [junit4]   2>     "node_name":"127.0.0.1:10012_solr",
   [junit4]   2>     "type":"NRT",
   [junit4]   2>     "leader":"true",
   [junit4]   2>     "SEARCHER.searcher.maxDoc":0,
   [junit4]   2>     "SEARCHER.searcher.deletedDocs":0,
   [junit4]   2>     "INDEX.sizeInBytes":10240,
   [junit4]   2>     "state":"active",
   [junit4]   2>     "INDEX.sizeInGB":9.5367431640625E-6,
   [junit4]   2>     "SEARCHER.searcher.numDocs":0}}
   [junit4]   2> 1409642 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409658 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409658 DEBUG (simCloudManagerPool-1528-thread-56) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=52
   [junit4]   2> 1409659 DEBUG (simCloudManagerPool-1528-thread-56) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 52
   [junit4]   2> 1409675 DEBUG (simCloudManagerPool-1528-thread-57) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=53
   [junit4]   2> 1409675 DEBUG (simCloudManagerPool-1528-thread-57) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 53
   [junit4]   2> 1409678 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409695 DEBUG (simCloudManagerPool-1528-thread-58) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=54
   [junit4]   2> 1409695 DEBUG (simCloudManagerPool-1528-thread-58) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 54
   [junit4]   2> 1409699 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409715 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409728 DEBUG (simCloudManagerPool-1528-thread-59) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=55
   [junit4]   2> 1409728 DEBUG (simCloudManagerPool-1528-thread-59) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 55
   [junit4]   2> 1409735 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409751 DEBUG (simCloudManagerPool-1528-thread-60) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=56
   [junit4]   2> 1409752 DEBUG (simCloudManagerPool-1528-thread-60) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 56
   [junit4]   2> 1409752 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409768 DEBUG (simCloudManagerPool-1528-thread-61) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=57
   [junit4]   2> 1409768 DEBUG (simCloudManagerPool-1528-thread-61) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 57
   [junit4]   2> 1409768 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409788 DEBUG (simCloudManagerPool-1528-thread-62) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=58
   [junit4]   2> 1409788 DEBUG (simCloudManagerPool-1528-thread-62) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 58
   [junit4]   2> 1409789 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409804 DEBUG (simCloudManagerPool-1528-thread-63) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=59
   [junit4]   2> 1409805 DEBUG (simCloudManagerPool-1528-thread-63) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 59
   [junit4]   2> 1409805 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409824 DEBUG (simCloudManagerPool-1528-thread-64) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=60
   [junit4]   2> 1409825 DEBUG (simCloudManagerPool-1528-thread-64) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 60
   [junit4]   2> 1409825 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409840 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409857 DEBUG (simCloudManagerPool-1528-thread-65) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=61
   [junit4]   2> 1409857 DEBUG (simCloudManagerPool-1528-thread-65) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 61
   [junit4]   2> 1409857 DEBUG (simCloudManagerPool-1528-thread-65) [    ] o.a.s.c.a.s.SimClusterStateProvider Running leader election (testNodeLost / shard6)
   [junit4]   2> 1409857 DEBUG (simCloudManagerPool-1528-thread-65) [    ] o.a.s.c.a.s.SimClusterStateProvider -- elected new leader for testNodeLost / shard6 (currentVersion=62): {"core_node51":{
   [junit4]   2>     "core":"testNodeLost_shard6_replica_n51",
   [junit4]   2>     "shard":"shard6",
   [junit4]   2>     "collection":"testNodeLost",
   [junit4]   2>     "node_name":"127.0.0.1:10098_solr",
   [junit4]   2>     "type":"NRT",
   [junit4]   2>     "leader":"true",
   [junit4]   2>     "SEARCHER.searcher.maxDoc":0,
   [junit4]   2>     "SEARCHER.searcher.deletedDocs":0,
   [junit4]   2>     "INDEX.sizeInBytes":10240,
   [junit4]   2>     "state":"active",
   [junit4]   2>     "INDEX.sizeInGB":9.5367431640625E-6,
   [junit4]   2>     "SEARCHER.searcher.numDocs":0}}
   [junit4]   2> 1409861 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409877 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409877 DEBUG (simCloudManagerPool-1528-thread-66) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=62
   [junit4]   2> 1409878 DEBUG (simCloudManagerPool-1528-thread-66) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 62
   [junit4]   2> 1409893 DEBUG (simCloudManagerPool-1528-thread-67) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=63
   [junit4]   2> 1409905 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409922 DEBUG (simCloudManagerPool-1528-thread-67) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 63
   [junit4]   2> 1409939 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409942 DEBUG (simCloudManagerPool-1528-thread-68) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=64
   [junit4]   2> 1409943 DEBUG (simCloudManagerPool-1528-thread-68) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 64
   [junit4]   2> 1409954 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409959 DEBUG (simCloudManagerPool-1528-thread-69) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=65
   [junit4]   2> 1409960 DEBUG (simCloudManagerPool-1528-thread-69) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 65
   [junit4]   2> 1409974 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1409990 DEBUG (simCloudManagerPool-1528-thread-70) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=66
   [junit4]   2> 1409991 DEBUG (simCloudManagerPool-1528-thread-70) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 66
   [junit4]   2> 1409991 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410011 DEBUG (simCloudManagerPool-1528-thread-71) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=67
   [junit4]   2> 1410011 DEBUG (simCloudManagerPool-1528-thread-71) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 67
   [junit4]   2> 1410011 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410031 DEBUG (simCloudManagerPool-1528-thread-72) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=68
   [junit4]   2> 1410031 DEBUG (simCloudManagerPool-1528-thread-72) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 68
   [junit4]   2> 1410032 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410051 DEBUG (simCloudManagerPool-1528-thread-73) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=69
   [junit4]   2> 1410052 DEBUG (simCloudManagerPool-1528-thread-73) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 69
   [junit4]   2> 1410052 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410071 DEBUG (simCloudManagerPool-1528-thread-74) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=70
   [junit4]   2> 1410072 DEBUG (simCloudManagerPool-1528-thread-74) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 70
   [junit4]   2> 1410072 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410088 DEBUG (simCloudManagerPool-1528-thread-75) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=71
   [junit4]   2> 1410088 DEBUG (simCloudManagerPool-1528-thread-75) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 71
   [junit4]   2> 1410088 DEBUG (simCloudManagerPool-1528-thread-75) [    ] o.a.s.c.a.s.SimClusterStateProvider Running leader election (testNodeLost / shard7)
   [junit4]   2> 1410088 DEBUG (simCloudManagerPool-1528-thread-75) [    ] o.a.s.c.a.s.SimClusterStateProvider -- elected new leader for testNodeLost / shard7 (currentVersion=72): {"core_node61":{
   [junit4]   2>     "core":"testNodeLost_shard7_replica_n61",
   [junit4]   2>     "shard":"shard7",
   [junit4]   2>     "collection":"testNodeLost",
   [junit4]   2>     "node_name":"127.0.0.1:10056_solr",
   [junit4]   2>     "type":"NRT",
   [junit4]   2>     "leader":"true",
   [junit4]   2>     "SEARCHER.searcher.maxDoc":0,
   [junit4]   2>     "SEARCHER.searcher.deletedDocs":0,
   [junit4]   2>     "INDEX.sizeInBytes":10240,
   [junit4]   2>     "state":"active",
   [junit4]   2>     "INDEX.sizeInGB":9.5367431640625E-6,
   [junit4]   2>     "SEARCHER.searcher.numDocs":0}}
   [junit4]   2> 1410088 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410104 DEBUG (simCloudManagerPool-1528-thread-76) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=72
   [junit4]   2> 1410104 DEBUG (simCloudManagerPool-1528-thread-76) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 72
   [junit4]   2> 1410105 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410124 DEBUG (simCloudManagerPool-1528-thread-77) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=73
   [junit4]   2> 1410125 DEBUG (simCloudManagerPool-1528-thread-77) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 73
   [junit4]   2> 1410125 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410140 DEBUG (simCloudManagerPool-1528-thread-78) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=74
   [junit4]   2> 1410141 DEBUG (simCloudManagerPool-1528-thread-78) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 74
   [junit4]   2> 1410141 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410161 DEBUG (simCloudManagerPool-1528-thread-79) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=75
   [junit4]   2> 1410161 DEBUG (simCloudManagerPool-1528-thread-79) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 75
   [junit4]   2> 1410161 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410177 DEBUG (simCloudManagerPool-1528-thread-80) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=76
   [junit4]   2> 1410177 DEBUG (simCloudManagerPool-1528-thread-80) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 76
   [junit4]   2> 1410177 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410197 DEBUG (simCloudManagerPool-1528-thread-81) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=77
   [junit4]   2> 1410198 DEBUG (simCloudManagerPool-1528-thread-81) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 77
   [junit4]   2> 1410198 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410213 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410217 DEBUG (simCloudManagerPool-1528-thread-82) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=78
   [junit4]   2> 1410218 DEBUG (simCloudManagerPool-1528-thread-82) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 78
   [junit4]   2> 1410233 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410237 DEBUG (simCloudManagerPool-1528-thread-83) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=79
   [junit4]   2> 1410238 DEBUG (simCloudManagerPool-1528-thread-83) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 79
   [junit4]   2> 1410250 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410254 DEBUG (simCloudManagerPool-1528-thread-84) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=80
   [junit4]   2> 1410254 DEBUG (simCloudManagerPool-1528-thread-84) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 80
   [junit4]   2> 1410271 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410282 DEBUG (simCloudManagerPool-1528-thread-85) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=81
   [junit4]   2> 1410283 DEBUG (simCloudManagerPool-1528-thread-85) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 81
   [junit4]   2> 1410283 DEBUG (simCloudManagerPool-1528-thread-85) [    ] o.a.s.c.a.s.SimClusterStateProvider Running leader election (testNodeLost / shard8)
   [junit4]   2> 1410283 DEBUG (simCloudManagerPool-1528-thread-85) [    ] o.a.s.c.a.s.SimClusterStateProvider -- elected new leader for testNodeLost / shard8 (currentVersion=82): {"core_node71":{
   [junit4]   2>     "core":"testNodeLost_shard8_replica_n71",
   [junit4]   2>     "shard":"shard8",
   [junit4]   2>     "collection":"testNodeLost",
   [junit4]   2>     "node_name":"127.0.0.1:10087_solr",
   [junit4]   2>     "type":"NRT",
   [junit4]   2>     "leader":"true",
   [junit4]   2>     "SEARCHER.searcher.maxDoc":0,
   [junit4]   2>     "SEARCHER.searcher.deletedDocs":0,
   [junit4]   2>     "INDEX.sizeInBytes":10240,
   [junit4]   2>     "state":"active",
   [junit4]   2>     "INDEX.sizeInGB":9.5367431640625E-6,
   [junit4]   2>     "SEARCHER.searcher.numDocs":0}}
   [junit4]   2> 1410286 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410299 DEBUG (simCloudManagerPool-1528-thread-86) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=82
   [junit4]   2> 1410299 DEBUG (simCloudManagerPool-1528-thread-86) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 82
   [junit4]   2> 1410306 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410323 DEBUG (simCloudManagerPool-1528-thread-87) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=83
   [junit4]   2> 1410323 DEBUG (simCloudManagerPool-1528-thread-87) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 83
   [junit4]   2> 1410323 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410343 DEBUG (simCloudManagerPool-1528-thread-88) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=84
   [junit4]   2> 1410344 DEBUG (simCloudManagerPool-1528-thread-88) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 84
   [junit4]   2> 1410344 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410359 DEBUG (simCloudManagerPool-1528-thread-89) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=85
   [junit4]   2> 1410360 DEBUG (simCloudManagerPool-1528-thread-89) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 85
   [junit4]   2> 1410360 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410379 DEBUG (simCloudManagerPool-1528-thread-90) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=86
   [junit4]   2> 1410380 DEBUG (simCloudManagerPool-1528-thread-90) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 86
   [junit4]   2> 1410380 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410395 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410416 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410416 DEBUG (simCloudManagerPool-1528-thread-91) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=87
   [junit4]   2> 1410417 DEBUG (simCloudManagerPool-1528-thread-91) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 87
   [junit4]   2> 1410432 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410432 DEBUG (simCloudManagerPool-1528-thread-92) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=88
   [junit4]   2> 1410433 DEBUG (simCloudManagerPool-1528-thread-92) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 88
   [junit4]   2> 1410452 DEBUG (simCloudManagerPool-1528-thread-93) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=89
   [junit4]   2> 1410453 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410453 DEBUG (simCloudManagerPool-1528-thread-93) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 89
   [junit4]   2> 1410468 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410468 DEBUG (simCloudManagerPool-1528-thread-94) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=90
   [junit4]   2> 1410469 DEBUG (simCloudManagerPool-1528-thread-94) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 90
   [junit4]   2> 1410485 DEBUG (simCloudManagerPool-1528-thread-95) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=91
   [junit4]   2> 1410485 DEBUG (simCloudManagerPool-1528-thread-95) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 91
   [junit4]   2> 1410485 DEBUG (simCloudManagerPool-1528-thread-95) [    ] o.a.s.c.a.s.SimClusterStateProvider Running leader election (testNodeLost / shard9)
   [junit4]   2> 1410485 DEBUG (simCloudManagerPool-1528-thread-95) [    ] o.a.s.c.a.s.SimClusterStateProvider -- elected new leader for testNodeLost / shard9 (currentVersion=92): {"core_node81":{
   [junit4]   2>     "core":"testNodeLost_shard9_replica_n81",
   [junit4]   2>     "shard":"shard9",
   [junit4]   2>     "collection":"testNodeLost",
   [junit4]   2>     "node_name":"127.0.0.1:10053_solr",
   [junit4]   2>     "type":"NRT",
   [junit4]   2>     "leader":"true",
   [junit4]   2>     "SEARCHER.searcher.maxDoc":0,
   [junit4]   2>     "SEARCHER.searcher.deletedDocs":0,
   [junit4]   2>     "INDEX.sizeInBytes":10240,
   [junit4]   2>     "state":"active",
   [junit4]   2>     "INDEX.sizeInGB":9.5367431640625E-6,
   [junit4]   2>     "SEARCHER.searcher.numDocs":0}}
   [junit4]   2> 1410489 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410505 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410517 DEBUG (simCloudManagerPool-1528-thread-96) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=92
   [junit4]   2> 1410518 DEBUG (simCloudManagerPool-1528-thread-96) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 92
   [junit4]   2> 1410525 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410537 DEBUG (simCloudManagerPool-1528-thread-97) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=93
   [junit4]   2> 1410538 DEBUG (simCloudManagerPool-1528-thread-97) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 93
   [junit4]   2> 1410541 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410562 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410562 DEBUG (simCloudManagerPool-1528-thread-98) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=94
   [junit4]   2> 1410563 DEBUG (simCloudManagerPool-1528-thread-98) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 94
   [junit4]   2> 1410578 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410578 DEBUG (simCloudManagerPool-1528-thread-99) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=95
   [junit4]   2> 1410579 DEBUG (simCloudManagerPool-1528-thread-99) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 95
   [junit4]   2> 1410598 DEBUG (simCloudManagerPool-1528-thread-100) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=96
   [junit4]   2> 1410599 DEBUG (simCloudManagerPool-1528-thread-100) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 96
   [junit4]   2> 1410599 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410614 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410614 DEBUG (simCloudManagerPool-1528-thread-101) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=97
   [junit4]   2> 1410615 DEBUG (simCloudManagerPool-1528-thread-101) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 97
   [junit4]   2> 1410635 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410651 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410671 DEBUG (simCloudManagerPool-1528-thread-102) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=98
   [junit4]   2> 1410673 DEBUG (simCloudManagerPool-1528-thread-102) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 98
   [junit4]   2> 1410687 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410687 DEBUG (simCloudManagerPool-1528-thread-103) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=99
   [junit4]   2> 1410688 DEBUG (simCloudManagerPool-1528-thread-103) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 99
   [junit4]   2> 1410708 DEBUG (simCloudManagerPool-1528-thread-104) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=100
   [junit4]   2> 1410709 DEBUG (simCloudManagerPool-1528-thread-104) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 100
   [junit4]   2> 1410720 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410724 DEBUG (simCloudManagerPool-1528-thread-105) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=101
   [junit4]   2> 1410725 DEBUG (simCloudManagerPool-1528-thread-105) [    ] o.a.s.c.a.s.SimClusterStateProvider ** saved cluster state version 101
   [junit4]   2> 1410725 DEBUG (simCloudManagerPool-1528-thread-105) [    ] o.a.s.c.a.s.SimClusterStateProvider Running leader election (testNodeLost / shard10)
   [junit4]   2> 1410725 DEBUG (simCloudManagerPool-1528-thread-105) [    ] o.a.s.c.a.s.SimClusterStateProvider -- elected new leader for testNodeLost / shard10 (currentVersion=102): {"core_node91":{
   [junit4]   2>     "core":"testNodeLost_shard10_replica_n91",
   [junit4]   2>     "shard":"shard10",
   [junit4]   2>     "collection":"testNodeLost",
   [junit4]   2>     "node_name":"127.0.0.1:10099_solr",
   [junit4]   2>     "type":"NRT",
   [junit4]   2>     "leader":"true",
   [junit4]   2>     "SEARCHER.searcher.maxDoc":0,
   [junit4]   2>     "SEARCHER.searcher.deletedDocs":0,
   [junit4]   2>     "INDEX.sizeInBytes":10240,
   [junit4]   2>     "state":"active",
   [junit4]   2>     "INDEX.sizeInGB":9.5367431640625E-6,
   [junit4]   2>     "SEARCHER.searcher.numDocs":0}}
   [junit4]   2> 1410736 DEBUG (ScheduledTrigger-1529-thread-3) [    ] o.a.s.c.a.NodeLostTrigger Running NodeLostTrigger: node_lost_trigger3 with currently live nodes: 100 and last live nodes: 100
   [junit4]   2> 1410744 DEBUG (simCloudManagerPool-1528-thread-106) [    ] o.a.s.c.a.s.SimClusterStateProvider ** creating new collection states, currentVersion=102
   [junit4]   2> 1410745 DEBUG

[...truncated too long message...]

rd1"}]}}]}}
   [junit4]   2> 	at org.apache.solr.cloud.autoscaling.ExecutePlanAction.process(ExecutePlanAction.java:120) ~[java/:?]
   [junit4]   2> 	at org.apache.solr.cloud.autoscaling.ScheduledTriggers.lambda$add$3(ScheduledTriggers.java:324) ~[java/:?]
   [junit4]   2> 	... 6 more
   [junit4]   2> Caused by: org.apache.solr.common.SolrException: Unexpected exception executing operation: action=ADDREPLICA&async=search_rate_trigger/936afca9279f2eTe08w3spwzg64re6josfe66ild/0&waitForFinalState=true&collection=testSearchRate&shard=shard1&node=127.0.0.1:10336_solr&type=NRT
   [junit4]   2> 	at org.apache.solr.cloud.autoscaling.ExecutePlanAction.process(ExecutePlanAction.java:110) ~[java/:?]
   [junit4]   2> 	at org.apache.solr.cloud.autoscaling.ScheduledTriggers.lambda$add$3(ScheduledTriggers.java:324) ~[java/:?]
   [junit4]   2> 	... 6 more
   [junit4]   2> Caused by: java.io.IOException: java.util.concurrent.RejectedExecutionException: Task org.apache.solr.common.util.ExecutorUtil$MDCAwareThreadPoolExecutor$$Lambda$190/0x00000001003f7440@7ea6660d rejected from org.apache.solr.common.util.ExecutorUtil$MDCAwareThreadPoolExecutor@3939f189[Terminated, pool size = 0, active threads = 0, queued tasks = 0, completed tasks = 35]
   [junit4]   2> 	at org.apache.solr.cloud.autoscaling.sim.SimCloudManager.request(SimCloudManager.java:728) ~[java/:?]
   [junit4]   2> 	at org.apache.solr.cloud.autoscaling.ExecutePlanAction.process(ExecutePlanAction.java:80) ~[java/:?]
   [junit4]   2> 	at org.apache.solr.cloud.autoscaling.ScheduledTriggers.lambda$add$3(ScheduledTriggers.java:324) ~[java/:?]
   [junit4]   2> 	... 6 more
   [junit4]   2> Caused by: java.util.concurrent.RejectedExecutionException: Task org.apache.solr.common.util.ExecutorUtil$MDCAwareThreadPoolExecutor$$Lambda$190/0x00000001003f7440@7ea6660d rejected from org.apache.solr.common.util.ExecutorUtil$MDCAwareThreadPoolExecutor@3939f189[Terminated, pool size = 0, active threads = 0, queued tasks = 0, completed tasks = 35]
   [junit4]   2> 	at java.util.concurrent.ThreadPoolExecutor$AbortPolicy.rejectedExecution(ThreadPoolExecutor.java:2055) ~[?:?]
   [junit4]   2> 	at java.util.concurrent.ThreadPoolExecutor.reject(ThreadPoolExecutor.java:825) ~[?:?]
   [junit4]   2> 	at java.util.concurrent.ThreadPoolExecutor.execute(ThreadPoolExecutor.java:1355) ~[?:?]
   [junit4]   2> 	at org.apache.solr.common.util.ExecutorUtil$MDCAwareThreadPoolExecutor.execute(ExecutorUtil.java:194) ~[java/:?]
   [junit4]   2> 	at java.util.concurrent.AbstractExecutorService.submit(AbstractExecutorService.java:140) ~[?:?]
   [junit4]   2> 	at org.apache.solr.cloud.autoscaling.sim.SimCloudManager.request(SimCloudManager.java:725) ~[java/:?]
   [junit4]   2> 	at org.apache.solr.cloud.autoscaling.ExecutePlanAction.process(ExecutePlanAction.java:80) ~[java/:?]
   [junit4]   2> 	at org.apache.solr.cloud.autoscaling.ScheduledTriggers.lambda$add$3(ScheduledTriggers.java:324) ~[java/:?]
   [junit4]   2> 	... 6 more
   [junit4]   2> 1690914 DEBUG (AutoscalingActionExecutor-1548-thread-1) [    ] o.a.s.c.a.ScheduledTriggers -- processing took 7335 ms for event id=936afca9279f2eTe08w3spwzg64re6josfe66ild
   [junit4]   2> 1690914 DEBUG (TEST-TestSimLargeCluster.testSearchRate-seed#[7D2F3D5486215375]) [    ] o.a.s.c.a.ScheduledTriggers Awaiting termination for scheduled thread pool executor
   [junit4]   2> 1690914 DEBUG (TEST-TestSimLargeCluster.testSearchRate-seed#[7D2F3D5486215375]) [    ] o.a.s.c.a.ScheduledTriggers ScheduledTriggers closed completely
   [junit4]   2> 1690914 DEBUG (TEST-TestSimLargeCluster.testSearchRate-seed#[7D2F3D5486215375]) [    ] o.a.s.c.a.OverseerTriggerThread OverseerTriggerThread has been closed explicitly
   [junit4]   2> 1690915 INFO  (TEST-TestSimLargeCluster.testSearchRate-seed#[7D2F3D5486215375]) [    ] o.a.s.SolrTestCaseJ4 ###Ending testSearchRate
   [junit4]   2> NOTE: leaving temporary files on disk at: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.autoscaling.sim.TestSimLargeCluster_7D2F3D5486215375-001
   [junit4]   2> NOTE: test params are: codec=Lucene80, sim=Asserting(org.apache.lucene.search.similarities.AssertingSimilarity@77c40650), locale=nnh, timezone=Pacific/Wake
   [junit4]   2> NOTE: Linux 4.4.0-112-generic amd64/Oracle Corporation 11.0.1 (64-bit)/cpus=4,threads=1,free=121324560,total=372244480
   [junit4]   2> NOTE: All tests run in this JVM: [TestLogWatcher, DistributedDebugComponentTest, HdfsRecoveryZkTest, BitVectorTest, BasicZkTest, DistribCursorPagingTest, CollectionTooManyReplicasTest, TestRangeQuery, TestManagedSynonymFilterFactory, TestInfoStreamLogging, ZkStateReaderTest, TestExpandComponent, HdfsWriteToMultipleCollectionsTest, ActionThrottleTest, GraphQueryTest, RequestHandlersTest, SolrRequestParserTest, SolrCLIZkUtilsTest, V2ApiIntegrationTest, TimeZoneUtilsTest, MBeansHandlerTest, BlockDirectoryTest, AssignBackwardCompatibilityTest, InfoHandlerTest, SuggestComponentContextFilterQueryTest, TestPullReplica, TestPartialUpdateDeduplication, TestUninvertingReader, HealthCheckHandlerTest, AtomicUpdatesTest, CopyFieldTest, TestStreamBody, TestIndexSearcher, TestTrie, TestLegacyFieldCache, ReplaceNodeNoTargetTest, TestDistributedGrouping, TestCloudSearcherWarming, TestReplicationHandlerBackup, BaseCdcrDistributedZkTest, DistributedQueryComponentCustomSortTest, TestJavabinTupleStreamParser, TestDefaultStatsCache, TestConfigOverlay, TestAnalyzeInfixSuggestions, TestSizeLimitedDistributedMap, NodeAddedTriggerIntegrationTest, ZkNodePropsTest, RuleEngineTest, TestSimpleQParserPlugin, TestSimLargeCluster]
   [junit4] Completed [123/858 (1!)] on J2 in 286.78s, 5 tests, 1 failure, 1 skipped <<< FAILURES!

[...truncated 49846 lines...]
-ecj-javadoc-lint-src:
    [mkdir] Created dir: /tmp/ecj2041362690
 [ecj-lint] Compiling 69 source files to /tmp/ecj2041362690
 [ecj-lint] invalid Class-Path header in manifest of jar file: /home/jenkins/.ivy2/cache/org.restlet.jee/org.restlet/jars/org.restlet-2.3.0.jar
 [ecj-lint] invalid Class-Path header in manifest of jar file: /home/jenkins/.ivy2/cache/org.restlet.jee/org.restlet.ext.servlet/jars/org.restlet.ext.servlet-2.3.0.jar
 [ecj-lint] ----------
 [ecj-lint] 1. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/JdbcDataSource.java (at line 28)
 [ecj-lint] 	import javax.naming.InitialContext;
 [ecj-lint] 	       ^^^^^^^^^^^^^^^^^^^^^^^^^^^
 [ecj-lint] The type javax.naming.InitialContext is not accessible
 [ecj-lint] ----------
 [ecj-lint] 2. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/JdbcDataSource.java (at line 29)
 [ecj-lint] 	import javax.naming.NamingException;
 [ecj-lint] 	       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 [ecj-lint] The type javax.naming.NamingException is not accessible
 [ecj-lint] ----------
 [ecj-lint] 3. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/JdbcDataSource.java (at line 182)
 [ecj-lint] 	c = getFromJndi(initProps, jndiName);
 [ecj-lint] 	    ^^^^^^^^^^^
 [ecj-lint] The method getFromJndi(Properties, String) from the type new Callable<Connection>(){} refers to the missing type NamingException
 [ecj-lint] ----------
 [ecj-lint] 4. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/JdbcDataSource.java (at line 245)
 [ecj-lint] 	private Connection getFromJndi(final Properties initProps, final String jndiName) throws NamingException,
 [ecj-lint] 	                                                                                         ^^^^^^^^^^^^^^^
 [ecj-lint] NamingException cannot be resolved to a type
 [ecj-lint] ----------
 [ecj-lint] 5. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/JdbcDataSource.java (at line 249)
 [ecj-lint] 	InitialContext ctx =  new InitialContext();
 [ecj-lint] 	^^^^^^^^^^^^^^
 [ecj-lint] InitialContext cannot be resolved to a type
 [ecj-lint] ----------
 [ecj-lint] 6. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/JdbcDataSource.java (at line 249)
 [ecj-lint] 	InitialContext ctx =  new InitialContext();
 [ecj-lint] 	                          ^^^^^^^^^^^^^^
 [ecj-lint] InitialContext cannot be resolved to a type
 [ecj-lint] ----------
 [ecj-lint] 6 problems (6 errors)

BUILD FAILED
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/build.xml:652: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/build.xml:101: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build.xml:651: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/common-build.xml:479: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/common-build.xml:2009: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/common-build.xml:2048: Compile failed; see the compiler error output for details.

Total time: 605 minutes 44 seconds
Build step 'Invoke Ant' marked build as failure
Archiving artifacts
Recording test results
Email was triggered for: Failure - Any
Sending email for trigger: Failure - Any

[JENKINS] Lucene-Solr-NightlyTests-master - Build # 1856 - Failure

Posted by Apache Jenkins Server <je...@builds.apache.org>.
Build: https://builds.apache.org/job/Lucene-Solr-NightlyTests-master/1856/

7 tests failed.
FAILED:  org.apache.lucene.search.TestSearcherManager.testConcurrentIndexCloseSearchAndRefresh

Error Message:
Captured an uncaught exception in thread: Thread[id=17184, name=Thread-15914, state=RUNNABLE, group=TGRP-TestSearcherManager]

Stack Trace:
com.carrotsearch.randomizedtesting.UncaughtExceptionError: Captured an uncaught exception in thread: Thread[id=17184, name=Thread-15914, state=RUNNABLE, group=TGRP-TestSearcherManager]
	at __randomizedtesting.SeedInfo.seed([754AC8E0CE5EB0D2:B78C0BA093E04CE5]:0)
Caused by: java.lang.RuntimeException: java.nio.file.FileSystemException: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/build/core/test/J2/temp/lucene.search.TestSearcherManager_754AC8E0CE5EB0D2-001/tempDir-001/_7m.nvm: Too many open files
	at __randomizedtesting.SeedInfo.seed([754AC8E0CE5EB0D2]:0)
	at org.apache.lucene.search.TestSearcherManager$11.run(TestSearcherManager.java:677)
Caused by: java.nio.file.FileSystemException: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/build/core/test/J2/temp/lucene.search.TestSearcherManager_754AC8E0CE5EB0D2-001/tempDir-001/_7m.nvm: Too many open files
	at org.apache.lucene.mockfile.HandleLimitFS.onOpen(HandleLimitFS.java:48)
	at org.apache.lucene.mockfile.HandleTrackingFS.callOpenHook(HandleTrackingFS.java:81)
	at org.apache.lucene.mockfile.HandleTrackingFS.newFileChannel(HandleTrackingFS.java:197)
	at org.apache.lucene.mockfile.FilterFileSystemProvider.newFileChannel(FilterFileSystemProvider.java:202)
	at java.base/java.nio.channels.FileChannel.open(FileChannel.java:292)
	at java.base/java.nio.channels.FileChannel.open(FileChannel.java:345)
	at org.apache.lucene.store.NIOFSDirectory.openInput(NIOFSDirectory.java:81)
	at org.apache.lucene.util.LuceneTestCase.slowFileExists(LuceneTestCase.java:2828)
	at org.apache.lucene.store.MockDirectoryWrapper.openInput(MockDirectoryWrapper.java:742)
	at org.apache.lucene.store.Directory.openChecksumInput(Directory.java:156)
	at org.apache.lucene.store.MockDirectoryWrapper.openChecksumInput(MockDirectoryWrapper.java:1038)
	at org.apache.lucene.codecs.lucene80.Lucene80NormsProducer.<init>(Lucene80NormsProducer.java:59)
	at org.apache.lucene.codecs.lucene80.Lucene80NormsFormat.normsProducer(Lucene80NormsFormat.java:90)
	at org.apache.lucene.codecs.asserting.AssertingNormsFormat.normsProducer(AssertingNormsFormat.java:51)
	at org.apache.lucene.index.SegmentCoreReaders.<init>(SegmentCoreReaders.java:121)
	at org.apache.lucene.index.SegmentReader.<init>(SegmentReader.java:84)
	at org.apache.lucene.index.ReadersAndUpdates.getReader(ReadersAndUpdates.java:177)
	at org.apache.lucene.index.ReadersAndUpdates.getReadOnlyClone(ReadersAndUpdates.java:219)
	at org.apache.lucene.index.StandardDirectoryReader.open(StandardDirectoryReader.java:109)
	at org.apache.lucene.index.IndexWriter.getReader(IndexWriter.java:526)
	at org.apache.lucene.index.DirectoryReader.open(DirectoryReader.java:116)
	at org.apache.lucene.search.SearcherManager.<init>(SearcherManager.java:108)
	at org.apache.lucene.search.SearcherManager.<init>(SearcherManager.java:76)
	at org.apache.lucene.search.TestSearcherManager$11.run(TestSearcherManager.java:665)


FAILED:  org.apache.solr.cloud.api.collections.ShardSplitTest.testSplitWithChaosMonkey

Error Message:
Address already in use

Stack Trace:
java.net.BindException: Address already in use
	at __randomizedtesting.SeedInfo.seed([A84462B0258F4DFC:2363B1616489E678]:0)
	at java.base/sun.nio.ch.Net.bind0(Native Method)
	at java.base/sun.nio.ch.Net.bind(Net.java:461)
	at java.base/sun.nio.ch.Net.bind(Net.java:453)
	at java.base/sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:227)
	at java.base/sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:80)
	at org.eclipse.jetty.server.ServerConnector.openAcceptChannel(ServerConnector.java:342)
	at org.eclipse.jetty.server.ServerConnector.open(ServerConnector.java:308)
	at org.eclipse.jetty.server.AbstractNetworkConnector.doStart(AbstractNetworkConnector.java:80)
	at org.eclipse.jetty.server.ServerConnector.doStart(ServerConnector.java:236)
	at org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:68)
	at org.eclipse.jetty.server.Server.doStart(Server.java:394)
	at org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:68)
	at org.apache.solr.client.solrj.embedded.JettySolrRunner.retryOnPortBindFailure(JettySolrRunner.java:558)
	at org.apache.solr.client.solrj.embedded.JettySolrRunner.start(JettySolrRunner.java:497)
	at org.apache.solr.client.solrj.embedded.JettySolrRunner.start(JettySolrRunner.java:465)
	at org.apache.solr.cloud.api.collections.ShardSplitTest.testSplitWithChaosMonkey(ShardSplitTest.java:499)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.base/java.lang.reflect.Method.invoke(Method.java:566)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988)
	at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:1082)
	at org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:1054)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at java.base/java.lang.Thread.run(Thread.java:834)


FAILED:  junit.framework.TestSuite.org.apache.solr.cloud.api.collections.ShardSplitTest

Error Message:
10 threads leaked from SUITE scope at org.apache.solr.cloud.api.collections.ShardSplitTest:     1) Thread[id=20274, name=qtp988075751-20274, state=TIMED_WAITING, group=TGRP-ShardSplitTest]         at java.base@11.0.1/jdk.internal.misc.Unsafe.park(Native Method)         at java.base@11.0.1/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)         at java.base@11.0.1/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)         at app//org.eclipse.jetty.util.BlockingArrayQueue.poll(BlockingArrayQueue.java:392)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool.idleJobPoll(QueuedThreadPool.java:656)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool.access$800(QueuedThreadPool.java:46)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:720)         at java.base@11.0.1/java.lang.Thread.run(Thread.java:834)    2) Thread[id=20270, name=qtp988075751-20270, state=RUNNABLE, group=TGRP-ShardSplitTest]         at java.base@11.0.1/sun.nio.ch.EPoll.wait(Native Method)         at java.base@11.0.1/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:120)         at java.base@11.0.1/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:124)         at java.base@11.0.1/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141)         at app//org.eclipse.jetty.io.ManagedSelector$SelectorProducer.select(ManagedSelector.java:423)         at app//org.eclipse.jetty.io.ManagedSelector$SelectorProducer.produce(ManagedSelector.java:360)         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.produceTask(EatWhatYouKill.java:357)         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:181)         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:168)         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.produce(EatWhatYouKill.java:132)         at app//org.eclipse.jetty.io.ManagedSelector$$Lambda$172/0x0000000100352440.run(Unknown Source)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:765)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:683)         at java.base@11.0.1/java.lang.Thread.run(Thread.java:834)    3) Thread[id=20272, name=qtp988075751-20272-acceptor-0@13dd982f-ServerConnector@21670263{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:34149}, state=RUNNABLE, group=TGRP-ShardSplitTest]         at java.base@11.0.1/sun.nio.ch.ServerSocketChannelImpl.accept0(Native Method)         at java.base@11.0.1/sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:533)         at java.base@11.0.1/sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:285)         at app//org.eclipse.jetty.server.ServerConnector.accept(ServerConnector.java:385)         at app//org.eclipse.jetty.server.AbstractConnector$Acceptor.run(AbstractConnector.java:648)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:765)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:683)         at java.base@11.0.1/java.lang.Thread.run(Thread.java:834)    4) Thread[id=20402, name=Scheduler-1054557539, state=TIMED_WAITING, group=TGRP-ShardSplitTest]         at java.base@11.0.1/jdk.internal.misc.Unsafe.park(Native Method)         at java.base@11.0.1/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)         at java.base@11.0.1/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)         at java.base@11.0.1/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1182)         at java.base@11.0.1/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:899)         at java.base@11.0.1/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1054)         at java.base@11.0.1/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1114)         at java.base@11.0.1/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)         at java.base@11.0.1/java.lang.Thread.run(Thread.java:834)    5) Thread[id=20279, name=Scheduler-580240861, state=TIMED_WAITING, group=TGRP-ShardSplitTest]         at java.base@11.0.1/jdk.internal.misc.Unsafe.park(Native Method)         at java.base@11.0.1/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)         at java.base@11.0.1/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)         at java.base@11.0.1/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1182)         at java.base@11.0.1/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:899)         at java.base@11.0.1/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1054)         at java.base@11.0.1/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1114)         at java.base@11.0.1/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)         at java.base@11.0.1/java.lang.Thread.run(Thread.java:834)    6) Thread[id=20276, name=qtp988075751-20276, state=TIMED_WAITING, group=TGRP-ShardSplitTest]         at java.base@11.0.1/jdk.internal.misc.Unsafe.park(Native Method)         at java.base@11.0.1/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)         at java.base@11.0.1/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)         at app//org.eclipse.jetty.util.BlockingArrayQueue.poll(BlockingArrayQueue.java:392)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool.idleJobPoll(QueuedThreadPool.java:656)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool.access$800(QueuedThreadPool.java:46)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:720)         at java.base@11.0.1/java.lang.Thread.run(Thread.java:834)    7) Thread[id=20275, name=qtp988075751-20275, state=TIMED_WAITING, group=TGRP-ShardSplitTest]         at java.base@11.0.1/jdk.internal.misc.Unsafe.park(Native Method)         at java.base@11.0.1/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)         at java.base@11.0.1/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)         at app//org.eclipse.jetty.util.BlockingArrayQueue.poll(BlockingArrayQueue.java:392)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool.idleJobPoll(QueuedThreadPool.java:656)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool.access$800(QueuedThreadPool.java:46)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:720)         at java.base@11.0.1/java.lang.Thread.run(Thread.java:834)    8) Thread[id=20277, name=qtp988075751-20277, state=TIMED_WAITING, group=TGRP-ShardSplitTest]         at java.base@11.0.1/jdk.internal.misc.Unsafe.park(Native Method)         at java.base@11.0.1/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)         at java.base@11.0.1/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)         at app//org.eclipse.jetty.util.BlockingArrayQueue.poll(BlockingArrayQueue.java:392)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool.idleJobPoll(QueuedThreadPool.java:656)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool.access$800(QueuedThreadPool.java:46)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:720)         at java.base@11.0.1/java.lang.Thread.run(Thread.java:834)    9) Thread[id=20271, name=qtp988075751-20271, state=RUNNABLE, group=TGRP-ShardSplitTest]         at java.base@11.0.1/sun.nio.ch.EPoll.wait(Native Method)         at java.base@11.0.1/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:120)         at java.base@11.0.1/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:124)         at java.base@11.0.1/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141)         at app//org.eclipse.jetty.io.ManagedSelector$SelectorProducer.select(ManagedSelector.java:423)         at app//org.eclipse.jetty.io.ManagedSelector$SelectorProducer.produce(ManagedSelector.java:360)         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.produceTask(EatWhatYouKill.java:357)         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:181)         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:168)         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.produce(EatWhatYouKill.java:132)         at app//org.eclipse.jetty.io.ManagedSelector$$Lambda$172/0x0000000100352440.run(Unknown Source)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:765)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:683)         at java.base@11.0.1/java.lang.Thread.run(Thread.java:834)   10) Thread[id=20278, name=qtp988075751-20278, state=TIMED_WAITING, group=TGRP-ShardSplitTest]         at java.base@11.0.1/jdk.internal.misc.Unsafe.park(Native Method)         at java.base@11.0.1/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)         at java.base@11.0.1/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)         at app//org.eclipse.jetty.util.BlockingArrayQueue.poll(BlockingArrayQueue.java:392)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool.idleJobPoll(QueuedThreadPool.java:656)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool.access$800(QueuedThreadPool.java:46)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:720)         at java.base@11.0.1/java.lang.Thread.run(Thread.java:834)

Stack Trace:
com.carrotsearch.randomizedtesting.ThreadLeakError: 10 threads leaked from SUITE scope at org.apache.solr.cloud.api.collections.ShardSplitTest: 
   1) Thread[id=20274, name=qtp988075751-20274, state=TIMED_WAITING, group=TGRP-ShardSplitTest]
        at java.base@11.0.1/jdk.internal.misc.Unsafe.park(Native Method)
        at java.base@11.0.1/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)
        at java.base@11.0.1/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)
        at app//org.eclipse.jetty.util.BlockingArrayQueue.poll(BlockingArrayQueue.java:392)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool.idleJobPoll(QueuedThreadPool.java:656)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool.access$800(QueuedThreadPool.java:46)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:720)
        at java.base@11.0.1/java.lang.Thread.run(Thread.java:834)
   2) Thread[id=20270, name=qtp988075751-20270, state=RUNNABLE, group=TGRP-ShardSplitTest]
        at java.base@11.0.1/sun.nio.ch.EPoll.wait(Native Method)
        at java.base@11.0.1/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:120)
        at java.base@11.0.1/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:124)
        at java.base@11.0.1/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141)
        at app//org.eclipse.jetty.io.ManagedSelector$SelectorProducer.select(ManagedSelector.java:423)
        at app//org.eclipse.jetty.io.ManagedSelector$SelectorProducer.produce(ManagedSelector.java:360)
        at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.produceTask(EatWhatYouKill.java:357)
        at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:181)
        at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:168)
        at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.produce(EatWhatYouKill.java:132)
        at app//org.eclipse.jetty.io.ManagedSelector$$Lambda$172/0x0000000100352440.run(Unknown Source)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:765)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:683)
        at java.base@11.0.1/java.lang.Thread.run(Thread.java:834)
   3) Thread[id=20272, name=qtp988075751-20272-acceptor-0@13dd982f-ServerConnector@21670263{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:34149}, state=RUNNABLE, group=TGRP-ShardSplitTest]
        at java.base@11.0.1/sun.nio.ch.ServerSocketChannelImpl.accept0(Native Method)
        at java.base@11.0.1/sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:533)
        at java.base@11.0.1/sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:285)
        at app//org.eclipse.jetty.server.ServerConnector.accept(ServerConnector.java:385)
        at app//org.eclipse.jetty.server.AbstractConnector$Acceptor.run(AbstractConnector.java:648)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:765)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:683)
        at java.base@11.0.1/java.lang.Thread.run(Thread.java:834)
   4) Thread[id=20402, name=Scheduler-1054557539, state=TIMED_WAITING, group=TGRP-ShardSplitTest]
        at java.base@11.0.1/jdk.internal.misc.Unsafe.park(Native Method)
        at java.base@11.0.1/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)
        at java.base@11.0.1/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)
        at java.base@11.0.1/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1182)
        at java.base@11.0.1/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:899)
        at java.base@11.0.1/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1054)
        at java.base@11.0.1/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1114)
        at java.base@11.0.1/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
        at java.base@11.0.1/java.lang.Thread.run(Thread.java:834)
   5) Thread[id=20279, name=Scheduler-580240861, state=TIMED_WAITING, group=TGRP-ShardSplitTest]
        at java.base@11.0.1/jdk.internal.misc.Unsafe.park(Native Method)
        at java.base@11.0.1/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)
        at java.base@11.0.1/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)
        at java.base@11.0.1/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1182)
        at java.base@11.0.1/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:899)
        at java.base@11.0.1/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1054)
        at java.base@11.0.1/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1114)
        at java.base@11.0.1/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
        at java.base@11.0.1/java.lang.Thread.run(Thread.java:834)
   6) Thread[id=20276, name=qtp988075751-20276, state=TIMED_WAITING, group=TGRP-ShardSplitTest]
        at java.base@11.0.1/jdk.internal.misc.Unsafe.park(Native Method)
        at java.base@11.0.1/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)
        at java.base@11.0.1/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)
        at app//org.eclipse.jetty.util.BlockingArrayQueue.poll(BlockingArrayQueue.java:392)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool.idleJobPoll(QueuedThreadPool.java:656)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool.access$800(QueuedThreadPool.java:46)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:720)
        at java.base@11.0.1/java.lang.Thread.run(Thread.java:834)
   7) Thread[id=20275, name=qtp988075751-20275, state=TIMED_WAITING, group=TGRP-ShardSplitTest]
        at java.base@11.0.1/jdk.internal.misc.Unsafe.park(Native Method)
        at java.base@11.0.1/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)
        at java.base@11.0.1/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)
        at app//org.eclipse.jetty.util.BlockingArrayQueue.poll(BlockingArrayQueue.java:392)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool.idleJobPoll(QueuedThreadPool.java:656)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool.access$800(QueuedThreadPool.java:46)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:720)
        at java.base@11.0.1/java.lang.Thread.run(Thread.java:834)
   8) Thread[id=20277, name=qtp988075751-20277, state=TIMED_WAITING, group=TGRP-ShardSplitTest]
        at java.base@11.0.1/jdk.internal.misc.Unsafe.park(Native Method)
        at java.base@11.0.1/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)
        at java.base@11.0.1/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)
        at app//org.eclipse.jetty.util.BlockingArrayQueue.poll(BlockingArrayQueue.java:392)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool.idleJobPoll(QueuedThreadPool.java:656)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool.access$800(QueuedThreadPool.java:46)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:720)
        at java.base@11.0.1/java.lang.Thread.run(Thread.java:834)
   9) Thread[id=20271, name=qtp988075751-20271, state=RUNNABLE, group=TGRP-ShardSplitTest]
        at java.base@11.0.1/sun.nio.ch.EPoll.wait(Native Method)
        at java.base@11.0.1/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:120)
        at java.base@11.0.1/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:124)
        at java.base@11.0.1/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141)
        at app//org.eclipse.jetty.io.ManagedSelector$SelectorProducer.select(ManagedSelector.java:423)
        at app//org.eclipse.jetty.io.ManagedSelector$SelectorProducer.produce(ManagedSelector.java:360)
        at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.produceTask(EatWhatYouKill.java:357)
        at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:181)
        at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:168)
        at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.produce(EatWhatYouKill.java:132)
        at app//org.eclipse.jetty.io.ManagedSelector$$Lambda$172/0x0000000100352440.run(Unknown Source)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:765)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:683)
        at java.base@11.0.1/java.lang.Thread.run(Thread.java:834)
  10) Thread[id=20278, name=qtp988075751-20278, state=TIMED_WAITING, group=TGRP-ShardSplitTest]
        at java.base@11.0.1/jdk.internal.misc.Unsafe.park(Native Method)
        at java.base@11.0.1/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)
        at java.base@11.0.1/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)
        at app//org.eclipse.jetty.util.BlockingArrayQueue.poll(BlockingArrayQueue.java:392)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool.idleJobPoll(QueuedThreadPool.java:656)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool.access$800(QueuedThreadPool.java:46)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:720)
        at java.base@11.0.1/java.lang.Thread.run(Thread.java:834)
	at __randomizedtesting.SeedInfo.seed([A84462B0258F4DFC]:0)


FAILED:  junit.framework.TestSuite.org.apache.solr.cloud.api.collections.ShardSplitTest

Error Message:
There are still zombie threads that couldn't be terminated:    1) Thread[id=20270, name=qtp988075751-20270, state=RUNNABLE, group=TGRP-ShardSplitTest]         at java.base@11.0.1/sun.nio.ch.EPoll.wait(Native Method)         at java.base@11.0.1/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:120)         at java.base@11.0.1/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:124)         at java.base@11.0.1/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141)         at app//org.eclipse.jetty.io.ManagedSelector$SelectorProducer.select(ManagedSelector.java:423)         at app//org.eclipse.jetty.io.ManagedSelector$SelectorProducer.produce(ManagedSelector.java:360)         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.produceTask(EatWhatYouKill.java:357)         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:181)         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:168)         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.produce(EatWhatYouKill.java:132)         at app//org.eclipse.jetty.io.ManagedSelector$$Lambda$172/0x0000000100352440.run(Unknown Source)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:765)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:683)         at java.base@11.0.1/java.lang.Thread.run(Thread.java:834)    2) Thread[id=20271, name=qtp988075751-20271, state=RUNNABLE, group=TGRP-ShardSplitTest]         at java.base@11.0.1/sun.nio.ch.EPoll.wait(Native Method)         at java.base@11.0.1/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:120)         at java.base@11.0.1/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:124)         at java.base@11.0.1/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141)         at app//org.eclipse.jetty.io.ManagedSelector$SelectorProducer.select(ManagedSelector.java:423)         at app//org.eclipse.jetty.io.ManagedSelector$SelectorProducer.produce(ManagedSelector.java:360)         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.produceTask(EatWhatYouKill.java:357)         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:181)         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:168)         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.produce(EatWhatYouKill.java:132)         at app//org.eclipse.jetty.io.ManagedSelector$$Lambda$172/0x0000000100352440.run(Unknown Source)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:765)         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:683)         at java.base@11.0.1/java.lang.Thread.run(Thread.java:834)    3) Thread[id=20402, name=Scheduler-1054557539, state=WAITING, group=TGRP-ShardSplitTest]         at java.base@11.0.1/jdk.internal.misc.Unsafe.park(Native Method)         at java.base@11.0.1/java.util.concurrent.locks.LockSupport.park(LockSupport.java:194)         at java.base@11.0.1/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2081)         at java.base@11.0.1/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1170)         at java.base@11.0.1/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:899)         at java.base@11.0.1/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1054)         at java.base@11.0.1/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1114)         at java.base@11.0.1/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)         at java.base@11.0.1/java.lang.Thread.run(Thread.java:834)    4) Thread[id=20279, name=Scheduler-580240861, state=TIMED_WAITING, group=TGRP-ShardSplitTest]         at java.base@11.0.1/jdk.internal.misc.Unsafe.park(Native Method)         at java.base@11.0.1/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)         at java.base@11.0.1/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)         at java.base@11.0.1/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1182)         at java.base@11.0.1/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:899)         at java.base@11.0.1/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1054)         at java.base@11.0.1/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1114)         at java.base@11.0.1/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)         at java.base@11.0.1/java.lang.Thread.run(Thread.java:834)

Stack Trace:
com.carrotsearch.randomizedtesting.ThreadLeakError: There are still zombie threads that couldn't be terminated:
   1) Thread[id=20270, name=qtp988075751-20270, state=RUNNABLE, group=TGRP-ShardSplitTest]
        at java.base@11.0.1/sun.nio.ch.EPoll.wait(Native Method)
        at java.base@11.0.1/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:120)
        at java.base@11.0.1/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:124)
        at java.base@11.0.1/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141)
        at app//org.eclipse.jetty.io.ManagedSelector$SelectorProducer.select(ManagedSelector.java:423)
        at app//org.eclipse.jetty.io.ManagedSelector$SelectorProducer.produce(ManagedSelector.java:360)
        at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.produceTask(EatWhatYouKill.java:357)
        at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:181)
        at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:168)
        at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.produce(EatWhatYouKill.java:132)
        at app//org.eclipse.jetty.io.ManagedSelector$$Lambda$172/0x0000000100352440.run(Unknown Source)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:765)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:683)
        at java.base@11.0.1/java.lang.Thread.run(Thread.java:834)
   2) Thread[id=20271, name=qtp988075751-20271, state=RUNNABLE, group=TGRP-ShardSplitTest]
        at java.base@11.0.1/sun.nio.ch.EPoll.wait(Native Method)
        at java.base@11.0.1/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:120)
        at java.base@11.0.1/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:124)
        at java.base@11.0.1/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141)
        at app//org.eclipse.jetty.io.ManagedSelector$SelectorProducer.select(ManagedSelector.java:423)
        at app//org.eclipse.jetty.io.ManagedSelector$SelectorProducer.produce(ManagedSelector.java:360)
        at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.produceTask(EatWhatYouKill.java:357)
        at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:181)
        at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:168)
        at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.produce(EatWhatYouKill.java:132)
        at app//org.eclipse.jetty.io.ManagedSelector$$Lambda$172/0x0000000100352440.run(Unknown Source)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:765)
        at app//org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:683)
        at java.base@11.0.1/java.lang.Thread.run(Thread.java:834)
   3) Thread[id=20402, name=Scheduler-1054557539, state=WAITING, group=TGRP-ShardSplitTest]
        at java.base@11.0.1/jdk.internal.misc.Unsafe.park(Native Method)
        at java.base@11.0.1/java.util.concurrent.locks.LockSupport.park(LockSupport.java:194)
        at java.base@11.0.1/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2081)
        at java.base@11.0.1/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1170)
        at java.base@11.0.1/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:899)
        at java.base@11.0.1/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1054)
        at java.base@11.0.1/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1114)
        at java.base@11.0.1/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
        at java.base@11.0.1/java.lang.Thread.run(Thread.java:834)
   4) Thread[id=20279, name=Scheduler-580240861, state=TIMED_WAITING, group=TGRP-ShardSplitTest]
        at java.base@11.0.1/jdk.internal.misc.Unsafe.park(Native Method)
        at java.base@11.0.1/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)
        at java.base@11.0.1/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)
        at java.base@11.0.1/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1182)
        at java.base@11.0.1/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:899)
        at java.base@11.0.1/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1054)
        at java.base@11.0.1/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1114)
        at java.base@11.0.1/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
        at java.base@11.0.1/java.lang.Thread.run(Thread.java:834)
	at __randomizedtesting.SeedInfo.seed([A84462B0258F4DFC]:0)


FAILED:  org.apache.solr.update.UpdateLogTest.testApplyPartialUpdatesOnMultipleInPlaceUpdatesInSequence

Error Message:
class java.lang.Integer cannot be cast to class org.apache.lucene.document.NumericDocValuesField (java.lang.Integer is in module java.base of loader 'bootstrap'; org.apache.lucene.document.NumericDocValuesField is in unnamed module of loader 'app')

Stack Trace:
java.lang.ClassCastException: class java.lang.Integer cannot be cast to class org.apache.lucene.document.NumericDocValuesField (java.lang.Integer is in module java.base of loader 'bootstrap'; org.apache.lucene.document.NumericDocValuesField is in unnamed module of loader 'app')
	at __randomizedtesting.SeedInfo.seed([A84462B0258F4DFC:666394D9DC1F9194]:0)
	at org.apache.solr.update.UpdateLogTest.testApplyPartialUpdatesOnMultipleInPlaceUpdatesInSequence(UpdateLogTest.java:77)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.base/java.lang.reflect.Method.invoke(Method.java:566)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at java.base/java.lang.Thread.run(Thread.java:834)


FAILED:  org.apache.solr.update.UpdateLogTest.testApplyPartialUpdatesDependingOnNonAddShouldThrowException

Error Message:
class java.lang.Integer cannot be cast to class org.apache.lucene.document.NumericDocValuesField (java.lang.Integer is in module java.base of loader 'bootstrap'; org.apache.lucene.document.NumericDocValuesField is in unnamed module of loader 'app')

Stack Trace:
java.lang.ClassCastException: class java.lang.Integer cannot be cast to class org.apache.lucene.document.NumericDocValuesField (java.lang.Integer is in module java.base of loader 'bootstrap'; org.apache.lucene.document.NumericDocValuesField is in unnamed module of loader 'app')
	at __randomizedtesting.SeedInfo.seed([A84462B0258F4DFC:FF2BEAEFE7A31293]:0)
	at org.apache.solr.update.UpdateLogTest.testApplyPartialUpdatesDependingOnNonAddShouldThrowException(UpdateLogTest.java:148)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.base/java.lang.reflect.Method.invoke(Method.java:566)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at java.base/java.lang.Thread.run(Thread.java:834)


FAILED:  org.apache.solr.update.UpdateLogTest.testApplyPartialUpdatesAfterMultipleCommits

Error Message:
class java.lang.Integer cannot be cast to class org.apache.lucene.document.NumericDocValuesField (java.lang.Integer is in module java.base of loader 'bootstrap'; org.apache.lucene.document.NumericDocValuesField is in unnamed module of loader 'app')

Stack Trace:
java.lang.ClassCastException: class java.lang.Integer cannot be cast to class org.apache.lucene.document.NumericDocValuesField (java.lang.Integer is in module java.base of loader 'bootstrap'; org.apache.lucene.document.NumericDocValuesField is in unnamed module of loader 'app')
	at __randomizedtesting.SeedInfo.seed([A84462B0258F4DFC:953333A11F4613A0]:0)
	at org.apache.solr.update.UpdateLogTest.testApplyPartialUpdatesAfterMultipleCommits(UpdateLogTest.java:128)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.base/java.lang.reflect.Method.invoke(Method.java:566)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at java.base/java.lang.Thread.run(Thread.java:834)




Build Log:
[...truncated 2063 lines...]
   [junit4] Suite: org.apache.lucene.search.TestSearcherManager
   [junit4]   2> ??? 27, 2019 6:05:32 ? com.carrotsearch.randomizedtesting.RandomizedRunner$QueueUncaughtExceptionsHandler uncaughtException
   [junit4]   2> WARNING: Uncaught exception in thread: Thread[Thread-15914,5,TGRP-TestSearcherManager]
   [junit4]   2> java.lang.RuntimeException: java.nio.file.FileSystemException: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/build/core/test/J2/temp/lucene.search.TestSearcherManager_754AC8E0CE5EB0D2-001/tempDir-001/_7m.nvm: Too many open files
   [junit4]   2> 	at __randomizedtesting.SeedInfo.seed([754AC8E0CE5EB0D2]:0)
   [junit4]   2> 	at org.apache.lucene.search.TestSearcherManager$11.run(TestSearcherManager.java:677)
   [junit4]   2> Caused by: java.nio.file.FileSystemException: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/build/core/test/J2/temp/lucene.search.TestSearcherManager_754AC8E0CE5EB0D2-001/tempDir-001/_7m.nvm: Too many open files
   [junit4]   2> 	at org.apache.lucene.mockfile.HandleLimitFS.onOpen(HandleLimitFS.java:48)
   [junit4]   2> 	at org.apache.lucene.mockfile.HandleTrackingFS.callOpenHook(HandleTrackingFS.java:81)
   [junit4]   2> 	at org.apache.lucene.mockfile.HandleTrackingFS.newFileChannel(HandleTrackingFS.java:197)
   [junit4]   2> 	at org.apache.lucene.mockfile.FilterFileSystemProvider.newFileChannel(FilterFileSystemProvider.java:202)
   [junit4]   2> 	at java.base/java.nio.channels.FileChannel.open(FileChannel.java:292)
   [junit4]   2> 	at java.base/java.nio.channels.FileChannel.open(FileChannel.java:345)
   [junit4]   2> 	at org.apache.lucene.store.NIOFSDirectory.openInput(NIOFSDirectory.java:81)
   [junit4]   2> 	at org.apache.lucene.util.LuceneTestCase.slowFileExists(LuceneTestCase.java:2828)
   [junit4]   2> 	at org.apache.lucene.store.MockDirectoryWrapper.openInput(MockDirectoryWrapper.java:742)
   [junit4]   2> 	at org.apache.lucene.store.Directory.openChecksumInput(Directory.java:156)
   [junit4]   2> 	at org.apache.lucene.store.MockDirectoryWrapper.openChecksumInput(MockDirectoryWrapper.java:1038)
   [junit4]   2> 	at org.apache.lucene.codecs.lucene80.Lucene80NormsProducer.<init>(Lucene80NormsProducer.java:59)
   [junit4]   2> 	at org.apache.lucene.codecs.lucene80.Lucene80NormsFormat.normsProducer(Lucene80NormsFormat.java:90)
   [junit4]   2> 	at org.apache.lucene.codecs.asserting.AssertingNormsFormat.normsProducer(AssertingNormsFormat.java:51)
   [junit4]   2> 	at org.apache.lucene.index.SegmentCoreReaders.<init>(SegmentCoreReaders.java:121)
   [junit4]   2> 	at org.apache.lucene.index.SegmentReader.<init>(SegmentReader.java:84)
   [junit4]   2> 	at org.apache.lucene.index.ReadersAndUpdates.getReader(ReadersAndUpdates.java:177)
   [junit4]   2> 	at org.apache.lucene.index.ReadersAndUpdates.getReadOnlyClone(ReadersAndUpdates.java:219)
   [junit4]   2> 	at org.apache.lucene.index.StandardDirectoryReader.open(StandardDirectoryReader.java:109)
   [junit4]   2> 	at org.apache.lucene.index.IndexWriter.getReader(IndexWriter.java:526)
   [junit4]   2> 	at org.apache.lucene.index.DirectoryReader.open(DirectoryReader.java:116)
   [junit4]   2> 	at org.apache.lucene.search.SearcherManager.<init>(SearcherManager.java:108)
   [junit4]   2> 	at org.apache.lucene.search.SearcherManager.<init>(SearcherManager.java:76)
   [junit4]   2> 	at org.apache.lucene.search.TestSearcherManager$11.run(TestSearcherManager.java:665)
   [junit4]   2> 
   [junit4]   2> NOTE: download the large Jenkins line-docs file by running 'ant get-jenkins-line-docs' in the lucene directory.
   [junit4]   2> NOTE: reproduce with: ant test  -Dtestcase=TestSearcherManager -Dtests.method=testConcurrentIndexCloseSearchAndRefresh -Dtests.seed=754AC8E0CE5EB0D2 -Dtests.multiplier=2 -Dtests.nightly=true -Dtests.slow=true -Dtests.linedocsfile=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/test-data/enwiki.random.lines.txt -Dtests.locale=ar-DZ -Dtests.timezone=Africa/Casablanca -Dtests.asserts=true -Dtests.file.encoding=US-ASCII
   [junit4] ERROR    108s J2 | TestSearcherManager.testConcurrentIndexCloseSearchAndRefresh <<<
   [junit4]    > Throwable #1: com.carrotsearch.randomizedtesting.UncaughtExceptionError: Captured an uncaught exception in thread: Thread[id=17184, name=Thread-15914, state=RUNNABLE, group=TGRP-TestSearcherManager]
   [junit4]    > 	at __randomizedtesting.SeedInfo.seed([754AC8E0CE5EB0D2:B78C0BA093E04CE5]:0)
   [junit4]    > Caused by: java.lang.RuntimeException: java.nio.file.FileSystemException: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/build/core/test/J2/temp/lucene.search.TestSearcherManager_754AC8E0CE5EB0D2-001/tempDir-001/_7m.nvm: Too many open files
   [junit4]    > 	at __randomizedtesting.SeedInfo.seed([754AC8E0CE5EB0D2]:0)
   [junit4]    > 	at org.apache.lucene.search.TestSearcherManager$11.run(TestSearcherManager.java:677)
   [junit4]    > Caused by: java.nio.file.FileSystemException: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/build/core/test/J2/temp/lucene.search.TestSearcherManager_754AC8E0CE5EB0D2-001/tempDir-001/_7m.nvm: Too many open files
   [junit4]    > 	at org.apache.lucene.mockfile.HandleLimitFS.onOpen(HandleLimitFS.java:48)
   [junit4]    > 	at org.apache.lucene.mockfile.HandleTrackingFS.callOpenHook(HandleTrackingFS.java:81)
   [junit4]    > 	at org.apache.lucene.mockfile.HandleTrackingFS.newFileChannel(HandleTrackingFS.java:197)
   [junit4]    > 	at org.apache.lucene.mockfile.FilterFileSystemProvider.newFileChannel(FilterFileSystemProvider.java:202)
   [junit4]    > 	at java.base/java.nio.channels.FileChannel.open(FileChannel.java:292)
   [junit4]    > 	at java.base/java.nio.channels.FileChannel.open(FileChannel.java:345)
   [junit4]    > 	at org.apache.lucene.store.NIOFSDirectory.openInput(NIOFSDirectory.java:81)
   [junit4]    > 	at org.apache.lucene.util.LuceneTestCase.slowFileExists(LuceneTestCase.java:2828)
   [junit4]    > 	at org.apache.lucene.store.MockDirectoryWrapper.openInput(MockDirectoryWrapper.java:742)
   [junit4]    > 	at org.apache.lucene.store.Directory.openChecksumInput(Directory.java:156)
   [junit4]    > 	at org.apache.lucene.store.MockDirectoryWrapper.openChecksumInput(MockDirectoryWrapper.java:1038)
   [junit4]    > 	at org.apache.lucene.codecs.lucene80.Lucene80NormsProducer.<init>(Lucene80NormsProducer.java:59)
   [junit4]    > 	at org.apache.lucene.codecs.lucene80.Lucene80NormsFormat.normsProducer(Lucene80NormsFormat.java:90)
   [junit4]    > 	at org.apache.lucene.codecs.asserting.AssertingNormsFormat.normsProducer(AssertingNormsFormat.java:51)
   [junit4]    > 	at org.apache.lucene.index.SegmentCoreReaders.<init>(SegmentCoreReaders.java:121)
   [junit4]    > 	at org.apache.lucene.index.SegmentReader.<init>(SegmentReader.java:84)
   [junit4]    > 	at org.apache.lucene.index.ReadersAndUpdates.getReader(ReadersAndUpdates.java:177)
   [junit4]    > 	at org.apache.lucene.index.ReadersAndUpdates.getReadOnlyClone(ReadersAndUpdates.java:219)
   [junit4]    > 	at org.apache.lucene.index.StandardDirectoryReader.open(StandardDirectoryReader.java:109)
   [junit4]    > 	at org.apache.lucene.index.IndexWriter.getReader(IndexWriter.java:526)
   [junit4]    > 	at org.apache.lucene.index.DirectoryReader.open(DirectoryReader.java:116)
   [junit4]    > 	at org.apache.lucene.search.SearcherManager.<init>(SearcherManager.java:108)
   [junit4]    > 	at org.apache.lucene.search.SearcherManager.<init>(SearcherManager.java:76)
   [junit4]    > 	at org.apache.lucene.search.TestSearcherManager$11.run(TestSearcherManager.java:665)
   [junit4]   2> NOTE: test params are: codec=Asserting(Lucene80): {date=PostingsFormat(name=MockRandom), extra13=PostingsFormat(name=LuceneFixedGap), extra35=PostingsFormat(name=LuceneFixedGap), extra14=PostingsFormat(name=LuceneVarGapFixedInterval), extra36=PostingsFormat(name=LuceneVarGapFixedInterval), extra33=PostingsFormat(name=MockRandom), extra8=PostingsFormat(name=MockRandom), extra11=PostingsFormat(name=MockRandom), extra9=Lucene50(blocksize=128), extra34=Lucene50(blocksize=128), extra12=Lucene50(blocksize=128), extra17=PostingsFormat(name=LuceneFixedGap), extra39=PostingsFormat(name=LuceneFixedGap), extra18=PostingsFormat(name=LuceneVarGapFixedInterval), extra37=PostingsFormat(name=MockRandom), extra15=PostingsFormat(name=MockRandom), extra16=Lucene50(blocksize=128), extra38=Lucene50(blocksize=128), extra2=PostingsFormat(name=LuceneFixedGap), extra3=PostingsFormat(name=LuceneVarGapFixedInterval), titleTokenized=Lucene50(blocksize=128), extra0=PostingsFormat(name=MockRandom), extra19=PostingsFormat(name=MockRandom), extra1=Lucene50(blocksize=128), body=PostingsFormat(name=MockRandom), title=PostingsFormat(name=LuceneFixedGap), extra6=PostingsFormat(name=LuceneFixedGap), extra7=PostingsFormat(name=LuceneVarGapFixedInterval), extra4=PostingsFormat(name=MockRandom), extra5=Lucene50(blocksize=128), packID=PostingsFormat(name=MockRandom), extra31=PostingsFormat(name=LuceneFixedGap), extra10=PostingsFormat(name=LuceneVarGapFixedInterval), extra32=PostingsFormat(name=LuceneVarGapFixedInterval), extra30=Lucene50(blocksize=128), extra24=PostingsFormat(name=LuceneFixedGap), extra25=PostingsFormat(name=LuceneVarGapFixedInterval), extra22=PostingsFormat(name=MockRandom), extra23=Lucene50(blocksize=128), extra28=PostingsFormat(name=LuceneFixedGap), extra29=PostingsFormat(name=LuceneVarGapFixedInterval), docid=PostingsFormat(name=LuceneVarGapFixedInterval), extra26=PostingsFormat(name=MockRandom), extra27=Lucene50(blocksize=128), extra20=PostingsFormat(name=LuceneFixedGap), extra21=PostingsFormat(name=LuceneVarGapFixedInterval)}, docValues:{date=DocValuesFormat(name=Lucene80), extra13=DocValuesFormat(name=Lucene80), extra14=DocValuesFormat(name=Lucene80), extra8=DocValuesFormat(name=Lucene80), extra11=DocValuesFormat(name=Asserting), extra9=DocValuesFormat(name=Asserting), extra12=DocValuesFormat(name=Lucene80), extra17=DocValuesFormat(name=Lucene80), extra18=DocValuesFormat(name=Asserting), extra15=DocValuesFormat(name=Lucene80), extra16=DocValuesFormat(name=Lucene80), extra2=DocValuesFormat(name=Lucene80), extra3=DocValuesFormat(name=Asserting), titleTokenized=DocValuesFormat(name=Lucene80), extra19=DocValuesFormat(name=Lucene80), extra0=DocValuesFormat(name=Lucene80), extra1=DocValuesFormat(name=Lucene80), body=DocValuesFormat(name=Asserting), extra6=DocValuesFormat(name=Asserting), extra7=DocValuesFormat(name=Lucene80), extra4=DocValuesFormat(name=Lucene80), extra5=DocValuesFormat(name=Asserting), extra10=DocValuesFormat(name=Asserting), extra35=DocValuesFormat(name=Asserting), extra36=DocValuesFormat(name=Lucene80), docid_int=DocValuesFormat(name=Lucene80), extra33=DocValuesFormat(name=Lucene80), extra34=DocValuesFormat(name=Asserting), extra39=DocValuesFormat(name=Lucene80), extra37=DocValuesFormat(name=Asserting), extra38=DocValuesFormat(name=Lucene80), title=DocValuesFormat(name=Asserting), packID=DocValuesFormat(name=Asserting), docid_intDV=DocValuesFormat(name=Asserting), extra31=DocValuesFormat(name=Lucene80), extra32=DocValuesFormat(name=Lucene80), extra30=DocValuesFormat(name=Lucene80), extra24=DocValuesFormat(name=Lucene80), extra25=DocValuesFormat(name=Asserting), extra22=DocValuesFormat(name=Lucene80), extra23=DocValuesFormat(name=Lucene80), extra28=DocValuesFormat(name=Asserting), extra29=DocValuesFormat(name=Lucene80), docid=DocValuesFormat(name=Lucene80), extra26=DocValuesFormat(name=Lucene80), extra27=DocValuesFormat(name=Asserting), extra20=DocValuesFormat(name=Asserting), extra21=DocValuesFormat(name=Lucene80), titleDV=DocValuesFormat(name=Asserting)}, maxPointsInLeafNode=120, maxMBSortInHeap=6.190694760649883, sim=Asserting(org.apache.lucene.search.similarities.AssertingSimilarity@5e37008f), locale=ar-DZ, timezone=Africa/Casablanca
   [junit4]   2> NOTE: Linux 4.4.0-112-generic amd64/Oracle Corporation 11.0.1 (64-bit)/cpus=4,threads=1,free=462823136,total=536870912
   [junit4]   2> NOTE: All tests run in this JVM: [TestStressDeletes, TestThreadedForceMerge, TestOmitPositions, TestFilterLeafReader, TestTermVectorsReader, Test2BPagedBytes, TestTerm, TestPostingsOffsets, TestWildcardRandom, TestField, TestStressNRT, TestBytesRefAttImpl, TestLucene50LiveDocsFormat, TestLucene80DocValuesFormat, TestOrdinalMap, TestMatchesIterator, TestPagedBytes, TestBooleanScorer, TestTimSorter, TestDocsWithFieldSet, TestCharacterUtils, TestPerSegmentDeletes, TestExceedMaxTermLength, TestFieldsReader, TestIndexInput, TestLockFactory, TestPrefixRandom, TestLMJelinekMercerSimilarity, TestBasics, TestTwoPhaseCommitTool, TestReaderClosed, TestIntRangeFieldQueries, TestNIOFSDirectory, TestReqExclBulkScorer, TestBasicModelIne, TestBinaryDocument, TestBytesRefHash, TestFuzzyQuery, TestSimpleSearchEquivalence, TestSpanNotQuery, TestDocValuesFieldUpdates, TestManyFields, TestIndexWriterOnJRECrash, Test2BDocs, Test2BPositions, TestDirectory, TestSegmentTermDocs, Test2BSortedDocValuesFixedSorted, TestFilterIterator, TestCompetitiveFreqNormAccumulator, TestSPIClassIterator, TestLatLonPointQueries, TestDocIDMerger, TestParallelReaderEmptyIndex, TestSimpleAttributeImpl, TestPositiveScoresOnlyCollector, TestNorms, TestDirectPacked, TestIndexReaderClose, TestSubScorerFreqs, TestNormsFieldExistsQuery, TestFieldInvertState, TestIntroSorter, TestIndexWriterDelete, TestFlushByRamOrCountsPolicy, TestSizeBoundedForceMerge, TestTransactionRollback, TestMultiDocValues, TestCompiledAutomaton, TestFieldValueQuery, TestUsageTrackingFilterCachingPolicy, TestSearchForDuplicates, TestIndexWriterLockRelease, TestFixedLengthBytesRefArray, TestLatLonPointDistanceFeatureQuery, TestSloppyMath, TestComplexExplanations, TestDocValuesRewriteMethod, TestNoDeletionPolicy, TestIndexWriterMerging, TestOperations, TestSearchWithThreads, TestShardSearching, TestByteBlockPool, TestExternalCodecs, TestAllFilesCheckIndexHeader, TestAllFilesDetectTruncation, TestAllFilesHaveChecksumFooter, TestAllFilesHaveCodecHeader, TestAtomicUpdate, TestBagOfPositions, TestBagOfPostings, TestBinaryDocValuesUpdates, TestDirectoryReader, TestDirectoryReaderReopen, TestDocInverterPerFieldErrorInfo, TestDocumentWriter, TestDocumentsWriterDeleteQueue, TestDocumentsWriterStallControl, TestIndexTooManyDocs, TestIndexWriter, TestIndexWriterExceptions2, TestIndexWriterOutOfFileDescriptors, TestIndexWriterReader, TestIndexWriterThreadsToSegments, TestIndexWriterUnicode, TestIndexWriterWithThreads, TestIndexableField, TestIndexingSequenceNumbers, TestInfoStream, TestIsCurrent, TestLazyProxSkipping, TestMaxTermFrequency, TestMergeRateLimiter, TestMultiFields, TestMultiLevelSkipList, TestMultiTermsEnum, TestNRTReaderCleanup, TestNRTReaderWithThreads, TestNRTThreads, TestPayloads, TestPayloadsOnVectors, TestPrefixCodedTerms, TestReaderPool, TestReaderWrapperDVTypeCheck, TestRollback, TestSameTokenSamePosition, TestSegmentInfos, TestSegmentMerger, TestSnapshotDeletionPolicy, TestSoftDeletesDirectoryReaderWrapper, TestSoftDeletesRetentionMergePolicy, TestStressIndexing, TestSumDocFreq, TestTermVectorsWriter, TestTermdocPerf, TestTerms, TestTieredMergePolicy, TestAutomatonQuery, TestAutomatonQueryUnicode, TestBlendedTermQuery, TestBlockMaxConjunction, TestBoolean2, TestCustomSearcherSort, TestDateSort, TestDisjunctionMaxQuery, TestDocIdSetIterator, TestDoubleRangeFieldQueries, TestLongValuesSource, TestMinShouldMatch2, TestMultiCollector, TestMultiPhraseQuery, TestMultiTermConstantScore, TestMultiTermQueryRewrites, TestMultiThreadTermVectors, TestMultiset, TestNot, TestPhrasePrefixQuery, TestPhraseQuery, TestPointQueries, TestQueryRescorer, TestQueryVisitor, TestRegexpQuery, TestRegexpRandom, TestSameScoresWithThreads, TestScoreCachingWrappingScorer, TestScorerPerf, TestSearcherManager]
   [junit4] Completed [407/517 (1!)] on J2 in 541.16s, 11 tests, 1 error <<< FAILURES!

[...truncated 11387 lines...]
   [junit4] Suite: org.apache.solr.update.UpdateLogTest
   [junit4]   2> 284884 INFO  (SUITE-UpdateLogTest-seed#[A84462B0258F4DFC]-worker) [    ] o.a.s.SolrTestCaseJ4 SecureRandom sanity checks: test.solr.allowed.securerandom=null & java.security.egd=file:/dev/./urandom
   [junit4]   2> Creating dataDir: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.update.UpdateLogTest_A84462B0258F4DFC-001/init-core-data-001
   [junit4]   2> 284884 WARN  (SUITE-UpdateLogTest-seed#[A84462B0258F4DFC]-worker) [    ] o.a.s.SolrTestCaseJ4 startTrackingSearchers: numOpens=3 numCloses=3
   [junit4]   2> 284885 INFO  (SUITE-UpdateLogTest-seed#[A84462B0258F4DFC]-worker) [    ] o.a.s.SolrTestCaseJ4 Using TrieFields (NUMERIC_POINTS_SYSPROP=false) w/NUMERIC_DOCVALUES_SYSPROP=false
   [junit4]   2> 284886 INFO  (SUITE-UpdateLogTest-seed#[A84462B0258F4DFC]-worker) [    ] o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (false) via: @org.apache.solr.util.RandomizeSSL(reason="", ssl=0.0/0.0, value=0.0/0.0, clientAuth=0.0/0.0)
   [junit4]   2> 284887 INFO  (SUITE-UpdateLogTest-seed#[A84462B0258F4DFC]-worker) [    ] o.a.s.SolrTestCaseJ4 ####initCore
   [junit4]   2> 284888 INFO  (SUITE-UpdateLogTest-seed#[A84462B0258F4DFC]-worker) [    ] o.a.s.c.SolrResourceLoader [null] Added 2 libs to classloader, from paths: [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/lib, /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/lib/classes]
   [junit4]   2> 284903 INFO  (SUITE-UpdateLogTest-seed#[A84462B0258F4DFC]-worker) [    ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
   [junit4]   2> 284919 INFO  (SUITE-UpdateLogTest-seed#[A84462B0258F4DFC]-worker) [    ] o.a.s.s.IndexSchema [null] Schema name=inplace-updates
   [junit4]   2> 284923 INFO  (SUITE-UpdateLogTest-seed#[A84462B0258F4DFC]-worker) [    ] o.a.s.s.IndexSchema Loaded schema inplace-updates/1.6 with uniqueid field id
   [junit4]   2> 285160 INFO  (SUITE-UpdateLogTest-seed#[A84462B0258F4DFC]-worker) [    ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
   [junit4]   2> 285185 WARN  (SUITE-UpdateLogTest-seed#[A84462B0258F4DFC]-worker) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@53d7e83b[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 285229 WARN  (SUITE-UpdateLogTest-seed#[A84462B0258F4DFC]-worker) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@6837365d[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 285262 INFO  (SUITE-UpdateLogTest-seed#[A84462B0258F4DFC]-worker) [    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 2147483647 transient cores
   [junit4]   2> 285262 INFO  (SUITE-UpdateLogTest-seed#[A84462B0258F4DFC]-worker) [    ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 285290 INFO  (SUITE-UpdateLogTest-seed#[A84462B0258F4DFC]-worker) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@3ec37ff0
   [junit4]   2> 285306 INFO  (SUITE-UpdateLogTest-seed#[A84462B0258F4DFC]-worker) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@3ec37ff0
   [junit4]   2> 285307 INFO  (SUITE-UpdateLogTest-seed#[A84462B0258F4DFC]-worker) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@3ec37ff0
   [junit4]   2> 285324 INFO  (coreLoadExecutor-331-thread-1) [    x:collection1] o.a.s.c.SolrResourceLoader [null] Added 2 libs to classloader, from paths: [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/lib, /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/lib/classes]
   [junit4]   2> 285360 INFO  (coreLoadExecutor-331-thread-1) [    x:collection1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
   [junit4]   2> 285450 INFO  (coreLoadExecutor-331-thread-1) [    x:collection1] o.a.s.s.IndexSchema [collection1] Schema name=inplace-updates
   [junit4]   2> 285471 INFO  (coreLoadExecutor-331-thread-1) [    x:collection1] o.a.s.s.IndexSchema Loaded schema inplace-updates/1.6 with uniqueid field id
   [junit4]   2> 285471 INFO  (coreLoadExecutor-331-thread-1) [    x:collection1] o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from instancedir /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1, trusted=true
   [junit4]   2> 285495 INFO  (coreLoadExecutor-331-thread-1) [    x:collection1] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.collection1' (registry 'solr.core.collection1') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@3ec37ff0
   [junit4]   2> 285496 INFO  (coreLoadExecutor-331-thread-1) [    x:collection1] o.a.s.c.SolrCore [[collection1] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1], dataDir=[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.update.UpdateLogTest_A84462B0258F4DFC-001/init-core-data-001/]
   [junit4]   2> 285528 INFO  (coreLoadExecutor-331-thread-1) [    x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.LogByteSizeMergePolicy: [LogByteSizeMergePolicy: minMergeSize=1677721, mergeFactor=28, maxMergeSize=2147483648, maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=false, maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.0]
   [junit4]   2> 285576 WARN  (coreLoadExecutor-331-thread-1) [    x:collection1] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 285846 INFO  (coreLoadExecutor-331-thread-1) [    x:collection1] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 285846 INFO  (coreLoadExecutor-331-thread-1) [    x:collection1] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 285859 INFO  (coreLoadExecutor-331-thread-1) [    x:collection1] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 285859 INFO  (coreLoadExecutor-331-thread-1) [    x:collection1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 285860 INFO  (coreLoadExecutor-331-thread-1) [    x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.MockRandomMergePolicy: org.apache.lucene.index.MockRandomMergePolicy@1233d504
   [junit4]   2> 285884 INFO  (coreLoadExecutor-331-thread-1) [    x:collection1] o.a.s.s.SolrIndexSearcher Opening [Searcher@6d29d648[collection1] main]
   [junit4]   2> 285884 WARN  (coreLoadExecutor-331-thread-1) [    x:collection1] o.a.s.r.ManagedResourceStorage Cannot write to config directory /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf; switching to use InMemory storage instead.
   [junit4]   2> 285885 INFO  (coreLoadExecutor-331-thread-1) [    x:collection1] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 285885 INFO  (coreLoadExecutor-331-thread-1) [    x:collection1] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1634724143553314816
   [junit4]   2> 285887 INFO  (SUITE-UpdateLogTest-seed#[A84462B0258F4DFC]-worker) [    ] o.a.s.SolrTestCaseJ4 ####initCore end
   [junit4]   2> 285894 INFO  (searcherExecutor-332-thread-1-processing-x:collection1) [    x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher Searcher@6d29d648[collection1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 285897 INFO  (TEST-UpdateLogTest.testApplyPartialUpdatesOnMultipleInPlaceUpdatesInSequence-seed#[A84462B0258F4DFC]) [    ] o.a.s.SolrTestCaseJ4 ###Starting testApplyPartialUpdatesOnMultipleInPlaceUpdatesInSequence
   [junit4]   2> 285905 INFO  (TEST-UpdateLogTest.testApplyPartialUpdatesOnMultipleInPlaceUpdatesInSequence-seed#[A84462B0258F4DFC]) [    ] o.a.s.SolrTestCaseJ4 ###Ending testApplyPartialUpdatesOnMultipleInPlaceUpdatesInSequence
   [junit4]   2> NOTE: download the large Jenkins line-docs file by running 'ant get-jenkins-line-docs' in the lucene directory.
   [junit4]   2> NOTE: reproduce with: ant test  -Dtestcase=UpdateLogTest -Dtests.method=testApplyPartialUpdatesOnMultipleInPlaceUpdatesInSequence -Dtests.seed=A84462B0258F4DFC -Dtests.multiplier=2 -Dtests.nightly=true -Dtests.slow=true -Dtests.linedocsfile=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/test-data/enwiki.random.lines.txt -Dtests.locale=mk -Dtests.timezone=Pacific/Pago_Pago -Dtests.asserts=true -Dtests.file.encoding=US-ASCII
   [junit4] ERROR   0.02s J2 | UpdateLogTest.testApplyPartialUpdatesOnMultipleInPlaceUpdatesInSequence <<<
   [junit4]    > Throwable #1: java.lang.ClassCastException: class java.lang.Integer cannot be cast to class org.apache.lucene.document.NumericDocValuesField (java.lang.Integer is in module java.base of loader 'bootstrap'; org.apache.lucene.document.NumericDocValuesField is in unnamed module of loader 'app')
   [junit4]    > 	at __randomizedtesting.SeedInfo.seed([A84462B0258F4DFC:666394D9DC1F9194]:0)
   [junit4]    > 	at org.apache.solr.update.UpdateLogTest.testApplyPartialUpdatesOnMultipleInPlaceUpdatesInSequence(UpdateLogTest.java:77)
   [junit4]    > 	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
   [junit4]    > 	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
   [junit4]    > 	at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
   [junit4]    > 	at java.base/java.lang.reflect.Method.invoke(Method.java:566)
   [junit4]    > 	at java.base/java.lang.Thread.run(Thread.java:834)
   [junit4]   2> 285936 INFO  (TEST-UpdateLogTest.testApplyPartialUpdatesDependingOnNonAddShouldThrowException-seed#[A84462B0258F4DFC]) [    ] o.a.s.SolrTestCaseJ4 ###Starting testApplyPartialUpdatesDependingOnNonAddShouldThrowException
   [junit4]   2> 285938 INFO  (TEST-UpdateLogTest.testApplyPartialUpdatesDependingOnNonAddShouldThrowException-seed#[A84462B0258F4DFC]) [    ] o.a.s.SolrTestCaseJ4 ###Ending testApplyPartialUpdatesDependingOnNonAddShouldThrowException
   [junit4]   2> NOTE: download the large Jenkins line-docs file by running 'ant get-jenkins-line-docs' in the lucene directory.
   [junit4]   2> NOTE: reproduce with: ant test  -Dtestcase=UpdateLogTest -Dtests.method=testApplyPartialUpdatesDependingOnNonAddShouldThrowException -Dtests.seed=A84462B0258F4DFC -Dtests.multiplier=2 -Dtests.nightly=true -Dtests.slow=true -Dtests.linedocsfile=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/test-data/enwiki.random.lines.txt -Dtests.locale=mk -Dtests.timezone=Pacific/Pago_Pago -Dtests.asserts=true -Dtests.file.encoding=US-ASCII
   [junit4] ERROR   0.03s J2 | UpdateLogTest.testApplyPartialUpdatesDependingOnNonAddShouldThrowException <<<
   [junit4]    > Throwable #1: java.lang.ClassCastException: class java.lang.Integer cannot be cast to class org.apache.lucene.document.NumericDocValuesField (java.lang.Integer is in module java.base of loader 'bootstrap'; org.apache.lucene.document.NumericDocValuesField is in unnamed module of loader 'app')
   [junit4]    > 	at __randomizedtesting.SeedInfo.seed([A84462B0258F4DFC:FF2BEAEFE7A31293]:0)
   [junit4]    > 	at org.apache.solr.update.UpdateLogTest.testApplyPartialUpdatesDependingOnNonAddShouldThrowException(UpdateLogTest.java:148)
   [junit4]    > 	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
   [junit4]    > 	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
   [junit4]    > 	at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
   [junit4]    > 	at java.base/java.lang.reflect.Method.invoke(Method.java:566)
   [junit4]    > 	at java.base/java.lang.Thread.run(Thread.java:834)
   [junit4]   2> 285954 INFO  (TEST-UpdateLogTest.testApplyPartialUpdatesWithDelete-seed#[A84462B0258F4DFC]) [    ] o.a.s.SolrTestCaseJ4 ###Starting testApplyPartialUpdatesWithDelete
   [junit4]   2> 285967 INFO  (TEST-UpdateLogTest.testApplyPartialUpdatesWithDelete-seed#[A84462B0258F4DFC]) [    ] o.a.s.c.S.Request [collection1]  webapp=null path=null params={qt=/get&id=1&wt=xml} status=0 QTime=3
   [junit4]   2> 285976 INFO  (TEST-UpdateLogTest.testApplyPartialUpdatesWithDelete-seed#[A84462B0258F4DFC]) [    ] o.a.s.SolrTestCaseJ4 ###Ending testApplyPartialUpdatesWithDelete
   [junit4]   2> 286004 INFO  (TEST-UpdateLogTest.testApplyPartialUpdatesAfterMultipleCommits-seed#[A84462B0258F4DFC]) [    ] o.a.s.SolrTestCaseJ4 ###Starting testApplyPartialUpdatesAfterMultipleCommits
   [junit4]   2> NOTE: download the large Jenkins line-docs file by running 'ant get-jenkins-line-docs' in the lucene directory.
   [junit4]   2> 286006 INFO  (TEST-UpdateLogTest.testApplyPartialUpdatesAfterMultipleCommits-seed#[A84462B0258F4DFC]) [    ] o.a.s.SolrTestCaseJ4 ###Ending testApplyPartialUpdatesAfterMultipleCommits
   [junit4]   2> NOTE: reproduce with: ant test  -Dtestcase=UpdateLogTest -Dtests.method=testApplyPartialUpdatesAfterMultipleCommits -Dtests.seed=A84462B0258F4DFC -Dtests.multiplier=2 -Dtests.nightly=true -Dtests.slow=true -Dtests.linedocsfile=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/test-data/enwiki.random.lines.txt -Dtests.locale=mk -Dtests.timezone=Pacific/Pago_Pago -Dtests.asserts=true -Dtests.file.encoding=US-ASCII
   [junit4] ERROR   0.03s J2 | UpdateLogTest.testApplyPartialUpdatesAfterMultipleCommits <<<
   [junit4]    > Throwable #1: java.lang.ClassCastException: class java.lang.Integer cannot be cast to class org.apache.lucene.document.NumericDocValuesField (java.lang.Integer is in module java.base of loader 'bootstrap'; org.apache.lucene.document.NumericDocValuesField is in unnamed module of loader 'app')
   [junit4]    > 	at __randomizedtesting.SeedInfo.seed([A84462B0258F4DFC:953333A11F4613A0]:0)
   [junit4]    > 	at org.apache.solr.update.UpdateLogTest.testApplyPartialUpdatesAfterMultipleCommits(UpdateLogTest.java:128)
   [junit4]    > 	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
   [junit4]    > 	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
   [junit4]    > 	at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
   [junit4]    > 	at java.base/java.lang.reflect.Method.invoke(Method.java:566)
   [junit4]    > 	at java.base/java.lang.Thread.run(Thread.java:834)
   [junit4]   2> 286007 INFO  (SUITE-UpdateLogTest-seed#[A84462B0258F4DFC]-worker) [    ] o.a.s.SolrTestCaseJ4 ###deleteCore
   [junit4]   2> 286007 INFO  (SUITE-UpdateLogTest-seed#[A84462B0258F4DFC]-worker) [    ] o.a.s.c.CoreContainer Shutting down CoreContainer instance=200636748
   [junit4]   2> 286042 INFO  (coreCloseExecutor-337-thread-1) [    x:collection1] o.a.s.c.SolrCore [collection1]  CLOSING SolrCore org.apache.solr.core.SolrCore@35350b9d
   [junit4]   2> 286042 INFO  (coreCloseExecutor-337-thread-1) [    x:collection1] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.core.collection1, tag=35350b9d
   [junit4]   2> 286042 INFO  (coreCloseExecutor-337-thread-1) [    x:collection1] o.a.s.m.r.SolrJmxReporter Closing reporter [org.apache.solr.metrics.reporters.SolrJmxReporter@6275df25: rootName = null, domain = solr.core.collection1, service url = null, agent id = null] for registry solr.core.collection1 / com.codahale.metrics.MetricRegistry@29dd94dd
   [junit4]   2> 286068 INFO  (coreCloseExecutor-337-thread-1) [    x:collection1] o.a.s.u.DirectUpdateHandler2 Committing on IndexWriter close.
   [junit4]   2> 286068 INFO  (coreCloseExecutor-337-thread-1) [    x:collection1] o.a.s.u.SolrIndexWriter Calling setCommitData with IW:org.apache.solr.update.SolrIndexWriter@2325ffad commitCommandVersion:0
   [junit4]   2> 286112 INFO  (SUITE-UpdateLogTest-seed#[A84462B0258F4DFC]-worker) [    ] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.node, tag=null
   [junit4]   2> 286116 INFO  (SUITE-UpdateLogTest-seed#[A84462B0258F4DFC]-worker) [    ] o.a.s.m.r.SolrJmxReporter Closing reporter [org.apache.solr.metrics.reporters.SolrJmxReporter@2286e18b: rootName = null, domain = solr.node, service url = null, agent id = null] for registry solr.node / com.codahale.metrics.MetricRegistry@37d96f72
   [junit4]   2> 286129 INFO  (SUITE-UpdateLogTest-seed#[A84462B0258F4DFC]-worker) [    ] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.jvm, tag=null
   [junit4]   2> 286130 INFO  (SUITE-UpdateLogTest-seed#[A84462B0258F4DFC]-worker) [    ] o.a.s.m.r.SolrJmxReporter Closing reporter [org.apache.solr.metrics.reporters.SolrJmxReporter@22e37f31: rootName = null, domain = solr.jvm, service url = null, agent id = null] for registry solr.jvm / com.codahale.metrics.MetricRegistry@959fe2b
   [junit4]   2> 286132 INFO  (SUITE-UpdateLogTest-seed#[A84462B0258F4DFC]-worker) [    ] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.jetty, tag=null
   [junit4]   2> 286132 INFO  (SUITE-UpdateLogTest-seed#[A84462B0258F4DFC]-worker) [    ] o.a.s.m.r.SolrJmxReporter Closing reporter [org.apache.solr.metrics.reporters.SolrJmxReporter@1a83cb62: rootName = null, domain = solr.jetty, service url = null, agent id = null] for registry solr.jetty / com.codahale.metrics.MetricRegistry@5cff144e
   [junit4]   2> NOTE: leaving temporary files on disk at: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.update.UpdateLogTest_A84462B0258F4DFC-001
   [junit4]   2> NOTE: test params are: codec=Asserting(Lucene80): {}, docValues:{}, maxPointsInLeafNode=61, maxMBSortInHeap=5.619275047359774, sim=Asserting(org.apache.lucene.search.similarities.AssertingSimilarity@38e4ab62), locale=mk, timezone=Pacific/Pago_Pago
   [junit4]   2> NOTE: Linux 4.4.0-112-generic amd64/Oracle Corporation 11.0.1 (64-bit)/cpus=4,threads=1,free=140591256,total=299892736
   [junit4]   2> NOTE: All tests run in this JVM: [FileUtilsTest, TestCloudManagedSchema, TestDefaultStatsCache, TestNoOpRegenerator, TestSolrCloudWithKerberosAlt, HdfsThreadLeakTest, XsltUpdateRequestHandlerTest, SmileWriterTest, ConnectionManagerTest, TestCollationFieldDocValues, TestCorePropertiesReload, UpdateLogTest]
   [junit4] Completed [22/858 (1!)] on J2 in 1.37s, 4 tests, 3 errors <<< FAILURES!

[...truncated 883 lines...]
   [junit4] Suite: org.apache.solr.cloud.api.collections.ShardSplitTest
   [junit4]   2> Creating dataDir: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.api.collections.ShardSplitTest_A84462B0258F4DFC-001/init-core-data-001
   [junit4]   2> 2416963 INFO  (SUITE-ShardSplitTest-seed#[A84462B0258F4DFC]-worker) [    ] o.a.s.SolrTestCaseJ4 Using PointFields 

[...truncated too long message...]

y.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:765)
   [junit4]    >         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:683)
   [junit4]    >         at java.base@11.0.1/java.lang.Thread.run(Thread.java:834)
   [junit4]    >    2) Thread[id=20271, name=qtp988075751-20271, state=RUNNABLE, group=TGRP-ShardSplitTest]
   [junit4]    >         at java.base@11.0.1/sun.nio.ch.EPoll.wait(Native Method)
   [junit4]    >         at java.base@11.0.1/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:120)
   [junit4]    >         at java.base@11.0.1/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:124)
   [junit4]    >         at java.base@11.0.1/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141)
   [junit4]    >         at app//org.eclipse.jetty.io.ManagedSelector$SelectorProducer.select(ManagedSelector.java:423)
   [junit4]    >         at app//org.eclipse.jetty.io.ManagedSelector$SelectorProducer.produce(ManagedSelector.java:360)
   [junit4]    >         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.produceTask(EatWhatYouKill.java:357)
   [junit4]    >         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:181)
   [junit4]    >         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:168)
   [junit4]    >         at app//org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.produce(EatWhatYouKill.java:132)
   [junit4]    >         at app//org.eclipse.jetty.io.ManagedSelector$$Lambda$172/0x0000000100352440.run(Unknown Source)
   [junit4]    >         at app//org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:765)
   [junit4]    >         at app//org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:683)
   [junit4]    >         at java.base@11.0.1/java.lang.Thread.run(Thread.java:834)
   [junit4]    >    3) Thread[id=20402, name=Scheduler-1054557539, state=WAITING, group=TGRP-ShardSplitTest]
   [junit4]    >         at java.base@11.0.1/jdk.internal.misc.Unsafe.park(Native Method)
   [junit4]    >         at java.base@11.0.1/java.util.concurrent.locks.LockSupport.park(LockSupport.java:194)
   [junit4]    >         at java.base@11.0.1/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2081)
   [junit4]    >         at java.base@11.0.1/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1170)
   [junit4]    >         at java.base@11.0.1/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:899)
   [junit4]    >         at java.base@11.0.1/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1054)
   [junit4]    >         at java.base@11.0.1/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1114)
   [junit4]    >         at java.base@11.0.1/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
   [junit4]    >         at java.base@11.0.1/java.lang.Thread.run(Thread.java:834)
   [junit4]    >    4) Thread[id=20279, name=Scheduler-580240861, state=TIMED_WAITING, group=TGRP-ShardSplitTest]
   [junit4]    >         at java.base@11.0.1/jdk.internal.misc.Unsafe.park(Native Method)
   [junit4]    >         at java.base@11.0.1/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:234)
   [junit4]    >         at java.base@11.0.1/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2123)
   [junit4]    >         at java.base@11.0.1/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1182)
   [junit4]    >         at java.base@11.0.1/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:899)
   [junit4]    >         at java.base@11.0.1/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1054)
   [junit4]    >         at java.base@11.0.1/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1114)
   [junit4]    >         at java.base@11.0.1/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
   [junit4]    >         at java.base@11.0.1/java.lang.Thread.run(Thread.java:834)
   [junit4]    > 	at __randomizedtesting.SeedInfo.seed([A84462B0258F4DFC]:0)
   [junit4] Completed [264/858 (2!)] on J2 in 883.54s, 11 tests, 3 errors <<< FAILURES!

[...truncated 49136 lines...]
-ecj-javadoc-lint-tests:
    [mkdir] Created dir: /tmp/ecj1301885788
 [ecj-lint] Compiling 48 source files to /tmp/ecj1301885788
 [ecj-lint] invalid Class-Path header in manifest of jar file: /home/jenkins/.ivy2/cache/org.restlet.jee/org.restlet/jars/org.restlet-2.3.0.jar
 [ecj-lint] invalid Class-Path header in manifest of jar file: /home/jenkins/.ivy2/cache/org.restlet.jee/org.restlet.ext.servlet/jars/org.restlet.ext.servlet-2.3.0.jar
 [ecj-lint] ----------
 [ecj-lint] 1. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/MockInitialContextFactory.java (at line 23)
 [ecj-lint] 	import javax.naming.NamingException;
 [ecj-lint] 	       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 [ecj-lint] The type javax.naming.NamingException is not accessible
 [ecj-lint] ----------
 [ecj-lint] 2. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/MockInitialContextFactory.java (at line 28)
 [ecj-lint] 	public class MockInitialContextFactory implements InitialContextFactory {
 [ecj-lint] 	             ^^^^^^^^^^^^^^^^^^^^^^^^^
 [ecj-lint] The type MockInitialContextFactory must implement the inherited abstract method InitialContextFactory.getInitialContext(Hashtable<?,?>)
 [ecj-lint] ----------
 [ecj-lint] 3. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/MockInitialContextFactory.java (at line 30)
 [ecj-lint] 	private final javax.naming.Context context;
 [ecj-lint] 	              ^^^^^^^^^^^^^^^^^^^^
 [ecj-lint] The type javax.naming.Context is not accessible
 [ecj-lint] ----------
 [ecj-lint] 4. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/MockInitialContextFactory.java (at line 33)
 [ecj-lint] 	context = mock(javax.naming.Context.class);
 [ecj-lint] 	^^^^^^^
 [ecj-lint] context cannot be resolved to a variable
 [ecj-lint] ----------
 [ecj-lint] 5. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/MockInitialContextFactory.java (at line 33)
 [ecj-lint] 	context = mock(javax.naming.Context.class);
 [ecj-lint] 	               ^^^^^^^^^^^^^^^^^^^^
 [ecj-lint] The type javax.naming.Context is not accessible
 [ecj-lint] ----------
 [ecj-lint] 6. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/MockInitialContextFactory.java (at line 36)
 [ecj-lint] 	when(context.lookup(anyString())).thenAnswer(invocation -> objects.get(invocation.getArgument(0)));
 [ecj-lint] 	     ^^^^^^^
 [ecj-lint] context cannot be resolved
 [ecj-lint] ----------
 [ecj-lint] 7. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/MockInitialContextFactory.java (at line 38)
 [ecj-lint] 	} catch (NamingException e) {
 [ecj-lint] 	         ^^^^^^^^^^^^^^^
 [ecj-lint] NamingException cannot be resolved to a type
 [ecj-lint] ----------
 [ecj-lint] 8. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/MockInitialContextFactory.java (at line 45)
 [ecj-lint] 	public javax.naming.Context getInitialContext(Hashtable env) {
 [ecj-lint] 	       ^^^^^^^^^^^^^^^^^^^^
 [ecj-lint] The type javax.naming.Context is not accessible
 [ecj-lint] ----------
 [ecj-lint] 9. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/MockInitialContextFactory.java (at line 46)
 [ecj-lint] 	return context;
 [ecj-lint] 	       ^^^^^^^
 [ecj-lint] context cannot be resolved to a variable
 [ecj-lint] ----------
 [ecj-lint] 9 problems (9 errors)

BUILD FAILED
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/build.xml:652: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/build.xml:101: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build.xml:651: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/common-build.xml:479: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/common-build.xml:2015: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/common-build.xml:2048: Compile failed; see the compiler error output for details.

Total time: 569 minutes 2 seconds
Build step 'Invoke Ant' marked build as failure
Archiving artifacts
Recording test results
Email was triggered for: Failure - Any
Sending email for trigger: Failure - Any

[JENKINS] Lucene-Solr-NightlyTests-master - Build # 1855 - Still Unstable

Posted by Apache Jenkins Server <je...@builds.apache.org>.
Build: https://builds.apache.org/job/Lucene-Solr-NightlyTests-master/1855/

2 tests failed.
FAILED:  org.apache.solr.update.TestInPlaceUpdatesDistrib.test

Error Message:
Test abandoned because suite timeout was reached.

Stack Trace:
java.lang.Exception: Test abandoned because suite timeout was reached.
	at __randomizedtesting.SeedInfo.seed([F855A53E8FDDDA62]:0)


FAILED:  junit.framework.TestSuite.org.apache.solr.update.TestInPlaceUpdatesDistrib

Error Message:
Suite timeout exceeded (>= 7200000 msec).

Stack Trace:
java.lang.Exception: Suite timeout exceeded (>= 7200000 msec).
	at __randomizedtesting.SeedInfo.seed([F855A53E8FDDDA62]:0)




Build Log:
[...truncated 15928 lines...]
   [junit4] Suite: org.apache.solr.update.TestInPlaceUpdatesDistrib
   [junit4]   2> 4267322 INFO  (SUITE-TestInPlaceUpdatesDistrib-seed#[F855A53E8FDDDA62]-worker) [    ] o.a.s.SolrTestCaseJ4 SecureRandom sanity checks: test.solr.allowed.securerandom=null & java.security.egd=file:/dev/./urandom
   [junit4]   2> Creating dataDir: /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.update.TestInPlaceUpdatesDistrib_F855A53E8FDDDA62-001/init-core-data-001
   [junit4]   2> 4267322 WARN  (SUITE-TestInPlaceUpdatesDistrib-seed#[F855A53E8FDDDA62]-worker) [    ] o.a.s.SolrTestCaseJ4 startTrackingSearchers: numOpens=81 numCloses=81
   [junit4]   2> 4267323 INFO  (SUITE-TestInPlaceUpdatesDistrib-seed#[F855A53E8FDDDA62]-worker) [    ] o.a.s.SolrTestCaseJ4 Using TrieFields (NUMERIC_POINTS_SYSPROP=false) w/NUMERIC_DOCVALUES_SYSPROP=true
   [junit4]   2> 4267324 INFO  (SUITE-TestInPlaceUpdatesDistrib-seed#[F855A53E8FDDDA62]-worker) [    ] o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (true) via: @org.apache.solr.util.RandomizeSSL(reason="", ssl=0.0/0.0, value=0.0/0.0, clientAuth=0.0/0.0)
   [junit4]   2> 4267324 INFO  (SUITE-TestInPlaceUpdatesDistrib-seed#[F855A53E8FDDDA62]-worker) [    ] o.a.s.BaseDistributedSearchTestCase Setting hostContext system property: /d_upd/y
   [junit4]   2> 4267324 INFO  (SUITE-TestInPlaceUpdatesDistrib-seed#[F855A53E8FDDDA62]-worker) [    ] o.a.s.SolrTestCaseJ4 updateLog impl=solr.UpdateLog
   [junit4]   2> 4267324 INFO  (SUITE-TestInPlaceUpdatesDistrib-seed#[F855A53E8FDDDA62]-worker) [    ] o.a.s.SolrTestCaseJ4 ####initCore
   [junit4]   2> 4267324 INFO  (SUITE-TestInPlaceUpdatesDistrib-seed#[F855A53E8FDDDA62]-worker) [    ] o.a.s.c.SolrResourceLoader [null] Added 2 libs to classloader, from paths: [/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/lib, /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/lib/classes]
   [junit4]   2> 4267344 INFO  (SUITE-TestInPlaceUpdatesDistrib-seed#[F855A53E8FDDDA62]-worker) [    ] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
   [junit4]   2> 4267361 INFO  (SUITE-TestInPlaceUpdatesDistrib-seed#[F855A53E8FDDDA62]-worker) [    ] o.a.s.s.IndexSchema [null] Schema name=inplace-updates
   [junit4]   2> 4267365 INFO  (SUITE-TestInPlaceUpdatesDistrib-seed#[F855A53E8FDDDA62]-worker) [    ] o.a.s.s.IndexSchema Loaded schema inplace-updates/1.6 with uniqueid field id
   [junit4]   2> 4267475 INFO  (SUITE-TestInPlaceUpdatesDistrib-seed#[F855A53E8FDDDA62]-worker) [    ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=false]
   [junit4]   2> 4267501 WARN  (SUITE-TestInPlaceUpdatesDistrib-seed#[F855A53E8FDDDA62]-worker) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@6df13681[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 4267525 WARN  (SUITE-TestInPlaceUpdatesDistrib-seed#[F855A53E8FDDDA62]-worker) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@26e44555[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 4267548 INFO  (SUITE-TestInPlaceUpdatesDistrib-seed#[F855A53E8FDDDA62]-worker) [    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 2147483647 transient cores
   [junit4]   2> 4267548 INFO  (SUITE-TestInPlaceUpdatesDistrib-seed#[F855A53E8FDDDA62]-worker) [    ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 4267573 INFO  (SUITE-TestInPlaceUpdatesDistrib-seed#[F855A53E8FDDDA62]-worker) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@38437ddc
   [junit4]   2> 4267589 INFO  (SUITE-TestInPlaceUpdatesDistrib-seed#[F855A53E8FDDDA62]-worker) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@38437ddc
   [junit4]   2> 4267590 INFO  (SUITE-TestInPlaceUpdatesDistrib-seed#[F855A53E8FDDDA62]-worker) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@38437ddc
   [junit4]   2> 4267592 INFO  (coreLoadExecutor-15198-thread-1) [    x:collection1] o.a.s.c.SolrResourceLoader [null] Added 2 libs to classloader, from paths: [/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/lib, /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/lib/classes]
   [junit4]   2> 4267614 INFO  (coreLoadExecutor-15198-thread-1) [    x:collection1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
   [junit4]   2> 4267631 INFO  (coreLoadExecutor-15198-thread-1) [    x:collection1] o.a.s.s.IndexSchema [collection1] Schema name=inplace-updates
   [junit4]   2> 4267634 INFO  (coreLoadExecutor-15198-thread-1) [    x:collection1] o.a.s.s.IndexSchema Loaded schema inplace-updates/1.6 with uniqueid field id
   [junit4]   2> 4267634 INFO  (coreLoadExecutor-15198-thread-1) [    x:collection1] o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from instancedir /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1, trusted=true
   [junit4]   2> 4267635 INFO  (coreLoadExecutor-15198-thread-1) [    x:collection1] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.collection1' (registry 'solr.core.collection1') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@38437ddc
   [junit4]   2> 4267635 INFO  (coreLoadExecutor-15198-thread-1) [    x:collection1] o.a.s.c.SolrCore [[collection1] ] Opening new SolrCore at [/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1], dataDir=[/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.update.TestInPlaceUpdatesDistrib_F855A53E8FDDDA62-001/init-core-data-001/]
   [junit4]   2> 4267642 WARN  (coreLoadExecutor-15198-thread-1) [    x:collection1] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 4267711 INFO  (coreLoadExecutor-15198-thread-1) [    x:collection1] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 4267711 INFO  (coreLoadExecutor-15198-thread-1) [    x:collection1] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 4267713 INFO  (coreLoadExecutor-15198-thread-1) [    x:collection1] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 4267713 INFO  (coreLoadExecutor-15198-thread-1) [    x:collection1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 4267714 INFO  (coreLoadExecutor-15198-thread-1) [    x:collection1] o.a.s.s.SolrIndexSearcher Opening [Searcher@655d364d[collection1] main]
   [junit4]   2> 4267715 WARN  (coreLoadExecutor-15198-thread-1) [    x:collection1] o.a.s.r.ManagedResourceStorage Cannot write to config directory /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf; switching to use InMemory storage instead.
   [junit4]   2> 4267715 INFO  (coreLoadExecutor-15198-thread-1) [    x:collection1] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 4267716 INFO  (coreLoadExecutor-15198-thread-1) [    x:collection1] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1634480045531594752
   [junit4]   2> 4267719 INFO  (SUITE-TestInPlaceUpdatesDistrib-seed#[F855A53E8FDDDA62]-worker) [    ] o.a.s.SolrTestCaseJ4 ####initCore end
   [junit4]   2> 4267739 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
   [junit4]   2> 4267750 INFO  (ZkTestServer Run Thread) [    ] o.a.s.c.ZkTestServer client port:0.0.0.0/0.0.0.0:0
   [junit4]   2> 4267750 INFO  (ZkTestServer Run Thread) [    ] o.a.s.c.ZkTestServer Starting server
   [junit4]   2> 4267751 INFO  (searcherExecutor-15199-thread-1-processing-x:collection1) [    x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher Searcher@655d364d[collection1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 4267850 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.c.ZkTestServer start zk server on port:46693
   [junit4]   2> 4267850 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.c.ZkTestServer parse host and port list: 127.0.0.1:46693
   [junit4]   2> 4267850 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.c.ZkTestServer connecting to 127.0.0.1 46693
   [junit4]   2> 4267856 INFO  (zkConnectionManagerCallback-22573-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 4267859 INFO  (zkConnectionManagerCallback-22575-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 4267860 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.c.ZkTestServer put /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/solrconfig-tlog.xml to /configs/conf1/solrconfig.xml
   [junit4]   2> 4267862 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.c.ZkTestServer put /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/schema-inplace-updates.xml to /configs/conf1/schema.xml
   [junit4]   2> 4267864 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.c.ZkTestServer put /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml to /configs/conf1/solrconfig.snippet.randomindexconfig.xml
   [junit4]   2> 4267865 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.c.ZkTestServer put /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/stopwords.txt to /configs/conf1/stopwords.txt
   [junit4]   2> 4267866 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.c.ZkTestServer put /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/protwords.txt to /configs/conf1/protwords.txt
   [junit4]   2> 4267868 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.c.ZkTestServer put /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/currency.xml to /configs/conf1/currency.xml
   [junit4]   2> 4267869 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.c.ZkTestServer put /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/enumsConfig.xml to /configs/conf1/enumsConfig.xml
   [junit4]   2> 4267870 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.c.ZkTestServer put /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/open-exchange-rates.json to /configs/conf1/open-exchange-rates.json
   [junit4]   2> 4267871 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.c.ZkTestServer put /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/mapping-ISOLatin1Accent.txt to /configs/conf1/mapping-ISOLatin1Accent.txt
   [junit4]   2> 4267873 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.c.ZkTestServer put /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/old_synonyms.txt to /configs/conf1/old_synonyms.txt
   [junit4]   2> 4267874 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.c.ZkTestServer put /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/synonyms.txt to /configs/conf1/synonyms.txt
   [junit4]   2> 4267876 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.c.AbstractFullDistribZkTestBase Will use NRT replicas unless explicitly asked otherwise
   [junit4]   2> 4268056 WARN  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 4268057 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 4268057 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 4268057 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 4268058 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 4268058 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 4268058 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 4268059 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@745f4f14{/d_upd/y,null,AVAILABLE}
   [junit4]   2> 4268060 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.e.j.s.AbstractConnector Started ServerConnector@2d60136a{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:42715}
   [junit4]   2> 4268060 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.e.j.s.Server Started @4268119ms
   [junit4]   2> 4268060 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/d_upd/y, solr.data.dir=/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.update.TestInPlaceUpdatesDistrib_F855A53E8FDDDA62-001/tempDir-001/control/data, hostPort=42715, coreRootDirectory=/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/../../../../../../../../../../../x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.update.TestInPlaceUpdatesDistrib_F855A53E8FDDDA62-001/control-001/cores}
   [junit4]   2> 4268060 ERROR (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 4268060 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 4268061 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 9.0.0
   [junit4]   2> 4268061 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 4268061 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 4268061 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-05-25T05:20:40.447182Z
   [junit4]   2> 4268063 INFO  (zkConnectionManagerCallback-22577-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 4268064 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 4268064 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.update.TestInPlaceUpdatesDistrib_F855A53E8FDDDA62-001/control-001/solr.xml
   [junit4]   2> 4268067 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 4268067 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 4268069 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@38437ddc, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 4268540 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=false]
   [junit4]   2> 4268541 WARN  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@7864e6d4[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 4268544 WARN  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@4baecc99[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 4268546 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:46693/solr
   [junit4]   2> 4268547 INFO  (zkConnectionManagerCallback-22584-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 4268550 INFO  (zkConnectionManagerCallback-22586-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 4268648 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.c.OverseerElectionContext I am going to be the leader 127.0.0.1:42715_d_upd%2Fy
   [junit4]   2> 4268649 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.c.Overseer Overseer (id=73361523819937796-127.0.0.1:42715_d_upd%2Fy-n_0000000000) starting
   [junit4]   2> 4268656 INFO  (zkConnectionManagerCallback-22593-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 4268659 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:46693/solr ready
   [junit4]   2> 4268660 INFO  (OverseerStateUpdate-73361523819937796-127.0.0.1:42715_d_upd%2Fy-n_0000000000) [n:127.0.0.1:42715_d_upd%2Fy    ] o.a.s.c.Overseer Starting to work on the main queue : 127.0.0.1:42715_d_upd%2Fy
   [junit4]   2> 4268661 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:42715_d_upd%2Fy
   [junit4]   2> 4268662 INFO  (OverseerStateUpdate-73361523819937796-127.0.0.1:42715_d_upd%2Fy-n_0000000000) [n:127.0.0.1:42715_d_upd%2Fy    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 4268663 INFO  (zkCallback-22592-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 4268682 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 4268717 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@38437ddc
   [junit4]   2> 4268741 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@38437ddc
   [junit4]   2> 4268741 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@38437ddc
   [junit4]   2> 4268744 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/../../../../../../../../../../../x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.update.TestInPlaceUpdatesDistrib_F855A53E8FDDDA62-001/control-001/cores
   [junit4]   2> 4268794 INFO  (zkConnectionManagerCallback-22599-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 4268796 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 4268797 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:46693/solr ready
   [junit4]   2> 4268800 INFO  (qtp93907865-94140) [n:127.0.0.1:42715_d_upd%2Fy    ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params collection.configName=conf1&name=control_collection&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=127.0.0.1:42715_d_upd%252Fy&wt=javabin&version=2 and sendToOCPQueue=true
   [junit4]   2> 4268803 INFO  (OverseerThreadFactory-15213-thread-1) [    ] o.a.s.c.a.c.CreateCollectionCmd Create collection control_collection
   [junit4]   2> 4268913 INFO  (qtp93907865-94142) [n:127.0.0.1:42715_d_upd%2Fy    x:control_collection_shard1_replica_n1] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf1&newCollection=true&name=control_collection_shard1_replica_n1&action=CREATE&numShards=1&collection=control_collection&shard=shard1&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 4268913 INFO  (qtp93907865-94142) [n:127.0.0.1:42715_d_upd%2Fy    x:control_collection_shard1_replica_n1] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 4269928 INFO  (qtp93907865-94142) [n:127.0.0.1:42715_d_upd%2Fy c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
   [junit4]   2> 4269947 INFO  (qtp93907865-94142) [n:127.0.0.1:42715_d_upd%2Fy c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.s.IndexSchema [control_collection_shard1_replica_n1] Schema name=inplace-updates
   [junit4]   2> 4269951 INFO  (qtp93907865-94142) [n:127.0.0.1:42715_d_upd%2Fy c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.s.IndexSchema Loaded schema inplace-updates/1.6 with uniqueid field id
   [junit4]   2> 4269951 INFO  (qtp93907865-94142) [n:127.0.0.1:42715_d_upd%2Fy c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.CoreContainer Creating SolrCore 'control_collection_shard1_replica_n1' using configuration from collection control_collection, trusted=true
   [junit4]   2> 4269952 INFO  (qtp93907865-94142) [n:127.0.0.1:42715_d_upd%2Fy c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.control_collection.shard1.replica_n1' (registry 'solr.core.control_collection.shard1.replica_n1') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@38437ddc
   [junit4]   2> 4269952 INFO  (qtp93907865-94142) [n:127.0.0.1:42715_d_upd%2Fy c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.SolrCore [[control_collection_shard1_replica_n1] ] Opening new SolrCore at [/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.update.TestInPlaceUpdatesDistrib_F855A53E8FDDDA62-001/control-001/cores/control_collection_shard1_replica_n1], dataDir=[/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/../../../../../../../../../../../x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.update.TestInPlaceUpdatesDistrib_F855A53E8FDDDA62-001/control-001/cores/control_collection_shard1_replica_n1/data/]
   [junit4]   2> 4269957 WARN  (qtp93907865-94142) [n:127.0.0.1:42715_d_upd%2Fy c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 4270024 INFO  (qtp93907865-94142) [n:127.0.0.1:42715_d_upd%2Fy c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 4270024 INFO  (qtp93907865-94142) [n:127.0.0.1:42715_d_upd%2Fy c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 4270026 INFO  (qtp93907865-94142) [n:127.0.0.1:42715_d_upd%2Fy c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 4270026 INFO  (qtp93907865-94142) [n:127.0.0.1:42715_d_upd%2Fy c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 4270028 INFO  (qtp93907865-94142) [n:127.0.0.1:42715_d_upd%2Fy c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.s.SolrIndexSearcher Opening [Searcher@5b85deeb[control_collection_shard1_replica_n1] main]
   [junit4]   2> 4270029 INFO  (qtp93907865-94142) [n:127.0.0.1:42715_d_upd%2Fy c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 4270030 INFO  (qtp93907865-94142) [n:127.0.0.1:42715_d_upd%2Fy c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 4270031 INFO  (qtp93907865-94142) [n:127.0.0.1:42715_d_upd%2Fy c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 4270031 INFO  (qtp93907865-94142) [n:127.0.0.1:42715_d_upd%2Fy c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1634480047959048192
   [junit4]   2> 4270036 INFO  (searcherExecutor-15218-thread-1-processing-n:127.0.0.1:42715_d_upd%2Fy x:control_collection_shard1_replica_n1 c:control_collection s:shard1) [n:127.0.0.1:42715_d_upd%2Fy c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.SolrCore [control_collection_shard1_replica_n1] Registered new searcher Searcher@5b85deeb[control_collection_shard1_replica_n1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 4270047 INFO  (qtp93907865-94142) [n:127.0.0.1:42715_d_upd%2Fy c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.ZkShardTerms Successful update of terms at /collections/control_collection/terms/shard1 to Terms{values={core_node2=0}, version=0}
   [junit4]   2> 4270047 INFO  (qtp93907865-94142) [n:127.0.0.1:42715_d_upd%2Fy c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/control_collection/leaders/shard1
   [junit4]   2> 4270053 INFO  (qtp93907865-94142) [n:127.0.0.1:42715_d_upd%2Fy c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 4270053 INFO  (qtp93907865-94142) [n:127.0.0.1:42715_d_upd%2Fy c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 4270053 INFO  (qtp93907865-94142) [n:127.0.0.1:42715_d_upd%2Fy c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.SyncStrategy Sync replicas to http://127.0.0.1:42715/d_upd/y/control_collection_shard1_replica_n1/
   [junit4]   2> 4270054 INFO  (qtp93907865-94142) [n:127.0.0.1:42715_d_upd%2Fy c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
   [junit4]   2> 4270054 INFO  (qtp93907865-94142) [n:127.0.0.1:42715_d_upd%2Fy c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.SyncStrategy http://127.0.0.1:42715/d_upd/y/control_collection_shard1_replica_n1/ has no replicas
   [junit4]   2> 4270054 INFO  (qtp93907865-94142) [n:127.0.0.1:42715_d_upd%2Fy c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContextBase Creating leader registration node /collections/control_collection/leaders/shard1/leader after winning as /collections/control_collection/leader_elect/shard1/election/73361523819937796-core_node2-n_0000000000
   [junit4]   2> 4270057 INFO  (qtp93907865-94142) [n:127.0.0.1:42715_d_upd%2Fy c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContext I am the new leader: http://127.0.0.1:42715/d_upd/y/control_collection_shard1_replica_n1/ shard1
   [junit4]   2> 4270159 INFO  (zkCallback-22585-thread-1) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 4270160 INFO  (qtp93907865-94142) [n:127.0.0.1:42715_d_upd%2Fy c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 4270163 INFO  (qtp93907865-94142) [n:127.0.0.1:42715_d_upd%2Fy c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={qt=/admin/cores&collection.configName=conf1&newCollection=true&name=control_collection_shard1_replica_n1&action=CREATE&numShards=1&collection=control_collection&shard=shard1&wt=javabin&version=2&replicaType=NRT} status=0 QTime=1250
   [junit4]   2> 4270178 INFO  (zkCallback-22585-thread-2) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 4270187 INFO  (qtp93907865-94140) [n:127.0.0.1:42715_d_upd%2Fy    ] o.a.s.h.a.CollectionsHandler Wait for new collection to be active for at most 45 seconds. Check all shard replicas
   [junit4]   2> 4270284 INFO  (zkCallback-22585-thread-2) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 4270284 INFO  (zkCallback-22585-thread-1) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 4270285 INFO  (zkCallback-22585-thread-3) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 4270285 INFO  (qtp93907865-94140) [n:127.0.0.1:42715_d_upd%2Fy    ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections params={collection.configName=conf1&name=control_collection&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=127.0.0.1:42715_d_upd%252Fy&wt=javabin&version=2} status=0 QTime=1485
   [junit4]   2> 4270294 INFO  (zkConnectionManagerCallback-22605-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 4270296 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 4270297 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:46693/solr ready
   [junit4]   2> 4270297 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.c.ChaosMonkey monkey: init - expire sessions:false cause connection loss:false
   [junit4]   2> 4270299 INFO  (qtp93907865-94142) [n:127.0.0.1:42715_d_upd%2Fy    ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params collection.configName=conf1&name=collection1&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=&stateFormat=2&wt=javabin&version=2 and sendToOCPQueue=true
   [junit4]   2> 4270303 INFO  (OverseerCollectionConfigSetProcessor-73361523819937796-127.0.0.1:42715_d_upd%2Fy-n_0000000000) [    ] o.a.s.c.OverseerTaskQueue Response ZK path: /overseer/collection-queue-work/qnr-0000000000 doesn't exist.  Requestor may have disconnected from ZooKeeper
   [junit4]   2> 4270303 INFO  (OverseerThreadFactory-15213-thread-2) [    ] o.a.s.c.a.c.CreateCollectionCmd Create collection collection1
   [junit4]   2> 4270509 WARN  (OverseerThreadFactory-15213-thread-2) [    ] o.a.s.c.a.c.CreateCollectionCmd It is unusual to create a collection (collection1) without cores.
   [junit4]   2> 4270511 INFO  (qtp93907865-94142) [n:127.0.0.1:42715_d_upd%2Fy    ] o.a.s.h.a.CollectionsHandler Wait for new collection to be active for at most 45 seconds. Check all shard replicas
   [junit4]   2> 4270512 INFO  (qtp93907865-94142) [n:127.0.0.1:42715_d_upd%2Fy    ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections params={collection.configName=conf1&name=collection1&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=&stateFormat=2&wt=javabin&version=2} status=0 QTime=213
   [junit4]   2> 4270512 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.c.AbstractFullDistribZkTestBase Creating jetty instances pullReplicaCount=0 numOtherReplicas=3
   [junit4]   2> 4270685 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 1 in directory /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.update.TestInPlaceUpdatesDistrib_F855A53E8FDDDA62-001/shard-1-001 of type NRT
   [junit4]   2> 4270685 WARN  (closeThreadPool-22606-thread-1) [    ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 4270686 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 4270686 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 4270686 INFO  (closeThreadPool-22606-thread-1) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 4270687 INFO  (closeThreadPool-22606-thread-1) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 4270688 INFO  (closeThreadPool-22606-thread-1) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 4270688 INFO  (closeThreadPool-22606-thread-1) [    ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 4270688 INFO  (closeThreadPool-22606-thread-1) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@48f00a9f{/d_upd/y,null,AVAILABLE}
   [junit4]   2> 4270688 INFO  (closeThreadPool-22606-thread-1) [    ] o.e.j.s.AbstractConnector Started ServerConnector@3ab66e26{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:39567}
   [junit4]   2> 4270688 INFO  (closeThreadPool-22606-thread-1) [    ] o.e.j.s.Server Started @4270747ms
   [junit4]   2> 4270688 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/d_upd/y, solrconfig=solrconfig.xml, solr.data.dir=/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.update.TestInPlaceUpdatesDistrib_F855A53E8FDDDA62-001/tempDir-001/jetty1, hostPort=39567, coreRootDirectory=/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.update.TestInPlaceUpdatesDistrib_F855A53E8FDDDA62-001/shard-1-001/cores, replicaType=NRT}
   [junit4]   2> 4270689 ERROR (closeThreadPool-22606-thread-1) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 4270689 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 4270689 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 9.0.0
   [junit4]   2> 4270689 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 4270689 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 4270689 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-05-25T05:20:43.075443Z
   [junit4]   2> 4270692 INFO  (zkConnectionManagerCallback-22608-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 4270693 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 4270693 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.update.TestInPlaceUpdatesDistrib_F855A53E8FDDDA62-001/shard-1-001/solr.xml
   [junit4]   2> 4270696 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 4270696 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 4270698 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@38437ddc, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 4270769 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=false]
   [junit4]   2> 4270779 WARN  (closeThreadPool-22606-thread-1) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@2b02e89a[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 4270782 WARN  (closeThreadPool-22606-thread-1) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@e4772e7[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 4270784 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:46693/solr
   [junit4]   2> 4270785 INFO  (zkConnectionManagerCallback-22615-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 4270794 INFO  (zkConnectionManagerCallback-22617-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 4270801 INFO  (closeThreadPool-22606-thread-1) [n:127.0.0.1:39567_d_upd%2Fy    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 4270806 INFO  (closeThreadPool-22606-thread-1) [n:127.0.0.1:39567_d_upd%2Fy    ] o.a.s.c.ZkController Publish node=127.0.0.1:39567_d_upd%2Fy as DOWN
   [junit4]   2> 4270807 INFO  (closeThreadPool-22606-thread-1) [n:127.0.0.1:39567_d_upd%2Fy    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 4270808 INFO  (closeThreadPool-22606-thread-1) [n:127.0.0.1:39567_d_upd%2Fy    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:39567_d_upd%2Fy
   [junit4]   2> 4270810 INFO  (zkCallback-22592-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 4270810 INFO  (zkCallback-22585-thread-3) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 4270815 INFO  (zkCallback-22604-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 4270817 INFO  (zkCallback-22616-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 4270817 INFO  (zkConnectionManagerCallback-22624-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 4270818 INFO  (closeThreadPool-22606-thread-1) [n:127.0.0.1:39567_d_upd%2Fy    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 4270820 INFO  (closeThreadPool-22606-thread-1) [n:127.0.0.1:39567_d_upd%2Fy    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:46693/solr ready
   [junit4]   2> 4270846 INFO  (closeThreadPool-22606-thread-1) [n:127.0.0.1:39567_d_upd%2Fy    ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 4270897 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 2 in directory /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.update.TestInPlaceUpdatesDistrib_F855A53E8FDDDA62-001/shard-2-001 of type NRT
   [junit4]   2> 4270907 WARN  (closeThreadPool-22606-thread-2) [    ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 4270907 INFO  (closeThreadPool-22606-thread-2) [    ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 4270907 INFO  (closeThreadPool-22606-thread-2) [    ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 4270907 INFO  (closeThreadPool-22606-thread-2) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 4270926 INFO  (closeThreadPool-22606-thread-1) [n:127.0.0.1:39567_d_upd%2Fy    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@38437ddc
   [junit4]   2> 4271056 INFO  (closeThreadPool-22606-thread-2) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 4271056 INFO  (closeThreadPool-22606-thread-2) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 4271056 INFO  (closeThreadPool-22606-thread-2) [    ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 4271057 INFO  (closeThreadPool-22606-thread-2) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@6faaa699{/d_upd/y,null,AVAILABLE}
   [junit4]   2> 4271057 INFO  (closeThreadPool-22606-thread-2) [    ] o.e.j.s.AbstractConnector Started ServerConnector@56155c40{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:40680}
   [junit4]   2> 4271058 INFO  (closeThreadPool-22606-thread-2) [    ] o.e.j.s.Server Started @4271116ms
   [junit4]   2> 4271058 INFO  (closeThreadPool-22606-thread-2) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/d_upd/y, solrconfig=solrconfig.xml, solr.data.dir=/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.update.TestInPlaceUpdatesDistrib_F855A53E8FDDDA62-001/tempDir-001/jetty2, hostPort=40680, coreRootDirectory=/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.update.TestInPlaceUpdatesDistrib_F855A53E8FDDDA62-001/shard-2-001/cores, replicaType=NRT}
   [junit4]   2> 4271058 ERROR (closeThreadPool-22606-thread-2) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 4271058 INFO  (closeThreadPool-22606-thread-2) [    ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 4271058 INFO  (closeThreadPool-22606-thread-2) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 9.0.0
   [junit4]   2> 4271058 INFO  (closeThreadPool-22606-thread-2) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 4271058 INFO  (closeThreadPool-22606-thread-2) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 4271058 INFO  (closeThreadPool-22606-thread-2) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-05-25T05:20:43.444718Z
   [junit4]   2> 4271061 INFO  (zkConnectionManagerCallback-22627-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 4271062 INFO  (closeThreadPool-22606-thread-2) [    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 4271062 INFO  (closeThreadPool-22606-thread-2) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.update.TestInPlaceUpdatesDistrib_F855A53E8FDDDA62-001/shard-2-001/solr.xml
   [junit4]   2> 4271066 INFO  (closeThreadPool-22606-thread-2) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 4271066 INFO  (closeThreadPool-22606-thread-2) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 4271067 INFO  (closeThreadPool-22606-thread-2) [    ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@38437ddc, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 4271073 INFO  (closeThreadPool-22606-thread-1) [n:127.0.0.1:39567_d_upd%2Fy    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@38437ddc
   [junit4]   2> 4271074 INFO  (closeThreadPool-22606-thread-1) [n:127.0.0.1:39567_d_upd%2Fy    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@38437ddc
   [junit4]   2> 4271077 INFO  (closeThreadPool-22606-thread-1) [n:127.0.0.1:39567_d_upd%2Fy    ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.update.TestInPlaceUpdatesDistrib_F855A53E8FDDDA62-001/shard-1-001/cores
   [junit4]   2> 4271149 INFO  (closeThreadPool-22606-thread-2) [    ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=false]
   [junit4]   2> 4271150 WARN  (closeThreadPool-22606-thread-2) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@6a668f36[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 4271155 WARN  (closeThreadPool-22606-thread-2) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@4c51c8be[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 4271156 INFO  (closeThreadPool-22606-thread-2) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:46693/solr
   [junit4]   2> 4271158 INFO  (zkConnectionManagerCallback-22634-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 4271178 INFO  (zkConnectionManagerCallback-22636-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 4271183 INFO  (closeThreadPool-22606-thread-2) [n:127.0.0.1:40680_d_upd%2Fy    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 4271189 INFO  (closeThreadPool-22606-thread-2) [n:127.0.0.1:40680_d_upd%2Fy    ] o.a.s.c.ZkController Publish node=127.0.0.1:40680_d_upd%2Fy as DOWN
   [junit4]   2> 4271190 INFO  (closeThreadPool-22606-thread-2) [n:127.0.0.1:40680_d_upd%2Fy    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 4271190 INFO  (closeThreadPool-22606-thread-2) [n:127.0.0.1:40680_d_upd%2Fy    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:40680_d_upd%2Fy
   [junit4]   2> 4271191 INFO  (zkCallback-22616-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 4271191 INFO  (zkCallback-22604-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 4271192 INFO  (zkCallback-22592-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 4271192 INFO  (zkCallback-22585-thread-3) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 4271192 INFO  (zkCallback-22623-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 4271195 INFO  (zkConnectionManagerCallback-22643-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 4271195 INFO  (zkCallback-22635-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 4271197 INFO  (closeThreadPool-22606-thread-2) [n:127.0.0.1:40680_d_upd%2Fy    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (3)
   [junit4]   2> 4271199 INFO  (closeThreadPool-22606-thread-2) [n:127.0.0.1:40680_d_upd%2Fy    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:46693/solr ready
   [junit4]   2> 4271220 INFO  (closeThreadPool-22606-thread-2) [n:127.0.0.1:40680_d_upd%2Fy    ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 4271253 INFO  (closeThreadPool-22606-thread-2) [n:127.0.0.1:40680_d_upd%2Fy    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@38437ddc
   [junit4]   2> 4271275 INFO  (closeThreadPool-22606-thread-2) [n:127.0.0.1:40680_d_upd%2Fy    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@38437ddc
   [junit4]   2> 4271276 INFO  (closeThreadPool-22606-thread-2) [n:127.0.0.1:40680_d_upd%2Fy    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@38437ddc
   [junit4]   2> 4271277 INFO  (TEST-TestInPlaceUpdatesDistrib.test-seed#[F855A53E8FDDDA62]) [    ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 3 in directory /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.update.TestInPlaceUpdatesDistrib_F855A53E8FDDDA62-001/shard-3-001 of type NRT
   [junit4]   2> 4271278 WARN  (closeThreadPool-22606-thread-1) [    ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 4271278 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 4271278 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 4271278 INFO  (closeThreadPool-22606-thread-2) [n:127.0.0.1:40680_d_upd%2Fy    ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.update.TestInPlaceUpdatesDistrib_F855A53E8FDDDA62-001/shard-2-001/cores
   [junit4]   2> 4271278 INFO  (closeThreadPool-22606-thread-1) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 4271280 INFO  (closeThreadPool-22606-thread-1) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 4271280 INFO  (closeThreadPool-22606-thread-1) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 4271280 INFO  (closeThreadPool-22606-thread-1) [    ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 4271280 INFO  (closeThreadPool-22606-thread-1) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@c9454f4{/d_upd/y,null,AVAILABLE}
   [junit4]   2> 4271281 INFO  (closeThreadPool-22606-thread-1) [    ] o.e.j.s.AbstractConnector Started ServerConnector@68eee243{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:45171}
   [junit4]   2> 4271281 INFO  (closeThreadPool-22606-thread-1) [    ] o.e.j.s.Server Started @4271340ms
   [junit4]   2> 4271281 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/d_upd/y, solrconfig=solrconfig.xml, solr.data.dir=/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.update.TestInPlaceUpdatesDistrib_F855A53E8FDDDA62-001/tempDir-001/jetty3, hostPort=45171, coreRootDirectory=/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.update.TestInPlaceUpdatesDistrib_F855A53E8FDDDA62-001/shard-3-001/cores, replicaType=NRT}
   [junit4]   2> 4271281 ERROR (closeThreadPool-22606-thread-1) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 4271281 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 4271281 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 9.0.0
   [junit4]   2> 4271281 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 4271282 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 4271282 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-05-25T05:20:43.668017Z
   [junit4]   2> 4271284 INFO  (zkConnectionManagerCallback-22646-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 4271285 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 4271285 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.update.TestInPlaceUpdatesDistrib_F855A53E8FDDDA62-001/shard-3-001/solr.xml
   [junit4]   2> 4271288 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 4271288 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 4271290 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@38437ddc, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 4271491 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=false]
   [junit4]   2> 4271492 WARN  (closeThreadPool-22606-thread-1) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@d2263d[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 4271495 WARN  (closeThreadPool-22606-thread-1) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@665aefc4[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 4271497 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:46693/solr
   [junit4]   2> 4271499 INFO  (zkConnectionManagerCallback-22653-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 4271501 INFO  (zkConnectionManagerCallback-22655-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 4271507 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (3)
   [junit4]   2> 4271512 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.c.ZkController Publish node=127.0.0.1:45171_d_upd%2Fy as DOWN
   [junit4]   2> 4271513 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 4271513 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:45171_d_upd%2Fy
   [junit4]   2> 4271515 INFO  (zkCallback-22623-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 4271515 INFO  (zkCallback-22585-thread-3) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 4271515 INFO  (zkCallback-22604-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 4271515 INFO  (zkCallback-22616-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 4271515 INFO  (zkCallback-22592-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 4271515 INFO  (zkCallback-22635-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 4271515 INFO  (zkCallback-22642-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 4271516 INFO  (zkCallback-22654-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 4271518 INFO  (zkConnectionManagerCallback-22662-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 4271519 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (4)
   [junit4]   2> 4271521 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:46693/solr ready
   [junit4]   2> 4271556 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 4271594 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@38437ddc
   [junit4]   2> 4271626 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@38437ddc
   [junit4]   2> 4271627 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@38437ddc
   [junit4]   2> 4271668 INFO  (closeThreadPool-22606-thread-1) [    ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.update.TestInPlaceUpdatesDistrib_F855A53E8FDDDA62-001/shard-3-001/cores
   [junit4]   2> 4271763 INFO  (qtp693706616-94200) [n:127.0.0.1:39567_d_upd%2Fy    ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :addreplica with params node=127.0.0.1:39567_d_upd%252Fy&action=ADDREPLICA&collection=collection1&shard=shard1&type=NRT&wt=javabin&version=2 and sendToOCPQueue=true
   [junit4]   2> 4271763 INFO  (qtp693706616-94201) [n:127.0.0.1:39567_d_upd%2Fy    ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :addreplica with params node=127.0.0.1:40680_d_upd%252Fy&action=ADDREPLICA&collection=collection1&shard=shard1&type=NRT&wt=javabin&version=2 and sendToOCPQueue=true
   [junit4]   2> 4271764 INFO  (qtp693706616-94198) [n:127.0.0.1:39567_d_upd%2Fy    ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :addreplica with params node=127.0.0.1:45171_d_upd%252Fy&action=ADDREPLICA&collection=collection1&shard=shard1&type=NRT&wt=javabin&version=2 and sendToOCPQueue=true
   [junit4]   2> 4271775 INFO  (OverseerCollectionConfigSetProcessor-73361523819937796-127.0.0.1:42715_d_upd%2Fy-n_0000000000) [    ] o.a.s.c.OverseerTaskQueue Response ZK path: /overseer/collection-queue-work/qnr-0000000002 doesn't exist.  Requestor may have disconnected from ZooKeeper
   [junit4]   2> 4271777 INFO  (OverseerThreadFactory-15213-thread-3) [ c:collection1 s:shard1  ] o.a.s.c.a.c.AddReplicaCmd Node Identified 127.0.0.1:39567_d_upd%2Fy for creating new replica of shard shard1 for collection collection1
   [junit4]   2> 4271780 INFO  (OverseerThreadFactory-15213-thread-3) [ c:collection1 s:shard1  ] o.a.s.c.a.c.AddReplicaCmd Returning CreateReplica command.
   [junit4]   2> 4271783 INFO  (qtp693706616-94199) [n:127.0.0.1:39567_d_upd%2Fy    x:collection1_shard1_replica_n1] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf1&name=collection1_shard1_replica_n1&action=CREATE&collection=collection1&shard=shard1&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 4272799 INFO  (qtp693706616-94199) [n:127.0.0.1:39567_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
   [junit4]   2> 4272818 INFO  (qtp693706616-94199) [n:127.0.0.1:39567_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.s.IndexSchema [collection1_shard1_replica_n1] Schema name=inplace-updates
   [junit4]   2> 4272821 INFO  (qtp693706616-94199) [n:127.0.0.1:39567_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.s.IndexSchema Loaded schema inplace-updates/1.6 with uniqueid field id
   [junit4]   2> 4272822 INFO  (qtp693706616-94199) [n:127.0.0.1:39567_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.CoreContainer Creating SolrCore 'collection1_shard1_replica_n1' using configuration from collection collection1, trusted=true
   [junit4]   2> 4272822 INFO  (qtp693706616-94199) [n:127.0.0.1:39567_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.collection1.shard1.replica_n1' (registry 'solr.core.collection1.shard1.replica_n1') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@38437ddc
   [junit4]   2> 4272822 INFO  (qtp693706616-94199) [n:127.0.0.1:39567_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.SolrCore [[collection1_shard1_replica_n1] ] Opening new SolrCore at [/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.update.TestInPlaceUpdatesDistrib_F855A53E8FDDDA62-001/shard-1-001/cores/collection1_shard1_replica_n1], dataDir=[/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.update.TestInPlaceUpdatesDistrib_F855A53E8FDDDA62-001/shard-1-001/cores/collection1_shard1_replica_n1/data/]
   [junit4]   2> 4272829 WARN  (qtp693706616-94199) [n:127.0.0.1:39567_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 4272908 INFO  (qtp693706616-94199) [n:127.0.0.1:39567_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 4272908 INFO  (qtp693706616-94199) [n:127.0.0.1:39567_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 4272910 INFO  (qtp693706616-94199) [n:127.0.0.1:39567_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 4272910 INFO  (qtp693706616-94199) [n:127.0.0.1:39567_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 4272913 INFO  (qtp693706616-94199) [n:127.0.0.1:39567_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.s.SolrIndexSearcher Opening [Searcher@4c0a1651[collection1_shard1_replica_n1] main]
   [junit4]   2> 4272915 INFO  (qtp693706616-94199) [n:127.0.0.1:39567_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 4272915 INFO  (qtp693706616-94199) [n:127.0.0.1:39567_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 4272916 INFO  (qtp693706616-94199) [n:127.0.0.1:39567_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 4272916 INFO  (qtp693706616-94199) [n:127.0.0.1:39567_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1634480050984189952
   [junit4]   2> 4272922 INFO  (searcherExecutor-15250-thread-1-processing-n:127.0.0.1:39567_d_upd%2Fy x:collection1_shard1_replica_n1 c:collection1 s:shard1) [n:127.0.0.1:39567_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.SolrCore [collection1_shard1_replica_n1] Registered new searcher Searcher@4c0a1651[collection1_shard1_replica_n1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 4272926 INFO  (qtp693706616-94199) [n:127.0.0.1:39567_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.ZkShardTerms Successful update of terms at /collections/collection1/terms/shard1 to Terms{values={core_node2=0}, version=0}
   [junit4]   2> 4272927 INFO  (qtp693706616-94199) [n:127.0.0.1:39567_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/collection1/leaders/shard1
   [junit4]   2> 4272934 INFO  (qtp693706616-94199) [n:127.0.0.1:39567_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 4272934 INFO  (qtp693706616-94199) [n:127.0.0.1:39567_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 4272934 INFO  (qtp693706616-94199) [n:127.0.0.1:39567_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.SyncStrategy Sync replicas to http://127.0.0.1:39567/d_upd/y/collection1_shard1_replica_n1/
   [junit4]   2> 4272934 INFO  (qtp693706616-94199) [n:127.0.0.1:39567_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
   [junit4]   2> 4272935 INFO  (qtp693706616-94199) [n:127.0.0.1:39567_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.SyncStrategy http://127.0.0.1:39567/d_upd/y/collection1_shard1_replica_n1/ has no replicas
   [junit4]   2> 4272935 INFO  (qtp693706616-94199) [n:127.0.0.1:39567_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContextBase Creating leader registration node /collections/collection1/leaders/shard1/leader after winning as /collections/collection1/leader_elect/shard1/election/73361523819937802-core_node2-n_0000000000
   [junit4]   2> 4272937 INFO  (qtp693706616-94199) [n:127.0.0.1:39567_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContext I am the new leader: http://127.0.0.1:39567/d_upd/y/collection1_shard1_replica_n1/ shard1
   [junit4]   2> 4273039 INFO  (zkCallback-22616-thread-1) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 4273040 INFO  (qtp693706616-94199) [n:127.0.0.1:39567_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 4273043 INFO  (qtp693706616-94199) [n:127.0.0.1:39567_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={qt=/admin/cores&collection.configName=conf1&name=collection1_shard1_replica_n1&action=CREATE&collection=collection1&shard=shard1&wt=javabin&version=2&replicaType=NRT} status=0 QTime=1259
   [junit4]   2> 4273043 INFO  (zkCallback-22616-thread-2) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 4273046 INFO  (qtp693706616-94200) [n:127.0.0.1:39567_d_upd%2Fy c:collection1   ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections params={node=127.0.0.1:39567_d_upd%252Fy&action=ADDREPLICA&collection=collection1&shard=shard1&type=NRT&wt=javabin&version=2} status=0 QTime=1283
   [junit4]   2> 4273143 INFO  (zkCallback-22616-thread-2) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 4273143 INFO  (zkCallback-22616-thread-1) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 4273145 INFO  (zkCallback-22616-thread-3) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 4273795 INFO  (OverseerCollectionConfigSetProcessor-73361523819937796-127.0.0.1:42715_d_upd%2Fy-n_0000000000) [    ] o.a.s.c.OverseerTaskQueue Response ZK path: /overseer/collection-queue-work/qnr-0000000004 doesn't exist.  Requestor may have disconnected from ZooKeeper
   [junit4]   2> 4273796 INFO  (OverseerThreadFactory-15213-thread-4) [ c:collection1 s:shard1  ] o.a.s.c.a.c.AddReplicaCmd Node Identified 127.0.0.1:40680_d_upd%2Fy for creating new replica of shard shard1 for collection collection1
   [junit4]   2> 4273797 INFO  (OverseerThreadFactory-15213-thread-4) [ c:collection1 s:shard1  ] o.a.s.c.a.c.AddReplicaCmd Returning CreateReplica command.
   [junit4]   2> 4273823 INFO  (qtp1864938593-94231) [n:127.0.0.1:40680_d_upd%2Fy    x:collection1_shard1_replica_n3] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf1&name=collection1_shard1_replica_n3&action=CREATE&collection=collection1&shard=shard1&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 4273928 INFO  (zkCallback-22616-thread-3) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 4273928 INFO  (zkCallback-22616-thread-1) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 4273928 INFO  (zkCallback-22616-thread-2) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 4274835 INFO  (qtp1864938593-94231) [n:127.0.0.1:40680_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
   [junit4]   2> 4274853 INFO  (qtp1864938593-94231) [n:127.0.0.1:40680_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.s.IndexSchema [collection1_shard1_replica_n3] Schema name=inplace-updates
   [junit4]   2> 4274856 INFO  (qtp1864938593-94231) [n:127.0.0.1:40680_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.s.IndexSchema Loaded schema inplace-updates/1.6 with uniqueid field id
   [junit4]   2> 4274856 INFO  (qtp1864938593-94231) [n:127.0.0.1:40680_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.c.CoreContainer Creating SolrCore 'collection1_shard1_replica_n3' using configuration from collection collection1, trusted=true
   [junit4]   2> 4274857 INFO  (qtp1864938593-94231) [n:127.0.0.1:40680_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.collection1.shard1.replica_n3' (registry 'solr.core.collection1.shard1.replica_n3') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@38437ddc
   [junit4]   2> 4274857 INFO  (qtp1864938593-94231) [n:127.0.0.1:40680_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.c.SolrCore [[collection1_shard1_replica_n3] ] Opening new SolrCore at [/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.update.TestInPlaceUpdatesDistrib_F855A53E8FDDDA62-001/shard-2-001/cores/collection1_shard1_replica_n3], dataDir=[/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.update.TestInPlaceUpdatesDistrib_F855A53E8FDDDA62-001/shard-2-001/cores/collection1_shard1_replica_n3/data/]
   [junit4]   2> 4274863 WARN  (qtp1864938593-94231) [n:127.0.0.1:40680_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 4274926 INFO  (qtp1864938593-94231) [n:127.0.0.1:40680_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 4274926 INFO  (qtp1864938593-94231) [n:127.0.0.1:40680_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 4274928 INFO  (qtp1864938593-94231) [n:127.0.0.1:40680_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 4274928 INFO  (qtp1864938593-94231) [n:127.0.0.1:40680_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 4274930 INFO  (qtp1864938593-94231) [n:127.0.0.1:40680_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.s.SolrIndexSearcher Opening [Searcher@3b4ea1e9[collection1_shard1_replica_n3] main]
   [junit4]   2> 4274932 INFO  (qtp1864938593-94231) [n:127.0.0.1:40680_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 4274932 INFO  (qtp1864938593-94231) [n:127.0.0.1:40680_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 4274933 INFO  (qtp1864938593-94231) [n:127.0.0.1:40680_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 4274933 INFO  (qtp1864938593-94231) [n:127.0.0.1:40680_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1634480053099167744
   [junit4]   2> 4274937 INFO  (searcherExecutor-15255-thread-1-processing-n:127.0.0.1:40680_d_upd%2Fy x:collection1_shard1_replica_n3 c:collection1 s:shard1) [n:127.0.0.1:40680_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.c.SolrCore [collection1_shard1_replica_n3] Registered new searcher Searcher@3b4ea1e9[collection1_shard1_replica_n3] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 4274940 INFO  (qtp1864938593-94231) [n:127.0.0.1:40680_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.c.ZkShardTerms Successful update of terms at /collections/collection1/terms/shard1 to Terms{values={core_node2=0, core_node4=0}, version=1}
   [junit4]   2> 4274940 INFO  (qtp1864938593-94231) [n:127.0.0.1:40680_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/collection1/leaders/shard1
   [junit4]   2> 4274943 INFO  (qtp1864938593-94231) [n:127.0.0.1:40680_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.c.ZkController Core needs to recover:collection1_shard1_replica_n3
   [junit4]   2> 4274943 INFO  (updateExecutor-22630-thread-1-processing-n:127.0.0.1:40680_d_upd%2Fy x:collection1_shard1_replica_n3 c:collection1 s:shard1) [n:127.0.0.1:40680_d_upd%2Fy c:collection1 s:shard1 r:core_node4 x:collection1_shard1_replica_n3] o.a.s.u.DefaultSolrCoreState Running recovery
   [junit4]   2> 4274944 INFO  (recoveryExecutor-22632-thread-1-processing-n:127.0.0.1:40680_d_upd%2Fy x:collection1_shard1_replica_n3 c:collection1 s:shard1 r:core_node4) [n:127.0.0.1:40680_d_upd%2Fy c:collection1 s:shard1 r:core_node4 x:collection1_shard1_replica_n3] o.a.s.c.RecoveryStrategy Starting recovery process. recoveringAfterStartup=true
   [junit4]   2> 4274944 INFO  (recoveryExecutor-22632-thread-1-processing-n:127.0.0.1:40680_d_upd%2Fy x:collection1_shard1_replica_n3 c:collection1 s:shard1 r:core_node4) [n:127.0.0.1:40680_d_upd%2Fy c:collection1 s:shard1 r:core_node4 x:collection1_shard1_replica_n3] o.a.s.c.RecoveryStrategy startupVersions is empty
   [junit4]   2> 4274945 INFO  (qtp1864938593-94231) [n:127.0.0.1:40680_d_upd%2Fy c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={qt=/admin/cores&collection.configName=conf1&name=collection1_shard1_replica_n3&action=CREATE&collection=collection1&shard=shard1&wt=javabin&version=2&replicaType=NRT} status=0 QTime=1122
   [junit4]   2> 4274946 INFO  (qtp693706616-94199) [n:127.0.0.1:39567_d_upd%2Fy c:collection1 s:shard1 r:core_node2 x:collection1_shard1_replica_n1] o.a.s.c.S.Request [collection1_shard1_replica_n1]  webapp=/d_upd/y path=/admin/ping params={wt=javabin&version=2} hits=0 status=0 QTime=0
   [junit4]   2> 4274946 INFO  (qtp693706616-94199) [n:127.0.0.1:39567_d_upd%2Fy c:collection1 s:shard1 r:core_node2 x:collection1_shard1_replica_n1] o.a.s.c.S.Request [collection1_shard1_replica_n1]  webapp=/d_upd/y path=/admin/ping params={wt=javabin&version=2} status=0 QTime=0
   [junit4]   2> 4274947 INFO  (recoveryExecutor-22632-thread-1-processing-n:127.0.0.1:40680_d_upd%2Fy x:collection1_shard1_replica_n3 c:collection1 s:shard1 r:core_node4) [n:127.0.0.1:40680_d_upd%2Fy c:collection1 s:shard1 r:core_node4 x:collection1_shard1_replica_n3] o.a.s.c.RecoveryStrategy Begin buffering updates. core=[collection1_shard1_replica_n3]
   [junit4]   2> 4274947 INFO  (recoveryExecutor-22632-thread-1-processing-n:127.0.0.1:40680_d_upd%2Fy x:collection1_shard1_replica_n3 c:collection1 s:shard1 r:core_node4) [n:127.0.0.1:40680_d_upd%2Fy c:collection1 s:shard1 r:core_node4 x:collection1_shard1_replica_n3] o.a.s.u.UpdateLog Starting to buffer updates. FSUpdateLog{state=ACTIVE, tlog=null}
   [junit4]   2> 4274947 INFO  (recoveryExecutor-22632-thread-1-processing-n:127.0.0.1:40680_d_upd%2Fy x:collection1_shard1_replica_n3 c:collection1 s:shard1 r:core_node4) [n:127.0.0.1:40680_d_upd%2Fy c:collection1 s:shard1 r:core_node4 x:collection1_shard1_replica_n3] o.a.s.c.RecoveryStrategy Publishing state of core [collection1_shard1_replica_n3] as recovering, leader is [http://127.0.0.1:39567/d_upd/y/collection1_shard1_replica_n1/] and I am [http://127.0.0.1:40680/d_upd/y/collection1_shard1_replica_n3/]
   [junit4]   2> 4274949 INFO  (qtp693706616-94201) [n:127.0.0.1:39567_d_upd%2Fy c:collection1   ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections params={node=127.0.0.1:40680_d_upd%252Fy&action=ADDREPLICA&collection=collection1&shard=shard1&type=NRT&wt=javabin&version=2} status=0 QTime=3185
   [junit4]   2> 4274950 INFO  (recoveryExecutor-22632-thread-1-processing-n:127.0.0.1:40680_d_upd%2Fy x:collection1_shard1_replica_n3 c:collection1 s:shard1 r:core_node4) [n:127.0.0.1:40680_d_upd%2Fy c:collection1 s:shard1 r:core_node4 x:collection1_shard1_replica_n3] o.a.s.c.RecoveryStrategy Sending prep recovery command to [http://127.0.0.1:39567/d_upd/y]; [WaitForState: action=PREPRECOVERY&core=collection1_shard1_replica_n1&nodeName=127.0.0.1:40680_d_upd%252Fy&coreNodeName=core_node4&state=recovering&checkLive=true&onlyIfLeader=true&onlyIfLeaderActive=true]
   [junit4]   2> 4274951 INFO  (qtp693706616-94200) [n:127.0.0.1:39567_d_upd%2Fy    x:collection1_shard1_replica_n1] o.a.s.h.a.PrepRecoveryOp Going to wait for coreNodeName: core_node4, state: recovering, checkLive: true, onlyIfLeader: true, onlyIfLeaderActive: true
   [junit4]   2> 4274951 INFO  (qtp693706616-94200) [n:127.0.0.1:39567_d_upd%2Fy    x:collection1_shard1_replica_n1] o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1, shard=shard1, thisCore=collection1_shard1_replica_n1, leaderDoesNotNeedRecovery=false, isLeader? true, live=true, checkLive=true, currentState=down, localState=active, nodeName=127.0.0.1:40680_d_upd%2Fy, coreNodeName=core_node4, onlyIfActiveCheckResult=false, nodeProps: core_node4:{
   [junit4]   2>   "core":"collection1_shard1_replica_n3",
   [junit4]   2>   "base_url":"http://127.0.0.1:40680/d_upd/y",
   [junit4]   2>   "node_name":"127.0.0.1:40680_d_upd%2Fy",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "type":"NRT"}
   [junit4]   2> 4275051 INFO  (zkCallback-22616-thread-3) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 4275051 INFO  (zkCallback-22616-thread-2) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 4275051 INFO  (zkCallback-22635-thread-1) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 4275051 INFO  (zkCallback-22616-thread-1) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 4275051 INFO  (watches-22618-thread-2) [    ] o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1, shard=shard1, thisCore=collection1_shard1_replica_n1, leaderDoesNotNeedRecovery=false, isLeader? true, live=true, checkLive=true, currentState=recovering, localState=active, nodeName=127.0.0.1:40680_d_upd%2Fy, coreNodeName=core_node4, onlyIfActiveCheckResult=false, nodeProps: core_node4:{
   [junit4]   2>   "core":"collection1_shard1_replica_n3",
   [junit4]   2>   "base_url":"http://127.0.0.1:40680/d_upd/y",
   [junit4]   2>   "node_name":"127.0.0.1:40680_d_upd%2Fy",
   [junit4]   2>   "state":"recovering",
   [junit4]   2>   "type":"NRT"}
   [junit4]   2> 4275051 INFO  (qtp693706616-94200) [n:127.0.0.1:39567_d_upd%2Fy    x:collection1_shard1_replica_n1] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={nodeName=127.0.0.1:40680_d_upd%252Fy&onlyIfLeaderActive=true&core=collection1_shard1_replica_n1&coreNodeName=core_node4&action=PREPRECOVERY&checkLive=true&state=recovering&onlyIfLeader=true&wt=javabin&version=2} status=0 QTime=101
   [junit4]   2> 4275058 INFO  (zkCallback-22635-thread-1) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 4275059 INFO  (zkCallback-22635-thread-2) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/collection1/state.json] for collection [collection1] has occurred - updating... (live nodes size: [4])
   [junit4]   2> 4275552 INFO  (recoveryExecutor-22632-thread-1-processing-n:127.0.0.1:40680_d_upd%2Fy x:collection1_shard1_replica_n3 c:collection1 s:shard1 r:core_node4) [n:127.0.0.1:40680_d_u

[...truncated too long message...]

:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

jar-checksums:
    [mkdir] Created dir: /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/null1329539178
     [copy] Copying 240 files to /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/null1329539178
   [delete] Deleting directory /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/null1329539178

check-working-copy:
[ivy:cachepath] :: resolving dependencies :: #;working@lucene1-us-west
[ivy:cachepath] 	confs: [default]
[ivy:cachepath] 	found org.eclipse.jgit#org.eclipse.jgit;5.3.0.201903130848-r in public
[ivy:cachepath] 	found com.jcraft#jsch;0.1.54 in public
[ivy:cachepath] 	found com.jcraft#jzlib;1.1.1 in public
[ivy:cachepath] 	found com.googlecode.javaewah#JavaEWAH;1.1.6 in public
[ivy:cachepath] 	found org.slf4j#slf4j-api;1.7.2 in public
[ivy:cachepath] 	found org.bouncycastle#bcpg-jdk15on;1.60 in public
[ivy:cachepath] 	found org.bouncycastle#bcprov-jdk15on;1.60 in public
[ivy:cachepath] 	found org.bouncycastle#bcpkix-jdk15on;1.60 in public
[ivy:cachepath] 	found org.slf4j#slf4j-nop;1.7.2 in public
[ivy:cachepath] :: resolution report :: resolve 53ms :: artifacts dl 9ms
	---------------------------------------------------------------------
	|                  |            modules            ||   artifacts   |
	|       conf       | number| search|dwnlded|evicted|| number|dwnlded|
	---------------------------------------------------------------------
	|      default     |   9   |   0   |   0   |   0   ||   9   |   0   |
	---------------------------------------------------------------------
[wc-checker] Initializing working copy...
[wc-checker] Checking working copy status...

-jenkins-base:

BUILD SUCCESSFUL
Total time: 432 minutes 22 seconds
Archiving artifacts
java.lang.InterruptedException: no matches found within 10000
	at hudson.FilePath$ValidateAntFileMask.hasMatch(FilePath.java:2847)
	at hudson.FilePath$ValidateAntFileMask.invoke(FilePath.java:2726)
	at hudson.FilePath$ValidateAntFileMask.invoke(FilePath.java:2707)
	at hudson.FilePath$FileCallableWrapper.call(FilePath.java:3086)
Also:   hudson.remoting.Channel$CallSiteStackTrace: Remote call to lucene
		at hudson.remoting.Channel.attachCallSiteStackTrace(Channel.java:1741)
		at hudson.remoting.UserRequest$ExceptionResponse.retrieve(UserRequest.java:357)
		at hudson.remoting.Channel.call(Channel.java:955)
		at hudson.FilePath.act(FilePath.java:1072)
		at hudson.FilePath.act(FilePath.java:1061)
		at hudson.FilePath.validateAntFileMask(FilePath.java:2705)
		at hudson.tasks.ArtifactArchiver.perform(ArtifactArchiver.java:243)
		at hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
		at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
		at hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
		at hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
		at hudson.model.Build$BuildExecution.post2(Build.java:186)
		at hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
		at hudson.model.Run.execute(Run.java:1835)
		at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
		at hudson.model.ResourceController.execute(ResourceController.java:97)
		at hudson.model.Executor.run(Executor.java:429)
Caused: hudson.FilePath$TunneledInterruptedException
	at hudson.FilePath$FileCallableWrapper.call(FilePath.java:3088)
	at hudson.remoting.UserRequest.perform(UserRequest.java:212)
	at hudson.remoting.UserRequest.perform(UserRequest.java:54)
	at hudson.remoting.Request$2.run(Request.java:369)
	at hudson.remoting.InterceptingExecutorService$1.call(InterceptingExecutorService.java:72)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
	at java.lang.Thread.run(Thread.java:748)
Caused: java.lang.InterruptedException: java.lang.InterruptedException: no matches found within 10000
	at hudson.FilePath.act(FilePath.java:1074)
	at hudson.FilePath.act(FilePath.java:1061)
	at hudson.FilePath.validateAntFileMask(FilePath.java:2705)
	at hudson.tasks.ArtifactArchiver.perform(ArtifactArchiver.java:243)
	at hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
	at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
	at hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
	at hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
	at hudson.model.Build$BuildExecution.post2(Build.java:186)
	at hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
	at hudson.model.Run.execute(Run.java:1835)
	at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
	at hudson.model.ResourceController.execute(ResourceController.java:97)
	at hudson.model.Executor.run(Executor.java:429)
No artifacts found that match the file pattern "**/*.events,heapdumps/**,**/hs_err_pid*". Configuration error?
Recording test results
Build step 'Publish JUnit test result report' changed build result to UNSTABLE
Email was triggered for: Unstable (Test Failures)
Sending email for trigger: Unstable (Test Failures)

[JENKINS] Lucene-Solr-NightlyTests-master - Build # 1854 - Still Unstable

Posted by Apache Jenkins Server <je...@builds.apache.org>.
Build: https://builds.apache.org/job/Lucene-Solr-NightlyTests-master/1854/

2 tests failed.
FAILED:  org.apache.solr.cloud.hdfs.HdfsTlogReplayBufferedWhileIndexingTest.test

Error Message:
Test abandoned because suite timeout was reached.

Stack Trace:
java.lang.Exception: Test abandoned because suite timeout was reached.
	at __randomizedtesting.SeedInfo.seed([97127C10F1843A6]:0)


FAILED:  junit.framework.TestSuite.org.apache.solr.cloud.hdfs.HdfsTlogReplayBufferedWhileIndexingTest

Error Message:
Suite timeout exceeded (>= 7200000 msec).

Stack Trace:
java.lang.Exception: Suite timeout exceeded (>= 7200000 msec).
	at __randomizedtesting.SeedInfo.seed([97127C10F1843A6]:0)




Build Log:
[...truncated 15468 lines...]
   [junit4] Suite: org.apache.solr.cloud.hdfs.HdfsTlogReplayBufferedWhileIndexingTest
   [junit4]   2> Creating dataDir: /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.hdfs.HdfsTlogReplayBufferedWhileIndexingTest_97127C10F1843A6-001/init-core-data-001
   [junit4]   2> 104742 WARN  (SUITE-HdfsTlogReplayBufferedWhileIndexingTest-seed#[97127C10F1843A6]-worker) [    ] o.a.s.SolrTestCaseJ4 startTrackingSearchers: numOpens=2 numCloses=2
   [junit4]   2> 104743 INFO  (SUITE-HdfsTlogReplayBufferedWhileIndexingTest-seed#[97127C10F1843A6]-worker) [    ] o.a.s.SolrTestCaseJ4 Using TrieFields (NUMERIC_POINTS_SYSPROP=false) w/NUMERIC_DOCVALUES_SYSPROP=false
   [junit4]   2> 104745 INFO  (SUITE-HdfsTlogReplayBufferedWhileIndexingTest-seed#[97127C10F1843A6]-worker) [    ] o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (false) via: @org.apache.solr.SolrTestCaseJ4$SuppressSSL(bugUrl="None")
   [junit4]   2> 104745 INFO  (SUITE-HdfsTlogReplayBufferedWhileIndexingTest-seed#[97127C10F1843A6]-worker) [    ] o.a.s.SolrTestCaseJ4 SecureRandom sanity checks: test.solr.allowed.securerandom=null & java.security.egd=file:/dev/./urandom
   [junit4]   2> 104746 INFO  (SUITE-HdfsTlogReplayBufferedWhileIndexingTest-seed#[97127C10F1843A6]-worker) [    ] o.a.s.BaseDistributedSearchTestCase Setting hostContext system property: /
   [junit4]   2> 105950 WARN  (SUITE-HdfsTlogReplayBufferedWhileIndexingTest-seed#[97127C10F1843A6]-worker) [    ] o.a.h.u.NativeCodeLoader Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
   [junit4]   1> Formatting using clusterid: testClusterID
   [junit4]   2> 107593 WARN  (SUITE-HdfsTlogReplayBufferedWhileIndexingTest-seed#[97127C10F1843A6]-worker) [    ] o.a.h.m.i.MetricsConfig Cannot locate configuration: tried hadoop-metrics2-namenode.properties,hadoop-metrics2.properties
   [junit4]   2> 107879 WARN  (SUITE-HdfsTlogReplayBufferedWhileIndexingTest-seed#[97127C10F1843A6]-worker) [    ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 107919 INFO  (SUITE-HdfsTlogReplayBufferedWhileIndexingTest-seed#[97127C10F1843A6]-worker) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 107922 INFO  (SUITE-HdfsTlogReplayBufferedWhileIndexingTest-seed#[97127C10F1843A6]-worker) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 107923 INFO  (SUITE-HdfsTlogReplayBufferedWhileIndexingTest-seed#[97127C10F1843A6]-worker) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 107923 INFO  (SUITE-HdfsTlogReplayBufferedWhileIndexingTest-seed#[97127C10F1843A6]-worker) [    ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 107925 INFO  (SUITE-HdfsTlogReplayBufferedWhileIndexingTest-seed#[97127C10F1843A6]-worker) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@6186339{static,/static,jar:file:/x1/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
   [junit4]   2> 108359 INFO  (SUITE-HdfsTlogReplayBufferedWhileIndexingTest-seed#[97127C10F1843A6]-worker) [    ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@9f2733f{hdfs,/,file:///x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/jetty-localhost-41042-hdfs-_-any-11080779452450029231.dir/webapp/,AVAILABLE}{/hdfs}
   [junit4]   2> 108362 INFO  (SUITE-HdfsTlogReplayBufferedWhileIndexingTest-seed#[97127C10F1843A6]-worker) [    ] o.e.j.s.AbstractConnector Started ServerConnector@1e9fa992{HTTP/1.1,[http/1.1]}{localhost:41042}
   [junit4]   2> 108362 INFO  (SUITE-HdfsTlogReplayBufferedWhileIndexingTest-seed#[97127C10F1843A6]-worker) [    ] o.e.j.s.Server Started @108437ms
   [junit4]   2> 109315 WARN  (SUITE-HdfsTlogReplayBufferedWhileIndexingTest-seed#[97127C10F1843A6]-worker) [    ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 109320 INFO  (SUITE-HdfsTlogReplayBufferedWhileIndexingTest-seed#[97127C10F1843A6]-worker) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 109322 INFO  (SUITE-HdfsTlogReplayBufferedWhileIndexingTest-seed#[97127C10F1843A6]-worker) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 109322 INFO  (SUITE-HdfsTlogReplayBufferedWhileIndexingTest-seed#[97127C10F1843A6]-worker) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 109322 INFO  (SUITE-HdfsTlogReplayBufferedWhileIndexingTest-seed#[97127C10F1843A6]-worker) [    ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 109322 INFO  (SUITE-HdfsTlogReplayBufferedWhileIndexingTest-seed#[97127C10F1843A6]-worker) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@62cb30e7{static,/static,jar:file:/x1/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
   [junit4]   2> 109494 INFO  (SUITE-HdfsTlogReplayBufferedWhileIndexingTest-seed#[97127C10F1843A6]-worker) [    ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@6cf032b7{datanode,/,file:///x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/jetty-localhost-45003-datanode-_-any-13326213864534826221.dir/webapp/,AVAILABLE}{/datanode}
   [junit4]   2> 109495 INFO  (SUITE-HdfsTlogReplayBufferedWhileIndexingTest-seed#[97127C10F1843A6]-worker) [    ] o.e.j.s.AbstractConnector Started ServerConnector@76308492{HTTP/1.1,[http/1.1]}{localhost:45003}
   [junit4]   2> 109495 INFO  (SUITE-HdfsTlogReplayBufferedWhileIndexingTest-seed#[97127C10F1843A6]-worker) [    ] o.e.j.s.Server Started @109570ms
   [junit4]   2> 110914 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0x20a675002646a432: Processing first storage report for DS-6b8187db-2db9-4b79-9322-f02cec5dca52 from datanode 149538cc-7e7c-4e20-9f38-30518e8aade0
   [junit4]   2> 110929 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0x20a675002646a432: from storage DS-6b8187db-2db9-4b79-9322-f02cec5dca52 node DatanodeRegistration(127.0.0.1:36929, datanodeUuid=149538cc-7e7c-4e20-9f38-30518e8aade0, infoPort=44743, infoSecurePort=0, ipcPort=40991, storageInfo=lv=-57;cid=testClusterID;nsid=1802227240;c=1558656235061), blocks: 0, hasStaleStorage: true, processing time: 10 msecs, invalidatedBlocks: 0
   [junit4]   2> 110930 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0x20a675002646a432: Processing first storage report for DS-8f1a5b79-19b1-4064-bbd6-7622567e6842 from datanode 149538cc-7e7c-4e20-9f38-30518e8aade0
   [junit4]   2> 110930 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0x20a675002646a432: from storage DS-8f1a5b79-19b1-4064-bbd6-7622567e6842 node DatanodeRegistration(127.0.0.1:36929, datanodeUuid=149538cc-7e7c-4e20-9f38-30518e8aade0, infoPort=44743, infoSecurePort=0, ipcPort=40991, storageInfo=lv=-57;cid=testClusterID;nsid=1802227240;c=1558656235061), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0
   [junit4]   2> 111014 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
   [junit4]   2> 111014 INFO  (ZkTestServer Run Thread) [    ] o.a.s.c.ZkTestServer client port:0.0.0.0/0.0.0.0:0
   [junit4]   2> 111014 INFO  (ZkTestServer Run Thread) [    ] o.a.s.c.ZkTestServer Starting server
   [junit4]   2> 111114 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.c.ZkTestServer start zk server on port:41112
   [junit4]   2> 111114 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.c.ZkTestServer parse host and port list: 127.0.0.1:41112
   [junit4]   2> 111114 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.c.ZkTestServer connecting to 127.0.0.1 41112
   [junit4]   2> 111126 INFO  (zkConnectionManagerCallback-3331-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 111141 INFO  (zkConnectionManagerCallback-3333-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 111149 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.c.ZkTestServer put /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/solrconfig-tlog.xml to /configs/conf1/solrconfig.xml
   [junit4]   2> 111152 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.c.ZkTestServer put /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/schema15.xml to /configs/conf1/schema.xml
   [junit4]   2> 111154 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.c.ZkTestServer put /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml to /configs/conf1/solrconfig.snippet.randomindexconfig.xml
   [junit4]   2> 111157 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.c.ZkTestServer put /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/stopwords.txt to /configs/conf1/stopwords.txt
   [junit4]   2> 111159 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.c.ZkTestServer put /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/protwords.txt to /configs/conf1/protwords.txt
   [junit4]   2> 111161 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.c.ZkTestServer put /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/currency.xml to /configs/conf1/currency.xml
   [junit4]   2> 111166 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.c.ZkTestServer put /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/enumsConfig.xml to /configs/conf1/enumsConfig.xml
   [junit4]   2> 111168 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.c.ZkTestServer put /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/open-exchange-rates.json to /configs/conf1/open-exchange-rates.json
   [junit4]   2> 111172 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.c.ZkTestServer put /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/mapping-ISOLatin1Accent.txt to /configs/conf1/mapping-ISOLatin1Accent.txt
   [junit4]   2> 111176 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.c.ZkTestServer put /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/old_synonyms.txt to /configs/conf1/old_synonyms.txt
   [junit4]   2> 111178 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.c.ZkTestServer put /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/synonyms.txt to /configs/conf1/synonyms.txt
   [junit4]   2> 111194 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.c.AbstractFullDistribZkTestBase Will use NRT replicas unless explicitly asked otherwise
   [junit4]   2> 111535 WARN  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 111535 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 111535 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 111535 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 111547 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 111547 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 111547 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 111548 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@38c8d24a{/,null,AVAILABLE}
   [junit4]   2> 111552 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.e.j.s.AbstractConnector Started ServerConnector@6f74d1ca{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:43064}
   [junit4]   2> 111552 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.e.j.s.Server Started @111626ms
   [junit4]   2> 111552 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/, solr.data.dir=hdfs://localhost:41099/hdfs__localhost_41099__x1_jenkins_jenkins-slave_workspace_Lucene-Solr-NightlyTests-master_checkout_solr_build_solr-core_test_J0_temp_solr.cloud.hdfs.HdfsTlogReplayBufferedWhileIndexingTest_97127C10F1843A6-001_tempDir-002_control_data, hostPort=43064, coreRootDirectory=/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/../../../../../../../../../../../x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.hdfs.HdfsTlogReplayBufferedWhileIndexingTest_97127C10F1843A6-001/control-001/cores}
   [junit4]   2> 111553 ERROR (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 111553 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 111553 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 9.0.0
   [junit4]   2> 111553 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 111553 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 111553 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-05-24T00:03:59.396453Z
   [junit4]   2> 111573 INFO  (zkConnectionManagerCallback-3335-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 111581 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 111581 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.hdfs.HdfsTlogReplayBufferedWhileIndexingTest_97127C10F1843A6-001/control-001/solr.xml
   [junit4]   2> 111591 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 111591 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 111603 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@61b4ef27, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 111772 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=false]
   [junit4]   2> 111786 WARN  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@79a4396f[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 111824 WARN  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@7066a532[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 111830 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:41112/solr
   [junit4]   2> 111851 INFO  (zkConnectionManagerCallback-3342-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 111856 INFO  (zkConnectionManagerCallback-3344-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 112064 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [n:127.0.0.1:43064_    ] o.a.s.c.OverseerElectionContext I am going to be the leader 127.0.0.1:43064_
   [junit4]   2> 112065 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [n:127.0.0.1:43064_    ] o.a.s.c.Overseer Overseer (id=73354616241324036-127.0.0.1:43064_-n_0000000000) starting
   [junit4]   2> 112085 INFO  (zkConnectionManagerCallback-3351-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 112089 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [n:127.0.0.1:43064_    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:41112/solr ready
   [junit4]   2> 112090 INFO  (OverseerStateUpdate-73354616241324036-127.0.0.1:43064_-n_0000000000) [n:127.0.0.1:43064_    ] o.a.s.c.Overseer Starting to work on the main queue : 127.0.0.1:43064_
   [junit4]   2> 112091 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [n:127.0.0.1:43064_    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:43064_
   [junit4]   2> 112129 INFO  (zkCallback-3343-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 112130 INFO  (zkCallback-3350-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 112153 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [n:127.0.0.1:43064_    ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 112210 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [n:127.0.0.1:43064_    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@61b4ef27
   [junit4]   2> 112260 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [n:127.0.0.1:43064_    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@61b4ef27
   [junit4]   2> 112261 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [n:127.0.0.1:43064_    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@61b4ef27
   [junit4]   2> 112264 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [n:127.0.0.1:43064_    ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/../../../../../../../../../../../x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.hdfs.HdfsTlogReplayBufferedWhileIndexingTest_97127C10F1843A6-001/control-001/cores
   [junit4]   2> 112341 INFO  (zkConnectionManagerCallback-3357-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 112343 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 112345 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:41112/solr ready
   [junit4]   2> 112357 INFO  (qtp163619927-3320) [n:127.0.0.1:43064_    ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params collection.configName=conf1&name=control_collection&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=127.0.0.1:43064_&wt=javabin&version=2 and sendToOCPQueue=true
   [junit4]   2> 112370 INFO  (OverseerThreadFactory-155-thread-1-processing-n:127.0.0.1:43064_) [n:127.0.0.1:43064_    ] o.a.s.c.a.c.CreateCollectionCmd Create collection control_collection
   [junit4]   2> 112499 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_    x:control_collection_shard1_replica_n1] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf1&newCollection=true&name=control_collection_shard1_replica_n1&action=CREATE&numShards=1&collection=control_collection&shard=shard1&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 112500 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_    x:control_collection_shard1_replica_n1] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 113542 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
   [junit4]   2> 113729 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.s.IndexSchema [control_collection_shard1_replica_n1] Schema name=test
   [junit4]   2> 113739 WARN  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.SolrResourceLoader Solr loaded a deprecated plugin/analysis class [solr.TrieIntField]. Please consult documentation how to replace it accordingly.
   [junit4]   2> 113744 WARN  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.SolrResourceLoader Solr loaded a deprecated plugin/analysis class [solr.TrieFloatField]. Please consult documentation how to replace it accordingly.
   [junit4]   2> 113746 WARN  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.SolrResourceLoader Solr loaded a deprecated plugin/analysis class [solr.TrieLongField]. Please consult documentation how to replace it accordingly.
   [junit4]   2> 113748 WARN  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.SolrResourceLoader Solr loaded a deprecated plugin/analysis class [solr.TrieDoubleField]. Please consult documentation how to replace it accordingly.
   [junit4]   2> 113783 WARN  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.SolrResourceLoader Solr loaded a deprecated plugin/analysis class [solr.TrieDateField]. Please consult documentation how to replace it accordingly.
   [junit4]   2> 114048 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.s.IndexSchema Loaded schema test/1.6 with uniqueid field id
   [junit4]   2> 114218 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.CoreContainer Creating SolrCore 'control_collection_shard1_replica_n1' using configuration from collection control_collection, trusted=true
   [junit4]   2> 114219 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.control_collection.shard1.replica_n1' (registry 'solr.core.control_collection.shard1.replica_n1') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@61b4ef27
   [junit4]   2> 114227 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://localhost:41099/solr_hdfs_home
   [junit4]   2> 114227 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 114228 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.SolrCore [[control_collection_shard1_replica_n1] ] Opening new SolrCore at [/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.hdfs.HdfsTlogReplayBufferedWhileIndexingTest_97127C10F1843A6-001/control-001/cores/control_collection_shard1_replica_n1], dataDir=[hdfs://localhost:41099/solr_hdfs_home/control_collection/core_node2/data/]
   [junit4]   2> 114230 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:41099/solr_hdfs_home/control_collection/core_node2/data/snapshot_metadata
   [junit4]   2> 114246 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 114246 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 114246 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 114560 WARN  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.s.h.HdfsDirectory The NameNode is in SafeMode - Solr will wait 5 seconds and try again.
   [junit4]   2> 119792 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 119804 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:41099/solr_hdfs_home/control_collection/core_node2/data
   [junit4]   2> 119879 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:41099/solr_hdfs_home/control_collection/core_node2/data/index
   [junit4]   2> 119887 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 119887 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 119887 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 119933 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 119934 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.LogDocMergePolicy: [LogDocMergePolicy: minMergeSize=1000, mergeFactor=8, maxMergeSize=9223372036854775807, maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=false, maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.7193527785811858]
   [junit4]   2> 121181 WARN  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 121345 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 121345 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 121345 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=2
   [junit4]   2> 121408 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.u.CommitTracker Hard AutoCommit: if uncommitted for 10000ms; 
   [junit4]   2> 121408 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.u.CommitTracker Soft AutoCommit: if uncommitted for 3000ms; 
   [junit4]   2> 121420 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.LogDocMergePolicy: [LogDocMergePolicy: minMergeSize=1000, mergeFactor=43, maxMergeSize=9223372036854775807, maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=false, maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.0]
   [junit4]   2> 121632 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.s.SolrIndexSearcher Opening [Searcher@76eb5e4e[control_collection_shard1_replica_n1] main]
   [junit4]   2> 121657 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 121658 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 121672 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 121682 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1634369535501926400
   [junit4]   2> 121716 INFO  (searcherExecutor-160-thread-1-processing-n:127.0.0.1:43064_ x:control_collection_shard1_replica_n1 c:control_collection s:shard1) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.SolrCore [control_collection_shard1_replica_n1] Registered new searcher Searcher@76eb5e4e[control_collection_shard1_replica_n1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 121742 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.ZkShardTerms Successful update of terms at /collections/control_collection/terms/shard1 to Terms{values={core_node2=0}, version=0}
   [junit4]   2> 121742 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/control_collection/leaders/shard1
   [junit4]   2> 121751 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 121751 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 121751 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.SyncStrategy Sync replicas to http://127.0.0.1:43064/control_collection_shard1_replica_n1/
   [junit4]   2> 121751 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
   [junit4]   2> 121752 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.SyncStrategy http://127.0.0.1:43064/control_collection_shard1_replica_n1/ has no replicas
   [junit4]   2> 121752 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContextBase Creating leader registration node /collections/control_collection/leaders/shard1/leader after winning as /collections/control_collection/leader_elect/shard1/election/73354616241324036-core_node2-n_0000000000
   [junit4]   2> 121755 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContext I am the new leader: http://127.0.0.1:43064/control_collection_shard1_replica_n1/ shard1
   [junit4]   2> 121760 INFO  (zkCallback-3343-thread-1) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 121763 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 121765 INFO  (zkCallback-3343-thread-2) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 121767 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_ c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={qt=/admin/cores&collection.configName=conf1&newCollection=true&name=control_collection_shard1_replica_n1&action=CREATE&numShards=1&collection=control_collection&shard=shard1&wt=javabin&version=2&replicaType=NRT} status=0 QTime=9269
   [junit4]   2> 121793 INFO  (qtp163619927-3320) [n:127.0.0.1:43064_    ] o.a.s.h.a.CollectionsHandler Wait for new collection to be active for at most 45 seconds. Check all shard replicas
   [junit4]   2> 121892 INFO  (zkCallback-3343-thread-2) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 121892 INFO  (zkCallback-3343-thread-1) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 121894 INFO  (qtp163619927-3320) [n:127.0.0.1:43064_    ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections params={collection.configName=conf1&name=control_collection&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=127.0.0.1:43064_&wt=javabin&version=2} status=0 QTime=9537
   [junit4]   2> 121901 INFO  (zkCallback-3343-thread-3) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 121927 INFO  (zkConnectionManagerCallback-3363-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 121938 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 121940 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:41112/solr ready
   [junit4]   2> 121943 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.c.ChaosMonkey monkey: init - expire sessions:false cause connection loss:false
   [junit4]   2> 121946 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_    ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params collection.configName=conf1&name=collection1&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=&stateFormat=1&wt=javabin&version=2 and sendToOCPQueue=true
   [junit4]   2> 121959 INFO  (OverseerCollectionConfigSetProcessor-73354616241324036-127.0.0.1:43064_-n_0000000000) [n:127.0.0.1:43064_    ] o.a.s.c.OverseerTaskQueue Response ZK path: /overseer/collection-queue-work/qnr-0000000000 doesn't exist.  Requestor may have disconnected from ZooKeeper
   [junit4]   2> 121963 INFO  (OverseerThreadFactory-155-thread-2-processing-n:127.0.0.1:43064_) [n:127.0.0.1:43064_    ] o.a.s.c.a.c.CreateCollectionCmd Create collection collection1
   [junit4]   2> 122184 WARN  (OverseerThreadFactory-155-thread-2-processing-n:127.0.0.1:43064_) [n:127.0.0.1:43064_    ] o.a.s.c.a.c.CreateCollectionCmd It is unusual to create a collection (collection1) without cores.
   [junit4]   2> 122195 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_    ] o.a.s.h.a.CollectionsHandler Wait for new collection to be active for at most 45 seconds. Check all shard replicas
   [junit4]   2> 122196 INFO  (qtp163619927-3322) [n:127.0.0.1:43064_    ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections params={collection.configName=conf1&name=collection1&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=&stateFormat=1&wt=javabin&version=2} status=0 QTime=250
   [junit4]   2> 122213 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.c.AbstractFullDistribZkTestBase Creating jetty instances pullReplicaCount=0 numOtherReplicas=2
   [junit4]   2> 122718 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 1 in directory /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.hdfs.HdfsTlogReplayBufferedWhileIndexingTest_97127C10F1843A6-001/shard-1-001 of type NRT
   [junit4]   2> 122740 WARN  (closeThreadPool-3364-thread-1) [    ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 122741 INFO  (closeThreadPool-3364-thread-1) [    ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 122741 INFO  (closeThreadPool-3364-thread-1) [    ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 122741 INFO  (closeThreadPool-3364-thread-1) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 122842 INFO  (closeThreadPool-3364-thread-1) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 122842 INFO  (closeThreadPool-3364-thread-1) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 122842 INFO  (closeThreadPool-3364-thread-1) [    ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 122876 INFO  (closeThreadPool-3364-thread-1) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@55ae1464{/,null,AVAILABLE}
   [junit4]   2> 122877 INFO  (closeThreadPool-3364-thread-1) [    ] o.e.j.s.AbstractConnector Started ServerConnector@7651cd84{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:34664}
   [junit4]   2> 122877 INFO  (closeThreadPool-3364-thread-1) [    ] o.e.j.s.Server Started @122951ms
   [junit4]   2> 122877 INFO  (closeThreadPool-3364-thread-1) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/, solrconfig=solrconfig.xml, solr.data.dir=hdfs://localhost:41099/hdfs__localhost_41099__x1_jenkins_jenkins-slave_workspace_Lucene-Solr-NightlyTests-master_checkout_solr_build_solr-core_test_J0_temp_solr.cloud.hdfs.HdfsTlogReplayBufferedWhileIndexingTest_97127C10F1843A6-001_tempDir-002_jetty1, hostPort=34664, coreRootDirectory=/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/../../../../../../../../../../../x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.hdfs.HdfsTlogReplayBufferedWhileIndexingTest_97127C10F1843A6-001/shard-1-001/cores}
   [junit4]   2> 122878 ERROR (closeThreadPool-3364-thread-1) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 122878 INFO  (closeThreadPool-3364-thread-1) [    ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 122878 INFO  (closeThreadPool-3364-thread-1) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 9.0.0
   [junit4]   2> 122878 INFO  (closeThreadPool-3364-thread-1) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 122878 INFO  (closeThreadPool-3364-thread-1) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 122878 INFO  (closeThreadPool-3364-thread-1) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-05-24T00:04:10.721343Z
   [junit4]   2> 123285 INFO  (zkConnectionManagerCallback-3366-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 123287 INFO  (closeThreadPool-3364-thread-1) [    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 123287 INFO  (closeThreadPool-3364-thread-1) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.hdfs.HdfsTlogReplayBufferedWhileIndexingTest_97127C10F1843A6-001/shard-1-001/solr.xml
   [junit4]   2> 123294 INFO  (closeThreadPool-3364-thread-1) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 123294 INFO  (closeThreadPool-3364-thread-1) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 123326 INFO  (closeThreadPool-3364-thread-1) [    ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@61b4ef27, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 123615 INFO  (TEST-HdfsTlogReplayBufferedWhileIndexingTest.test-seed#[97127C10F1843A6]) [    ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 2 in directory /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.hdfs.HdfsTlogReplayBufferedWhileIndexingTest_97127C10F1843A6-001/shard-2-001 of type NRT
   [junit4]   2> 123628 WARN  (closeThreadPool-3364-thread-2) [    ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 123629 INFO  (closeThreadPool-3364-thread-2) [    ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 123629 INFO  (closeThreadPool-3364-thread-2) [    ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 123629 INFO  (closeThreadPool-3364-thread-2) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 123651 INFO  (closeThreadPool-3364-thread-2) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 123651 INFO  (closeThreadPool-3364-thread-2) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 123651 INFO  (closeThreadPool-3364-thread-2) [    ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 123652 INFO  (closeThreadPool-3364-thread-2) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@18a03aa6{/,null,AVAILABLE}
   [junit4]   2> 123652 INFO  (closeThreadPool-3364-thread-2) [    ] o.e.j.s.AbstractConnector Started ServerConnector@3073c1a3{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:39421}
   [junit4]   2> 123653 INFO  (closeThreadPool-3364-thread-2) [    ] o.e.j.s.Server Started @123727ms
   [junit4]   2> 123653 INFO  (closeThreadPool-3364-thread-2) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/, solrconfig=solrconfig.xml, solr.data.dir=hdfs://localhost:41099/hdfs__localhost_41099__x1_jenkins_jenkins-slave_workspace_Lucene-Solr-NightlyTests-master_checkout_solr_build_solr-core_test_J0_temp_solr.cloud.hdfs.HdfsTlogReplayBufferedWhileIndexingTest_97127C10F1843A6-001_tempDir-002_jetty2, hostPort=39421, coreRootDirectory=/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/../../../../../../../../../../../x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.hdfs.HdfsTlogReplayBufferedWhileIndexingTest_97127C10F1843A6-001/shard-2-001/cores}
   [junit4]   2> 123653 ERROR (closeThreadPool-3364-thread-2) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 123653 INFO  (closeThreadPool-3364-thread-2) [    ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 123653 INFO  (closeThreadPool-3364-thread-2) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 9.0.0
   [junit4]   2> 123653 INFO  (closeThreadPool-3364-thread-2) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 123653 INFO  (closeThreadPool-3364-thread-2) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 123653 INFO  (closeThreadPool-3364-thread-2) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-05-24T00:04:11.496879Z
   [junit4]   2> 123696 INFO  (zkConnectionManagerCallback-3369-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 123698 INFO  (closeThreadPool-3364-thread-2) [    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 123698 INFO  (closeThreadPool-3364-thread-2) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.hdfs.HdfsTlogReplayBufferedWhileIndexingTest_97127C10F1843A6-001/shard-2-001/solr.xml
   [junit4]   2> 123706 INFO  (closeThreadPool-3364-thread-2) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 123706 INFO  (closeThreadPool-3364-thread-2) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 123709 INFO  (closeThreadPool-3364-thread-2) [    ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@61b4ef27, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 123968 INFO  (OverseerCollectionConfigSetProcessor-73354616241324036-127.0.0.1:43064_-n_0000000000) [n:127.0.0.1:43064_    ] o.a.s.c.OverseerTaskQueue Response ZK path: /overseer/collection-queue-work/qnr-0000000002 doesn't exist.  Requestor may have disconnected from ZooKeeper
   [junit4]   2> 123978 INFO  (closeThreadPool-3364-thread-2) [    ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=false]
   [junit4]   2> 123980 WARN  (closeThreadPool-3364-thread-2) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@22e2a2bc[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 123984 WARN  (closeThreadPool-3364-thread-2) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@544a73c1[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 123985 INFO  (closeThreadPool-3364-thread-1) [    ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=false]
   [junit4]   2> 123986 INFO  (closeThreadPool-3364-thread-2) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:41112/solr
   [junit4]   2> 123988 WARN  (closeThreadPool-3364-thread-1) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@14d481a9[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 124054 INFO  (zkConnectionManagerCallback-3377-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 124063 WARN  (closeThreadPool-3364-thread-1) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@36b0a2c3[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 124065 INFO  (closeThreadPool-3364-thread-1) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:41112/solr
   [junit4]   2> 124121 INFO  (zkConnectionManagerCallback-3381-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 124159 INFO  (zkConnectionManagerCallback-3384-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 124171 INFO  (closeThreadPool-3364-thread-2) [n:127.0.0.1:39421_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 124180 INFO  (closeThreadPool-3364-thread-2) [n:127.0.0.1:39421_    ] o.a.s.c.ZkController Publish node=127.0.0.1:39421_ as DOWN
   [junit4]   2> 124182 INFO  (closeThreadPool-3364-thread-2) [n:127.0.0.1:39421_    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 124182 INFO  (closeThreadPool-3364-thread-2) [n:127.0.0.1:39421_    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:39421_
   [junit4]   2> 124184 INFO  (zkCallback-3362-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 124184 INFO  (zkCallback-3350-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 124185 INFO  (zkCallback-3343-thread-3) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 124190 INFO  (zkConnectionManagerCallback-3388-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 124197 INFO  (closeThreadPool-3364-thread-1) [n:127.0.0.1:34664_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 124203 INFO  (closeThreadPool-3364-thread-1) [n:127.0.0.1:34664_    ] o.a.s.c.ZkController Publish node=127.0.0.1:34664_ as DOWN
   [junit4]   2> 124205 INFO  (closeThreadPool-3364-thread-1) [n:127.0.0.1:34664_    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 124205 INFO  (closeThreadPool-3364-thread-1) [n:127.0.0.1:34664_    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:34664_
   [junit4]   2> 124207 INFO  (zkCallback-3362-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 124207 INFO  (zkCallback-3343-thread-3) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 124208 INFO  (zkCallback-3350-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 124214 INFO  (zkCallback-3380-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (3)
   [junit4]   2> 124230 INFO  (zkCallback-3387-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 124233 INFO  (zkConnectionManagerCallback-3393-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 124236 INFO  (closeThreadPool-3364-thread-2) [n:127.0.0.1:39421_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (3)
   [junit4]   2> 124238 INFO  (closeThreadPool-3364-thread-2) [n:127.0.0.1:39421_    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:41112/solr ready
   [junit4]   2> 124241 INFO  (zkConnectionManagerCallback-3400-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 124243 INFO  (closeThreadPool-3364-thread-1) [n:127.0.0.1:34664_    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (3)
   [junit4]   2> 124244 INFO  (closeThreadPool-3364-thread-1) [n:127.0.0.1:34664_    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:41112/solr ready
   [junit4]   2> 124293 INFO  (closeThreadPool-3364-thread-1) [n:127.0.0.1:34664_    ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 124329 INFO  (closeThreadPool-3364-thread-2) [n:127.0.0.1:39421_    ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 124365 WARN  (closeThreadPool-3364-thread-1) [n:127.0.0.1:34664_    ] o.a.s.m.r.j.JmxMetricsReporter Unable to register meter
   [junit4]   2>           => javax.management.InstanceNotFoundException: solr:dom1=node,category=UPDATE,scope=updateShardHandler,name=threadPool.updateOnlyExecutor.completed
   [junit4]   2> 	at java.management/com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1083)
   [junit4]   2> javax.management.InstanceNotFoundException: solr:dom1=node,category=UPDATE,scope=updateShardHandler,name=threadPool.updateOnlyExecutor.completed
   [junit4]   2> 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(DefaultMBeanServerInterceptor.java:1083) ~[?:?]
   [junit4]   2> 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.exclusiveUnregisterMBean(DefaultMBeanServerInterceptor.java:423) ~[?:?]
   [junit4]   2> 	at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.unregisterMBean(DefaultMBeanServerInterceptor.java:411) ~[?:?]
   [junit4]   2> 	at com.sun.jmx.mbeanserver.JmxMBeanServer.unregisterMBean(JmxMBeanServer.java:546) ~[?:?]
   [junit4]   2> 	at org.apache.solr.metrics.reporters.jmx.JmxMetricsReporter$JmxListener.registerMBean(JmxMetricsReporter.java:531) ~[java/:?]
   [junit4]   2> 	at org.apache.solr.metrics.reporters.jmx.JmxMetricsReporter$JmxListener.onMeterAdded(JmxMetricsReporter.java:648) ~[java/:?]
   [junit4]   2> 	at org.apache.solr.metrics.reporters.jmx.JmxMetricsReporter.lambda$start$0(JmxMetricsReporter.java:736) ~[java/:?]
   [junit4]   2> 	at java.util.HashMap.forEach(HashMap.java:1336) ~[?:?]
   [junit4]   2> 	at org.apache.solr.metrics.reporters.jmx.JmxMetricsReporter.start(JmxMetricsReporter.java:732) ~[java/:?]
   [junit4]   2> 	at org.apache.solr.metrics.reporters.SolrJmxReporter.doInit(SolrJmxReporter.java:109) ~[java/:?]
   [junit4]   2> 	at org.apache.solr.metrics.SolrMetricReporter.init(SolrMetricReporter.java:70) ~[java/:?]
   [junit4]   2> 	at org.apache.solr.metrics.SolrMetricManager.loadReporter(SolrMetricManager.java:916) ~[java/:?]
   [junit4]   2> 	at org.apache.solr.metrics.SolrMetricManager.loadReporters(SolrMetricManager.java:843) ~[java/:?]
   [junit4]   2> 	at org.apache.solr.core.CoreContainer.load(CoreContainer.java:654) ~[java/:?]
   [junit4]   2> 	at org.apache.solr.servlet.SolrDispatchFilter.createCoreContainer(SolrDispatchFilter.java:255) ~[java/:?]
   [junit4]   2> 	at org.apache.solr.servlet.SolrDispatchFilter.init(SolrDispatchFilter.java:175) ~[java/:?]
   [junit4]   2> 	at org.eclipse.jetty.servlet.FilterHolder.initialize(FilterHolder.java:136) ~[jetty-servlet-9.4.14.v20181114.jar:9.4.14.v20181114]
   [junit4]   2> 	at org.eclipse.jetty.servlet.ServletHandler.lambda$initialize$0(ServletHandler.java:750) ~[jetty-servlet-9.4.14.v20181114.jar:9.4.14.v20181114]
   [junit4]   2> 	at java.util.Spliterators$ArraySpliterator.forEachRemaining(Spliterators.java:948) ~[?:?]
   [junit4]   2> 	at java.util.stream.Streams$ConcatSpliterator.forEachRemaining(Streams.java:734) ~[?:?]
   [junit4]   2> 	at java.util.stream.Streams$ConcatSpliterator.forEachRemaining(Streams.java:734) ~[?:?]
   [junit4]   2> 	at java.util.stream.ReferencePipeline$Head.forEach(ReferencePipeline.java:658) ~[?:?]
   [junit4]   2> 	at org.eclipse.jetty.servlet.ServletHandler.initialize(ServletHandler.java:744) ~[jetty-servlet-9.4.14.v20181114.jar:9.4.14.v20181114]
   [junit4]   2> 	at org.eclipse.jetty.servlet.ServletHandler.updateMappings(ServletHandler.java:1449) ~[jetty-servlet-9.4.14.v20181114.jar:9.4.14.v20181114]
   [junit4]   2> 	at org.eclipse.jetty.servlet.ServletHandler.setFilterMappings(ServletHandler.java:1513) ~[jetty-servlet-9.4.14.v20181114.jar:9.4.14.v20181114]
   [junit4]   2> 	at org.eclipse.jetty.servlet.ServletHandler.addFilterMapping(ServletHandler.java:1158) ~[jetty-servlet-9.4.14.v20181114.jar:9.4.14.v20181114]
   [junit4]   2> 	at org.eclipse.jetty.servlet.ServletHandler.addFilterWithMapping(ServletHandler.java:995) ~[jetty-servlet-9.4.14.v20181114.jar:9.4.14.v20181114]
   [junit4]   2> 	at org.eclipse.jetty.servlet.ServletContextHandler.addFilter(ServletContextHandler.java:467) ~[jetty-servlet-9.4.14.v20181114.jar:9.4.14.v20181114]
   [junit4]   2> 	at org.apache.solr.client.solrj.embedded.JettySolrRunner$1.lifeCycleStarted(JettySolrRunner.java:384) ~[java/:?]
   [junit4]   2> 	at org.eclipse.jetty.util.component.AbstractLifeCycle.setStarted(AbstractLifeCycle.java:179) ~[jetty-util-9.4.14.v20181114.jar:9.4.14.v20181114]
   [junit4]   2> 	at org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:69) ~[jetty-util-9.4.14.v20181114.jar:9.4.14.v20181114]
   [junit4]   2> 	at org.apache.solr.client.solrj.embedded.JettySolrRunner.retryOnPortBindFailure(JettySolrRunner.java:558) ~[java/:?]
   [junit4]   2> 	at org.apache.solr.client.solrj.embedded.JettySolrRunner.start(JettySolrRunner.java:497) ~[java/:?]
   [junit4]   2> 	at org.apache.solr.client.solrj.embedded.JettySolrRunner.start(JettySolrRunner.java:465) ~[java/:?]
   [junit4]   2> 	at org.apache.solr.cloud.AbstractFullDistribZkTestBase.lambda$createJettys$2(AbstractFullDistribZkTestBase.java:464) ~[java/:?]
   [junit4]   2> 	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) ~[?:?]
   [junit4]   2> 	at java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[?:?]
   [junit4]   2> 	at org.apache.solr.common.util.ExecutorUtil$MDCAwareThreadPoolExecutor.lambda$execute$0(ExecutorUtil.java:209) ~[java/:?]
   [junit4]   2> 	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) ~[?:?]
   [junit4]   2> 	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) ~[?:?]
   [junit4]   2> 	at java.lang.Thread.run(Thread.java:834) [?:?]
   [junit4]   2> 124384 INFO  (closeThreadPool-3364-thread-1) [n:127.0.0.1:34664_    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@61b4ef27
   [junit4]   2> 124475 INFO  (closeThreadPool-3364-thread-2) [n:127.0.0.1:39421_    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@61b4ef27
   [junit4]   2> 124483 INFO  (closeThreadPool-3364-thread-1) [n:127.0.0.1:34664_    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@61b4ef27
   [junit4]   2> 124483 INFO  (closeThreadPool-3364-thread-1) [n:127.0.0.1:34664_    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@61b4ef27
   [junit4]   2> 124487 INFO  (closeThreadPool-3364-thread-1) [n:127.0.0.1:34664_    ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/../../../../../../../../../../../x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.hdfs.HdfsTlogReplayBufferedWhileIndexingTest_97127C10F1843A6-001/shard-1-001/cores
   [junit4]   2> 124540 INFO  (closeThreadPool-3364-thread-2) [n:127.0.0.1:39421_    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@61b4ef27
   [junit4]   2> 124540 INFO  (closeThreadPool-3364-thread-2) [n:127.0.0.1:39421_    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@61b4ef27
   [junit4]   2> 124546 INFO  (closeThreadPool-3364-thread-2) [n:127.0.0.1:39421_    ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/../../../../../../../../../../../x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.hdfs.HdfsTlogReplayBufferedWhileIndexingTest_97127C10F1843A6-001/shard-2-001/cores
   [junit4]   2> 125188 INFO  (qtp771633406-3392) [n:127.0.0.1:34664_    ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :addreplica with params node=127.0.0.1:39421_&action=ADDREPLICA&collection=collection1&shard=shard1&type=NRT&wt=javabin&version=2 and sendToOCPQueue=true
   [junit4]   2> 125191 INFO  (qtp771633406-3394) [n:127.0.0.1:34664_    ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :addreplica with params node=127.0.0.1:34664_&action=ADDREPLICA&collection=collection1&shard=shard1&type=NRT&wt=javabin&version=2 and sendToOCPQueue=true
   [junit4]   2> 125220 INFO  (OverseerThreadFactory-155-thread-3-processing-n:127.0.0.1:43064_) [n:127.0.0.1:43064_ c:collection1 s:shard1  ] o.a.s.c.a.c.AddReplicaCmd Node Identified 127.0.0.1:39421_ for creating new replica of shard shard1 for collection collection1
   [junit4]   2> 125227 INFO  (OverseerThreadFactory-155-thread-3-processing-n:127.0.0.1:43064_) [n:127.0.0.1:43064_ c:collection1 s:shard1  ] o.a.s.c.a.c.AddReplicaCmd Returning CreateReplica command.
   [junit4]   2> 125260 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_    x:collection1_shard1_replica_n1] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf1&name=collection1_shard1_replica_n1&action=CREATE&collection=collection1&shard=shard1&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 126467 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
   [junit4]   2> 126571 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.s.IndexSchema [collection1_shard1_replica_n1] Schema name=test
   [junit4]   2> 126842 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.s.IndexSchema Loaded schema test/1.6 with uniqueid field id
   [junit4]   2> 126985 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.CoreContainer Creating SolrCore 'collection1_shard1_replica_n1' using configuration from collection collection1, trusted=true
   [junit4]   2> 126986 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.collection1.shard1.replica_n1' (registry 'solr.core.collection1.shard1.replica_n1') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@61b4ef27
   [junit4]   2> 126988 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://localhost:41099/solr_hdfs_home
   [junit4]   2> 126988 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 126988 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.SolrCore [[collection1_shard1_replica_n1] ] Opening new SolrCore at [/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.hdfs.HdfsTlogReplayBufferedWhileIndexingTest_97127C10F1843A6-001/shard-2-001/cores/collection1_shard1_replica_n1], dataDir=[hdfs://localhost:41099/solr_hdfs_home/collection1/core_node2/data/]
   [junit4]   2> 126991 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:41099/solr_hdfs_home/collection1/core_node2/data/snapshot_metadata
   [junit4]   2> 127014 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 127015 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 127015 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 127054 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 127058 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:41099/solr_hdfs_home/collection1/core_node2/data
   [junit4]   2> 127123 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:41099/solr_hdfs_home/collection1/core_node2/data/index
   [junit4]   2> 127138 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 127139 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 127139 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 127174 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 127175 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.LogDocMergePolicy: [LogDocMergePolicy: minMergeSize=1000, mergeFactor=8, maxMergeSize=9223372036854775807, maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=false, maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.7193527785811858]
   [junit4]   2> 127324 WARN  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 127562 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 127562 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 127562 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=2
   [junit4]   2> 127584 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.u.CommitTracker Hard AutoCommit: if uncommitted for 10000ms; 
   [junit4]   2> 127584 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.u.CommitTracker Soft AutoCommit: if uncommitted for 3000ms; 
   [junit4]   2> 127597 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.LogDocMergePolicy: [LogDocMergePolicy: minMergeSize=1000, mergeFactor=43, maxMergeSize=9223372036854775807, maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=false, maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.0]
   [junit4]   2> 127655 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.s.SolrIndexSearcher Opening [Searcher@79a37a3c[collection1_shard1_replica_n1] main]
   [junit4]   2> 127657 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 127658 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 127659 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 127659 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1634369541769265152
   [junit4]   2> 127717 INFO  (searcherExecutor-183-thread-1-processing-n:127.0.0.1:39421_ x:collection1_shard1_replica_n1 c:collection1 s:shard1) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.SolrCore [collection1_shard1_replica_n1] Registered new searcher Searcher@79a37a3c[collection1_shard1_replica_n1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 127720 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.ZkShardTerms Successful update of terms at /collections/collection1/terms/shard1 to Terms{values={core_node2=0}, version=0}
   [junit4]   2> 127720 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/collection1/leaders/shard1
   [junit4]   2> 128109 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 128109 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 128109 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.SyncStrategy Sync replicas to http://127.0.0.1:39421/collection1_shard1_replica_n1/
   [junit4]   2> 128109 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
   [junit4]   2> 128109 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.SyncStrategy http://127.0.0.1:39421/collection1_shard1_replica_n1/ has no replicas
   [junit4]   2> 128109 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContextBase Creating leader registration node /collections/collection1/leaders/shard1/leader after winning as /collections/collection1/leader_elect/shard1/election/73354616241324043-core_node2-n_0000000000
   [junit4]   2> 128111 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContext I am the new leader: http://127.0.0.1:39421/collection1_shard1_replica_n1/ shard1
   [junit4]   2> 128116 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 128119 INFO  (qtp955831380-3405) [n:127.0.0.1:39421_ c:collection1 s:shard1  x:collection1_shard1_replica_n1] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={qt=/admin/cores&collection.configName=conf1&name=collection1_shard1_replica_n1&action=CREATE&collection=collection1&shard=shard1&wt=javabin&version=2&replicaType=NRT} status=0 QTime=2859
   [junit4]   2> 128129 INFO  (qtp771633406-3392) [n:127.0.0.1:34664_ c:collection1   ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections params={node=127.0.0.1:39421_&action=ADDREPLICA&collection=collection1&shard=shard1&type=NRT&wt=javabin&version=2} status=0 QTime=2941
   [junit4]   2> 129215 INFO  (OverseerThreadFactory-155-thread-4-processing-n:127.0.0.1:43064_) [n:127.0.0.1:43064_ c:collection1 s:shard1  ] o.a.s.c.a.c.AddReplicaCmd Node Identified 127.0.0.1:34664_ for creating new replica of shard shard1 for collection collection1
   [junit4]   2> 129215 INFO  (OverseerCollectionConfigSetProcessor-73354616241324036-127.0.0.1:43064_-n_0000000000) [n:127.0.0.1:43064_    ] o.a.s.c.OverseerTaskQueue Response ZK path: /overseer/collection-queue-work/qnr-0000000004 doesn't exist.  Requestor may have disconnected from ZooKeeper
   [junit4]   2> 129218 INFO  (OverseerThreadFactory-155-thread-4-processing-n:127.0.0.1:43064_) [n:127.0.0.1:43064_ c:collection1 s:shard1  ] o.a.s.c.a.c.AddReplicaCmd Returning CreateReplica command.
   [junit4]   2> 129260 INFO  (qtp771633406-3390) [n:127.0.0.1:34664_    x:collection1_shard1_replica_n3] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf1&name=collection1_shard1_replica_n3&action=CREATE&collection=collection1&shard=shard1&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 130287 INFO  (qtp771633406-3390) [n:127.0.0.1:34664_ c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
   [junit4]   2> 130380 INFO  (qtp771633406-3390) [n:127.0.0.1:34664_ c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.s.IndexSchema [collection1_shard1_replica_n3] Schema name=test
   [junit4]   2> 130600 INFO  (qtp771633406-3390) [n:127.0.0.1:34664_ c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.s.IndexSchema Loaded schema test/1.6 with uniqueid field id
   [junit4]   2> 130676 INFO  (qtp771633406-3390) [n:127.0.0.1:34664_ c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.c.CoreContainer Creating SolrCore 'collection1_shard1_replica_n3' using configuration from collection collection1, trusted=true
   [junit4]   2> 130676 INFO  (qtp771633406-3390) [n:127.0.0.1:34664_ c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.collection1.shard1.replica_n3' (registry 'solr.core.collection1.shard1.replica_n3') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@61b4ef27
   [junit4]   2> 130677 INFO  (qtp771633406-3390) [n:127.0.0.1:34664_ c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://localhost:41099/solr_hdfs_home
   [junit4]   2> 130677 INFO  (qtp771633406-3390) [n:127.0.0.1:34664_ c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 130677 INFO  (qtp771633406-3390) [n:127.0.0.1:34664_ c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.c.SolrCore [[collection1_shard1_replica_n3] ] Opening new SolrCore at [/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.hdfs.HdfsTlogReplayBufferedWhileIndexingTest_97127C10F1843A6-001/shard-1-001/cores/collection1_shard1_replica_n3], dataDir=[hdfs://localhost:41099/solr_hdfs_home/collection1/core_node4/data/]
   [junit4]   2> 130686 INFO  (qtp771633406-3390) [n:127.0.0.1:34664_ c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:41099/solr_hdfs_home/collection1/core_node4/data/snapshot_metadata
   [junit4]   2> 130700 INFO  (qtp771633406-3390) [n:127.0.0.1:34664_ c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 130700 INFO  (qtp771633406-3390) [n:127.0.0.1:34664_ c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 130700 INFO  (qtp771633406-3390) [n:127.0.0.1:34664_ c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 130719 INFO  (qtp771633406-3390) [n:127.0.0.1:34664_ c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 130728 INFO  (qtp771633406-3390) [n:127.0.0.1:34664_ c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:41099/solr_hdfs_home/collection1/core_node4/data
   [junit4]   2> 130779 INFO  (qtp771633406-3390) [n:127.0.0.1:34664_ c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:41099/solr_hdfs_home/collection1/core_node4/data/index
   [junit4]   2> 130791 INFO  (qtp771633406-3390) [n:127.0.0.1:34664_ c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 130791 INFO  (qtp771633406-3390) [n:127.0.0.1:34664_ c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 130791 INFO  (qtp771633406-3390) [n:127.0.0.1:34664_ c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 130810 INFO  (qtp771633406-3390) [n:127.0.0.1:34664_ c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 130811 INFO  (qtp771633406-3390) [n:127.0.0.1:34664_ c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.LogDocMergePolicy: [LogDocMergePolicy: minMergeSize=1000, mergeFactor=8, maxMergeSize=9223372036854775807, maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=false, maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.7193527785811858]
   [junit4]   2> 130926 WARN  (qtp771633406-3390) [n:127.0.0.1:34664_ c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 131100 INFO  (qtp771633406-3390) [n:127.0.0.1:34664_ c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 131100 INFO  (qtp771633406-3390) [n:127.0.0.1:34664_ c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 131100 INFO  (qtp771633406-3390) [n:127.0.0.1:34664_ c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=2
   [junit4]   2> 131121 INFO  (qtp771633406-3390) [n:127.0.0.1:34664_ c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.u.CommitTracker Hard AutoCommit: if uncommitted for 10000ms; 
   [junit4]   2> 131121 INFO  (qtp771633406-3390) [n:127.0.0.1:34664_ c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.u.CommitTracker Soft AutoCommit: if uncommitted for 3000ms; 
   [junit4]   2> 131131 INFO  (qtp771633406-3390) [n:127.0.0.1:34664_ c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.LogDocMergePolicy: [LogDocMergePolicy: minMergeSize=1000, mergeFactor=43, maxMergeSize=9223372036854775807, maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=false, maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.0]
   [junit4]   2> 131160 INFO  (qtp771633406-3390) [n:127.0.0.1:34664_ c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.s.SolrIndexSearcher Opening [Searcher@28eefedd[collection1_shard1_replica_n3] main]
   [junit4]   2> 131164 INFO  (qtp771633406-3390) [n:127.0.0.1:34664_ c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 131165 INFO  (qtp771633406-3390) [n:127.0.0.1:34664_ c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 131166 INFO  (qtp771633406-3390) [n:127.0.0.1:34664_ c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 131167 INFO  (qtp771633406-3390) [n:127.0.0.1:34664_ c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1634369545447669760
   [junit4]   2> 131172 INFO  (searcherExecutor-188-thread-1-processing-n:127.0.0.1:34664_ x:collection1_shard1_replica_n3 c:collection1 s:shard1) [n:127.0.0.1:34664_ c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.c.SolrCore [collection1_shard1_replica_n3] Registered new searcher Searcher@28eefedd[collection1_shard1_replica_n3] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 131191 INFO  (qtp771633406-3390) [n:127.0.0.1:34664_ c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.c.ZkShardTerms Successful update of terms at /collections/collection1/terms/shard1 to Terms{values={core_node2=0, core_node4=0}, version=1}
   [junit4]   2> 131191 INFO  (qtp771633406-3390) [n:127.0.0.1:34664_ c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/collection1/leaders/shard1
   [junit4]   2> 131197 INFO  (qtp771633406-3390) [n:127.0.0.1:34664_ c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.c.ZkController Core needs to recover:collection1_shard1_replica_n3
   [junit4]   2> 131210 INFO  (updateExecutor-3378-thread-1-processing-n:127.0.0.1:34664_ x:collection1_shard1_replica_n3 c:collection1 s:shard1) [n:127.0.0.1:34664_ c:collection1 s:shard1 r:core_node4 x:collection1_shard1_replica_n3] o.a.s.u.DefaultSolrCoreState Running recovery
   [junit4]   2> 131241 INFO  (qtp771633406-3390) [n:127.0.0.1:34664_ c:collection1 s:shard1  x:collection1_shard1_replica_n3] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={qt=/admin/cores&collection.configName=conf1&name=collection1_shard1_replica_n3&action=CREATE&collection=collection1&shard=shard1&wt=javabin&version=2&replicaType=NRT} status=0 QTime=1980
   [junit4]   2> 131255 INFO  (qtp771633406-3394) [n:127.0.0.1:34664_ c:collection1   ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections params={node=127.0.0.1:34664_&act

[...truncated too long message...]

ail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

jar-checksums:
    [mkdir] Created dir: /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/null645439230
     [copy] Copying 240 files to /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/null645439230
   [delete] Deleting directory /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/null645439230

check-working-copy:
[ivy:cachepath] :: resolving dependencies :: #;working@lucene1-us-west
[ivy:cachepath] 	confs: [default]
[ivy:cachepath] 	found org.eclipse.jgit#org.eclipse.jgit;5.3.0.201903130848-r in public
[ivy:cachepath] 	found com.jcraft#jsch;0.1.54 in public
[ivy:cachepath] 	found com.jcraft#jzlib;1.1.1 in public
[ivy:cachepath] 	found com.googlecode.javaewah#JavaEWAH;1.1.6 in public
[ivy:cachepath] 	found org.slf4j#slf4j-api;1.7.2 in public
[ivy:cachepath] 	found org.bouncycastle#bcpg-jdk15on;1.60 in public
[ivy:cachepath] 	found org.bouncycastle#bcprov-jdk15on;1.60 in public
[ivy:cachepath] 	found org.bouncycastle#bcpkix-jdk15on;1.60 in public
[ivy:cachepath] 	found org.slf4j#slf4j-nop;1.7.2 in public
[ivy:cachepath] :: resolution report :: resolve 57ms :: artifacts dl 6ms
	---------------------------------------------------------------------
	|                  |            modules            ||   artifacts   |
	|       conf       | number| search|dwnlded|evicted|| number|dwnlded|
	---------------------------------------------------------------------
	|      default     |   9   |   0   |   0   |   0   ||   9   |   0   |
	---------------------------------------------------------------------
[wc-checker] Initializing working copy...
[wc-checker] Checking working copy status...

-jenkins-base:

BUILD SUCCESSFUL
Total time: 388 minutes 26 seconds
Archiving artifacts
java.lang.InterruptedException: no matches found within 10000
	at hudson.FilePath$ValidateAntFileMask.hasMatch(FilePath.java:2847)
	at hudson.FilePath$ValidateAntFileMask.invoke(FilePath.java:2726)
	at hudson.FilePath$ValidateAntFileMask.invoke(FilePath.java:2707)
	at hudson.FilePath$FileCallableWrapper.call(FilePath.java:3086)
Also:   hudson.remoting.Channel$CallSiteStackTrace: Remote call to lucene
		at hudson.remoting.Channel.attachCallSiteStackTrace(Channel.java:1741)
		at hudson.remoting.UserRequest$ExceptionResponse.retrieve(UserRequest.java:357)
		at hudson.remoting.Channel.call(Channel.java:955)
		at hudson.FilePath.act(FilePath.java:1072)
		at hudson.FilePath.act(FilePath.java:1061)
		at hudson.FilePath.validateAntFileMask(FilePath.java:2705)
		at hudson.tasks.ArtifactArchiver.perform(ArtifactArchiver.java:243)
		at hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
		at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
		at hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
		at hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
		at hudson.model.Build$BuildExecution.post2(Build.java:186)
		at hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
		at hudson.model.Run.execute(Run.java:1835)
		at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
		at hudson.model.ResourceController.execute(ResourceController.java:97)
		at hudson.model.Executor.run(Executor.java:429)
Caused: hudson.FilePath$TunneledInterruptedException
	at hudson.FilePath$FileCallableWrapper.call(FilePath.java:3088)
	at hudson.remoting.UserRequest.perform(UserRequest.java:212)
	at hudson.remoting.UserRequest.perform(UserRequest.java:54)
	at hudson.remoting.Request$2.run(Request.java:369)
	at hudson.remoting.InterceptingExecutorService$1.call(InterceptingExecutorService.java:72)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
	at java.lang.Thread.run(Thread.java:748)
Caused: java.lang.InterruptedException: java.lang.InterruptedException: no matches found within 10000
	at hudson.FilePath.act(FilePath.java:1074)
	at hudson.FilePath.act(FilePath.java:1061)
	at hudson.FilePath.validateAntFileMask(FilePath.java:2705)
	at hudson.tasks.ArtifactArchiver.perform(ArtifactArchiver.java:243)
	at hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
	at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
	at hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
	at hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
	at hudson.model.Build$BuildExecution.post2(Build.java:186)
	at hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
	at hudson.model.Run.execute(Run.java:1835)
	at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
	at hudson.model.ResourceController.execute(ResourceController.java:97)
	at hudson.model.Executor.run(Executor.java:429)
No artifacts found that match the file pattern "**/*.events,heapdumps/**,**/hs_err_pid*". Configuration error?
Recording test results
Build step 'Publish JUnit test result report' changed build result to UNSTABLE
Email was triggered for: Unstable (Test Failures)
Sending email for trigger: Unstable (Test Failures)

[JENKINS] Lucene-Solr-NightlyTests-master - Build # 1853 - Still unstable

Posted by Apache Jenkins Server <je...@builds.apache.org>.
Build: https://builds.apache.org/job/Lucene-Solr-NightlyTests-master/1853/

1 tests failed.
FAILED:  org.apache.solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest.testSimple

Error Message:
Waiting for collection testSimple2 Timeout waiting to see state for collection=testSimple2 :DocCollection(testSimple2//collections/testSimple2/state.json/23)={   "pullReplicas":"0",   "replicationFactor":"2",   "shards":{     "shard1":{       "range":"80000000-ffffffff",       "state":"active",       "replicas":{         "core_node2":{           "dataDir":"hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple2/core_node2/data/",           "base_url":"http://127.0.0.1:34363/solr",           "node_name":"127.0.0.1:34363_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple2/core_node2/data/tlog",           "core":"testSimple2_shard1_replica_n1",           "shared_storage":"true",           "state":"down"},         "core_node5":{           "dataDir":"hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple2/core_node5/data/",           "base_url":"http://127.0.0.1:34343/solr",           "node_name":"127.0.0.1:34343_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple2/core_node5/data/tlog",           "core":"testSimple2_shard1_replica_n3",           "shared_storage":"true",           "state":"active",           "leader":"true"}}},     "shard2":{       "range":"0-7fffffff",       "state":"active",       "replicas":{         "core_node7":{           "dataDir":"hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple2/core_node7/data/",           "base_url":"http://127.0.0.1:34363/solr",           "node_name":"127.0.0.1:34363_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple2/core_node7/data/tlog",           "core":"testSimple2_shard2_replica_n4",           "shared_storage":"true",           "state":"down"},         "core_node8":{           "dataDir":"hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple2/core_node8/data/",           "base_url":"http://127.0.0.1:34343/solr",           "node_name":"127.0.0.1:34343_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple2/core_node8/data/tlog",           "core":"testSimple2_shard2_replica_n6",           "shared_storage":"true",           "state":"active",           "leader":"true"}}}},   "router":{"name":"compositeId"},   "maxShardsPerNode":"2",   "autoAddReplicas":"true",   "nrtReplicas":"2",   "tlogReplicas":"0"} Live Nodes: [127.0.0.1:34343_solr, 127.0.0.1:36798_solr] Last available state: DocCollection(testSimple2//collections/testSimple2/state.json/23)={   "pullReplicas":"0",   "replicationFactor":"2",   "shards":{     "shard1":{       "range":"80000000-ffffffff",       "state":"active",       "replicas":{         "core_node2":{           "dataDir":"hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple2/core_node2/data/",           "base_url":"http://127.0.0.1:34363/solr",           "node_name":"127.0.0.1:34363_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple2/core_node2/data/tlog",           "core":"testSimple2_shard1_replica_n1",           "shared_storage":"true",           "state":"down"},         "core_node5":{           "dataDir":"hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple2/core_node5/data/",           "base_url":"http://127.0.0.1:34343/solr",           "node_name":"127.0.0.1:34343_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple2/core_node5/data/tlog",           "core":"testSimple2_shard1_replica_n3",           "shared_storage":"true",           "state":"active",           "leader":"true"}}},     "shard2":{       "range":"0-7fffffff",       "state":"active",       "replicas":{         "core_node7":{           "dataDir":"hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple2/core_node7/data/",           "base_url":"http://127.0.0.1:34363/solr",           "node_name":"127.0.0.1:34363_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple2/core_node7/data/tlog",           "core":"testSimple2_shard2_replica_n4",           "shared_storage":"true",           "state":"down"},         "core_node8":{           "dataDir":"hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple2/core_node8/data/",           "base_url":"http://127.0.0.1:34343/solr",           "node_name":"127.0.0.1:34343_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple2/core_node8/data/tlog",           "core":"testSimple2_shard2_replica_n6",           "shared_storage":"true",           "state":"active",           "leader":"true"}}}},   "router":{"name":"compositeId"},   "maxShardsPerNode":"2",   "autoAddReplicas":"true",   "nrtReplicas":"2",   "tlogReplicas":"0"}

Stack Trace:
java.lang.AssertionError: Waiting for collection testSimple2
Timeout waiting to see state for collection=testSimple2 :DocCollection(testSimple2//collections/testSimple2/state.json/23)={
  "pullReplicas":"0",
  "replicationFactor":"2",
  "shards":{
    "shard1":{
      "range":"80000000-ffffffff",
      "state":"active",
      "replicas":{
        "core_node2":{
          "dataDir":"hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple2/core_node2/data/",
          "base_url":"http://127.0.0.1:34363/solr",
          "node_name":"127.0.0.1:34363_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple2/core_node2/data/tlog",
          "core":"testSimple2_shard1_replica_n1",
          "shared_storage":"true",
          "state":"down"},
        "core_node5":{
          "dataDir":"hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple2/core_node5/data/",
          "base_url":"http://127.0.0.1:34343/solr",
          "node_name":"127.0.0.1:34343_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple2/core_node5/data/tlog",
          "core":"testSimple2_shard1_replica_n3",
          "shared_storage":"true",
          "state":"active",
          "leader":"true"}}},
    "shard2":{
      "range":"0-7fffffff",
      "state":"active",
      "replicas":{
        "core_node7":{
          "dataDir":"hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple2/core_node7/data/",
          "base_url":"http://127.0.0.1:34363/solr",
          "node_name":"127.0.0.1:34363_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple2/core_node7/data/tlog",
          "core":"testSimple2_shard2_replica_n4",
          "shared_storage":"true",
          "state":"down"},
        "core_node8":{
          "dataDir":"hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple2/core_node8/data/",
          "base_url":"http://127.0.0.1:34343/solr",
          "node_name":"127.0.0.1:34343_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple2/core_node8/data/tlog",
          "core":"testSimple2_shard2_replica_n6",
          "shared_storage":"true",
          "state":"active",
          "leader":"true"}}}},
  "router":{"name":"compositeId"},
  "maxShardsPerNode":"2",
  "autoAddReplicas":"true",
  "nrtReplicas":"2",
  "tlogReplicas":"0"}
Live Nodes: [127.0.0.1:34343_solr, 127.0.0.1:36798_solr]
Last available state: DocCollection(testSimple2//collections/testSimple2/state.json/23)={
  "pullReplicas":"0",
  "replicationFactor":"2",
  "shards":{
    "shard1":{
      "range":"80000000-ffffffff",
      "state":"active",
      "replicas":{
        "core_node2":{
          "dataDir":"hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple2/core_node2/data/",
          "base_url":"http://127.0.0.1:34363/solr",
          "node_name":"127.0.0.1:34363_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple2/core_node2/data/tlog",
          "core":"testSimple2_shard1_replica_n1",
          "shared_storage":"true",
          "state":"down"},
        "core_node5":{
          "dataDir":"hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple2/core_node5/data/",
          "base_url":"http://127.0.0.1:34343/solr",
          "node_name":"127.0.0.1:34343_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple2/core_node5/data/tlog",
          "core":"testSimple2_shard1_replica_n3",
          "shared_storage":"true",
          "state":"active",
          "leader":"true"}}},
    "shard2":{
      "range":"0-7fffffff",
      "state":"active",
      "replicas":{
        "core_node7":{
          "dataDir":"hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple2/core_node7/data/",
          "base_url":"http://127.0.0.1:34363/solr",
          "node_name":"127.0.0.1:34363_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple2/core_node7/data/tlog",
          "core":"testSimple2_shard2_replica_n4",
          "shared_storage":"true",
          "state":"down"},
        "core_node8":{
          "dataDir":"hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple2/core_node8/data/",
          "base_url":"http://127.0.0.1:34343/solr",
          "node_name":"127.0.0.1:34343_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple2/core_node8/data/tlog",
          "core":"testSimple2_shard2_replica_n6",
          "shared_storage":"true",
          "state":"active",
          "leader":"true"}}}},
  "router":{"name":"compositeId"},
  "maxShardsPerNode":"2",
  "autoAddReplicas":"true",
  "nrtReplicas":"2",
  "tlogReplicas":"0"}
	at __randomizedtesting.SeedInfo.seed([F4A76428D7CC3F:3847839A0F2418EE]:0)
	at org.junit.Assert.fail(Assert.java:88)
	at org.apache.solr.cloud.SolrCloudTestCase.waitForState(SolrCloudTestCase.java:310)
	at org.apache.solr.cloud.autoscaling.AutoAddReplicasIntegrationTest.testSimple(AutoAddReplicasIntegrationTest.java:169)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.base/java.lang.reflect.Method.invoke(Method.java:566)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at java.base/java.lang.Thread.run(Thread.java:834)




Build Log:
[...truncated 14449 lines...]
   [junit4] Suite: org.apache.solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest
   [junit4]   2> Creating dataDir: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_F4A76428D7CC3F-001/init-core-data-001
   [junit4]   1> Formatting using clusterid: testClusterID
   [junit4]   2> 3414289 WARN  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[F4A76428D7CC3F]-worker) [    ] o.a.h.m.i.MetricsConfig Cannot locate configuration: tried hadoop-metrics2-namenode.properties,hadoop-metrics2.properties
   [junit4]   2> 3414453 WARN  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[F4A76428D7CC3F]-worker) [    ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 3414458 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[F4A76428D7CC3F]-worker) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 3414525 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[F4A76428D7CC3F]-worker) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 3414525 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[F4A76428D7CC3F]-worker) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 3414525 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[F4A76428D7CC3F]-worker) [    ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 3414525 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[F4A76428D7CC3F]-worker) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@52f6695a{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
   [junit4]   2> 3416348 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[F4A76428D7CC3F]-worker) [    ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@3c1b5005{hdfs,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/jetty-lucene2-us-west.apache.org-35043-hdfs-_-any-13743714256886829969.dir/webapp/,AVAILABLE}{/hdfs}
   [junit4]   2> 3416400 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[F4A76428D7CC3F]-worker) [    ] o.e.j.s.AbstractConnector Started ServerConnector@5bfc6cdf{HTTP/1.1,[http/1.1]}{lucene2-us-west.apache.org:35043}
   [junit4]   2> 3416400 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[F4A76428D7CC3F]-worker) [    ] o.e.j.s.Server Started @3416670ms
   [junit4]   2> 3417713 WARN  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[F4A76428D7CC3F]-worker) [    ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 3417714 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[F4A76428D7CC3F]-worker) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 3417766 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[F4A76428D7CC3F]-worker) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 3417766 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[F4A76428D7CC3F]-worker) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 3417766 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[F4A76428D7CC3F]-worker) [    ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 3417766 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[F4A76428D7CC3F]-worker) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@42289f06{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
   [junit4]   2> 3418672 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[F4A76428D7CC3F]-worker) [    ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@4f2ae280{datanode,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/jetty-localhost-39429-datanode-_-any-9826393394435126603.dir/webapp/,AVAILABLE}{/datanode}
   [junit4]   2> 3418672 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[F4A76428D7CC3F]-worker) [    ] o.e.j.s.AbstractConnector Started ServerConnector@619baaed{HTTP/1.1,[http/1.1]}{localhost:39429}
   [junit4]   2> 3418672 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[F4A76428D7CC3F]-worker) [    ] o.e.j.s.Server Started @3418943ms
   [junit4]   2> 3420380 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0x79d818c8536081a0: Processing first storage report for DS-32b31028-d557-41d3-b3fe-6c04e7802b0f from datanode e87e9286-3fc1-4813-876b-991a2456f3f6
   [junit4]   2> 3420411 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0x79d818c8536081a0: from storage DS-32b31028-d557-41d3-b3fe-6c04e7802b0f node DatanodeRegistration(127.0.0.1:40194, datanodeUuid=e87e9286-3fc1-4813-876b-991a2456f3f6, infoPort=36674, infoSecurePort=0, ipcPort=43065, storageInfo=lv=-57;cid=testClusterID;nsid=1404170054;c=1558531777458), blocks: 0, hasStaleStorage: true, processing time: 0 msecs, invalidatedBlocks: 0
   [junit4]   2> 3420412 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0x79d818c8536081a0: Processing first storage report for DS-d65a598c-dd2a-4a3b-a9cf-d42995954228 from datanode e87e9286-3fc1-4813-876b-991a2456f3f6
   [junit4]   2> 3420412 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0x79d818c8536081a0: from storage DS-d65a598c-dd2a-4a3b-a9cf-d42995954228 node DatanodeRegistration(127.0.0.1:40194, datanodeUuid=e87e9286-3fc1-4813-876b-991a2456f3f6, infoPort=36674, infoSecurePort=0, ipcPort=43065, storageInfo=lv=-57;cid=testClusterID;nsid=1404170054;c=1558531777458), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0
   [junit4]   2> 3420647 INFO  (TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[F4A76428D7CC3F]) [    ] o.a.s.c.MiniSolrCloudCluster Starting cluster of 3 servers in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_F4A76428D7CC3F-001/tempDir-002
   [junit4]   2> 3420647 INFO  (TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[F4A76428D7CC3F]) [    ] o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
   [junit4]   2> 3420667 INFO  (ZkTestServer Run Thread) [    ] o.a.s.c.ZkTestServer client port:0.0.0.0/0.0.0.0:0
   [junit4]   2> 3420667 INFO  (ZkTestServer Run Thread) [    ] o.a.s.c.ZkTestServer Starting server
   [junit4]   2> 3420792 INFO  (TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[F4A76428D7CC3F]) [    ] o.a.s.c.ZkTestServer start zk server on port:44879
   [junit4]   2> 3420792 INFO  (TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[F4A76428D7CC3F]) [    ] o.a.s.c.ZkTestServer parse host and port list: 127.0.0.1:44879
   [junit4]   2> 3420792 INFO  (TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[F4A76428D7CC3F]) [    ] o.a.s.c.ZkTestServer connecting to 127.0.0.1 44879
   [junit4]   2> 3421047 INFO  (zkConnectionManagerCallback-59465-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 3421412 INFO  (zkConnectionManagerCallback-59467-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 3421436 INFO  (zkConnectionManagerCallback-59469-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 3421473 WARN  (jetty-launcher-59470-thread-1) [    ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 3421473 INFO  (jetty-launcher-59470-thread-1) [    ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 3421473 INFO  (jetty-launcher-59470-thread-1) [    ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 3421473 INFO  (jetty-launcher-59470-thread-1) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 3421489 WARN  (jetty-launcher-59470-thread-2) [    ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 3421489 INFO  (jetty-launcher-59470-thread-2) [    ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 3421489 INFO  (jetty-launcher-59470-thread-2) [    ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 3421489 INFO  (jetty-launcher-59470-thread-2) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 3421501 WARN  (jetty-launcher-59470-thread-3) [    ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 3421501 INFO  (jetty-launcher-59470-thread-3) [    ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 3421501 INFO  (jetty-launcher-59470-thread-3) [    ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 3421501 INFO  (jetty-launcher-59470-thread-3) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 3421582 INFO  (jetty-launcher-59470-thread-1) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 3421582 INFO  (jetty-launcher-59470-thread-1) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 3421582 INFO  (jetty-launcher-59470-thread-1) [    ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 3421598 INFO  (jetty-launcher-59470-thread-1) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@15055eb3{/solr,null,AVAILABLE}
   [junit4]   2> 3421610 INFO  (jetty-launcher-59470-thread-1) [    ] o.e.j.s.AbstractConnector Started ServerConnector@45935dc3{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:34363}
   [junit4]   2> 3421611 INFO  (jetty-launcher-59470-thread-1) [    ] o.e.j.s.Server Started @3421881ms
   [junit4]   2> 3421611 INFO  (jetty-launcher-59470-thread-1) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=34363}
   [junit4]   2> 3421611 ERROR (jetty-launcher-59470-thread-1) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 3421611 INFO  (jetty-launcher-59470-thread-1) [    ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 3421611 INFO  (jetty-launcher-59470-thread-1) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 9.0.0
   [junit4]   2> 3421611 INFO  (jetty-launcher-59470-thread-1) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 3421611 INFO  (jetty-launcher-59470-thread-1) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 3421611 INFO  (jetty-launcher-59470-thread-1) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-05-22T13:29:44.910492Z
   [junit4]   2> 3421683 INFO  (jetty-launcher-59470-thread-2) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 3421683 INFO  (jetty-launcher-59470-thread-2) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 3421683 INFO  (jetty-launcher-59470-thread-2) [    ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 3421684 INFO  (jetty-launcher-59470-thread-2) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@58cfbb54{/solr,null,AVAILABLE}
   [junit4]   2> 3421684 INFO  (jetty-launcher-59470-thread-2) [    ] o.e.j.s.AbstractConnector Started ServerConnector@4c2fa80a{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:36798}
   [junit4]   2> 3421684 INFO  (jetty-launcher-59470-thread-2) [    ] o.e.j.s.Server Started @3421954ms
   [junit4]   2> 3421684 INFO  (jetty-launcher-59470-thread-2) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=36798}
   [junit4]   2> 3421684 ERROR (jetty-launcher-59470-thread-2) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 3421684 INFO  (jetty-launcher-59470-thread-2) [    ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 3421684 INFO  (jetty-launcher-59470-thread-2) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 9.0.0
   [junit4]   2> 3421684 INFO  (jetty-launcher-59470-thread-2) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 3421684 INFO  (jetty-launcher-59470-thread-2) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 3421684 INFO  (jetty-launcher-59470-thread-2) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-05-22T13:29:44.983891Z
   [junit4]   2> 3421744 INFO  (zkConnectionManagerCallback-59472-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 3421761 INFO  (jetty-launcher-59470-thread-1) [    ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 3421763 INFO  (jetty-launcher-59470-thread-1) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 3421763 INFO  (jetty-launcher-59470-thread-1) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 3421764 INFO  (jetty-launcher-59470-thread-3) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 3421764 INFO  (jetty-launcher-59470-thread-3) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 3421764 INFO  (jetty-launcher-59470-thread-3) [    ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 3421777 INFO  (jetty-launcher-59470-thread-1) [    ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@338a23c7, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 3421781 INFO  (jetty-launcher-59470-thread-3) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@7d71a88c{/solr,null,AVAILABLE}
   [junit4]   2> 3421797 INFO  (jetty-launcher-59470-thread-3) [    ] o.e.j.s.AbstractConnector Started ServerConnector@300c4480{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:34343}
   [junit4]   2> 3421797 INFO  (jetty-launcher-59470-thread-3) [    ] o.e.j.s.Server Started @3422067ms
   [junit4]   2> 3421797 INFO  (jetty-launcher-59470-thread-3) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=34343}
   [junit4]   2> 3421797 ERROR (jetty-launcher-59470-thread-3) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 3421797 INFO  (jetty-launcher-59470-thread-3) [    ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 3421797 INFO  (jetty-launcher-59470-thread-3) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 9.0.0
   [junit4]   2> 3421797 INFO  (jetty-launcher-59470-thread-3) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 3421797 INFO  (jetty-launcher-59470-thread-3) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 3421797 INFO  (jetty-launcher-59470-thread-3) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-05-22T13:29:45.096509Z
   [junit4]   2> 3421825 INFO  (zkConnectionManagerCallback-59474-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 3421825 INFO  (zkConnectionManagerCallback-59477-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 3421833 INFO  (jetty-launcher-59470-thread-3) [    ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 3421833 INFO  (jetty-launcher-59470-thread-2) [    ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 3421836 INFO  (jetty-launcher-59470-thread-3) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 3421836 INFO  (jetty-launcher-59470-thread-3) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 3421852 INFO  (jetty-launcher-59470-thread-2) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 3421852 INFO  (jetty-launcher-59470-thread-2) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 3421853 INFO  (jetty-launcher-59470-thread-2) [    ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@338a23c7, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 3421907 INFO  (jetty-launcher-59470-thread-3) [    ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@338a23c7, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 3422324 INFO  (jetty-launcher-59470-thread-1) [    ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
   [junit4]   2> 3422344 WARN  (jetty-launcher-59470-thread-1) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@103c9e0d[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 3422360 WARN  (jetty-launcher-59470-thread-1) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@1b001f2a[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 3422361 INFO  (jetty-launcher-59470-thread-1) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:44879/solr
   [junit4]   2> 3422379 INFO  (zkConnectionManagerCallback-59485-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 3422432 INFO  (zkConnectionManagerCallback-59487-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 3422601 INFO  (jetty-launcher-59470-thread-2) [    ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
   [junit4]   2> 3422618 WARN  (jetty-launcher-59470-thread-2) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@79709eae[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 3422709 WARN  (jetty-launcher-59470-thread-2) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@7d75adc2[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 3422711 INFO  (jetty-launcher-59470-thread-2) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:44879/solr
   [junit4]   2> 3422780 INFO  (zkConnectionManagerCallback-59495-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 3422906 INFO  (zkConnectionManagerCallback-59497-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 3423534 INFO  (jetty-launcher-59470-thread-2) [n:127.0.0.1:36798_solr    ] o.a.s.c.OverseerElectionContext I am going to be the leader 127.0.0.1:36798_solr
   [junit4]   2> 3423571 INFO  (jetty-launcher-59470-thread-2) [n:127.0.0.1:36798_solr    ] o.a.s.c.Overseer Overseer (id=74735996940779529-127.0.0.1:36798_solr-n_0000000000) starting
   [junit4]   2> 3424053 INFO  (zkConnectionManagerCallback-59504-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 3424207 INFO  (jetty-launcher-59470-thread-2) [n:127.0.0.1:36798_solr    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:44879/solr ready
   [junit4]   2> 3424243 INFO  (OverseerStateUpdate-74735996940779529-127.0.0.1:36798_solr-n_0000000000) [n:127.0.0.1:36798_solr    ] o.a.s.c.Overseer Starting to work on the main queue : 127.0.0.1:36798_solr
   [junit4]   2> 3424256 INFO  (jetty-launcher-59470-thread-2) [n:127.0.0.1:36798_solr    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:36798_solr
   [junit4]   2> 3424292 INFO  (OverseerStateUpdate-74735996940779529-127.0.0.1:36798_solr-n_0000000000) [n:127.0.0.1:36798_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 3424333 INFO  (zkCallback-59503-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 3424478 INFO  (jetty-launcher-59470-thread-1) [n:127.0.0.1:34363_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 3424512 INFO  (jetty-launcher-59470-thread-2) [n:127.0.0.1:36798_solr    ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 3424602 INFO  (jetty-launcher-59470-thread-2) [n:127.0.0.1:36798_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@338a23c7
   [junit4]   2> 3424640 INFO  (jetty-launcher-59470-thread-2) [n:127.0.0.1:36798_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@338a23c7
   [junit4]   2> 3424657 INFO  (jetty-launcher-59470-thread-2) [n:127.0.0.1:36798_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@338a23c7
   [junit4]   2> 3424658 INFO  (jetty-launcher-59470-thread-2) [n:127.0.0.1:36798_solr    ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_F4A76428D7CC3F-001/tempDir-002/node2/.
   [junit4]   2> 3424818 INFO  (jetty-launcher-59470-thread-1) [n:127.0.0.1:34363_solr    ] o.a.s.c.ZkController Publish node=127.0.0.1:34363_solr as DOWN
   [junit4]   2> 3424875 INFO  (jetty-launcher-59470-thread-1) [n:127.0.0.1:34363_solr    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 3424875 INFO  (jetty-launcher-59470-thread-1) [n:127.0.0.1:34363_solr    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:34363_solr
   [junit4]   2> 3424904 INFO  (zkCallback-59496-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 3424904 INFO  (zkCallback-59486-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 3424920 INFO  (zkCallback-59503-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 3424952 INFO  (zkConnectionManagerCallback-59510-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 3424995 INFO  (jetty-launcher-59470-thread-3) [    ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
   [junit4]   2> 3425009 WARN  (jetty-launcher-59470-thread-3) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@1d38e32a[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 3425038 WARN  (jetty-launcher-59470-thread-3) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@49fec504[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 3425039 INFO  (jetty-launcher-59470-thread-3) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:44879/solr
   [junit4]   2> 3425045 INFO  (jetty-launcher-59470-thread-1) [n:127.0.0.1:34363_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 3425073 INFO  (zkConnectionManagerCallback-59516-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 3425119 INFO  (jetty-launcher-59470-thread-1) [n:127.0.0.1:34363_solr    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:44879/solr ready
   [junit4]   2> 3425203 INFO  (zkConnectionManagerCallback-59518-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 3425227 INFO  (jetty-launcher-59470-thread-1) [n:127.0.0.1:34363_solr    ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 3425265 INFO  (jetty-launcher-59470-thread-3) [n:127.0.0.1:34343_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 3425302 INFO  (jetty-launcher-59470-thread-3) [n:127.0.0.1:34343_solr    ] o.a.s.c.ZkController Publish node=127.0.0.1:34343_solr as DOWN
   [junit4]   2> 3425302 INFO  (jetty-launcher-59470-thread-3) [n:127.0.0.1:34343_solr    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 3425302 INFO  (jetty-launcher-59470-thread-3) [n:127.0.0.1:34343_solr    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:34343_solr
   [junit4]   2> 3425303 INFO  (zkCallback-59486-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 3425304 INFO  (zkCallback-59503-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 3425304 INFO  (zkCallback-59496-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 3425336 INFO  (zkCallback-59509-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 3425373 INFO  (zkCallback-59517-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 3425375 INFO  (jetty-launcher-59470-thread-1) [n:127.0.0.1:34363_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@338a23c7
   [junit4]   2> 3425421 INFO  (zkConnectionManagerCallback-59526-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 3425520 INFO  (jetty-launcher-59470-thread-3) [n:127.0.0.1:34343_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (3)
   [junit4]   2> 3425520 INFO  (jetty-launcher-59470-thread-1) [n:127.0.0.1:34363_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@338a23c7
   [junit4]   2> 3425520 INFO  (jetty-launcher-59470-thread-1) [n:127.0.0.1:34363_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@338a23c7
   [junit4]   2> 3425520 INFO  (jetty-launcher-59470-thread-3) [n:127.0.0.1:34343_solr    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:44879/solr ready
   [junit4]   2> 3425521 INFO  (jetty-launcher-59470-thread-1) [n:127.0.0.1:34363_solr    ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_F4A76428D7CC3F-001/tempDir-002/node1/.
   [junit4]   2> 3425802 INFO  (jetty-launcher-59470-thread-3) [n:127.0.0.1:34343_solr    ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 3425933 INFO  (jetty-launcher-59470-thread-3) [n:127.0.0.1:34343_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@338a23c7
   [junit4]   2> 3426095 INFO  (jetty-launcher-59470-thread-3) [n:127.0.0.1:34343_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@338a23c7
   [junit4]   2> 3426095 INFO  (jetty-launcher-59470-thread-3) [n:127.0.0.1:34343_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@338a23c7
   [junit4]   2> 3426096 INFO  (jetty-launcher-59470-thread-3) [n:127.0.0.1:34343_solr    ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_F4A76428D7CC3F-001/tempDir-002/node3/.
   [junit4]   2> 3426674 INFO  (zkConnectionManagerCallback-59532-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 3426686 INFO  (TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[F4A76428D7CC3F]) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (3)
   [junit4]   2> 3426686 INFO  (TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[F4A76428D7CC3F]) [    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:44879/solr ready
   [junit4]   2> 3426722 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr    ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/cluster params={wt=javabin&version=2} status=0 QTime=4
   [junit4]   2> 3426725 INFO  (qtp1793648870-105799) [n:127.0.0.1:34363_solr    ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params collection.configName=conf&maxShardsPerNode=2&autoAddReplicas=true&name=testSimple1&nrtReplicas=2&action=CREATE&numShards=2&createNodeSet=127.0.0.1:36798_solr,127.0.0.1:34363_solr&wt=javabin&version=2 and sendToOCPQueue=true
   [junit4]   2> 3426847 INFO  (OverseerThreadFactory-8944-thread-1-processing-n:127.0.0.1:36798_solr) [n:127.0.0.1:36798_solr    ] o.a.s.c.a.c.CreateCollectionCmd Create collection testSimple1
   [junit4]   2> 3427199 INFO  (OverseerStateUpdate-74735996940779529-127.0.0.1:36798_solr-n_0000000000) [n:127.0.0.1:36798_solr    ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"testSimple1",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "core":"testSimple1_shard1_replica_n1",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"http://127.0.0.1:36798/solr",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 3427272 INFO  (OverseerStateUpdate-74735996940779529-127.0.0.1:36798_solr-n_0000000000) [n:127.0.0.1:36798_solr    ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"testSimple1",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "core":"testSimple1_shard1_replica_n2",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"http://127.0.0.1:34363/solr",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 3427402 INFO  (OverseerStateUpdate-74735996940779529-127.0.0.1:36798_solr-n_0000000000) [n:127.0.0.1:36798_solr    ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"testSimple1",
   [junit4]   2>   "shard":"shard2",
   [junit4]   2>   "core":"testSimple1_shard2_replica_n3",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"http://127.0.0.1:36798/solr",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 3427475 INFO  (OverseerStateUpdate-74735996940779529-127.0.0.1:36798_solr-n_0000000000) [n:127.0.0.1:36798_solr    ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"testSimple1",
   [junit4]   2>   "shard":"shard2",
   [junit4]   2>   "core":"testSimple1_shard2_replica_n4",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"http://127.0.0.1:34363/solr",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 3427808 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr    x:testSimple1_shard1_replica_n2] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node6&collection.configName=conf&newCollection=true&name=testSimple1_shard1_replica_n2&action=CREATE&numShards=2&collection=testSimple1&shard=shard1&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 3427809 INFO  (qtp2100012586-105813) [n:127.0.0.1:36798_solr    x:testSimple1_shard2_replica_n3] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node7&collection.configName=conf&newCollection=true&name=testSimple1_shard2_replica_n3&action=CREATE&numShards=2&collection=testSimple1&shard=shard2&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 3427809 INFO  (qtp2100012586-105813) [n:127.0.0.1:36798_solr    x:testSimple1_shard2_replica_n3] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 3427811 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr    x:testSimple1_shard2_replica_n4] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node8&collection.configName=conf&newCollection=true&name=testSimple1_shard2_replica_n4&action=CREATE&numShards=2&collection=testSimple1&shard=shard2&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 3427848 INFO  (qtp2100012586-105793) [n:127.0.0.1:36798_solr    x:testSimple1_shard1_replica_n1] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node5&collection.configName=conf&newCollection=true&name=testSimple1_shard1_replica_n1&action=CREATE&numShards=2&collection=testSimple1&shard=shard1&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 3428858 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
   [junit4]   2> 3428895 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.s.IndexSchema [testSimple1_shard2_replica_n4] Schema name=minimal
   [junit4]   2> 3428912 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 3428912 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.c.CoreContainer Creating SolrCore 'testSimple1_shard2_replica_n4' using configuration from collection testSimple1, trusted=true
   [junit4]   2> 3428912 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.testSimple1.shard2.replica_n4' (registry 'solr.core.testSimple1.shard2.replica_n4') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@338a23c7
   [junit4]   2> 3429076 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
   [junit4]   2> 3429245 INFO  (qtp2100012586-105813) [n:127.0.0.1:36798_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n3] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
   [junit4]   2> 3429261 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home
   [junit4]   2> 3429261 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 3429268 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.s.IndexSchema [testSimple1_shard1_replica_n2] Schema name=minimal
   [junit4]   2> 3429268 INFO  (qtp2100012586-105793) [n:127.0.0.1:36798_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
   [junit4]   2> 3429277 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.c.SolrCore [[testSimple1_shard2_replica_n4] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_F4A76428D7CC3F-001/tempDir-002/node1/testSimple1_shard2_replica_n4], dataDir=[hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple1/core_node8/data/]
   [junit4]   2> 3429348 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple1/core_node8/data/snapshot_metadata
   [junit4]   2> 3429270 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 3429379 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.c.CoreContainer Creating SolrCore 'testSimple1_shard1_replica_n2' using configuration from collection testSimple1, trusted=true
   [junit4]   2> 3429380 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.testSimple1.shard1.replica_n2' (registry 'solr.core.testSimple1.shard1.replica_n2') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@338a23c7
   [junit4]   2> 3429380 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home
   [junit4]   2> 3429380 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 3429380 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.c.SolrCore [[testSimple1_shard1_replica_n2] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_F4A76428D7CC3F-001/tempDir-002/node1/testSimple1_shard1_replica_n2], dataDir=[hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple1/core_node6/data/]
   [junit4]   2> 3429381 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple1/core_node6/data/snapshot_metadata
   [junit4]   2> 3429486 INFO  (qtp2100012586-105793) [n:127.0.0.1:36798_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n1] o.a.s.s.IndexSchema [testSimple1_shard1_replica_n1] Schema name=minimal
   [junit4]   2> 3429486 INFO  (qtp2100012586-105813) [n:127.0.0.1:36798_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n3] o.a.s.s.IndexSchema [testSimple1_shard2_replica_n3] Schema name=minimal
   [junit4]   2> 3429488 INFO  (qtp2100012586-105793) [n:127.0.0.1:36798_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n1] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 3429488 INFO  (qtp2100012586-105813) [n:127.0.0.1:36798_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n3] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 3429488 INFO  (qtp2100012586-105793) [n:127.0.0.1:36798_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n1] o.a.s.c.CoreContainer Creating SolrCore 'testSimple1_shard1_replica_n1' using configuration from collection testSimple1, trusted=true
   [junit4]   2> 3429488 INFO  (qtp2100012586-105813) [n:127.0.0.1:36798_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n3] o.a.s.c.CoreContainer Creating SolrCore 'testSimple1_shard2_replica_n3' using configuration from collection testSimple1, trusted=true
   [junit4]   2> 3429521 INFO  (qtp2100012586-105793) [n:127.0.0.1:36798_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n1] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.testSimple1.shard1.replica_n1' (registry 'solr.core.testSimple1.shard1.replica_n1') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@338a23c7
   [junit4]   2> 3429521 INFO  (qtp2100012586-105813) [n:127.0.0.1:36798_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n3] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.testSimple1.shard2.replica_n3' (registry 'solr.core.testSimple1.shard2.replica_n3') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@338a23c7
   [junit4]   2> 3429522 INFO  (qtp2100012586-105793) [n:127.0.0.1:36798_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home
   [junit4]   2> 3429522 INFO  (qtp2100012586-105793) [n:127.0.0.1:36798_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 3429522 INFO  (qtp2100012586-105793) [n:127.0.0.1:36798_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n1] o.a.s.c.SolrCore [[testSimple1_shard1_replica_n1] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_F4A76428D7CC3F-001/tempDir-002/node2/testSimple1_shard1_replica_n1], dataDir=[hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple1/core_node5/data/]
   [junit4]   2> 3429522 INFO  (qtp2100012586-105813) [n:127.0.0.1:36798_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n3] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home
   [junit4]   2> 3429522 INFO  (qtp2100012586-105813) [n:127.0.0.1:36798_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n3] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 3429522 INFO  (qtp2100012586-105813) [n:127.0.0.1:36798_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n3] o.a.s.c.SolrCore [[testSimple1_shard2_replica_n3] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_F4A76428D7CC3F-001/tempDir-002/node2/testSimple1_shard2_replica_n3], dataDir=[hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple1/core_node7/data/]
   [junit4]   2> 3429523 INFO  (qtp2100012586-105793) [n:127.0.0.1:36798_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple1/core_node5/data/snapshot_metadata
   [junit4]   2> 3429523 INFO  (qtp2100012586-105813) [n:127.0.0.1:36798_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n3] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple1/core_node7/data/snapshot_metadata
   [junit4]   2> 3429528 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 3429528 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 3429528 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 3429528 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 3429528 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 3429528 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 3429575 INFO  (qtp2100012586-105793) [n:127.0.0.1:36798_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 3429575 INFO  (qtp2100012586-105793) [n:127.0.0.1:36798_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 3429575 INFO  (qtp2100012586-105793) [n:127.0.0.1:36798_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 3429595 INFO  (qtp2100012586-105813) [n:127.0.0.1:36798_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n3] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 3429595 INFO  (qtp2100012586-105813) [n:127.0.0.1:36798_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n3] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 3429595 INFO  (qtp2100012586-105813) [n:127.0.0.1:36798_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n3] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 3431349 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 3431350 INFO  (qtp2100012586-105813) [n:127.0.0.1:36798_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n3] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 3431385 INFO  (qtp2100012586-105793) [n:127.0.0.1:36798_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n1] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 3431410 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple1/core_node6/data
   [junit4]   2> 3431422 INFO  (qtp2100012586-105813) [n:127.0.0.1:36798_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n3] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple1/core_node7/data
   [junit4]   2> 3431424 INFO  (qtp2100012586-105793) [n:127.0.0.1:36798_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple1/core_node5/data
   [junit4]   2> 3431788 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple1/core_node6/data/index
   [junit4]   2> 3431802 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 3431826 INFO  (qtp2100012586-105813) [n:127.0.0.1:36798_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n3] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple1/core_node7/data/index
   [junit4]   2> 3431826 INFO  (qtp2100012586-105793) [n:127.0.0.1:36798_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple1/core_node5/data/index
   [junit4]   2> 3431847 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 3431847 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 3431847 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 3431859 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple1/core_node8/data
   [junit4]   2> 3431934 INFO  (qtp2100012586-105793) [n:127.0.0.1:36798_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 3431934 INFO  (qtp2100012586-105793) [n:127.0.0.1:36798_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 3431934 INFO  (qtp2100012586-105793) [n:127.0.0.1:36798_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 3431953 INFO  (qtp2100012586-105813) [n:127.0.0.1:36798_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n3] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 3431953 INFO  (qtp2100012586-105813) [n:127.0.0.1:36798_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n3] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 3431953 INFO  (qtp2100012586-105813) [n:127.0.0.1:36798_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n3] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 3431985 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 3432107 INFO  (qtp2100012586-105793) [n:127.0.0.1:36798_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n1] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 3432114 INFO  (qtp2100012586-105813) [n:127.0.0.1:36798_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n3] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 3432289 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:39387/solr_hdfs_home/testSimple1/core_node8/data/index
   [junit4]   2> 3432357 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 3432357 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 3432357 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 3432442 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 3435484 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 3435484 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 3435484 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=3
   [junit4]   2> 3435538 INFO  (qtp2100012586-105793) [n:127.0.0.1:36798_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n1] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 3435538 INFO  (qtp2100012586-105793) [n:127.0.0.1:36798_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n1] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 3435538 INFO  (qtp2100012586-105793) [n:127.0.0.1:36798_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n1] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=3
   [junit4]   2> 3435603 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 3435603 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 3435603 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=3
   [junit4]   2> 3435604 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 3435605 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 3435697 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 3435697 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 3435715 INFO  (qtp2100012586-105793) [n:127.0.0.1:36798_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n1] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 3435715 INFO  (qtp2100012586-105793) [n:127.0.0.1:36798_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 3435721 INFO  (qtp2100012586-105813) [n:127.0.0.1:36798_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n3] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 3435721 INFO  (qtp2100012586-105813) [n:127.0.0.1:36798_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n3] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 3435721 INFO  (qtp2100012586-105813) [n:127.0.0.1:36798_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n3] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=3
   [junit4]   2> 3435787 INFO  (qtp2100012586-105813) [n:127.0.0.1:36798_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n3] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 3435787 INFO  (qtp2100012586-105813) [n:127.0.0.1:36798_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n3] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 3436646 INFO  (qtp2100012586-105793) [n:127.0.0.1:36798_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n1] o.a.s.s.SolrIndexSearcher Opening [Searcher@1bd3cae8[testSimple1_shard1_replica_n1] main]
   [junit4]   2> 3436646 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.s.SolrIndexSearcher Opening [Searcher@5a4ae0f[testSimple1_shard2_replica_n4] main]
   [junit4]   2> 3436649 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf
   [junit4]   2> 3436649 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf
   [junit4]   2> 3436661 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.s.SolrIndexSearcher Opening [Searcher@67eb95e7[testSimple1_shard1_replica_n2] main]
   [junit4]   2> 3436663 INFO  (qtp2100012586-105813) [n:127.0.0.1:36798_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n3] o.a.s.s.SolrIndexSearcher Opening [Searcher@65318a3c[testSimple1_shard2_replica_n3] main]
   [junit4]   2> 3436666 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf
   [junit4]   2> 3436666 INFO  (qtp2100012586-105793) [n:127.0.0.1:36798_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n1] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf
   [junit4]   2> 3436666 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf
   [junit4]   2> 3436666 INFO  (qtp2100012586-105793) [n:127.0.0.1:36798_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n1] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf
   [junit4]   2> 3436679 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 3436680 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1634239040694779904
   [junit4]   2> 3436685 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 3436686 INFO  (qtp2100012586-105793) [n:127.0.0.1:36798_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n1] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 3436737 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1634239040754548736
   [junit4]   2> 3436739 INFO  (qtp2100012586-105793) [n:127.0.0.1:36798_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n1] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1634239040756645888
   [junit4]   2> 3436755 INFO  (searcherExecutor-8960-thread-1-processing-n:127.0.0.1:34363_solr x:testSimple1_shard2_replica_n4 c:testSimple1 s:shard2 r:core_node8) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.c.SolrCore [testSimple1_shard2_replica_n4] Registered new searcher Searcher@5a4ae0f[testSimple1_shard2_replica_n4] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 3436770 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.c.ZkShardTerms Successful update of terms at /collections/testSimple1/terms/shard2 to Terms{values={core_node8=0}, version=0}
   [junit4]   2> 3436770 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/testSimple1/leaders/shard2
   [junit4]   2> 3436774 INFO  (qtp2100012586-105813) [n:127.0.0.1:36798_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n3] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf
   [junit4]   2> 3436775 INFO  (qtp2100012586-105813) [n:127.0.0.1:36798_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n3] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf
   [junit4]   2> 3436775 INFO  (qtp2100012586-105813) [n:127.0.0.1:36798_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n3] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 3436776 INFO  (qtp2100012586-105813) [n:127.0.0.1:36798_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n3] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1634239040795443200
   [junit4]   2> 3436786 INFO  (searcherExecutor-8961-thread-1-processing-n:127.0.0.1:34363_solr x:testSimple1_shard1_replica_n2 c:testSimple1 s:shard1 r:core_node6) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.c.SolrCore [testSimple1_shard1_replica_n2] Registered new searcher Searcher@67eb95e7[testSimple1_shard1_replica_n2] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 3436786 INFO  (searcherExecutor-8962-thread-1-processing-n:127.0.0.1:36798_solr x:testSimple1_shard1_replica_n1 c:testSimple1 s:shard1 r:core_node5) [n:127.0.0.1:36798_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n1] o.a.s.c.SolrCore [testSimple1_shard1_replica_n1] Registered new searcher Searcher@1bd3cae8[testSimple1_shard1_replica_n1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 3436791 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.c.ShardLeaderElectionContext Waiting until we see more replicas up for shard shard2: total=2 found=1 timeoutin=14996ms
   [junit4]   2> 3436792 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.c.ZkShardTerms Successful update of terms at /collections/testSimple1/terms/shard1 to Terms{values={core_node6=0}, version=0}
   [junit4]   2> 3436792 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/testSimple1/leaders/shard1
   [junit4]   2> 3436807 INFO  (searcherExecutor-8963-thread-1-processing-n:127.0.0.1:36798_solr x:testSimple1_shard2_replica_n3 c:testSimple1 s:shard2 r:core_node7) [n:127.0.0.1:36798_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n3] o.a.s.c.SolrCore [testSimple1_shard2_replica_n3] Registered new searcher Searcher@65318a3c[testSimple1_shard2_replica_n3] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 3436807 INFO  (zkCallback-59496-thread-1) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/testSimple1/state.json] for collection [testSimple1] has occurred - updating... (live nodes size: [3])
   [junit4]   2> 3436809 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.c.ShardLeaderElectionContext Waiting until we see more replicas up for shard shard1: total=2 found=1 timeoutin=14999ms
   [junit4]   2> 3436849 INFO  (qtp2100012586-105793) [n:127.0.0.1:36798_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n1] o.a.s.c.ZkShardTerms Successful update of terms at /collections/testSimple1/terms/shard1 to Terms{values={core_node6=0, core_node5=0}, version=1}
   [junit4]   2> 3436849 INFO  (qtp2100012586-105793) [n:127.0.0.1:36798_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/testSimple1/leaders/shard1
   [junit4]   2> 3436867 INFO  (qtp2100012586-105813) [n:127.0.0.1:36798_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n3] o.a.s.c.ZkShardTerms Successful update of terms at /collections/testSimple1/terms/shard2 to Terms{values={core_node7=0, core_node8=0}, version=1}
   [junit4]   2> 3436867 INFO  (qtp2100012586-105813) [n:127.0.0.1:36798_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n3] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/testSimple1/leaders/shard2
   [junit4]   2> 3437342 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 3437342 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 3437342 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.c.SyncStrategy Sync replicas to http://127.0.0.1:34363/solr/testSimple1_shard1_replica_n2/
   [junit4]   2> 3437342 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.u.PeerSync PeerSync: core=testSimple1_shard1_replica_n2 url=http://127.0.0.1:34363/solr START replicas=[http://127.0.0.1:36798/solr/testSimple1_shard1_replica_n1/] nUpdates=100
   [junit4]   2> 3437353 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 3437353 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 3437353 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.c.SyncStrategy Sync replicas to http://127.0.0.1:34363/solr/testSimple1_shard2_replica_n4/
   [junit4]   2> 3437357 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.u.PeerSync PeerSync: core=testSimple1_shard1_replica_n2 url=http://127.0.0.1:34363/solr DONE.  We have no versions.  sync failed.
   [junit4]   2> 3437370 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.u.PeerSync PeerSync: core=testSimple1_shard2_replica_n4 url=http://127.0.0.1:34363/solr START replicas=[http://127.0.0.1:36798/solr/testSimple1_shard2_replica_n3/] nUpdates=100
   [junit4]   2> 3437372 INFO  (qtp2100012586-105812) [n:127.0.0.1:36798_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n1] o.a.s.c.S.Request [testSimple1_shard1_replica_n1]  webapp=/solr path=/get params={distrib=false&qt=/get&fingerprint=false&getVersions=100&wt=javabin&version=2} status=0 QTime=1
   [junit4]   2> 3437372 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.u.PeerSync PeerSync: core=testSimple1_shard2_replica_n4 url=http://127.0.0.1:34363/solr DONE.  We have no versions.  sync failed.
   [junit4]   2> 3437378 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.c.SyncStrategy Leader's attempt to sync with shard failed, moving to the next candidate
   [junit4]   2> 3437378 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.c.ShardLeaderElectionContext We failed sync, but we have no versions - we can't sync in that case - we were active before, so become leader anyway
   [junit4]   2> 3437378 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.c.ShardLeaderElectionContextBase Creating leader registration node /collections/testSimple1/leaders/shard1/leader after winning as /collections/testSimple1/leader_elect/shard1/election/74735996940779527-core_node6-n_0000000000
   [junit4]   2> 3437379 INFO  (qtp1793648870-105801) [n:127.0.0.1:34363_solr c:testSimple1 s:shard1 r:core_node6 x:testSimple1_shard1_replica_n2] o.a.s.c.ShardLeaderElectionContext I am the new leader: http://127.0.0.1:34363/solr/testSimple1_shard1_replica_n2/ shard1
   [junit4]   2> 3437386 INFO  (qtp2100012586-105814) [n:127.0.0.1:36798_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n3] o.a.s.c.S.Request [testSimple1_shard2_replica_n3]  webapp=/solr path=/get params={distrib=false&qt=/get&fingerprint=false&getVersions=100&wt=javabin&version=2} status=0 QTime=13
   [junit4]   2> 3437390 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.c.SyncStrategy Leader's attempt to sync with shard failed, moving to the next candidate
   [junit4]   2> 3437390 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.c.ShardLeaderElectionContext We failed sync, but we have no versions - we can't sync in that case - we were active before, so become leader anyway
   [junit4]   2> 3437390 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.c.ShardLeaderElectionContextBase Creating leader registration node /collections/testSimple1/leaders/shard2/leader after winning as /collections/testSimple1/leader_elect/shard2/election/74735996940779527-core_node8-n_0000000000
   [junit4]   2> 3437396 INFO  (qtp1793648870-105798) [n:127.0.0.1:34363_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n4] o.a.s.c.ShardLeaderElectionContext I am the new leader: http://127.0.0.1:34363/solr/testSimple1_shard2_replica_n4/ shard2
   [junit4]   2> 3437528 INFO  (zkCallback-59486-thread-1) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/testSimple1/state.json] for collection [testSimpl

[...truncated too long message...]

ettings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

jar-checksums:
    [mkdir] Created dir: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/null1330205002
     [copy] Copying 240 files to /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/null1330205002
   [delete] Deleting directory /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/null1330205002

check-working-copy:
[ivy:cachepath] :: resolving dependencies :: #;working@lucene2-us-west.apache.org
[ivy:cachepath] 	confs: [default]
[ivy:cachepath] 	found org.eclipse.jgit#org.eclipse.jgit;5.3.0.201903130848-r in public
[ivy:cachepath] 	found com.jcraft#jsch;0.1.54 in public
[ivy:cachepath] 	found com.jcraft#jzlib;1.1.1 in public
[ivy:cachepath] 	found com.googlecode.javaewah#JavaEWAH;1.1.6 in public
[ivy:cachepath] 	found org.slf4j#slf4j-api;1.7.2 in public
[ivy:cachepath] 	found org.bouncycastle#bcpg-jdk15on;1.60 in public
[ivy:cachepath] 	found org.bouncycastle#bcprov-jdk15on;1.60 in public
[ivy:cachepath] 	found org.bouncycastle#bcpkix-jdk15on;1.60 in public
[ivy:cachepath] 	found org.slf4j#slf4j-nop;1.7.2 in public
[ivy:cachepath] :: resolution report :: resolve 53ms :: artifacts dl 5ms
	---------------------------------------------------------------------
	|                  |            modules            ||   artifacts   |
	|       conf       | number| search|dwnlded|evicted|| number|dwnlded|
	---------------------------------------------------------------------
	|      default     |   9   |   0   |   0   |   0   ||   9   |   0   |
	---------------------------------------------------------------------
[wc-checker] Initializing working copy...
[wc-checker] Checking working copy status...

-jenkins-base:

BUILD SUCCESSFUL
Total time: 722 minutes 11 seconds
Archiving artifacts
java.lang.InterruptedException: no matches found within 10000
	at hudson.FilePath$ValidateAntFileMask.hasMatch(FilePath.java:2847)
	at hudson.FilePath$ValidateAntFileMask.invoke(FilePath.java:2726)
	at hudson.FilePath$ValidateAntFileMask.invoke(FilePath.java:2707)
	at hudson.FilePath$FileCallableWrapper.call(FilePath.java:3086)
Also:   hudson.remoting.Channel$CallSiteStackTrace: Remote call to lucene2
		at hudson.remoting.Channel.attachCallSiteStackTrace(Channel.java:1741)
		at hudson.remoting.UserRequest$ExceptionResponse.retrieve(UserRequest.java:357)
		at hudson.remoting.Channel.call(Channel.java:955)
		at hudson.FilePath.act(FilePath.java:1072)
		at hudson.FilePath.act(FilePath.java:1061)
		at hudson.FilePath.validateAntFileMask(FilePath.java:2705)
		at hudson.tasks.ArtifactArchiver.perform(ArtifactArchiver.java:243)
		at hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
		at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
		at hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
		at hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
		at hudson.model.Build$BuildExecution.post2(Build.java:186)
		at hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
		at hudson.model.Run.execute(Run.java:1835)
		at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
		at hudson.model.ResourceController.execute(ResourceController.java:97)
		at hudson.model.Executor.run(Executor.java:429)
Caused: hudson.FilePath$TunneledInterruptedException
	at hudson.FilePath$FileCallableWrapper.call(FilePath.java:3088)
	at hudson.remoting.UserRequest.perform(UserRequest.java:212)
	at hudson.remoting.UserRequest.perform(UserRequest.java:54)
	at hudson.remoting.Request$2.run(Request.java:369)
	at hudson.remoting.InterceptingExecutorService$1.call(InterceptingExecutorService.java:72)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
	at java.lang.Thread.run(Thread.java:748)
Caused: java.lang.InterruptedException: java.lang.InterruptedException: no matches found within 10000
	at hudson.FilePath.act(FilePath.java:1074)
	at hudson.FilePath.act(FilePath.java:1061)
	at hudson.FilePath.validateAntFileMask(FilePath.java:2705)
	at hudson.tasks.ArtifactArchiver.perform(ArtifactArchiver.java:243)
	at hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
	at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
	at hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
	at hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
	at hudson.model.Build$BuildExecution.post2(Build.java:186)
	at hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
	at hudson.model.Run.execute(Run.java:1835)
	at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
	at hudson.model.ResourceController.execute(ResourceController.java:97)
	at hudson.model.Executor.run(Executor.java:429)
No artifacts found that match the file pattern "**/*.events,heapdumps/**,**/hs_err_pid*". Configuration error?
Recording test results
Build step 'Publish JUnit test result report' changed build result to UNSTABLE
Email was triggered for: Unstable (Test Failures)
Sending email for trigger: Unstable (Test Failures)

[JENKINS] Lucene-Solr-NightlyTests-master - Build # 1852 - Failure

Posted by Apache Jenkins Server <je...@builds.apache.org>.
Build: https://builds.apache.org/job/Lucene-Solr-NightlyTests-master/1852/

4 tests failed.
FAILED:  org.apache.lucene.codecs.simpletext.TestSimpleTextNormsFormat.testNCommonBig

Error Message:
Test abandoned because suite timeout was reached.

Stack Trace:
java.lang.Exception: Test abandoned because suite timeout was reached.
	at __randomizedtesting.SeedInfo.seed([246FB680C9859FD2]:0)


FAILED:  junit.framework.TestSuite.org.apache.lucene.codecs.simpletext.TestSimpleTextNormsFormat

Error Message:
Suite timeout exceeded (>= 7200000 msec).

Stack Trace:
java.lang.Exception: Suite timeout exceeded (>= 7200000 msec).
	at __randomizedtesting.SeedInfo.seed([246FB680C9859FD2]:0)


FAILED:  junit.framework.TestSuite.org.apache.solr.handler.TestReplicationHandler

Error Message:
ObjectTracker found 4 object(s) that were not released!!! [InternalHttpClient, SolrCore, MockDirectoryWrapper, MockDirectoryWrapper] org.apache.solr.common.util.ObjectReleaseTracker$ObjectTrackerException: org.apache.http.impl.client.InternalHttpClient  at org.apache.solr.common.util.ObjectReleaseTracker.track(ObjectReleaseTracker.java:42)  at org.apache.solr.client.solrj.impl.HttpClientUtil.createClient(HttpClientUtil.java:322)  at org.apache.solr.client.solrj.impl.HttpClientUtil.createClient(HttpClientUtil.java:331)  at org.apache.solr.handler.IndexFetcher.createHttpClient(IndexFetcher.java:230)  at org.apache.solr.handler.IndexFetcher.<init>(IndexFetcher.java:272)  at org.apache.solr.handler.ReplicationHandler.inform(ReplicationHandler.java:1222)  at org.apache.solr.core.SolrResourceLoader.inform(SolrResourceLoader.java:696)  at org.apache.solr.core.SolrCore.<init>(SolrCore.java:1005)  at org.apache.solr.core.SolrCore.<init>(SolrCore.java:879)  at org.apache.solr.core.CoreContainer.createFromDescriptor(CoreContainer.java:1227)  at org.apache.solr.core.CoreContainer.lambda$load$13(CoreContainer.java:749)  at com.codahale.metrics.InstrumentedExecutorService$InstrumentedCallable.call(InstrumentedExecutorService.java:202)  at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)  at org.apache.solr.common.util.ExecutorUtil$MDCAwareThreadPoolExecutor.lambda$execute$0(ExecutorUtil.java:209)  at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)  at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)  at java.base/java.lang.Thread.run(Thread.java:834)  org.apache.solr.common.util.ObjectReleaseTracker$ObjectTrackerException: org.apache.solr.core.SolrCore  at org.apache.solr.common.util.ObjectReleaseTracker.track(ObjectReleaseTracker.java:42)  at org.apache.solr.core.SolrCore.<init>(SolrCore.java:1059)  at org.apache.solr.core.SolrCore.<init>(SolrCore.java:879)  at org.apache.solr.core.CoreContainer.createFromDescriptor(CoreContainer.java:1227)  at org.apache.solr.core.CoreContainer.lambda$load$13(CoreContainer.java:749)  at com.codahale.metrics.InstrumentedExecutorService$InstrumentedCallable.call(InstrumentedExecutorService.java:202)  at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)  at org.apache.solr.common.util.ExecutorUtil$MDCAwareThreadPoolExecutor.lambda$execute$0(ExecutorUtil.java:209)  at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)  at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)  at java.base/java.lang.Thread.run(Thread.java:834)  org.apache.solr.common.util.ObjectReleaseTracker$ObjectTrackerException: org.apache.lucene.store.MockDirectoryWrapper  at org.apache.solr.common.util.ObjectReleaseTracker.track(ObjectReleaseTracker.java:42)  at org.apache.solr.core.CachingDirectoryFactory.get(CachingDirectoryFactory.java:348)  at org.apache.solr.update.SolrIndexWriter.create(SolrIndexWriter.java:99)  at org.apache.solr.core.SolrCore.initIndex(SolrCore.java:775)  at org.apache.solr.core.SolrCore.<init>(SolrCore.java:972)  at org.apache.solr.core.SolrCore.<init>(SolrCore.java:879)  at org.apache.solr.core.CoreContainer.createFromDescriptor(CoreContainer.java:1227)  at org.apache.solr.core.CoreContainer.lambda$load$13(CoreContainer.java:749)  at com.codahale.metrics.InstrumentedExecutorService$InstrumentedCallable.call(InstrumentedExecutorService.java:202)  at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)  at org.apache.solr.common.util.ExecutorUtil$MDCAwareThreadPoolExecutor.lambda$execute$0(ExecutorUtil.java:209)  at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)  at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)  at java.base/java.lang.Thread.run(Thread.java:834)  org.apache.solr.common.util.ObjectReleaseTracker$ObjectTrackerException: org.apache.lucene.store.MockDirectoryWrapper  at org.apache.solr.common.util.ObjectReleaseTracker.track(ObjectReleaseTracker.java:42)  at org.apache.solr.core.CachingDirectoryFactory.get(CachingDirectoryFactory.java:348)  at org.apache.solr.core.SolrCore.initSnapshotMetaDataManager(SolrCore.java:513)  at org.apache.solr.core.SolrCore.<init>(SolrCore.java:964)  at org.apache.solr.core.SolrCore.<init>(SolrCore.java:879)  at org.apache.solr.core.CoreContainer.createFromDescriptor(CoreContainer.java:1227)  at org.apache.solr.core.CoreContainer.lambda$load$13(CoreContainer.java:749)  at com.codahale.metrics.InstrumentedExecutorService$InstrumentedCallable.call(InstrumentedExecutorService.java:202)  at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)  at org.apache.solr.common.util.ExecutorUtil$MDCAwareThreadPoolExecutor.lambda$execute$0(ExecutorUtil.java:209)  at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)  at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)  at java.base/java.lang.Thread.run(Thread.java:834)   expected null, but was:<ObjectTracker found 4 object(s) that were not released!!! [InternalHttpClient, SolrCore, MockDirectoryWrapper, MockDirectoryWrapper] org.apache.solr.common.util.ObjectReleaseTracker$ObjectTrackerException: org.apache.http.impl.client.InternalHttpClient  at org.apache.solr.common.util.ObjectReleaseTracker.track(ObjectReleaseTracker.java:42)  at org.apache.solr.client.solrj.impl.HttpClientUtil.createClient(HttpClientUtil.java:322)  at org.apache.solr.client.solrj.impl.HttpClientUtil.createClient(HttpClientUtil.java:331)  at org.apache.solr.handler.IndexFetcher.createHttpClient(IndexFetcher.java:230)  at org.apache.solr.handler.IndexFetcher.<init>(IndexFetcher.java:272)  at org.apache.solr.handler.ReplicationHandler.inform(ReplicationHandler.java:1222)  at org.apache.solr.core.SolrResourceLoader.inform(SolrResourceLoader.java:696)  at org.apache.solr.core.SolrCore.<init>(SolrCore.java:1005)  at org.apache.solr.core.SolrCore.<init>(SolrCore.java:879)  at org.apache.solr.core.CoreContainer.createFromDescriptor(CoreContainer.java:1227)  at org.apache.solr.core.CoreContainer.lambda$load$13(CoreContainer.java:749)  at com.codahale.metrics.InstrumentedExecutorService$InstrumentedCallable.call(InstrumentedExecutorService.java:202)  at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)  at org.apache.solr.common.util.ExecutorUtil$MDCAwareThreadPoolExecutor.lambda$execute$0(ExecutorUtil.java:209)  at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)  at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)  at java.base/java.lang.Thread.run(Thread.java:834)  org.apache.solr.common.util.ObjectReleaseTracker$ObjectTrackerException: org.apache.solr.core.SolrCore  at org.apache.solr.common.util.ObjectReleaseTracker.track(ObjectReleaseTracker.java:42)  at org.apache.solr.core.SolrCore.<init>(SolrCore.java:1059)  at org.apache.solr.core.SolrCore.<init>(SolrCore.java:879)  at org.apache.solr.core.CoreContainer.createFromDescriptor(CoreContainer.java:1227)  at org.apache.solr.core.CoreContainer.lambda$load$13(CoreContainer.java:749)  at com.codahale.metrics.InstrumentedExecutorService$InstrumentedCallable.call(InstrumentedExecutorService.java:202)  at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)  at org.apache.solr.common.util.ExecutorUtil$MDCAwareThreadPoolExecutor.lambda$execute$0(ExecutorUtil.java:209)  at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)  at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)  at java.base/java.lang.Thread.run(Thread.java:834)  org.apache.solr.common.util.ObjectReleaseTracker$ObjectTrackerException: org.apache.lucene.store.MockDirectoryWrapper  at org.apache.solr.common.util.ObjectReleaseTracker.track(ObjectReleaseTracker.java:42)  at org.apache.solr.core.CachingDirectoryFactory.get(CachingDirectoryFactory.java:348)  at org.apache.solr.update.SolrIndexWriter.create(SolrIndexWriter.java:99)  at org.apache.solr.core.SolrCore.initIndex(SolrCore.java:775)  at org.apache.solr.core.SolrCore.<init>(SolrCore.java:972)  at org.apache.solr.core.SolrCore.<init>(SolrCore.java:879)  at org.apache.solr.core.CoreContainer.createFromDescriptor(CoreContainer.java:1227)  at org.apache.solr.core.CoreContainer.lambda$load$13(CoreContainer.java:749)  at com.codahale.metrics.InstrumentedExecutorService$InstrumentedCallable.call(InstrumentedExecutorService.java:202)  at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)  at org.apache.solr.common.util.ExecutorUtil$MDCAwareThreadPoolExecutor.lambda$execute$0(ExecutorUtil.java:209)  at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)  at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)  at java.base/java.lang.Thread.run(Thread.java:834)  org.apache.solr.common.util.ObjectReleaseTracker$ObjectTrackerException: org.apache.lucene.store.MockDirectoryWrapper  at org.apache.solr.common.util.ObjectReleaseTracker.track(ObjectReleaseTracker.java:42)  at org.apache.solr.core.CachingDirectoryFactory.get(CachingDirectoryFactory.java:348)  at org.apache.solr.core.SolrCore.initSnapshotMetaDataManager(SolrCore.java:513)  at org.apache.solr.core.SolrCore.<init>(SolrCore.java:964)  at org.apache.solr.core.SolrCore.<init>(SolrCore.java:879)  at org.apache.solr.core.CoreContainer.createFromDescriptor(CoreContainer.java:1227)  at org.apache.solr.core.CoreContainer.lambda$load$13(CoreContainer.java:749)  at com.codahale.metrics.InstrumentedExecutorService$InstrumentedCallable.call(InstrumentedExecutorService.java:202)  at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)  at org.apache.solr.common.util.ExecutorUtil$MDCAwareThreadPoolExecutor.lambda$execute$0(ExecutorUtil.java:209)  at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)  at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)  at java.base/java.lang.Thread.run(Thread.java:834)  >

Stack Trace:
java.lang.AssertionError: ObjectTracker found 4 object(s) that were not released!!! [InternalHttpClient, SolrCore, MockDirectoryWrapper, MockDirectoryWrapper]
org.apache.solr.common.util.ObjectReleaseTracker$ObjectTrackerException: org.apache.http.impl.client.InternalHttpClient
	at org.apache.solr.common.util.ObjectReleaseTracker.track(ObjectReleaseTracker.java:42)
	at org.apache.solr.client.solrj.impl.HttpClientUtil.createClient(HttpClientUtil.java:322)
	at org.apache.solr.client.solrj.impl.HttpClientUtil.createClient(HttpClientUtil.java:331)
	at org.apache.solr.handler.IndexFetcher.createHttpClient(IndexFetcher.java:230)
	at org.apache.solr.handler.IndexFetcher.<init>(IndexFetcher.java:272)
	at org.apache.solr.handler.ReplicationHandler.inform(ReplicationHandler.java:1222)
	at org.apache.solr.core.SolrResourceLoader.inform(SolrResourceLoader.java:696)
	at org.apache.solr.core.SolrCore.<init>(SolrCore.java:1005)
	at org.apache.solr.core.SolrCore.<init>(SolrCore.java:879)
	at org.apache.solr.core.CoreContainer.createFromDescriptor(CoreContainer.java:1227)
	at org.apache.solr.core.CoreContainer.lambda$load$13(CoreContainer.java:749)
	at com.codahale.metrics.InstrumentedExecutorService$InstrumentedCallable.call(InstrumentedExecutorService.java:202)
	at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
	at org.apache.solr.common.util.ExecutorUtil$MDCAwareThreadPoolExecutor.lambda$execute$0(ExecutorUtil.java:209)
	at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
	at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
	at java.base/java.lang.Thread.run(Thread.java:834)

org.apache.solr.common.util.ObjectReleaseTracker$ObjectTrackerException: org.apache.solr.core.SolrCore
	at org.apache.solr.common.util.ObjectReleaseTracker.track(ObjectReleaseTracker.java:42)
	at org.apache.solr.core.SolrCore.<init>(SolrCore.java:1059)
	at org.apache.solr.core.SolrCore.<init>(SolrCore.java:879)
	at org.apache.solr.core.CoreContainer.createFromDescriptor(CoreContainer.java:1227)
	at org.apache.solr.core.CoreContainer.lambda$load$13(CoreContainer.java:749)
	at com.codahale.metrics.InstrumentedExecutorService$InstrumentedCallable.call(InstrumentedExecutorService.java:202)
	at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
	at org.apache.solr.common.util.ExecutorUtil$MDCAwareThreadPoolExecutor.lambda$execute$0(ExecutorUtil.java:209)
	at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
	at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
	at java.base/java.lang.Thread.run(Thread.java:834)

org.apache.solr.common.util.ObjectReleaseTracker$ObjectTrackerException: org.apache.lucene.store.MockDirectoryWrapper
	at org.apache.solr.common.util.ObjectReleaseTracker.track(ObjectReleaseTracker.java:42)
	at org.apache.solr.core.CachingDirectoryFactory.get(CachingDirectoryFactory.java:348)
	at org.apache.solr.update.SolrIndexWriter.create(SolrIndexWriter.java:99)
	at org.apache.solr.core.SolrCore.initIndex(SolrCore.java:775)
	at org.apache.solr.core.SolrCore.<init>(SolrCore.java:972)
	at org.apache.solr.core.SolrCore.<init>(SolrCore.java:879)
	at org.apache.solr.core.CoreContainer.createFromDescriptor(CoreContainer.java:1227)
	at org.apache.solr.core.CoreContainer.lambda$load$13(CoreContainer.java:749)
	at com.codahale.metrics.InstrumentedExecutorService$InstrumentedCallable.call(InstrumentedExecutorService.java:202)
	at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
	at org.apache.solr.common.util.ExecutorUtil$MDCAwareThreadPoolExecutor.lambda$execute$0(ExecutorUtil.java:209)
	at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
	at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
	at java.base/java.lang.Thread.run(Thread.java:834)

org.apache.solr.common.util.ObjectReleaseTracker$ObjectTrackerException: org.apache.lucene.store.MockDirectoryWrapper
	at org.apache.solr.common.util.ObjectReleaseTracker.track(ObjectReleaseTracker.java:42)
	at org.apache.solr.core.CachingDirectoryFactory.get(CachingDirectoryFactory.java:348)
	at org.apache.solr.core.SolrCore.initSnapshotMetaDataManager(SolrCore.java:513)
	at org.apache.solr.core.SolrCore.<init>(SolrCore.java:964)
	at org.apache.solr.core.SolrCore.<init>(SolrCore.java:879)
	at org.apache.solr.core.CoreContainer.createFromDescriptor(CoreContainer.java:1227)
	at org.apache.solr.core.CoreContainer.lambda$load$13(CoreContainer.java:749)
	at com.codahale.metrics.InstrumentedExecutorService$InstrumentedCallable.call(InstrumentedExecutorService.java:202)
	at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
	at org.apache.solr.common.util.ExecutorUtil$MDCAwareThreadPoolExecutor.lambda$execute$0(ExecutorUtil.java:209)
	at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
	at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
	at java.base/java.lang.Thread.run(Thread.java:834)

 expected null, but was:<ObjectTracker found 4 object(s) that were not released!!! [InternalHttpClient, SolrCore, MockDirectoryWrapper, MockDirectoryWrapper]
org.apache.solr.common.util.ObjectReleaseTracker$ObjectTrackerException: org.apache.http.impl.client.InternalHttpClient
	at org.apache.solr.common.util.ObjectReleaseTracker.track(ObjectReleaseTracker.java:42)
	at org.apache.solr.client.solrj.impl.HttpClientUtil.createClient(HttpClientUtil.java:322)
	at org.apache.solr.client.solrj.impl.HttpClientUtil.createClient(HttpClientUtil.java:331)
	at org.apache.solr.handler.IndexFetcher.createHttpClient(IndexFetcher.java:230)
	at org.apache.solr.handler.IndexFetcher.<init>(IndexFetcher.java:272)
	at org.apache.solr.handler.ReplicationHandler.inform(ReplicationHandler.java:1222)
	at org.apache.solr.core.SolrResourceLoader.inform(SolrResourceLoader.java:696)
	at org.apache.solr.core.SolrCore.<init>(SolrCore.java:1005)
	at org.apache.solr.core.SolrCore.<init>(SolrCore.java:879)
	at org.apache.solr.core.CoreContainer.createFromDescriptor(CoreContainer.java:1227)
	at org.apache.solr.core.CoreContainer.lambda$load$13(CoreContainer.java:749)
	at com.codahale.metrics.InstrumentedExecutorService$InstrumentedCallable.call(InstrumentedExecutorService.java:202)
	at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
	at org.apache.solr.common.util.ExecutorUtil$MDCAwareThreadPoolExecutor.lambda$execute$0(ExecutorUtil.java:209)
	at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
	at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
	at java.base/java.lang.Thread.run(Thread.java:834)

org.apache.solr.common.util.ObjectReleaseTracker$ObjectTrackerException: org.apache.solr.core.SolrCore
	at org.apache.solr.common.util.ObjectReleaseTracker.track(ObjectReleaseTracker.java:42)
	at org.apache.solr.core.SolrCore.<init>(SolrCore.java:1059)
	at org.apache.solr.core.SolrCore.<init>(SolrCore.java:879)
	at org.apache.solr.core.CoreContainer.createFromDescriptor(CoreContainer.java:1227)
	at org.apache.solr.core.CoreContainer.lambda$load$13(CoreContainer.java:749)
	at com.codahale.metrics.InstrumentedExecutorService$InstrumentedCallable.call(InstrumentedExecutorService.java:202)
	at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
	at org.apache.solr.common.util.ExecutorUtil$MDCAwareThreadPoolExecutor.lambda$execute$0(ExecutorUtil.java:209)
	at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
	at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
	at java.base/java.lang.Thread.run(Thread.java:834)

org.apache.solr.common.util.ObjectReleaseTracker$ObjectTrackerException: org.apache.lucene.store.MockDirectoryWrapper
	at org.apache.solr.common.util.ObjectReleaseTracker.track(ObjectReleaseTracker.java:42)
	at org.apache.solr.core.CachingDirectoryFactory.get(CachingDirectoryFactory.java:348)
	at org.apache.solr.update.SolrIndexWriter.create(SolrIndexWriter.java:99)
	at org.apache.solr.core.SolrCore.initIndex(SolrCore.java:775)
	at org.apache.solr.core.SolrCore.<init>(SolrCore.java:972)
	at org.apache.solr.core.SolrCore.<init>(SolrCore.java:879)
	at org.apache.solr.core.CoreContainer.createFromDescriptor(CoreContainer.java:1227)
	at org.apache.solr.core.CoreContainer.lambda$load$13(CoreContainer.java:749)
	at com.codahale.metrics.InstrumentedExecutorService$InstrumentedCallable.call(InstrumentedExecutorService.java:202)
	at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
	at org.apache.solr.common.util.ExecutorUtil$MDCAwareThreadPoolExecutor.lambda$execute$0(ExecutorUtil.java:209)
	at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
	at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
	at java.base/java.lang.Thread.run(Thread.java:834)

org.apache.solr.common.util.ObjectReleaseTracker$ObjectTrackerException: org.apache.lucene.store.MockDirectoryWrapper
	at org.apache.solr.common.util.ObjectReleaseTracker.track(ObjectReleaseTracker.java:42)
	at org.apache.solr.core.CachingDirectoryFactory.get(CachingDirectoryFactory.java:348)
	at org.apache.solr.core.SolrCore.initSnapshotMetaDataManager(SolrCore.java:513)
	at org.apache.solr.core.SolrCore.<init>(SolrCore.java:964)
	at org.apache.solr.core.SolrCore.<init>(SolrCore.java:879)
	at org.apache.solr.core.CoreContainer.createFromDescriptor(CoreContainer.java:1227)
	at org.apache.solr.core.CoreContainer.lambda$load$13(CoreContainer.java:749)
	at com.codahale.metrics.InstrumentedExecutorService$InstrumentedCallable.call(InstrumentedExecutorService.java:202)
	at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
	at org.apache.solr.common.util.ExecutorUtil$MDCAwareThreadPoolExecutor.lambda$execute$0(ExecutorUtil.java:209)
	at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
	at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
	at java.base/java.lang.Thread.run(Thread.java:834)

>
	at __randomizedtesting.SeedInfo.seed([241E1B59C49615CB]:0)
	at org.junit.Assert.fail(Assert.java:88)
	at org.junit.Assert.failNotNull(Assert.java:755)
	at org.junit.Assert.assertNull(Assert.java:737)
	at org.apache.solr.SolrTestCaseJ4.teardownTestCases(SolrTestCaseJ4.java:333)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.base/java.lang.reflect.Method.invoke(Method.java:566)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:901)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at java.base/java.lang.Thread.run(Thread.java:834)


FAILED:  junit.framework.TestSuite.org.apache.solr.handler.TestReplicationHandler

Error Message:
file handle leaks: [FileChannel(/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.handler.TestReplicationHandler_241E1B59C49615CB-001/index-SimpleFSDirectory-015/write.lock)]

Stack Trace:
java.lang.RuntimeException: file handle leaks: [FileChannel(/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.handler.TestReplicationHandler_241E1B59C49615CB-001/index-SimpleFSDirectory-015/write.lock)]
	at org.apache.lucene.mockfile.LeakFS.onClose(LeakFS.java:63)
	at org.apache.lucene.mockfile.FilterFileSystem.close(FilterFileSystem.java:77)
	at org.apache.lucene.mockfile.FilterFileSystem.close(FilterFileSystem.java:78)
	at org.apache.lucene.util.TestRuleTemporaryFilesCleanup.afterAlways(TestRuleTemporaryFilesCleanup.java:228)
	at com.carrotsearch.randomizedtesting.rules.TestRuleAdapter$1.afterAlways(TestRuleAdapter.java:31)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:43)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at java.base/java.lang.Thread.run(Thread.java:834)
Caused by: java.lang.Exception
	at org.apache.lucene.mockfile.LeakFS.onOpen(LeakFS.java:46)
	at org.apache.lucene.mockfile.HandleTrackingFS.callOpenHook(HandleTrackingFS.java:81)
	at org.apache.lucene.mockfile.HandleTrackingFS.newFileChannel(HandleTrackingFS.java:197)
	at org.apache.lucene.mockfile.HandleTrackingFS.newFileChannel(HandleTrackingFS.java:166)
	at java.base/java.nio.channels.FileChannel.open(FileChannel.java:292)
	at java.base/java.nio.channels.FileChannel.open(FileChannel.java:345)
	at org.apache.lucene.store.NativeFSLockFactory.obtainFSLock(NativeFSLockFactory.java:125)
	at org.apache.lucene.store.FSLockFactory.obtainLock(FSLockFactory.java:41)
	at org.apache.lucene.store.BaseDirectory.obtainLock(BaseDirectory.java:45)
	at org.apache.lucene.store.FilterDirectory.obtainLock(FilterDirectory.java:105)
	at org.apache.lucene.store.MockDirectoryWrapper.obtainLock(MockDirectoryWrapper.java:1015)
	at org.apache.lucene.index.IndexWriter.<init>(IndexWriter.java:729)
	at org.apache.solr.update.SolrIndexWriter.<init>(SolrIndexWriter.java:128)
	at org.apache.solr.update.SolrIndexWriter.create(SolrIndexWriter.java:101)
	at org.apache.solr.update.DefaultSolrCoreState.createMainIndexWriter(DefaultSolrCoreState.java:260)
	at org.apache.solr.update.DefaultSolrCoreState.getIndexWriter(DefaultSolrCoreState.java:134)
	at org.apache.solr.handler.IndexFetcher.fetchLatestIndex(IndexFetcher.java:455)
	at org.apache.solr.handler.IndexFetcher.fetchLatestIndex(IndexFetcher.java:351)
	at org.apache.solr.handler.ReplicationHandler.doFetch(ReplicationHandler.java:422)
	at org.apache.solr.handler.ReplicationHandler.lambda$setupPolling$13(ReplicationHandler.java:1191)
	at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
	at java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)
	at java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)
	at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
	at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
	... 1 more




Build Log:
[...truncated 8031 lines...]
   [junit4] Suite: org.apache.lucene.codecs.simpletext.TestSimpleTextNormsFormat
   [junit4]   2> May 21, 2019 7:45:24 AM com.carrotsearch.randomizedtesting.ThreadLeakControl$2 evaluate
   [junit4]   2> WARNING: Suite execution timed out: org.apache.lucene.codecs.simpletext.TestSimpleTextNormsFormat
   [junit4]   2>    1) Thread[id=148, name=TEST-TestSimpleTextNormsFormat.testNCommonBig-seed#[246FB680C9859FD2], state=TIMED_WAITING, group=TGRP-TestSimpleTextNormsFormat]
   [junit4]   2>         at java.base@11.0.1/java.lang.Object.wait(Native Method)
   [junit4]   2>         at app//org.apache.lucene.index.IndexWriter.doWait(IndexWriter.java:4688)
   [junit4]   2>         at app//org.apache.lucene.index.IndexWriter.forceMerge(IndexWriter.java:2013)
   [junit4]   2>         at app//org.apache.lucene.index.IndexWriter.forceMerge(IndexWriter.java:1939)
   [junit4]   2>         at app//org.apache.lucene.index.RandomIndexWriter.forceMerge(RandomIndexWriter.java:500)
   [junit4]   2>         at app//org.apache.lucene.index.BaseNormsFormatTestCase.doTestNormsVersusDocValues(BaseNormsFormatTestCase.java:499)
   [junit4]   2>         at app//org.apache.lucene.index.BaseNormsFormatTestCase.testNCommonBig(BaseNormsFormatTestCase.java:391)
   [junit4]   2>         at java.base@11.0.1/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
   [junit4]   2>         at java.base@11.0.1/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
   [junit4]   2>         at java.base@11.0.1/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
   [junit4]   2>         at java.base@11.0.1/java.lang.reflect.Method.invoke(Method.java:566)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988)
   [junit4]   2>         at app//org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
   [junit4]   2>         at app//org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
   [junit4]   2>         at app//org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
   [junit4]   2>         at app//org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
   [junit4]   2>         at app//org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894)
   [junit4]   2>         at app//org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
   [junit4]   2>         at app//org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
   [junit4]   2>         at app//org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
   [junit4]   2>         at app//org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
   [junit4]   2>         at app//org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
   [junit4]   2>         at app//org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
   [junit4]   2>         at java.base@11.0.1/java.lang.Thread.run(Thread.java:834)
   [junit4]   2>    2) Thread[id=147, name=SUITE-TestSimpleTextNormsFormat-seed#[246FB680C9859FD2], state=RUNNABLE, group=TGRP-TestSimpleTextNormsFormat]
   [junit4]   2>         at java.base/java.lang.Thread.getStackTrace(Thread.java:1606)
   [junit4]   2>         at com.carrotsearch.randomizedtesting.ThreadLeakControl$4.run(ThreadLeakControl.java:696)
   [junit4]   2>         at com.carrotsearch.randomizedtesting.ThreadLeakControl$4.run(ThreadLeakControl.java:693)
   [junit4]   2>         at java.base/java.security.AccessController.doPrivileged(Native Method)
   [junit4]   2>         at com.carrotsearch.randomizedtesting.ThreadLeakControl.getStackTrace(ThreadLeakControl.java:693)
   [junit4]   2>         at com.carrotsearch.randomizedtesting.ThreadLeakControl.getThreadsWithTraces(ThreadLeakControl.java:709)
   [junit4]   2>         at com.carrotsearch.randomizedtesting.ThreadLeakControl.formatThreadStacksFull(ThreadLeakControl.java:689)
   [junit4]   2>         at com.carrotsearch.randomizedtesting.ThreadLeakControl.access$1000(ThreadLeakControl.java:65)
   [junit4]   2>         at com.carrotsearch.randomizedtesting.ThreadLeakControl$2.evaluate(ThreadLeakControl.java:415)
   [junit4]   2>         at com.carrotsearch.randomizedtesting.RandomizedRunner.runSuite(RandomizedRunner.java:708)
   [junit4]   2>         at com.carrotsearch.randomizedtesting.RandomizedRunner.access$200(RandomizedRunner.java:138)
   [junit4]   2>         at com.carrotsearch.randomizedtesting.RandomizedRunner$2.run(RandomizedRunner.java:629)
   [junit4]   2>    3) Thread[id=1, name=main, state=WAITING, group=main]
   [junit4]   2>         at java.base@11.0.1/java.lang.Object.wait(Native Method)
   [junit4]   2>         at java.base@11.0.1/java.lang.Thread.join(Thread.java:1305)
   [junit4]   2>         at java.base@11.0.1/java.lang.Thread.join(Thread.java:1379)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.RandomizedRunner.runSuite(RandomizedRunner.java:639)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.RandomizedRunner.run(RandomizedRunner.java:496)
   [junit4]   2>         at app//com.carrotsearch.ant.tasks.junit4.slave.SlaveMain.execute(SlaveMain.java:269)
   [junit4]   2>         at app//com.carrotsearch.ant.tasks.junit4.slave.SlaveMain.main(SlaveMain.java:394)
   [junit4]   2>         at app//com.carrotsearch.ant.tasks.junit4.slave.SlaveMainSafe.main(SlaveMainSafe.java:13)
   [junit4]   2>    4) Thread[id=11, name=JUnit4-serializer-daemon, state=TIMED_WAITING, group=main]
   [junit4]   2>         at java.base@11.0.1/java.lang.Thread.sleep(Native Method)
   [junit4]   2>         at app//com.carrotsearch.ant.tasks.junit4.events.Serializer$1.run(Serializer.java:50)
   [junit4]   2> NOTE: download the large Jenkins line-docs file by running 'ant get-jenkins-line-docs' in the lucene directory.
   [junit4]   2> NOTE: reproduce with: ant test  -Dtestcase=TestSimpleTextNormsFormat -Dtests.method=testNCommonBig -Dtests.seed=246FB680C9859FD2 -Dtests.multiplier=2 -Dtests.nightly=true -Dtests.slow=true -Dtests.linedocsfile=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/test-data/enwiki.random.lines.txt -Dtests.locale=en-PH -Dtests.timezone=Indian/Maldives -Dtests.asserts=true -Dtests.file.encoding=UTF-8
   [junit4] ERROR   6908s J1 | TestSimpleTextNormsFormat.testNCommonBig <<<
   [junit4]    > Throwable #1: java.lang.Exception: Test abandoned because suite timeout was reached.
   [junit4]    > 	at __randomizedtesting.SeedInfo.seed([246FB680C9859FD2]:0)
   [junit4]   2> May 21, 2019 7:45:24 AM com.carrotsearch.randomizedtesting.ThreadLeakControl checkThreadLeaks
   [junit4]   2> WARNING: Will linger awaiting termination of 1 leaked thread(s).
   [junit4]   2> May 21, 2019 7:45:44 AM com.carrotsearch.randomizedtesting.ThreadLeakControl checkThreadLeaks
   [junit4]   2> SEVERE: 1 thread leaked from SUITE scope at org.apache.lucene.codecs.simpletext.TestSimpleTextNormsFormat: 
   [junit4]   2>    1) Thread[id=148, name=TEST-TestSimpleTextNormsFormat.testNCommonBig-seed#[246FB680C9859FD2], state=TIMED_WAITING, group=TGRP-TestSimpleTextNormsFormat]
   [junit4]   2>         at java.base@11.0.1/java.lang.Object.wait(Native Method)
   [junit4]   2>         at app//org.apache.lucene.index.IndexWriter.doWait(IndexWriter.java:4688)
   [junit4]   2>         at app//org.apache.lucene.index.IndexWriter.forceMerge(IndexWriter.java:2013)
   [junit4]   2>         at app//org.apache.lucene.index.IndexWriter.forceMerge(IndexWriter.java:1939)
   [junit4]   2>         at app//org.apache.lucene.index.RandomIndexWriter.forceMerge(RandomIndexWriter.java:500)
   [junit4]   2>         at app//org.apache.lucene.index.BaseNormsFormatTestCase.doTestNormsVersusDocValues(BaseNormsFormatTestCase.java:499)
   [junit4]   2>         at app//org.apache.lucene.index.BaseNormsFormatTestCase.testNCommonBig(BaseNormsFormatTestCase.java:391)
   [junit4]   2>         at java.base@11.0.1/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
   [junit4]   2>         at java.base@11.0.1/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
   [junit4]   2>         at java.base@11.0.1/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
   [junit4]   2>         at java.base@11.0.1/java.lang.reflect.Method.invoke(Method.java:566)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988)
   [junit4]   2>         at app//org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
   [junit4]   2>         at app//org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
   [junit4]   2>         at app//org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
   [junit4]   2>         at app//org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
   [junit4]   2>         at app//org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894)
   [junit4]   2>         at app//org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
   [junit4]   2>         at app//org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
   [junit4]   2>         at app//org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
   [junit4]   2>         at app//org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
   [junit4]   2>         at app//org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
   [junit4]   2>         at app//org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
   [junit4]   2>         at app//com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
   [junit4]   2>         at java.base@11.0.1/java.lang.Thread.run(Thread.java:834)
   [junit4]   2> May 21, 2019 7:45:44 AM com.carrotsearch.randomizedtesting.ThreadLeakControl tryToInterruptAll
   [junit4]   2> INFO: Starting to interrupt leaked threads:
   [junit4]   2>    1) Thread[id=148, name=TEST-TestSimpleTextNormsFormat.testNCommonBig-seed#[246FB680C9859FD2], state=TIMED_WAITING, group=TGRP-TestSimpleTextNormsFormat]
   [junit4]   2> NOTE: leaving temporary files on disk at: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/build/codecs/test/J1/temp/lucene.codecs.simpletext.TestSimpleTextNormsFormat_246FB680C9859FD2-001
   [junit4]   2> May 21, 2019 2:45:45 AM com.carrotsearch.randomizedtesting.ThreadLeakControl tryToInterruptAll
   [junit4]   2> INFO: All leaked threads terminated.
   [junit4]   2> NOTE: test params are: codec=Asserting(Lucene80), sim=Asserting(org.apache.lucene.search.similarities.AssertingSimilarity@f948554), locale=en-PH, timezone=Indian/Maldives
   [junit4]   2> NOTE: Linux 4.4.0-112-generic amd64/Oracle Corporation 11.0.1 (64-bit)/cpus=4,threads=1,free=208798208,total=312475648
   [junit4]   2> NOTE: All tests run in this JVM: [TestBloomPostingsFormat, TestSimpleTextSegmentInfoFormat, TestSimpleTextNormsFormat]
   [junit4]   2> NOTE: download the large Jenkins line-docs file by running 'ant get-jenkins-line-docs' in the lucene directory.
   [junit4]   2> NOTE: reproduce with: ant test  -Dtestcase=TestSimpleTextNormsFormat -Dtests.seed=246FB680C9859FD2 -Dtests.multiplier=2 -Dtests.nightly=true -Dtests.slow=true -Dtests.linedocsfile=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/test-data/enwiki.random.lines.txt -Dtests.locale=en-PH -Dtests.timezone=Indian/Maldives -Dtests.asserts=true -Dtests.file.encoding=UTF-8
   [junit4] ERROR   0.00s J1 | TestSimpleTextNormsFormat (suite) <<<
   [junit4]    > Throwable #1: java.lang.Exception: Suite timeout exceeded (>= 7200000 msec).
   [junit4]    > 	at __randomizedtesting.SeedInfo.seed([246FB680C9859FD2]:0)
   [junit4] Completed [19/19 (1!)] on J1 in 7220.40s, 21 tests, 2 errors <<< FAILURES!

[...truncated 6128 lines...]
   [junit4] Suite: org.apache.solr.handler.TestReplicationHandler
   [junit4]   2> Creating dataDir: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.handler.TestReplicationHandler_241E1B59C49615CB-001/init-core-data-001
   [junit4]   2> 2306447 INFO  (SUITE-TestReplicationHandler-seed#[241E1B59C49615CB]-worker) [    ] o.a.s.SolrTestCaseJ4 SecureRandom sanity checks: test.solr.allowed.securerandom=null & java.security.egd=file:/dev/./urandom
   [junit4]   2> 2306447 INFO  (SUITE-TestReplicationHandler-seed#[241E1B59C49615CB]-worker) [    ] o.a.s.SolrTestCaseJ4 Using TrieFields (NUMERIC_POINTS_SYSPROP=false) w/NUMERIC_DOCVALUES_SYSPROP=false
   [junit4]   2> 2306448 INFO  (SUITE-TestReplicationHandler-seed#[241E1B59C49615CB]-worker) [    ] o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (false) via: @org.apache.solr.SolrTestCaseJ4$SuppressSSL(bugUrl="None")
   [junit4]   2> 2306474 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.SolrTestCaseJ4 ###Starting doTestIndexFetchWithMasterUrl
   [junit4]   2> 2306475 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.SolrTestCaseJ4 Writing core.properties file to /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.handler.TestReplicationHandler_241E1B59C49615CB-001/solr-instance-001/collection1
   [junit4]   2> 2306497 WARN  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 2306514 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 2306514 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 2306514 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 2306518 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 2306518 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 2306518 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 2306519 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@505c3afe{/solr,null,AVAILABLE}
   [junit4]   2> 2306521 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.e.j.s.AbstractConnector Started ServerConnector@1641ae3{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:40341}
   [junit4]   2> 2306521 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.e.j.s.Server Started @2306594ms
   [junit4]   2> 2306521 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, solr.data.dir=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.handler.TestReplicationHandler_241E1B59C49615CB-001/solr-instance-001/collection1/data, hostPort=40341}
   [junit4]   2> 2306521 ERROR (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 2306521 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 2306521 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 9.0.0
   [junit4]   2> 2306521 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in standalone mode on port null
   [junit4]   2> 2306521 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 2306521 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-05-21T04:53:50.414659Z
   [junit4]   2> 2306521 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.handler.TestReplicationHandler_241E1B59C49615CB-001/solr-instance-001/solr.xml
   [junit4]   2> 2306661 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 2306661 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 2306663 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@7b2e5068, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 2307073 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
   [junit4]   2> 2307078 WARN  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@3a2dd39e[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 2307095 WARN  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@7fa798da[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 2307186 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 2307186 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 2307260 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7b2e5068
   [junit4]   2> 2307299 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7b2e5068
   [junit4]   2> 2307299 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7b2e5068
   [junit4]   2> 2307318 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.handler.TestReplicationHandler_241E1B59C49615CB-001/solr-instance-001/.
   [junit4]   2> 2307318 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 2307364 INFO  (coreLoadExecutor-2910-thread-1) [    x:collection1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
   [junit4]   2> 2307403 INFO  (coreLoadExecutor-2910-thread-1) [    x:collection1] o.a.s.s.IndexSchema [collection1] Schema name=test
   [junit4]   2> 2307404 INFO  (coreLoadExecutor-2910-thread-1) [    x:collection1] o.a.s.s.IndexSchema Loaded schema test/1.2 with uniqueid field id
   [junit4]   2> 2307404 INFO  (coreLoadExecutor-2910-thread-1) [    x:collection1] o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from instancedir /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.handler.TestReplicationHandler_241E1B59C49615CB-001/solr-instance-001/./collection1, trusted=true
   [junit4]   2> 2307405 INFO  (coreLoadExecutor-2910-thread-1) [    x:collection1] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.collection1' (registry 'solr.core.collection1') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7b2e5068
   [junit4]   2> 2307421 INFO  (coreLoadExecutor-2910-thread-1) [    x:collection1] o.a.s.c.SolrCore [[collection1] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.handler.TestReplicationHandler_241E1B59C49615CB-001/solr-instance-001/collection1], dataDir=[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.handler.TestReplicationHandler_241E1B59C49615CB-001/solr-instance-001/./collection1/data/]
   [junit4]   2> 2307424 INFO  (coreLoadExecutor-2910-thread-1) [    x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=45, maxMergeAtOnceExplicit=27, maxMergedSegmentMB=87.2216796875, floorSegmentMB=1.689453125, forceMergeDeletesPctAllowed=23.142391950989285, segmentsPerTier=22.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.0, deletesPctAllowed=44.02722863588346
   [junit4]   2> 2307656 INFO  (coreLoadExecutor-2910-thread-1) [    x:collection1] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 2307656 INFO  (coreLoadExecutor-2910-thread-1) [    x:collection1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 2307657 INFO  (coreLoadExecutor-2910-thread-1) [    x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.LogDocMergePolicy: [LogDocMergePolicy: minMergeSize=1000, mergeFactor=30, maxMergeSize=9223372036854775807, maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, maxMergeDocs=2147483647, maxCFSSegmentSizeMB=1.02734375, noCFSRatio=0.7451718786901139]
   [junit4]   2> 2307658 INFO  (coreLoadExecutor-2910-thread-1) [    x:collection1] o.a.s.s.SolrIndexSearcher Opening [Searcher@7c24cc6c[collection1] main]
   [junit4]   2> 2307659 INFO  (coreLoadExecutor-2910-thread-1) [    x:collection1] o.a.s.r.ManagedResourceStorage File-based storage initialized to use dir: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.handler.TestReplicationHandler_241E1B59C49615CB-001/solr-instance-001/collection1/conf
   [junit4]   2> 2307660 INFO  (coreLoadExecutor-2910-thread-1) [    x:collection1] o.a.s.h.ReplicationHandler Replication enabled for following config files: schema.xml,xslt/dummy.xsl
   [junit4]   2> 2307660 INFO  (coreLoadExecutor-2910-thread-1) [    x:collection1] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 2307679 INFO  (searcherExecutor-2911-thread-1-processing-x:collection1) [    x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher Searcher@7c24cc6c[collection1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 2308162 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.SolrTestCaseJ4 Writing core.properties file to /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.handler.TestReplicationHandler_241E1B59C49615CB-001/solr-instance-002/collection1
   [junit4]   2> 2308163 WARN  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 2308164 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 2308164 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 2308164 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 2308165 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 2308165 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 2308165 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 2308165 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@56f74d0a{/solr,null,AVAILABLE}
   [junit4]   2> 2308165 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.e.j.s.AbstractConnector Started ServerConnector@583a0c68{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:36426}
   [junit4]   2> 2308165 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.e.j.s.Server Started @2308239ms
   [junit4]   2> 2308165 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, solr.data.dir=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.handler.TestReplicationHandler_241E1B59C49615CB-001/solr-instance-002/collection1/data, hostPort=36426}
   [junit4]   2> 2308166 ERROR (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 2308166 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 2308166 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 9.0.0
   [junit4]   2> 2308166 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in standalone mode on port null
   [junit4]   2> 2308166 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 2308166 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-05-21T04:53:52.059651Z
   [junit4]   2> 2308166 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.handler.TestReplicationHandler_241E1B59C49615CB-001/solr-instance-002/solr.xml
   [junit4]   2> 2308169 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 2308169 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 2308170 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@7b2e5068, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 2308353 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
   [junit4]   2> 2308355 WARN  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@8602305[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 2308370 WARN  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@27b3940d[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 2308422 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 2308422 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 2308485 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7b2e5068
   [junit4]   2> 2308526 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7b2e5068
   [junit4]   2> 2308526 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7b2e5068
   [junit4]   2> 2308541 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.handler.TestReplicationHandler_241E1B59C49615CB-001/solr-instance-002/.
   [junit4]   2> 2308541 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 2308546 INFO  (coreLoadExecutor-2923-thread-1) [    x:collection1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
   [junit4]   2> 2308604 INFO  (coreLoadExecutor-2923-thread-1) [    x:collection1] o.a.s.s.IndexSchema [collection1] Schema name=test
   [junit4]   2> 2308605 INFO  (coreLoadExecutor-2923-thread-1) [    x:collection1] o.a.s.s.IndexSchema Loaded schema test/1.2 with uniqueid field id
   [junit4]   2> 2308606 INFO  (coreLoadExecutor-2923-thread-1) [    x:collection1] o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from instancedir /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.handler.TestReplicationHandler_241E1B59C49615CB-001/solr-instance-002/./collection1, trusted=true
   [junit4]   2> 2308606 INFO  (coreLoadExecutor-2923-thread-1) [    x:collection1] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.collection1' (registry 'solr.core.collection1') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7b2e5068
   [junit4]   2> 2308606 INFO  (coreLoadExecutor-2923-thread-1) [    x:collection1] o.a.s.c.SolrCore [[collection1] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.handler.TestReplicationHandler_241E1B59C49615CB-001/solr-instance-002/collection1], dataDir=[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.handler.TestReplicationHandler_241E1B59C49615CB-001/solr-instance-002/./collection1/data/]
   [junit4]   2> 2308625 INFO  (coreLoadExecutor-2923-thread-1) [    x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=45, maxMergeAtOnceExplicit=27, maxMergedSegmentMB=87.2216796875, floorSegmentMB=1.689453125, forceMergeDeletesPctAllowed=23.142391950989285, segmentsPerTier=22.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.0, deletesPctAllowed=44.02722863588346
   [junit4]   2> 2308804 INFO  (coreLoadExecutor-2923-thread-1) [    x:collection1] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 2308804 INFO  (coreLoadExecutor-2923-thread-1) [    x:collection1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 2308805 INFO  (coreLoadExecutor-2923-thread-1) [    x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.LogDocMergePolicy: [LogDocMergePolicy: minMergeSize=1000, mergeFactor=30, maxMergeSize=9223372036854775807, maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, maxMergeDocs=2147483647, maxCFSSegmentSizeMB=1.02734375, noCFSRatio=0.7451718786901139]
   [junit4]   2> 2308818 INFO  (coreLoadExecutor-2923-thread-1) [    x:collection1] o.a.s.s.SolrIndexSearcher Opening [Searcher@460ac2cc[collection1] main]
   [junit4]   2> 2308819 INFO  (coreLoadExecutor-2923-thread-1) [    x:collection1] o.a.s.r.ManagedResourceStorage File-based storage initialized to use dir: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.handler.TestReplicationHandler_241E1B59C49615CB-001/solr-instance-002/collection1/conf
   [junit4]   2> 2308821 INFO  (coreLoadExecutor-2923-thread-1) [    x:collection1] o.a.s.h.ReplicationHandler Poll scheduled at an interval of 1000ms
   [junit4]   2> 2308821 INFO  (coreLoadExecutor-2923-thread-1) [    x:collection1] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 2308825 INFO  (searcherExecutor-2924-thread-1-processing-x:collection1) [    x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher Searcher@460ac2cc[collection1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 2308828 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.c.CoreContainer Shutting down CoreContainer instance=611632474
   [junit4]   2> 2308840 INFO  (qtp1217841260-10685) [    x:collection1] o.a.s.c.S.Request [collection1]  webapp=/solr path=/replication params={qt=/replication&wt=javabin&version=2&command=indexversion} status=0 QTime=0
   [junit4]   2> 2308840 INFO  (indexFetcher-2928-thread-1) [    ] o.a.s.h.IndexFetcher Master's generation: 1
   [junit4]   2> 2308840 INFO  (indexFetcher-2928-thread-1) [    ] o.a.s.h.IndexFetcher Master's version: 0
   [junit4]   2> 2308840 INFO  (indexFetcher-2928-thread-1) [    ] o.a.s.h.IndexFetcher Slave's generation: 1
   [junit4]   2> 2308840 INFO  (indexFetcher-2928-thread-1) [    ] o.a.s.h.IndexFetcher Slave's version: 0
   [junit4]   2> 2308840 INFO  (indexFetcher-2928-thread-1) [    ] o.a.s.h.IndexFetcher New index in Master. Deleting mine...
   [junit4]   2> 2308853 INFO  (indexFetcher-2928-thread-1) [    ] o.a.s.s.SolrIndexSearcher Opening [Searcher@728148fd[collection1] main]
   [junit4]   2> 2308854 INFO  (searcherExecutor-2924-thread-1) [    ] o.a.s.c.SolrCore [collection1] Registered new searcher Searcher@728148fd[collection1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 2308854 INFO  (coreCloseExecutor-2929-thread-1) [    x:collection1] o.a.s.c.SolrCore [collection1]  CLOSING SolrCore org.apache.solr.core.SolrCore@223d3fb0
   [junit4]   2> 2308854 INFO  (coreCloseExecutor-2929-thread-1) [    x:collection1] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.core.collection1, tag=223d3fb0
   [junit4]   2> 2308854 INFO  (coreCloseExecutor-2929-thread-1) [    x:collection1] o.a.s.m.r.SolrJmxReporter Closing reporter [org.apache.solr.metrics.reporters.SolrJmxReporter@19224d98: rootName = null, domain = solr.core.collection1, service url = null, agent id = null] for registry solr.core.collection1 / com.codahale.metrics.MetricRegistry@181e9103
   [junit4]   2> 2308905 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.node, tag=null
   [junit4]   2> 2308905 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.m.r.SolrJmxReporter Closing reporter [org.apache.solr.metrics.reporters.SolrJmxReporter@4ca12e2d: rootName = null, domain = solr.node, service url = null, agent id = null] for registry solr.node / com.codahale.metrics.MetricRegistry@1005a5
   [junit4]   2> 2308910 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.jvm, tag=null
   [junit4]   2> 2308910 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.m.r.SolrJmxReporter Closing reporter [org.apache.solr.metrics.reporters.SolrJmxReporter@63bcf862: rootName = null, domain = solr.jvm, service url = null, agent id = null] for registry solr.jvm / com.codahale.metrics.MetricRegistry@47199e98
   [junit4]   2> 2308929 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.jetty, tag=null
   [junit4]   2> 2308929 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.m.r.SolrJmxReporter Closing reporter [org.apache.solr.metrics.reporters.SolrJmxReporter@19765683: rootName = null, domain = solr.jetty, service url = null, agent id = null] for registry solr.jetty / com.codahale.metrics.MetricRegistry@34720c0b
   [junit4]   2> 2308931 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.e.j.s.AbstractConnector Stopped ServerConnector@583a0c68{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:0}
   [junit4]   2> 2308931 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.e.j.s.h.ContextHandler Stopped o.e.j.s.ServletContextHandler@56f74d0a{/solr,null,UNAVAILABLE}
   [junit4]   2> 2308931 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.e.j.s.session node0 Stopped scavenging
   [junit4]   2> 2308945 WARN  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 2308945 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 2308945 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 2308945 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 2308946 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 2308946 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 2308946 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 2308946 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@1736ae33{/solr,null,AVAILABLE}
   [junit4]   2> 2308947 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.e.j.s.AbstractConnector Started ServerConnector@7db3be6a{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:45384}
   [junit4]   2> 2308947 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.e.j.s.Server Started @2309020ms
   [junit4]   2> 2308947 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, solr.data.dir=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.handler.TestReplicationHandler_241E1B59C49615CB-001/solr-instance-002/collection1/data, hostPort=45384}
   [junit4]   2> 2308947 ERROR (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 2308947 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 2308947 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 9.0.0
   [junit4]   2> 2308947 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in standalone mode on port null
   [junit4]   2> 2308947 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 2308947 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-05-21T04:53:52.840813Z
   [junit4]   2> 2308948 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.handler.TestReplicationHandler_241E1B59C49615CB-001/solr-instance-002/solr.xml
   [junit4]   2> 2308962 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 2308962 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 2308963 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@7b2e5068, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 2309419 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
   [junit4]   2> 2309431 WARN  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@7e72e99f[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 2309455 WARN  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@290122a1[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 2309803 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 2309803 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 2309931 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7b2e5068
   [junit4]   2> 2309988 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7b2e5068
   [junit4]   2> 2309988 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7b2e5068
   [junit4]   2> 2310003 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.handler.TestReplicationHandler_241E1B59C49615CB-001/solr-instance-002/.
   [junit4]   2> 2310003 INFO  (TEST-TestReplicationHandler.doTestIndexFetchWithMasterUrl-seed#[241E1B59C49615CB]) [    ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 2310103 INFO  (coreLoadExecutor-2939-thread-1) [    x:collection1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
   [junit4]   2> 2310136 INFO  (coreLoadExecutor-2939-thread-1) [    x:collection1] o.a.s.s.IndexSchema [collection1] Schema name=test
   [junit4]   2> 2310137 INFO  (coreLoadExecutor-2939-thread-1) [    x:collection1] o.a.s.s.IndexSchema Loaded schema test/1.2 with uniqueid field id
   [junit4]   2> 2310138 INFO  (coreLoadExecutor-2939-thread-1) [    x:collection1] o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from instancedir /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.handler.TestReplicationHandler_241E1B59C49615CB-001/solr-instance-002/./collection1, trusted=true
   [junit4]   2> 2310138 INFO  (coreLoadExecutor-2939-thread-1) [    x:collection1] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.collection1' (registry 'solr.core.collection1') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7b2e5068
   [junit4]   2> 2310138 INFO  (coreLoadExecutor-2939-thread-1) [    x:collection1] o.a.s.c.SolrCore [[collection1] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.handler.TestReplicationHandler_241E1B59C49615CB-001/solr-instance-002/collection1], dataDir=[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.handler.TestReplicationHandler_241E1B59C49615CB-001/solr-instance-002/./collection1/data/]
   [junit4]   2> 2310154 INFO  (coreLoadExecutor-2939-thread-1) [    x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=45, maxMergeAtOnceExplicit=27, maxMergedSegmentMB=87.2216796875, floorSegmentMB=1.689453125, forceMergeDeletesPctAllowed=23.142391950989285, segmentsPerTier=22.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.0, deletesPctAllowed=44.02722863588346
   [junit4]   2> 2310340 INFO  (coreLoadExecutor-2939-thread-1) [    x:collection1] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 2310340 INFO  (coreLoadExecutor-2939-thread-1) [    x:collection1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 2310340 INFO  (coreLoadExecutor-2939-thread-1) [    x:collection1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.LogDocMergePolicy: [LogDocMergePolicy: minMergeSize=1000, mergeFactor=30, maxMergeSize=9223372036854775807, maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, maxMergeDocs=2147483647, maxCFSSegmentSizeMB=1.02734375, noCFSRatio=0.7451718786901139]
   [junit4]   2> 2310350 INFO  (coreLoadExecutor-2939-thread-1) [    x:collection1] o.a.s.s.SolrIndexSearcher Opening [Searcher@1a2db32[collection1] main]
   [junit4]   2> 2310350 INFO  (coreLoadExecutor-2939-thread-1) [    x:collection1] o.a.s.r.ManagedResourceStorage File-based storage initialized to use dir: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.handler.TestReplicationHandler_241E1B59C49615CB-001/solr-instance-002/collection1/conf
   [junit4]   2> 2310351 INFO  (coreLoadExecutor-2939-thread-1) [    x:collection1] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 2310353 INFO  (searcherExecutor-2940-thread-1-processing-x:collection1) [    x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher Searcher@1a2db32[collection1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 2310362 INFO  (qtp1217841260-10686) [    x:collection1] o.a.s.u.DirectUpdateHandler2 [collection1] REMOVING ALL DOCUMENTS FROM INDEX
   [junit4]   2> 2310363 INFO  (qtp1217841260-10686) [    x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/solr path=/update params={wt=javabin&version=2}{deleteByQuery=*:*} 0 1
   [junit4]   2> 2310364 INFO  (qtp345758909-10731) [    x:collection1] o.a.s.u.DirectUpdateHandler2 [collection1] REMOVING ALL DOCUMENTS FROM INDEX
   [junit4]   2> 2310365 INFO  (qtp345758909-10731) [    x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/solr path=/update params={wt=javabin&version=2}{deleteByQuery=*:*} 0 1
   [junit4]   2> 2310374 INFO  (qtp345758909-10732) [    x:collection1] o.a.s.u.DirectUpdateHandler2 start commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 2310375 INFO  (qtp345758909-10732) [    x:collection1] o.a.s.u.SolrIndexWriter Calling setCommitData with IW:org.apache.solr.update.SolrIndexWriter@5a4aea8c commitCommandVersion:0
   [junit4]   2> 2310377 INFO  (qtp345758909-10732) [    x:collection1] o.a.s.s.SolrIndexSearcher Opening [Searcher@759ffac1[collection1] main]
   [junit4]   2> 2310377 INFO  (qtp345758909-10732) [    x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 2310378 INFO  (searcherExecutor-2940-thread-1-processing-x:collection1) [    x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher Searcher@759ffac1[collection1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 2310378 INFO  (qtp345758909-10732) [    x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/solr path=/update params={waitSearcher=true&commit=true&softCommit=false&wt=javabin&version=2}{commit=} 0 4
   [junit4]   2> 2310380 INFO  (qtp1217841260-10685) [    x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/solr path=/update params={wt=javabin&version=2}{add=[0]} 0 1
   [junit4]   2> 2310380 INFO  (qtp1217841260-10681) [    x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/solr path=/update params={wt=javabin&version=2}{add=[1]} 0 0
   [junit4]   2> 2310381 INFO  (qtp1217841260-10682) [    x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/solr path=/update params={wt=javabin&version=2}{add=[2]} 0 0
   [junit4]   2> 2310382 INFO  (qtp1217841260-10683) [    x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/solr path=/update params={wt=javabin&version=2}{add=[3]} 0 0
   [junit4]   2> 2310395 INFO  (qtp1217841260-10686) [    x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/solr path=/update params={wt=javabin&version=2}{add=[4]} 0 0
   [junit4]   2> 2310395 INFO  (qtp1217841260-10685) [    x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/solr path=/update params={wt=javabin&version=2}{add=[5]} 0 0
   [junit4]   2> 2310396 INFO  (qtp1217841260-10681) [    x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/solr path=/update params={wt=javabin&version=2}{add=[6]} 0 0
   [junit4]   2> 2310396 INFO  (qtp1217841260-10682) [    x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/solr path=/update params={wt=javabin&version=2}{add=[7]} 0 0
   [junit4]   2> 2310397 INFO  (qtp1217841260-10683) [    x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/solr path=/update params={wt=javabin&version=2}{add=[8]} 0 0
   [junit4]   2> 2310397 INFO  (qtp1217841260-10686) [    x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/solr path=/update params={wt=javabin&version=2}{add=[9]} 0 0
   [junit4]   2> 2310398 INFO  (qtp1217841260-10685) [    x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/solr path=/update params={wt=javabin&version=2}{add=[10]} 0 0
   [junit4]   2> 2310411 INFO  (qtp1217841260-10681) [    x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/solr path=/update params={wt=javabin&version=2}{add=[11]} 0 0
   [junit4]   2> 2310411 INFO  (qtp1217841260-10682) [    x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/solr path=/update params={wt=javabin&version=2}{add=[12]} 0 0
   [junit4]   2> 2310412 INFO  (qtp1217841260-10683) [    x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/solr path=/update params={wt=javabin&version=2}{add=[13]} 0 0
   [junit4]   2> 2310412 INFO  (qtp1217841260-10686) [    x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/solr path=/update params={wt=javabin&version=2}{add=[14]} 0 0
   [junit4]   2> 2310412 INFO  (qtp1217841260-10685) [    x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/solr path=/update params={wt=javabin&version=2}{add=[15]} 0 0
   [junit4]   2> 2310413 INFO  (qtp1217841260-10681) [    x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/solr path=/update params={wt=javabin&version=2}{add=[16]} 0 0
   [junit4] 

[...truncated too long message...]

mport javax.naming.Context;
 [ecj-lint] 	       ^^^^^^^^^^^^^^^^^^^^
 [ecj-lint] The type javax.naming.Context is not accessible
 [ecj-lint] ----------
 [ecj-lint] 5. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java (at line 20)
 [ecj-lint] 	import javax.naming.InitialContext;
 [ecj-lint] 	       ^^^^^^^^^^^^^^^^^^^^^^^^^^^
 [ecj-lint] The type javax.naming.InitialContext is not accessible
 [ecj-lint] ----------
 [ecj-lint] 6. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java (at line 21)
 [ecj-lint] 	import javax.naming.NamingException;
 [ecj-lint] 	       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 [ecj-lint] The type javax.naming.NamingException is not accessible
 [ecj-lint] ----------
 [ecj-lint] 7. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java (at line 22)
 [ecj-lint] 	import javax.naming.NoInitialContextException;
 [ecj-lint] 	       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 [ecj-lint] The type javax.naming.NoInitialContextException is not accessible
 [ecj-lint] ----------
 [ecj-lint] 8. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java (at line 776)
 [ecj-lint] 	Context c = new InitialContext();
 [ecj-lint] 	^^^^^^^
 [ecj-lint] Context cannot be resolved to a type
 [ecj-lint] ----------
 [ecj-lint] 9. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java (at line 776)
 [ecj-lint] 	Context c = new InitialContext();
 [ecj-lint] 	                ^^^^^^^^^^^^^^
 [ecj-lint] InitialContext cannot be resolved to a type
 [ecj-lint] ----------
 [ecj-lint] 10. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java (at line 779)
 [ecj-lint] 	} catch (NoInitialContextException e) {
 [ecj-lint] 	         ^^^^^^^^^^^^^^^^^^^^^^^^^
 [ecj-lint] NoInitialContextException cannot be resolved to a type
 [ecj-lint] ----------
 [ecj-lint] 11. ERROR in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java (at line 781)
 [ecj-lint] 	} catch (NamingException e) {
 [ecj-lint] 	         ^^^^^^^^^^^^^^^
 [ecj-lint] NamingException cannot be resolved to a type
 [ecj-lint] ----------
 [ecj-lint] ----------
 [ecj-lint] 12. WARNING in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/java/org/apache/solr/handler/admin/SegmentsInfoRequestHandler.java (at line 215)
 [ecj-lint] 	leafReader = ((FilterLeafReader)leafReader).getDelegate();
 [ecj-lint] 	^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 [ecj-lint] Resource leak: 'leafReader' is not closed at this location
 [ecj-lint] ----------
 [ecj-lint] ----------
 [ecj-lint] 13. WARNING in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandler.java (at line 142)
 [ecj-lint] 	return new JavaBinCodec(null, stringCache).setReadStringAsCharSeq(true);
 [ecj-lint] 	       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 [ecj-lint] Resource leak: '<unassigned Closeable value>' is never closed
 [ecj-lint] ----------
 [ecj-lint] ----------
 [ecj-lint] 14. WARNING in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/java/org/apache/solr/handler/loader/JavabinLoader.java (at line 137)
 [ecj-lint] 	new JavaBinCodec() {
 [ecj-lint]       SolrParams params;
 [ecj-lint]       AddUpdateCommand addCmd = null;
 [ecj-lint] 
 [ecj-lint]       @Override
 [ecj-lint]       public List<Object> readIterator(DataInputInputStream fis) throws IOException {
 [ecj-lint]         while (true) {
 [ecj-lint]           Object o = readVal(fis);
 [ecj-lint]           if (o == END_OBJ) break;
 [ecj-lint]           if (o instanceof NamedList) {
 [ecj-lint]             params = ((NamedList) o).toSolrParams();
 [ecj-lint]           } else {
 [ecj-lint]             try {
 [ecj-lint]               if (o instanceof byte[]) {
 [ecj-lint]                 if (params != null) req.setParams(params);
 [ecj-lint]                 byte[] buf = (byte[]) o;
 [ecj-lint]                 contentStreamLoader.load(req, rsp, new ContentStreamBase.ByteArrayStream(buf, null), processor);
 [ecj-lint]               } else {
 [ecj-lint]                 throw new RuntimeException("unsupported type ");
 [ecj-lint]               }
 [ecj-lint]             } catch (Exception e) {
 [ecj-lint]               throw new RuntimeException(e);
 [ecj-lint]             } finally {
 [ecj-lint]               params = null;
 [ecj-lint]               req.setParams(old);
 [ecj-lint]             }
 [ecj-lint]           }
 [ecj-lint]         }
 [ecj-lint]         return Collections.emptyList();
 [ecj-lint]       }
 [ecj-lint] 
 [ecj-lint]     }.unmarshal(in);
 [ecj-lint] 	^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 [ecj-lint] Resource leak: '<unassigned Closeable value>' is never closed
 [ecj-lint] ----------
 [ecj-lint] ----------
 [ecj-lint] 15. INFO in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/java/org/apache/solr/logging/log4j2/Log4j2Watcher.java (at line 187)
 [ecj-lint] 	if (logger == root || root.equals(logger) || isRootLogger(name) || "".equals(name)) {
 [ecj-lint] 	                                  ^^^^^^
 [ecj-lint] Unlikely argument type for equals(): Map.Entry<String,LoggerConfig> seems to be unrelated to Logger
 [ecj-lint] ----------
 [ecj-lint] ----------
 [ecj-lint] 16. WARNING in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/java/org/apache/solr/update/SolrIndexSplitter.java (at line 258)
 [ecj-lint] 	HardlinkCopyDirectoryWrapper hardLinkedDir = new HardlinkCopyDirectoryWrapper(splitDir);
 [ecj-lint] 	                             ^^^^^^^^^^^^^
 [ecj-lint] Resource leak: 'hardLinkedDir' is never closed
 [ecj-lint] ----------
 [ecj-lint] ----------
 [ecj-lint] 17. WARNING in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/java/org/apache/solr/util/FileUtils.java (at line 50)
 [ecj-lint] 	in = new FileInputStream(src).getChannel();
 [ecj-lint] 	     ^^^^^^^^^^^^^^^^^^^^^^^^
 [ecj-lint] Resource leak: '<unassigned Closeable value>' is never closed
 [ecj-lint] ----------
 [ecj-lint] 18. WARNING in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/java/org/apache/solr/util/FileUtils.java (at line 51)
 [ecj-lint] 	out = new FileOutputStream(destination).getChannel();
 [ecj-lint] 	      ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 [ecj-lint] Resource leak: '<unassigned Closeable value>' is never closed
 [ecj-lint] ----------
 [ecj-lint] ----------
 [ecj-lint] 19. WARNING in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/java/org/apache/solr/util/SolrCLI.java (at line 1143)
 [ecj-lint] 	SnapshotCloudManager snapshotCloudManager = new SnapshotCloudManager(simCloudManager, config);
 [ecj-lint] 	                     ^^^^^^^^^^^^^^^^^^^^
 [ecj-lint] Resource leak: 'snapshotCloudManager' is never closed
 [ecj-lint] ----------
 [ecj-lint] ----------
 [ecj-lint] 20. WARNING in /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/java/org/apache/solr/util/TestInjection.java (at line 263)
 [ecj-lint] 	timers.remove(this);
 [ecj-lint] 	              ^^^^
 [ecj-lint] Unlikely argument type new TimerTask(){} for remove(Object) on a Collection<Timer>
 [ecj-lint] ----------
 [ecj-lint] 20 problems (8 errors, 11 warnings, 1 info)

BUILD FAILED
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/build.xml:652: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/build.xml:101: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build.xml:681: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/common-build.xml:2010: The following error occurred while executing this line:
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/common-build.xml:2049: Compile failed; see the compiler error output for details.

Total time: 707 minutes 26 seconds
Build step 'Invoke Ant' marked build as failure
Archiving artifacts
Recording test results
Email was triggered for: Failure - Any
Sending email for trigger: Failure - Any

[JENKINS] Lucene-Solr-NightlyTests-master - Build # 1851 - Still Unstable

Posted by Apache Jenkins Server <je...@builds.apache.org>.
Build: https://builds.apache.org/job/Lucene-Solr-NightlyTests-master/1851/

1 tests failed.
FAILED:  org.apache.solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest.testSimple

Error Message:
Waiting for collection testSimple2 Timeout waiting to see state for collection=testSimple2 :DocCollection(testSimple2//collections/testSimple2/state.json/23)={   "pullReplicas":"0",   "replicationFactor":"2",   "shards":{     "shard1":{       "range":"80000000-ffffffff",       "state":"active",       "replicas":{         "core_node3":{           "dataDir":"hdfs://localhost:39447/solr_hdfs_home/testSimple2/core_node3/data/",           "base_url":"http://127.0.0.1:34335/solr",           "node_name":"127.0.0.1:34335_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:39447/solr_hdfs_home/testSimple2/core_node3/data/tlog",           "core":"testSimple2_shard1_replica_n1",           "shared_storage":"true",           "state":"down"},         "core_node5":{           "dataDir":"hdfs://localhost:39447/solr_hdfs_home/testSimple2/core_node5/data/",           "base_url":"http://127.0.0.1:33057/solr",           "node_name":"127.0.0.1:33057_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:39447/solr_hdfs_home/testSimple2/core_node5/data/tlog",           "core":"testSimple2_shard1_replica_n2",           "shared_storage":"true",           "state":"active",           "leader":"true"}}},     "shard2":{       "range":"0-7fffffff",       "state":"active",       "replicas":{         "core_node7":{           "dataDir":"hdfs://localhost:39447/solr_hdfs_home/testSimple2/core_node7/data/",           "base_url":"http://127.0.0.1:34335/solr",           "node_name":"127.0.0.1:34335_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:39447/solr_hdfs_home/testSimple2/core_node7/data/tlog",           "core":"testSimple2_shard2_replica_n4",           "shared_storage":"true",           "state":"down"},         "core_node8":{           "dataDir":"hdfs://localhost:39447/solr_hdfs_home/testSimple2/core_node8/data/",           "base_url":"http://127.0.0.1:33057/solr",           "node_name":"127.0.0.1:33057_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:39447/solr_hdfs_home/testSimple2/core_node8/data/tlog",           "core":"testSimple2_shard2_replica_n6",           "shared_storage":"true",           "state":"active",           "leader":"true"}}}},   "router":{"name":"compositeId"},   "maxShardsPerNode":"2",   "autoAddReplicas":"true",   "nrtReplicas":"2",   "tlogReplicas":"0"} Live Nodes: [127.0.0.1:33057_solr, 127.0.0.1:40740_solr] Last available state: DocCollection(testSimple2//collections/testSimple2/state.json/23)={   "pullReplicas":"0",   "replicationFactor":"2",   "shards":{     "shard1":{       "range":"80000000-ffffffff",       "state":"active",       "replicas":{         "core_node3":{           "dataDir":"hdfs://localhost:39447/solr_hdfs_home/testSimple2/core_node3/data/",           "base_url":"http://127.0.0.1:34335/solr",           "node_name":"127.0.0.1:34335_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:39447/solr_hdfs_home/testSimple2/core_node3/data/tlog",           "core":"testSimple2_shard1_replica_n1",           "shared_storage":"true",           "state":"down"},         "core_node5":{           "dataDir":"hdfs://localhost:39447/solr_hdfs_home/testSimple2/core_node5/data/",           "base_url":"http://127.0.0.1:33057/solr",           "node_name":"127.0.0.1:33057_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:39447/solr_hdfs_home/testSimple2/core_node5/data/tlog",           "core":"testSimple2_shard1_replica_n2",           "shared_storage":"true",           "state":"active",           "leader":"true"}}},     "shard2":{       "range":"0-7fffffff",       "state":"active",       "replicas":{         "core_node7":{           "dataDir":"hdfs://localhost:39447/solr_hdfs_home/testSimple2/core_node7/data/",           "base_url":"http://127.0.0.1:34335/solr",           "node_name":"127.0.0.1:34335_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:39447/solr_hdfs_home/testSimple2/core_node7/data/tlog",           "core":"testSimple2_shard2_replica_n4",           "shared_storage":"true",           "state":"down"},         "core_node8":{           "dataDir":"hdfs://localhost:39447/solr_hdfs_home/testSimple2/core_node8/data/",           "base_url":"http://127.0.0.1:33057/solr",           "node_name":"127.0.0.1:33057_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:39447/solr_hdfs_home/testSimple2/core_node8/data/tlog",           "core":"testSimple2_shard2_replica_n6",           "shared_storage":"true",           "state":"active",           "leader":"true"}}}},   "router":{"name":"compositeId"},   "maxShardsPerNode":"2",   "autoAddReplicas":"true",   "nrtReplicas":"2",   "tlogReplicas":"0"}

Stack Trace:
java.lang.AssertionError: Waiting for collection testSimple2
Timeout waiting to see state for collection=testSimple2 :DocCollection(testSimple2//collections/testSimple2/state.json/23)={
  "pullReplicas":"0",
  "replicationFactor":"2",
  "shards":{
    "shard1":{
      "range":"80000000-ffffffff",
      "state":"active",
      "replicas":{
        "core_node3":{
          "dataDir":"hdfs://localhost:39447/solr_hdfs_home/testSimple2/core_node3/data/",
          "base_url":"http://127.0.0.1:34335/solr",
          "node_name":"127.0.0.1:34335_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:39447/solr_hdfs_home/testSimple2/core_node3/data/tlog",
          "core":"testSimple2_shard1_replica_n1",
          "shared_storage":"true",
          "state":"down"},
        "core_node5":{
          "dataDir":"hdfs://localhost:39447/solr_hdfs_home/testSimple2/core_node5/data/",
          "base_url":"http://127.0.0.1:33057/solr",
          "node_name":"127.0.0.1:33057_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:39447/solr_hdfs_home/testSimple2/core_node5/data/tlog",
          "core":"testSimple2_shard1_replica_n2",
          "shared_storage":"true",
          "state":"active",
          "leader":"true"}}},
    "shard2":{
      "range":"0-7fffffff",
      "state":"active",
      "replicas":{
        "core_node7":{
          "dataDir":"hdfs://localhost:39447/solr_hdfs_home/testSimple2/core_node7/data/",
          "base_url":"http://127.0.0.1:34335/solr",
          "node_name":"127.0.0.1:34335_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:39447/solr_hdfs_home/testSimple2/core_node7/data/tlog",
          "core":"testSimple2_shard2_replica_n4",
          "shared_storage":"true",
          "state":"down"},
        "core_node8":{
          "dataDir":"hdfs://localhost:39447/solr_hdfs_home/testSimple2/core_node8/data/",
          "base_url":"http://127.0.0.1:33057/solr",
          "node_name":"127.0.0.1:33057_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:39447/solr_hdfs_home/testSimple2/core_node8/data/tlog",
          "core":"testSimple2_shard2_replica_n6",
          "shared_storage":"true",
          "state":"active",
          "leader":"true"}}}},
  "router":{"name":"compositeId"},
  "maxShardsPerNode":"2",
  "autoAddReplicas":"true",
  "nrtReplicas":"2",
  "tlogReplicas":"0"}
Live Nodes: [127.0.0.1:33057_solr, 127.0.0.1:40740_solr]
Last available state: DocCollection(testSimple2//collections/testSimple2/state.json/23)={
  "pullReplicas":"0",
  "replicationFactor":"2",
  "shards":{
    "shard1":{
      "range":"80000000-ffffffff",
      "state":"active",
      "replicas":{
        "core_node3":{
          "dataDir":"hdfs://localhost:39447/solr_hdfs_home/testSimple2/core_node3/data/",
          "base_url":"http://127.0.0.1:34335/solr",
          "node_name":"127.0.0.1:34335_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:39447/solr_hdfs_home/testSimple2/core_node3/data/tlog",
          "core":"testSimple2_shard1_replica_n1",
          "shared_storage":"true",
          "state":"down"},
        "core_node5":{
          "dataDir":"hdfs://localhost:39447/solr_hdfs_home/testSimple2/core_node5/data/",
          "base_url":"http://127.0.0.1:33057/solr",
          "node_name":"127.0.0.1:33057_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:39447/solr_hdfs_home/testSimple2/core_node5/data/tlog",
          "core":"testSimple2_shard1_replica_n2",
          "shared_storage":"true",
          "state":"active",
          "leader":"true"}}},
    "shard2":{
      "range":"0-7fffffff",
      "state":"active",
      "replicas":{
        "core_node7":{
          "dataDir":"hdfs://localhost:39447/solr_hdfs_home/testSimple2/core_node7/data/",
          "base_url":"http://127.0.0.1:34335/solr",
          "node_name":"127.0.0.1:34335_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:39447/solr_hdfs_home/testSimple2/core_node7/data/tlog",
          "core":"testSimple2_shard2_replica_n4",
          "shared_storage":"true",
          "state":"down"},
        "core_node8":{
          "dataDir":"hdfs://localhost:39447/solr_hdfs_home/testSimple2/core_node8/data/",
          "base_url":"http://127.0.0.1:33057/solr",
          "node_name":"127.0.0.1:33057_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:39447/solr_hdfs_home/testSimple2/core_node8/data/tlog",
          "core":"testSimple2_shard2_replica_n6",
          "shared_storage":"true",
          "state":"active",
          "leader":"true"}}}},
  "router":{"name":"compositeId"},
  "maxShardsPerNode":"2",
  "autoAddReplicas":"true",
  "nrtReplicas":"2",
  "tlogReplicas":"0"}
	at __randomizedtesting.SeedInfo.seed([530D9F6E0766DA77:6BBEBB9020950EA6]:0)
	at org.junit.Assert.fail(Assert.java:88)
	at org.apache.solr.cloud.SolrCloudTestCase.waitForState(SolrCloudTestCase.java:310)
	at org.apache.solr.cloud.autoscaling.AutoAddReplicasIntegrationTest.testSimple(AutoAddReplicasIntegrationTest.java:169)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.base/java.lang.reflect.Method.invoke(Method.java:566)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at java.base/java.lang.Thread.run(Thread.java:834)




Build Log:
[...truncated 14009 lines...]
   [junit4] Suite: org.apache.solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest
   [junit4]   2> 1831863 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[530D9F6E0766DA77]-worker) [    ] o.a.s.SolrTestCaseJ4 SecureRandom sanity checks: test.solr.allowed.securerandom=null & java.security.egd=file:/dev/./urandom
   [junit4]   2> Creating dataDir: /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_530D9F6E0766DA77-001/init-core-data-001
   [junit4]   2> 1831864 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[530D9F6E0766DA77]-worker) [    ] o.a.s.SolrTestCaseJ4 Using PointFields (NUMERIC_POINTS_SYSPROP=true) w/NUMERIC_DOCVALUES_SYSPROP=false
   [junit4]   2> 1831865 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[530D9F6E0766DA77]-worker) [    ] o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (false) via: @org.apache.solr.util.RandomizeSSL(reason="", ssl=0.0/0.0, value=0.0/0.0, clientAuth=0.0/0.0)
   [junit4]   1> Formatting using clusterid: testClusterID
   [junit4]   2> 1831916 WARN  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[530D9F6E0766DA77]-worker) [    ] o.a.h.m.i.MetricsConfig Cannot locate configuration: tried hadoop-metrics2-namenode.properties,hadoop-metrics2.properties
   [junit4]   2> 1831930 WARN  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[530D9F6E0766DA77]-worker) [    ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 1831932 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[530D9F6E0766DA77]-worker) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 1831934 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[530D9F6E0766DA77]-worker) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 1831934 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[530D9F6E0766DA77]-worker) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 1831934 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[530D9F6E0766DA77]-worker) [    ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 1831935 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[530D9F6E0766DA77]-worker) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@6d52d673{static,/static,jar:file:/x1/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
   [junit4]   2> 1832172 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[530D9F6E0766DA77]-worker) [    ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@1a0b833e{hdfs,/,file:///x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/jetty-localhost-39550-hdfs-_-any-2659414019615033126.dir/webapp/,AVAILABLE}{/hdfs}
   [junit4]   2> 1832173 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[530D9F6E0766DA77]-worker) [    ] o.e.j.s.AbstractConnector Started ServerConnector@45f3d3b4{HTTP/1.1,[http/1.1]}{localhost:39550}
   [junit4]   2> 1832173 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[530D9F6E0766DA77]-worker) [    ] o.e.j.s.Server Started @1832230ms
   [junit4]   2> 1832295 WARN  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[530D9F6E0766DA77]-worker) [    ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 1832297 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[530D9F6E0766DA77]-worker) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 1832307 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[530D9F6E0766DA77]-worker) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 1832307 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[530D9F6E0766DA77]-worker) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 1832307 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[530D9F6E0766DA77]-worker) [    ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 1832308 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[530D9F6E0766DA77]-worker) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@61ba33db{static,/static,jar:file:/x1/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
   [junit4]   2> 1832477 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[530D9F6E0766DA77]-worker) [    ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@6cad89e5{datanode,/,file:///x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/jetty-localhost-42020-datanode-_-any-15961907549034839740.dir/webapp/,AVAILABLE}{/datanode}
   [junit4]   2> 1832477 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[530D9F6E0766DA77]-worker) [    ] o.e.j.s.AbstractConnector Started ServerConnector@cfa355a{HTTP/1.1,[http/1.1]}{localhost:42020}
   [junit4]   2> 1832477 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[530D9F6E0766DA77]-worker) [    ] o.e.j.s.Server Started @1832535ms
   [junit4]   2> 1832578 WARN  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[530D9F6E0766DA77]-worker) [    ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 1832579 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[530D9F6E0766DA77]-worker) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 1832588 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[530D9F6E0766DA77]-worker) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 1832588 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[530D9F6E0766DA77]-worker) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 1832588 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[530D9F6E0766DA77]-worker) [    ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 1832589 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[530D9F6E0766DA77]-worker) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@74ba4ae6{static,/static,jar:file:/x1/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
   [junit4]   2> 1832684 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0xc2db9189b4ba019e: Processing first storage report for DS-c38c8dd1-696f-4d7d-ae88-969066c7a1fb from datanode 94779f12-6234-4f1f-a4c8-b97143872409
   [junit4]   2> 1832684 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0xc2db9189b4ba019e: from storage DS-c38c8dd1-696f-4d7d-ae88-969066c7a1fb node DatanodeRegistration(127.0.0.1:37210, datanodeUuid=94779f12-6234-4f1f-a4c8-b97143872409, infoPort=39961, infoSecurePort=0, ipcPort=35595, storageInfo=lv=-57;cid=testClusterID;nsid=552310082;c=1558248803758), blocks: 0, hasStaleStorage: true, processing time: 0 msecs, invalidatedBlocks: 0
   [junit4]   2> 1832685 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0xc2db9189b4ba019e: Processing first storage report for DS-3033e6a1-ed95-4c55-9298-289e1eba725e from datanode 94779f12-6234-4f1f-a4c8-b97143872409
   [junit4]   2> 1832685 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0xc2db9189b4ba019e: from storage DS-3033e6a1-ed95-4c55-9298-289e1eba725e node DatanodeRegistration(127.0.0.1:37210, datanodeUuid=94779f12-6234-4f1f-a4c8-b97143872409, infoPort=39961, infoSecurePort=0, ipcPort=35595, storageInfo=lv=-57;cid=testClusterID;nsid=552310082;c=1558248803758), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0
   [junit4]   2> 1832793 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[530D9F6E0766DA77]-worker) [    ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@386eb296{datanode,/,file:///x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/jetty-localhost-41421-datanode-_-any-13394820733772822495.dir/webapp/,AVAILABLE}{/datanode}
   [junit4]   2> 1832794 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[530D9F6E0766DA77]-worker) [    ] o.e.j.s.AbstractConnector Started ServerConnector@32dfa907{HTTP/1.1,[http/1.1]}{localhost:41421}
   [junit4]   2> 1832794 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[530D9F6E0766DA77]-worker) [    ] o.e.j.s.Server Started @1832851ms
   [junit4]   2> 1832932 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0xc3fc28357f4d730d: Processing first storage report for DS-b3e8ca79-2073-48b7-986c-2cb44755f445 from datanode 1d2a3af1-756e-4438-9820-e070bad7d09c
   [junit4]   2> 1832932 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0xc3fc28357f4d730d: from storage DS-b3e8ca79-2073-48b7-986c-2cb44755f445 node DatanodeRegistration(127.0.0.1:34242, datanodeUuid=1d2a3af1-756e-4438-9820-e070bad7d09c, infoPort=38833, infoSecurePort=0, ipcPort=33366, storageInfo=lv=-57;cid=testClusterID;nsid=552310082;c=1558248803758), blocks: 0, hasStaleStorage: true, processing time: 0 msecs, invalidatedBlocks: 0
   [junit4]   2> 1832932 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0xc3fc28357f4d730d: Processing first storage report for DS-d5a47a98-39f2-4f46-927e-9b48f5f0e25e from datanode 1d2a3af1-756e-4438-9820-e070bad7d09c
   [junit4]   2> 1832932 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0xc3fc28357f4d730d: from storage DS-d5a47a98-39f2-4f46-927e-9b48f5f0e25e node DatanodeRegistration(127.0.0.1:34242, datanodeUuid=1d2a3af1-756e-4438-9820-e070bad7d09c, infoPort=38833, infoSecurePort=0, ipcPort=33366, storageInfo=lv=-57;cid=testClusterID;nsid=552310082;c=1558248803758), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0
   [junit4]   2> 1832986 INFO  (TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[530D9F6E0766DA77]) [    ] o.a.s.SolrTestCaseJ4 ###Starting testSimple
   [junit4]   2> 1832987 INFO  (TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[530D9F6E0766DA77]) [    ] o.a.s.c.MiniSolrCloudCluster Starting cluster of 3 servers in /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_530D9F6E0766DA77-001/tempDir-002
   [junit4]   2> 1832988 INFO  (TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[530D9F6E0766DA77]) [    ] o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
   [junit4]   2> 1832988 INFO  (ZkTestServer Run Thread) [    ] o.a.s.c.ZkTestServer client port:0.0.0.0/0.0.0.0:0
   [junit4]   2> 1832988 INFO  (ZkTestServer Run Thread) [    ] o.a.s.c.ZkTestServer Starting server
   [junit4]   2> 1833088 INFO  (TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[530D9F6E0766DA77]) [    ] o.a.s.c.ZkTestServer start zk server on port:37256
   [junit4]   2> 1833088 INFO  (TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[530D9F6E0766DA77]) [    ] o.a.s.c.ZkTestServer parse host and port list: 127.0.0.1:37256
   [junit4]   2> 1833088 INFO  (TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[530D9F6E0766DA77]) [    ] o.a.s.c.ZkTestServer connecting to 127.0.0.1 37256
   [junit4]   2> 1833097 INFO  (zkConnectionManagerCallback-6724-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1833101 INFO  (zkConnectionManagerCallback-6726-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1833103 INFO  (zkConnectionManagerCallback-6728-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1833106 WARN  (jetty-launcher-6729-thread-1) [    ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 1833106 WARN  (jetty-launcher-6729-thread-2) [    ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 1833106 WARN  (jetty-launcher-6729-thread-3) [    ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 1833106 INFO  (jetty-launcher-6729-thread-2) [    ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 1833106 INFO  (jetty-launcher-6729-thread-3) [    ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 1833106 INFO  (jetty-launcher-6729-thread-2) [    ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 1833106 INFO  (jetty-launcher-6729-thread-1) [    ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 1833106 INFO  (jetty-launcher-6729-thread-2) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 1833106 INFO  (jetty-launcher-6729-thread-1) [    ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 1833106 INFO  (jetty-launcher-6729-thread-1) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 1833106 INFO  (jetty-launcher-6729-thread-3) [    ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 1833107 INFO  (jetty-launcher-6729-thread-3) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 1833109 INFO  (jetty-launcher-6729-thread-3) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 1833109 INFO  (jetty-launcher-6729-thread-3) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 1833109 INFO  (jetty-launcher-6729-thread-3) [    ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 1833109 INFO  (jetty-launcher-6729-thread-2) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 1833109 INFO  (jetty-launcher-6729-thread-2) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 1833109 INFO  (jetty-launcher-6729-thread-3) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@55266a5d{/solr,null,AVAILABLE}
   [junit4]   2> 1833109 INFO  (jetty-launcher-6729-thread-2) [    ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 1833110 INFO  (jetty-launcher-6729-thread-3) [    ] o.e.j.s.AbstractConnector Started ServerConnector@7475da8a{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:40740}
   [junit4]   2> 1833110 INFO  (jetty-launcher-6729-thread-2) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@78ca27b8{/solr,null,AVAILABLE}
   [junit4]   2> 1833110 INFO  (jetty-launcher-6729-thread-3) [    ] o.e.j.s.Server Started @1833167ms
   [junit4]   2> 1833110 INFO  (jetty-launcher-6729-thread-3) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=40740}
   [junit4]   2> 1833110 INFO  (jetty-launcher-6729-thread-2) [    ] o.e.j.s.AbstractConnector Started ServerConnector@13d3d0d9{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:34335}
   [junit4]   2> 1833110 ERROR (jetty-launcher-6729-thread-3) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 1833110 INFO  (jetty-launcher-6729-thread-3) [    ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 1833110 INFO  (jetty-launcher-6729-thread-2) [    ] o.e.j.s.Server Started @1833167ms
   [junit4]   2> 1833110 INFO  (jetty-launcher-6729-thread-2) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=34335}
   [junit4]   2> 1833110 INFO  (jetty-launcher-6729-thread-3) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 9.0.0
   [junit4]   2> 1833110 INFO  (jetty-launcher-6729-thread-3) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 1833110 INFO  (jetty-launcher-6729-thread-3) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 1833110 INFO  (jetty-launcher-6729-thread-3) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-05-19T06:53:24.973893Z
   [junit4]   2> 1833110 ERROR (jetty-launcher-6729-thread-2) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 1833110 INFO  (jetty-launcher-6729-thread-2) [    ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 1833111 INFO  (jetty-launcher-6729-thread-2) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 9.0.0
   [junit4]   2> 1833111 INFO  (jetty-launcher-6729-thread-2) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 1833111 INFO  (jetty-launcher-6729-thread-2) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 1833111 INFO  (jetty-launcher-6729-thread-2) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-05-19T06:53:24.974111Z
   [junit4]   2> 1833111 INFO  (jetty-launcher-6729-thread-1) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 1833111 INFO  (jetty-launcher-6729-thread-1) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 1833111 INFO  (jetty-launcher-6729-thread-1) [    ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 1833112 INFO  (jetty-launcher-6729-thread-1) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@4ca75aa9{/solr,null,AVAILABLE}
   [junit4]   2> 1833112 INFO  (jetty-launcher-6729-thread-1) [    ] o.e.j.s.AbstractConnector Started ServerConnector@4a7e7f38{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:33057}
   [junit4]   2> 1833112 INFO  (jetty-launcher-6729-thread-1) [    ] o.e.j.s.Server Started @1833169ms
   [junit4]   2> 1833112 INFO  (jetty-launcher-6729-thread-1) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=33057}
   [junit4]   2> 1833112 INFO  (zkConnectionManagerCallback-6733-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1833113 ERROR (jetty-launcher-6729-thread-1) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 1833113 INFO  (jetty-launcher-6729-thread-1) [    ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 1833113 INFO  (jetty-launcher-6729-thread-1) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 9.0.0
   [junit4]   2> 1833113 INFO  (jetty-launcher-6729-thread-1) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 1833113 INFO  (jetty-launcher-6729-thread-1) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 1833113 INFO  (jetty-launcher-6729-thread-1) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-05-19T06:53:24.976198Z
   [junit4]   2> 1833113 INFO  (zkConnectionManagerCallback-6731-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1833113 INFO  (jetty-launcher-6729-thread-2) [    ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 1833114 INFO  (jetty-launcher-6729-thread-3) [    ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 1833115 INFO  (zkConnectionManagerCallback-6735-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1833116 INFO  (jetty-launcher-6729-thread-1) [    ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 1833117 INFO  (jetty-launcher-6729-thread-3) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 1833117 INFO  (jetty-launcher-6729-thread-3) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 1833119 INFO  (jetty-launcher-6729-thread-3) [    ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@d406836, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 1833119 INFO  (jetty-launcher-6729-thread-1) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 1833119 INFO  (jetty-launcher-6729-thread-1) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 1833121 INFO  (jetty-launcher-6729-thread-2) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 1833121 INFO  (jetty-launcher-6729-thread-2) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 1833121 INFO  (jetty-launcher-6729-thread-1) [    ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@d406836, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 1833122 INFO  (jetty-launcher-6729-thread-2) [    ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@d406836, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 1833183 INFO  (jetty-launcher-6729-thread-2) [    ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
   [junit4]   2> 1833184 WARN  (jetty-launcher-6729-thread-2) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@10540d4d[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 1833205 INFO  (jetty-launcher-6729-thread-3) [    ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
   [junit4]   2> 1833207 WARN  (jetty-launcher-6729-thread-3) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@18ad9c78[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 1833208 WARN  (jetty-launcher-6729-thread-2) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@56993359[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 1833210 INFO  (jetty-launcher-6729-thread-2) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:37256/solr
   [junit4]   2> 1833211 WARN  (jetty-launcher-6729-thread-3) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@1567c240[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 1833212 INFO  (jetty-launcher-6729-thread-3) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:37256/solr
   [junit4]   2> 1833212 INFO  (zkConnectionManagerCallback-6747-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1833218 INFO  (zkConnectionManagerCallback-6750-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1833219 INFO  (zkConnectionManagerCallback-6752-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1833224 INFO  (zkConnectionManagerCallback-6754-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1833364 INFO  (jetty-launcher-6729-thread-3) [n:127.0.0.1:40740_solr    ] o.a.s.c.OverseerElectionContext I am going to be the leader 127.0.0.1:40740_solr
   [junit4]   2> 1833365 INFO  (jetty-launcher-6729-thread-3) [n:127.0.0.1:40740_solr    ] o.a.s.c.Overseer Overseer (id=73327914646306825-127.0.0.1:40740_solr-n_0000000000) starting
   [junit4]   2> 1833377 INFO  (zkConnectionManagerCallback-6763-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1833380 INFO  (jetty-launcher-6729-thread-3) [n:127.0.0.1:40740_solr    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:37256/solr ready
   [junit4]   2> 1833381 INFO  (OverseerStateUpdate-73327914646306825-127.0.0.1:40740_solr-n_0000000000) [n:127.0.0.1:40740_solr    ] o.a.s.c.Overseer Starting to work on the main queue : 127.0.0.1:40740_solr
   [junit4]   2> 1833383 INFO  (jetty-launcher-6729-thread-3) [n:127.0.0.1:40740_solr    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:40740_solr
   [junit4]   2> 1833387 INFO  (zkCallback-6753-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 1833392 INFO  (jetty-launcher-6729-thread-2) [n:127.0.0.1:34335_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 1833397 INFO  (jetty-launcher-6729-thread-2) [n:127.0.0.1:34335_solr    ] o.a.s.c.ZkController Publish node=127.0.0.1:34335_solr as DOWN
   [junit4]   2> 1833398 INFO  (jetty-launcher-6729-thread-2) [n:127.0.0.1:34335_solr    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 1833398 INFO  (jetty-launcher-6729-thread-2) [n:127.0.0.1:34335_solr    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:34335_solr
   [junit4]   2> 1833404 INFO  (zkCallback-6753-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 1833406 INFO  (zkCallback-6762-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 1833411 INFO  (zkCallback-6751-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 1833413 INFO  (zkConnectionManagerCallback-6768-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1833415 INFO  (jetty-launcher-6729-thread-3) [n:127.0.0.1:40740_solr    ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 1833416 INFO  (jetty-launcher-6729-thread-2) [n:127.0.0.1:34335_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 1833418 INFO  (jetty-launcher-6729-thread-2) [n:127.0.0.1:34335_solr    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:37256/solr ready
   [junit4]   2> 1833446 INFO  (jetty-launcher-6729-thread-3) [n:127.0.0.1:40740_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@d406836
   [junit4]   2> 1833455 INFO  (jetty-launcher-6729-thread-2) [n:127.0.0.1:34335_solr    ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 1833467 INFO  (jetty-launcher-6729-thread-3) [n:127.0.0.1:40740_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@d406836
   [junit4]   2> 1833467 INFO  (jetty-launcher-6729-thread-3) [n:127.0.0.1:40740_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@d406836
   [junit4]   2> 1833470 INFO  (jetty-launcher-6729-thread-3) [n:127.0.0.1:40740_solr    ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_530D9F6E0766DA77-001/tempDir-002/node3/.
   [junit4]   2> 1833515 INFO  (jetty-launcher-6729-thread-2) [n:127.0.0.1:34335_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@d406836
   [junit4]   2> 1833545 INFO  (jetty-launcher-6729-thread-2) [n:127.0.0.1:34335_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@d406836
   [junit4]   2> 1833545 INFO  (jetty-launcher-6729-thread-2) [n:127.0.0.1:34335_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@d406836
   [junit4]   2> 1833548 INFO  (jetty-launcher-6729-thread-2) [n:127.0.0.1:34335_solr    ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_530D9F6E0766DA77-001/tempDir-002/node2/.
   [junit4]   2> 1833728 INFO  (jetty-launcher-6729-thread-1) [    ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
   [junit4]   2> 1833729 WARN  (jetty-launcher-6729-thread-1) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@6ede002d[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 1833735 WARN  (jetty-launcher-6729-thread-1) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@f8a5f44[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 1833737 INFO  (jetty-launcher-6729-thread-1) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:37256/solr
   [junit4]   2> 1833739 INFO  (zkConnectionManagerCallback-6776-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1833743 INFO  (zkConnectionManagerCallback-6778-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1833750 INFO  (jetty-launcher-6729-thread-1) [n:127.0.0.1:33057_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 1833759 INFO  (jetty-launcher-6729-thread-1) [n:127.0.0.1:33057_solr    ] o.a.s.c.ZkController Publish node=127.0.0.1:33057_solr as DOWN
   [junit4]   2> 1833760 INFO  (jetty-launcher-6729-thread-1) [n:127.0.0.1:33057_solr    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 1833760 INFO  (jetty-launcher-6729-thread-1) [n:127.0.0.1:33057_solr    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:33057_solr
   [junit4]   2> 1833766 INFO  (zkCallback-6762-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 1833766 INFO  (zkCallback-6751-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 1833766 INFO  (zkCallback-6753-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 1833770 INFO  (zkConnectionManagerCallback-6785-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1833770 INFO  (zkCallback-6767-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 1833771 INFO  (zkCallback-6777-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 1833772 INFO  (jetty-launcher-6729-thread-1) [n:127.0.0.1:33057_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (3)
   [junit4]   2> 1833774 INFO  (jetty-launcher-6729-thread-1) [n:127.0.0.1:33057_solr    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:37256/solr ready
   [junit4]   2> 1833797 INFO  (jetty-launcher-6729-thread-1) [n:127.0.0.1:33057_solr    ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 1833829 INFO  (jetty-launcher-6729-thread-1) [n:127.0.0.1:33057_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@d406836
   [junit4]   2> 1833854 INFO  (jetty-launcher-6729-thread-1) [n:127.0.0.1:33057_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@d406836
   [junit4]   2> 1833854 INFO  (jetty-launcher-6729-thread-1) [n:127.0.0.1:33057_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@d406836
   [junit4]   2> 1833856 INFO  (jetty-launcher-6729-thread-1) [n:127.0.0.1:33057_solr    ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_530D9F6E0766DA77-001/tempDir-002/node1/.
   [junit4]   2> 1833955 INFO  (zkConnectionManagerCallback-6791-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1833957 INFO  (TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[530D9F6E0766DA77]) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (3)
   [junit4]   2> 1833959 INFO  (TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[530D9F6E0766DA77]) [    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:37256/solr ready
   [junit4]   2> 1833981 INFO  (qtp180254357-24054) [n:127.0.0.1:33057_solr    ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/cluster params={wt=javabin&version=2} status=0 QTime=4
   [junit4]   2> 1833984 INFO  (qtp180254357-24058) [n:127.0.0.1:33057_solr    ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params collection.configName=conf&maxShardsPerNode=2&autoAddReplicas=true&name=testSimple1&nrtReplicas=2&action=CREATE&numShards=2&createNodeSet=127.0.0.1:40740_solr,127.0.0.1:34335_solr&wt=javabin&version=2 and sendToOCPQueue=true
   [junit4]   2> 1833994 INFO  (OverseerThreadFactory-5941-thread-1-processing-n:127.0.0.1:40740_solr) [n:127.0.0.1:40740_solr    ] o.a.s.c.a.c.CreateCollectionCmd Create collection testSimple1
   [junit4]   2> 1834119 INFO  (OverseerStateUpdate-73327914646306825-127.0.0.1:40740_solr-n_0000000000) [n:127.0.0.1:40740_solr    ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"testSimple1",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "core":"testSimple1_shard1_replica_n1",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"http://127.0.0.1:34335/solr",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 1834127 INFO  (OverseerStateUpdate-73327914646306825-127.0.0.1:40740_solr-n_0000000000) [n:127.0.0.1:40740_solr    ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"testSimple1",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "core":"testSimple1_shard1_replica_n3",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"http://127.0.0.1:40740/solr",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 1834130 INFO  (OverseerStateUpdate-73327914646306825-127.0.0.1:40740_solr-n_0000000000) [n:127.0.0.1:40740_solr    ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"testSimple1",
   [junit4]   2>   "shard":"shard2",
   [junit4]   2>   "core":"testSimple1_shard2_replica_n4",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"http://127.0.0.1:34335/solr",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 1834135 INFO  (OverseerStateUpdate-73327914646306825-127.0.0.1:40740_solr-n_0000000000) [n:127.0.0.1:40740_solr    ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"testSimple1",
   [junit4]   2>   "shard":"shard2",
   [junit4]   2>   "core":"testSimple1_shard2_replica_n6",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"http://127.0.0.1:40740/solr",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 1834345 INFO  (qtp936834944-24052) [n:127.0.0.1:34335_solr    x:testSimple1_shard1_replica_n1] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node2&collection.configName=conf&newCollection=true&name=testSimple1_shard1_replica_n1&action=CREATE&numShards=2&collection=testSimple1&shard=shard1&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 1834349 INFO  (qtp1188405185-24049) [n:127.0.0.1:40740_solr    x:testSimple1_shard2_replica_n6] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node8&collection.configName=conf&newCollection=true&name=testSimple1_shard2_replica_n6&action=CREATE&numShards=2&collection=testSimple1&shard=shard2&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 1834349 INFO  (qtp1188405185-24049) [n:127.0.0.1:40740_solr    x:testSimple1_shard2_replica_n6] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 1834350 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr    x:testSimple1_shard2_replica_n4] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node7&collection.configName=conf&newCollection=true&name=testSimple1_shard2_replica_n4&action=CREATE&numShards=2&collection=testSimple1&shard=shard2&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 1834351 INFO  (qtp1188405185-24051) [n:127.0.0.1:40740_solr    x:testSimple1_shard1_replica_n3] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node5&collection.configName=conf&newCollection=true&name=testSimple1_shard1_replica_n3&action=CREATE&numShards=2&collection=testSimple1&shard=shard1&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 1835364 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
   [junit4]   2> 1835367 INFO  (qtp936834944-24052) [n:127.0.0.1:34335_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
   [junit4]   2> 1835371 INFO  (qtp1188405185-24051) [n:127.0.0.1:40740_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n3] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
   [junit4]   2> 1835375 INFO  (qtp1188405185-24049) [n:127.0.0.1:40740_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
   [junit4]   2> 1835380 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.s.IndexSchema [testSimple1_shard2_replica_n4] Schema name=minimal
   [junit4]   2> 1835384 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 1835385 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.CoreContainer Creating SolrCore 'testSimple1_shard2_replica_n4' using configuration from collection testSimple1, trusted=true
   [junit4]   2> 1835387 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.testSimple1.shard2.replica_n4' (registry 'solr.core.testSimple1.shard2.replica_n4') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@d406836
   [junit4]   2> 1835387 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://localhost:39447/solr_hdfs_home
   [junit4]   2> 1835388 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 1835388 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.SolrCore [[testSimple1_shard2_replica_n4] ] Opening new SolrCore at [/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_530D9F6E0766DA77-001/tempDir-002/node2/testSimple1_shard2_replica_n4], dataDir=[hdfs://localhost:39447/solr_hdfs_home/testSimple1/core_node7/data/]
   [junit4]   2> 1835393 INFO  (qtp936834944-24052) [n:127.0.0.1:34335_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.s.IndexSchema [testSimple1_shard1_replica_n1] Schema name=minimal
   [junit4]   2> 1835393 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:39447/solr_hdfs_home/testSimple1/core_node7/data/snapshot_metadata
   [junit4]   2> 1835394 INFO  (qtp1188405185-24051) [n:127.0.0.1:40740_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n3] o.a.s.s.IndexSchema [testSimple1_shard1_replica_n3] Schema name=minimal
   [junit4]   2> 1835398 INFO  (qtp1188405185-24049) [n:127.0.0.1:40740_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.s.IndexSchema [testSimple1_shard2_replica_n6] Schema name=minimal
   [junit4]   2> 1835399 INFO  (qtp1188405185-24051) [n:127.0.0.1:40740_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n3] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 1835400 INFO  (qtp1188405185-24051) [n:127.0.0.1:40740_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n3] o.a.s.c.CoreContainer Creating SolrCore 'testSimple1_shard1_replica_n3' using configuration from collection testSimple1, trusted=true
   [junit4]   2> 1835400 INFO  (qtp1188405185-24051) [n:127.0.0.1:40740_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n3] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.testSimple1.shard1.replica_n3' (registry 'solr.core.testSimple1.shard1.replica_n3') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@d406836
   [junit4]   2> 1835400 INFO  (qtp1188405185-24051) [n:127.0.0.1:40740_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n3] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://localhost:39447/solr_hdfs_home
   [junit4]   2> 1835400 INFO  (qtp1188405185-24051) [n:127.0.0.1:40740_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n3] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 1835400 INFO  (qtp1188405185-24051) [n:127.0.0.1:40740_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n3] o.a.s.c.SolrCore [[testSimple1_shard1_replica_n3] ] Opening new SolrCore at [/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_530D9F6E0766DA77-001/tempDir-002/node3/testSimple1_shard1_replica_n3], dataDir=[hdfs://localhost:39447/solr_hdfs_home/testSimple1/core_node5/data/]
   [junit4]   2> 1835402 INFO  (qtp936834944-24052) [n:127.0.0.1:34335_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 1835402 INFO  (qtp936834944-24052) [n:127.0.0.1:34335_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.c.CoreContainer Creating SolrCore 'testSimple1_shard1_replica_n1' using configuration from collection testSimple1, trusted=true
   [junit4]   2> 1835403 INFO  (qtp936834944-24052) [n:127.0.0.1:34335_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.testSimple1.shard1.replica_n1' (registry 'solr.core.testSimple1.shard1.replica_n1') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@d406836
   [junit4]   2> 1835403 INFO  (qtp936834944-24052) [n:127.0.0.1:34335_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://localhost:39447/solr_hdfs_home
   [junit4]   2> 1835403 INFO  (qtp936834944-24052) [n:127.0.0.1:34335_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 1835403 INFO  (qtp936834944-24052) [n:127.0.0.1:34335_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.c.SolrCore [[testSimple1_shard1_replica_n1] ] Opening new SolrCore at [/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_530D9F6E0766DA77-001/tempDir-002/node2/testSimple1_shard1_replica_n1], dataDir=[hdfs://localhost:39447/solr_hdfs_home/testSimple1/core_node2/data/]
   [junit4]   2> 1835405 INFO  (qtp1188405185-24051) [n:127.0.0.1:40740_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n3] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:39447/solr_hdfs_home/testSimple1/core_node5/data/snapshot_metadata
   [junit4]   2> 1835412 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 1835413 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [33554432] will allocate [1] slabs and use ~[33554432] bytes
   [junit4]   2> 1835414 INFO  (qtp936834944-24052) [n:127.0.0.1:34335_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:39447/solr_hdfs_home/testSimple1/core_node2/data/snapshot_metadata
   [junit4]   2> 1835439 INFO  (qtp1188405185-24049) [n:127.0.0.1:40740_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 1835439 INFO  (qtp1188405185-24049) [n:127.0.0.1:40740_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.c.CoreContainer Creating SolrCore 'testSimple1_shard2_replica_n6' using configuration from collection testSimple1, trusted=true
   [junit4]   2> 1835440 INFO  (qtp1188405185-24049) [n:127.0.0.1:40740_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.testSimple1.shard2.replica_n6' (registry 'solr.core.testSimple1.shard2.replica_n6') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@d406836
   [junit4]   2> 1835440 INFO  (qtp1188405185-24049) [n:127.0.0.1:40740_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://localhost:39447/solr_hdfs_home
   [junit4]   2> 1835440 INFO  (qtp1188405185-24049) [n:127.0.0.1:40740_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 1835440 INFO  (qtp1188405185-24049) [n:127.0.0.1:40740_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.c.SolrCore [[testSimple1_shard2_replica_n6] ] Opening new SolrCore at [/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_530D9F6E0766DA77-001/tempDir-002/node3/testSimple1_shard2_replica_n6], dataDir=[hdfs://localhost:39447/solr_hdfs_home/testSimple1/core_node8/data/]
   [junit4]   2> 1835445 INFO  (qtp936834944-24052) [n:127.0.0.1:34335_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 1835445 INFO  (qtp1188405185-24051) [n:127.0.0.1:40740_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n3] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 1835445 INFO  (qtp936834944-24052) [n:127.0.0.1:34335_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [33554432] will allocate [1] slabs and use ~[33554432] bytes
   [junit4]   2> 1835445 INFO  (qtp1188405185-24051) [n:127.0.0.1:40740_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n3] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [33554432] will allocate [1] slabs and use ~[33554432] bytes
   [junit4]   2> 1835454 INFO  (qtp1188405185-24049) [n:127.0.0.1:40740_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:39447/solr_hdfs_home/testSimple1/core_node8/data/snapshot_metadata
   [junit4]   2> 1835459 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 1835460 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:39447/solr_hdfs_home/testSimple1/core_node7/data
   [junit4]   2> 1835465 INFO  (qtp1188405185-24049) [n:127.0.0.1:40740_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 1835465 INFO  (qtp1188405185-24049) [n:127.0.0.1:40740_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [33554432] will allocate [1] slabs and use ~[33554432] bytes
   [junit4]   2> 1835465 INFO  (qtp936834944-24052) [n:127.0.0.1:34335_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 1835466 INFO  (qtp936834944-24052) [n:127.0.0.1:34335_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:39447/solr_hdfs_home/testSimple1/core_node2/data
   [junit4]   2> 1835469 INFO  (qtp1188405185-24051) [n:127.0.0.1:40740_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n3] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 1835472 INFO  (qtp1188405185-24049) [n:127.0.0.1:40740_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 1835474 INFO  (qtp1188405185-24049) [n:127.0.0.1:40740_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:39447/solr_hdfs_home/testSimple1/core_node8/data
   [junit4]   2> 1835475 INFO  (qtp1188405185-24051) [n:127.0.0.1:40740_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n3] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:39447/solr_hdfs_home/testSimple1/core_node5/data
   [junit4]   2> 1835510 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:39447/solr_hdfs_home/testSimple1/core_node7/data/index
   [junit4]   2> 1835526 INFO  (qtp1188405185-24049) [n:127.0.0.1:40740_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:39447/solr_hdfs_home/testSimple1/core_node8/data/index
   [junit4]   2> 1835531 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 1835531 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [33554432] will allocate [1] slabs and use ~[33554432] bytes
   [junit4]   2> 1835533 INFO  (qtp936834944-24052) [n:127.0.0.1:34335_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:39447/solr_hdfs_home/testSimple1/core_node2/data/index
   [junit4]   2> 1835534 INFO  (qtp1188405185-24049) [n:127.0.0.1:40740_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 1835534 INFO  (qtp1188405185-24049) [n:127.0.0.1:40740_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [33554432] will allocate [1] slabs and use ~[33554432] bytes
   [junit4]   2> 1835537 INFO  (qtp1188405185-24051) [n:127.0.0.1:40740_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n3] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:39447/solr_hdfs_home/testSimple1/core_node5/data/index
   [junit4]   2> 1835538 INFO  (qtp1188405185-24049) [n:127.0.0.1:40740_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 1835539 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 1835550 INFO  (qtp1188405185-24051) [n:127.0.0.1:40740_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n3] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 1835550 INFO  (qtp1188405185-24051) [n:127.0.0.1:40740_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n3] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [33554432] will allocate [1] slabs and use ~[33554432] bytes
   [junit4]   2> 1835552 INFO  (qtp936834944-24052) [n:127.0.0.1:34335_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 1835552 INFO  (qtp936834944-24052) [n:127.0.0.1:34335_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [33554432] will allocate [1] slabs and use ~[33554432] bytes
   [junit4]   2> 1835555 INFO  (qtp936834944-24052) [n:127.0.0.1:34335_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 1835560 INFO  (qtp1188405185-24051) [n:127.0.0.1:40740_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n3] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 1835794 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 1835794 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 1835794 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=3
   [junit4]   2> 1835800 INFO  (qtp1188405185-24049) [n:127.0.0.1:40740_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 1835800 INFO  (qtp1188405185-24049) [n:127.0.0.1:40740_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 1835800 INFO  (qtp1188405185-24049) [n:127.0.0.1:40740_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=3
   [junit4]   2> 1835806 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 1835806 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 1835812 INFO  (qtp1188405185-24049) [n:127.0.0.1:40740_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 1835812 INFO  (qtp1188405185-24049) [n:127.0.0.1:40740_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 1835829 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.s.SolrIndexSearcher Opening [Searcher@6ea11982[testSimple1_shard2_replica_n4] main]
   [junit4]   2> 1835831 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf
   [junit4]   2> 1835831 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf
   [junit4]   2> 1835832 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 1835832 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1633942301777592320
   [junit4]   2> 1835839 INFO  (searcherExecutor-5957-thread-1-processing-n:127.0.0.1:34335_solr x:testSimple1_shard2_replica_n4 c:testSimple1 s:shard2 r:core_node7) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.SolrCore [testSimple1_shard2_replica_n4] Registered new searcher Searcher@6ea11982[testSimple1_shard2_replica_n4] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 1835839 INFO  (qtp1188405185-24049) [n:127.0.0.1:40740_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.s.SolrIndexSearcher Opening [Searcher@4e96560c[testSimple1_shard2_replica_n6] main]
   [junit4]   2> 1835842 INFO  (qtp1188405185-24049) [n:127.0.0.1:40740_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf
   [junit4]   2> 1835842 INFO  (qtp1188405185-24049) [n:127.0.0.1:40740_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf
   [junit4]   2> 1835843 INFO  (qtp1188405185-24049) [n:127.0.0.1:40740_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 1835844 INFO  (qtp1188405185-24049) [n:127.0.0.1:40740_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1633942301790175232
   [junit4]   2> 1835845 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.ZkShardTerms Successful update of terms at /collections/testSimple1/terms/shard2 to Terms{values={core_node7=0}, version=0}
   [junit4]   2> 1835845 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/testSimple1/leaders/shard2
   [junit4]   2> 1835847 INFO  (searcherExecutor-5960-thread-1-processing-n:127.0.0.1:40740_solr x:testSimple1_shard2_replica_n6 c:testSimple1 s:shard2 r:core_node8) [n:127.0.0.1:40740_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.c.SolrCore [testSimple1_shard2_replica_n6] Registered new searcher Searcher@4e96560c[testSimple1_shard2_replica_n6] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 1835850 INFO  (qtp1188405185-24049) [n:127.0.0.1:40740_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.c.ZkShardTerms Successful update of terms at /collections/testSimple1/terms/shard2 to Terms{values={core_node7=0, core_node8=0}, version=1}
   [junit4]   2> 1835850 INFO  (qtp1188405185-24049) [n:127.0.0.1:40740_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/testSimple1/leaders/shard2
   [junit4]   2> 1835853 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 1835853 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 1835853 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.SyncStrategy Sync replicas to http://127.0.0.1:34335/solr/testSimple1_shard2_replica_n4/
   [junit4]   2> 1835853 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.u.PeerSync PeerSync: core=testSimple1_shard2_replica_n4 url=http://127.0.0.1:34335/solr START replicas=[http://127.0.0.1:40740/solr/testSimple1_shard2_replica_n6/] nUpdates=100
   [junit4]   2> 1835854 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.u.PeerSync PeerSync: core=testSimple1_shard2_replica_n4 url=http://127.0.0.1:34335/solr DONE.  We have no versions.  sync failed.
   [junit4]   2> 1835856 INFO  (qtp1188405185-24047) [n:127.0.0.1:40740_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n6] o.a.s.c.S.Request [testSimple1_shard2_replica_n6]  webapp=/solr path=/get params={distrib=false&qt=/get&fingerprint=false&getVersions=100&wt=javabin&version=2} status=0 QTime=1
   [junit4]   2> 1835857 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.SyncStrategy Leader's attempt to sync with shard failed, moving to the next candidate
   [junit4]   2> 1835857 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.ShardLeaderElectionContext We failed sync, but we have no versions - we can't sync in that case - we were active before, so become leader anyway
   [junit4]   2> 1835857 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.ShardLeaderElectionContextBase Creating leader registration node /collections/testSimple1/leaders/shard2/leader after winning as /collections/testSimple1/leader_elect/shard2/election/73327914646306824-core_node7-n_0000000000
   [junit4]   2> 1835859 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.ShardLeaderElectionContext I am the new leader: http://127.0.0.1:34335/solr/testSimple1_shard2_replica_n4/ shard2
   [junit4]   2> 1835961 INFO  (zkCallback-6751-thread-2) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/testSimple1/state.json] for collection [testSimple1] has occurred - updating... (live nodes size: [3])
   [junit4]   2> 1835961 INFO  (zkCallback-6751-thread-1) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/testSimple1/state.json] for collection [testSimple1] has occurred - updating... (live nodes size: [3])
   [junit4]   2> 1835963 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 1835965 INFO  (qtp936834944-24057) [n:127.0.0.1:34335_solr c:testSimple1 s:shard2 r:core_node7 x:testSimple1_shard2_replica_n4] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={qt=/admin/cores&coreNodeName=core_node7&collection.configName=conf&newCollection=true&name=testSimple1_shard2_replica_n4&action=CREATE&numShards=2&collection=testSimple1&shard=shard2&wt=javabin&version=2&replicaType=NRT} status=0 QTime=1615
   [junit4]   2> 1836065 INFO  (zkCallback-6751-thread-1) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/testSimple1/state.json] for collection [testSimple1] has occurred - updating... (live nodes size: [3])
   [junit4]   2> 1836065 INFO  (zkCallback-6751-thread-2) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/testSimple1/state.json] for collection [testSimple1] has occurred - updating... (live nodes size: [3])
   [junit4]   2> 1836065 INFO  (zkCallback-6751-thread-3) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/testSimple1/state.json] for collection [testSimple1] has occurred - updating... (live nodes size: [3])
   [junit4]   2> 1836146 INFO  (qtp936834944-24052) [n:127.0.0.1:34335_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 1836146 INFO  (qtp936834944-24052) [n:127.0.0.1:34335_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 1836146 INFO  (qtp936834944-24052) [n:127.0.0.1:34335_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=3
   [junit4]   2> 1836158 INFO  (qtp936834944-24052) [n:127.0.0.1:34335_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 1836158 INFO  (qtp936834944-24052) [n:127.0.0.1:34335_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 1836165 INFO  (qtp1188405185-24051) [n:127.0.0.1:40740_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n3] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 1836165 INFO  (qtp1188405185-24051) [n:127.0.0.1:40740_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n3] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 1836165 INFO  (qtp1188405185-24051) [n:127.0.0.1:40740_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n3] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=3
   [junit4]   2> 1836177 INFO  (qtp936834944-24052) [n:127.0.0.1:34335_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.s.SolrIndexSearcher Opening [Searcher@6f96dbed[testSimple1_shard1_replica_n1] main]
   [junit4]   2> 1836178 INFO  (qtp1188405185-24051) [n:127.0.0.1:40740_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n3] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 1836178 INFO  (qtp1188405185-24051) [n:127.0.0.1:40740_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n3] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 1836181 INFO  (qtp936834944-24052) [n:127.0.0.1:34335_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf
   [junit4]   2> 1836182 INFO  (qtp936834944-24052) [n:127.0.0.1:34335_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf
   [junit4]   2> 1836183 INFO  (qtp936834944-24052) [n:127.0.0.1:34335_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 1836183 INFO  (qtp936834944-24052) [n:127.0.0.1:34335_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1633942302145642496
   [junit4]   2> 1836188 INFO  (searcherExecutor-5959-thread-1-processing-n:127.0.0.1:34335_solr x:testSimple1_shard1_replica_n1 c:testSimple1 s:shard1 r:core_node2) [n:127.0.0.1:34335_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.c.SolrCore [testSimple1_shard1_replica_n1] Registered new searcher Searcher@6f96dbed[testSimple1_shard1_replica_n1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 1836205 INFO  (qtp1188405185-24051) [n:127.0.0.1:40740_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n3] o.a.s.s.SolrIndexSearcher Opening [Searcher@281f22ab[testSimple1_shard1_replica_n3] main]
   [junit4]   2> 1836207 INFO  (qtp1188405185-24051) [n:127.0.0.1:40740_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n3] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf
   [junit4]   2> 1836208 INFO  (qtp1188405185-24051) [n:127.0.0.1:40740_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n3] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf
   [junit4]   2> 1836208 INFO  (qtp936834944-24052) [n:127.0.0.1:34335_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.c.ZkShardTerms Successful update of terms at /collections/testSimple1/terms/shard1 to Terms{values={core_node2=0}, version=0}
   [junit4]   2> 1836208 INFO  (qtp936834944-24052) [n:127.0.0.1:34335_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/testSimple1/leaders/shard1
   [junit4]   2> 1836209 INFO  (qtp1188405185-24051) [n:127.0.0.1:40740_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n3] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 1836209 INFO  (qtp1188405185-24051) [n:127.0.0.1:40740_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n3] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1633942302172905472
   [junit4]   2> 1836214 INFO  (searcherExecutor-5958-thread-1-processing-n:127.0.0.1:40740_solr x:testSimple1_shard1_replica_n3 c:testSimple1 s:shard1 r:core_node5) [n:127.0.0.1:40740_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n3] o.a.s.c.SolrCore [testSimple1_shard1_replica_n3] Registered new searcher Searcher@281f22ab[testSimple1_shard1_replica_n3] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 1836218 INFO  (qtp936834944-24052) [n:127.0.0.1:34335_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContext Waiting until we see more replicas up for shard shard1: total=2 found=1 timeoutin=14999ms
   [junit4]   2> 1836219 INFO  (qtp1188405185-24051) [n:127.0.0.1:40740_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n3] o.a.s.c.ZkShardTerms Successful update of terms at /collections/testSimple1/terms/shard1 to Terms{values={core_node2=0, core_node5=0}, version=1}
   [junit4]   2> 1836219 INFO  (qtp1188405185-24051) [n:127.0.0.1:40740_solr c:testSimple1 s:shard1 r:core_node5 x:testSimple1_shard1_replica_n3] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/testSimple1/leaders/shard1
   [junit4]   2> 1836321 INFO  (zkCallback-6751-thread-3) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/testSimple1/state.json] for collection [testSimple1] has occurred - updating... (live nodes size: [3])
   [junit4]   2> 1836321 INFO  (zkCallback-6751-thread-2) [    ] o.a.s.c.

[...truncated too long message...]

:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

jar-checksums:
    [mkdir] Created dir: /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/null1794000042
     [copy] Copying 240 files to /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/null1794000042
   [delete] Deleting directory /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/null1794000042

check-working-copy:
[ivy:cachepath] :: resolving dependencies :: #;working@lucene1-us-west
[ivy:cachepath] 	confs: [default]
[ivy:cachepath] 	found org.eclipse.jgit#org.eclipse.jgit;5.3.0.201903130848-r in public
[ivy:cachepath] 	found com.jcraft#jsch;0.1.54 in public
[ivy:cachepath] 	found com.jcraft#jzlib;1.1.1 in public
[ivy:cachepath] 	found com.googlecode.javaewah#JavaEWAH;1.1.6 in public
[ivy:cachepath] 	found org.slf4j#slf4j-api;1.7.2 in public
[ivy:cachepath] 	found org.bouncycastle#bcpg-jdk15on;1.60 in public
[ivy:cachepath] 	found org.bouncycastle#bcprov-jdk15on;1.60 in public
[ivy:cachepath] 	found org.bouncycastle#bcpkix-jdk15on;1.60 in public
[ivy:cachepath] 	found org.slf4j#slf4j-nop;1.7.2 in public
[ivy:cachepath] :: resolution report :: resolve 55ms :: artifacts dl 7ms
	---------------------------------------------------------------------
	|                  |            modules            ||   artifacts   |
	|       conf       | number| search|dwnlded|evicted|| number|dwnlded|
	---------------------------------------------------------------------
	|      default     |   9   |   0   |   0   |   0   ||   9   |   0   |
	---------------------------------------------------------------------
[wc-checker] Initializing working copy...
[wc-checker] Checking working copy status...

-jenkins-base:

BUILD SUCCESSFUL
Total time: 378 minutes 48 seconds
Archiving artifacts
java.lang.InterruptedException: no matches found within 10000
	at hudson.FilePath$ValidateAntFileMask.hasMatch(FilePath.java:2847)
	at hudson.FilePath$ValidateAntFileMask.invoke(FilePath.java:2726)
	at hudson.FilePath$ValidateAntFileMask.invoke(FilePath.java:2707)
	at hudson.FilePath$FileCallableWrapper.call(FilePath.java:3086)
Also:   hudson.remoting.Channel$CallSiteStackTrace: Remote call to lucene
		at hudson.remoting.Channel.attachCallSiteStackTrace(Channel.java:1741)
		at hudson.remoting.UserRequest$ExceptionResponse.retrieve(UserRequest.java:357)
		at hudson.remoting.Channel.call(Channel.java:955)
		at hudson.FilePath.act(FilePath.java:1072)
		at hudson.FilePath.act(FilePath.java:1061)
		at hudson.FilePath.validateAntFileMask(FilePath.java:2705)
		at hudson.tasks.ArtifactArchiver.perform(ArtifactArchiver.java:243)
		at hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
		at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
		at hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
		at hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
		at hudson.model.Build$BuildExecution.post2(Build.java:186)
		at hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
		at hudson.model.Run.execute(Run.java:1835)
		at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
		at hudson.model.ResourceController.execute(ResourceController.java:97)
		at hudson.model.Executor.run(Executor.java:429)
Caused: hudson.FilePath$TunneledInterruptedException
	at hudson.FilePath$FileCallableWrapper.call(FilePath.java:3088)
	at hudson.remoting.UserRequest.perform(UserRequest.java:212)
	at hudson.remoting.UserRequest.perform(UserRequest.java:54)
	at hudson.remoting.Request$2.run(Request.java:369)
	at hudson.remoting.InterceptingExecutorService$1.call(InterceptingExecutorService.java:72)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
	at java.lang.Thread.run(Thread.java:748)
Caused: java.lang.InterruptedException: java.lang.InterruptedException: no matches found within 10000
	at hudson.FilePath.act(FilePath.java:1074)
	at hudson.FilePath.act(FilePath.java:1061)
	at hudson.FilePath.validateAntFileMask(FilePath.java:2705)
	at hudson.tasks.ArtifactArchiver.perform(ArtifactArchiver.java:243)
	at hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
	at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
	at hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
	at hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
	at hudson.model.Build$BuildExecution.post2(Build.java:186)
	at hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
	at hudson.model.Run.execute(Run.java:1835)
	at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
	at hudson.model.ResourceController.execute(ResourceController.java:97)
	at hudson.model.Executor.run(Executor.java:429)
No artifacts found that match the file pattern "**/*.events,heapdumps/**,**/hs_err_pid*". Configuration error?
Recording test results
Build step 'Publish JUnit test result report' changed build result to UNSTABLE
Email was triggered for: Unstable (Test Failures)
Sending email for trigger: Unstable (Test Failures)

[JENKINS] Lucene-Solr-NightlyTests-master - Build # 1850 - Still unstable

Posted by Apache Jenkins Server <je...@builds.apache.org>.
Build: https://builds.apache.org/job/Lucene-Solr-NightlyTests-master/1850/

2 tests failed.
FAILED:  junit.framework.TestSuite.org.apache.solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest

Error Message:
ObjectTracker found 1 object(s) that were not released!!! [NRTCachingDirectory] org.apache.solr.common.util.ObjectReleaseTracker$ObjectTrackerException: org.apache.lucene.store.NRTCachingDirectory  at org.apache.solr.common.util.ObjectReleaseTracker.track(ObjectReleaseTracker.java:42)  at org.apache.solr.core.CachingDirectoryFactory.get(CachingDirectoryFactory.java:348)  at org.apache.solr.update.SolrIndexWriter.create(SolrIndexWriter.java:99)  at org.apache.solr.core.SolrCore.initIndex(SolrCore.java:779)  at org.apache.solr.core.SolrCore.<init>(SolrCore.java:976)  at org.apache.solr.core.SolrCore.<init>(SolrCore.java:883)  at org.apache.solr.core.CoreContainer.createFromDescriptor(CoreContainer.java:1227)  at org.apache.solr.core.CoreContainer.create(CoreContainer.java:1137)  at org.apache.solr.handler.admin.CoreAdminOperation.lambda$static$0(CoreAdminOperation.java:92)  at org.apache.solr.handler.admin.CoreAdminOperation.execute(CoreAdminOperation.java:360)  at org.apache.solr.handler.admin.CoreAdminHandler$CallInfo.call(CoreAdminHandler.java:396)  at org.apache.solr.handler.admin.CoreAdminHandler.handleRequestBody(CoreAdminHandler.java:180)  at org.apache.solr.handler.RequestHandlerBase.handleRequest(RequestHandlerBase.java:199)  at org.apache.solr.servlet.HttpSolrCall.handleAdmin(HttpSolrCall.java:796)  at org.apache.solr.servlet.HttpSolrCall.handleAdminRequest(HttpSolrCall.java:762)  at org.apache.solr.servlet.HttpSolrCall.call(HttpSolrCall.java:522)  at org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:397)  at org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:343)  at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1610)  at org.apache.solr.client.solrj.embedded.JettySolrRunner$DebugFilter.doFilter(JettySolrRunner.java:165)  at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1610)  at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:540)  at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:255)  at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1588)  at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:255)  at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1345)  at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:203)  at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:480)  at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1557)  at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:201)  at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1247)  at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:144)  at org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:703)  at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:132)  at org.eclipse.jetty.server.Server.handle(Server.java:502)  at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:364)  at org.eclipse.jetty.server.HttpChannel.run(HttpChannel.java:305)  at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:765)  at org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:683)  at java.base/java.lang.Thread.run(Thread.java:834)   expected null, but was:<ObjectTracker found 1 object(s) that were not released!!! [NRTCachingDirectory] org.apache.solr.common.util.ObjectReleaseTracker$ObjectTrackerException: org.apache.lucene.store.NRTCachingDirectory  at org.apache.solr.common.util.ObjectReleaseTracker.track(ObjectReleaseTracker.java:42)  at org.apache.solr.core.CachingDirectoryFactory.get(CachingDirectoryFactory.java:348)  at org.apache.solr.update.SolrIndexWriter.create(SolrIndexWriter.java:99)  at org.apache.solr.core.SolrCore.initIndex(SolrCore.java:779)  at org.apache.solr.core.SolrCore.<init>(SolrCore.java:976)  at org.apache.solr.core.SolrCore.<init>(SolrCore.java:883)  at org.apache.solr.core.CoreContainer.createFromDescriptor(CoreContainer.java:1227)  at org.apache.solr.core.CoreContainer.create(CoreContainer.java:1137)  at org.apache.solr.handler.admin.CoreAdminOperation.lambda$static$0(CoreAdminOperation.java:92)  at org.apache.solr.handler.admin.CoreAdminOperation.execute(CoreAdminOperation.java:360)  at org.apache.solr.handler.admin.CoreAdminHandler$CallInfo.call(CoreAdminHandler.java:396)  at org.apache.solr.handler.admin.CoreAdminHandler.handleRequestBody(CoreAdminHandler.java:180)  at org.apache.solr.handler.RequestHandlerBase.handleRequest(RequestHandlerBase.java:199)  at org.apache.solr.servlet.HttpSolrCall.handleAdmin(HttpSolrCall.java:796)  at org.apache.solr.servlet.HttpSolrCall.handleAdminRequest(HttpSolrCall.java:762)  at org.apache.solr.servlet.HttpSolrCall.call(HttpSolrCall.java:522)  at org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:397)  at org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:343)  at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1610)  at org.apache.solr.client.solrj.embedded.JettySolrRunner$DebugFilter.doFilter(JettySolrRunner.java:165)  at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1610)  at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:540)  at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:255)  at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1588)  at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:255)  at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1345)  at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:203)  at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:480)  at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1557)  at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:201)  at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1247)  at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:144)  at org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:703)  at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:132)  at org.eclipse.jetty.server.Server.handle(Server.java:502)  at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:364)  at org.eclipse.jetty.server.HttpChannel.run(HttpChannel.java:305)  at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:765)  at org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:683)  at java.base/java.lang.Thread.run(Thread.java:834)  >

Stack Trace:
java.lang.AssertionError: ObjectTracker found 1 object(s) that were not released!!! [NRTCachingDirectory]
org.apache.solr.common.util.ObjectReleaseTracker$ObjectTrackerException: org.apache.lucene.store.NRTCachingDirectory
	at org.apache.solr.common.util.ObjectReleaseTracker.track(ObjectReleaseTracker.java:42)
	at org.apache.solr.core.CachingDirectoryFactory.get(CachingDirectoryFactory.java:348)
	at org.apache.solr.update.SolrIndexWriter.create(SolrIndexWriter.java:99)
	at org.apache.solr.core.SolrCore.initIndex(SolrCore.java:779)
	at org.apache.solr.core.SolrCore.<init>(SolrCore.java:976)
	at org.apache.solr.core.SolrCore.<init>(SolrCore.java:883)
	at org.apache.solr.core.CoreContainer.createFromDescriptor(CoreContainer.java:1227)
	at org.apache.solr.core.CoreContainer.create(CoreContainer.java:1137)
	at org.apache.solr.handler.admin.CoreAdminOperation.lambda$static$0(CoreAdminOperation.java:92)
	at org.apache.solr.handler.admin.CoreAdminOperation.execute(CoreAdminOperation.java:360)
	at org.apache.solr.handler.admin.CoreAdminHandler$CallInfo.call(CoreAdminHandler.java:396)
	at org.apache.solr.handler.admin.CoreAdminHandler.handleRequestBody(CoreAdminHandler.java:180)
	at org.apache.solr.handler.RequestHandlerBase.handleRequest(RequestHandlerBase.java:199)
	at org.apache.solr.servlet.HttpSolrCall.handleAdmin(HttpSolrCall.java:796)
	at org.apache.solr.servlet.HttpSolrCall.handleAdminRequest(HttpSolrCall.java:762)
	at org.apache.solr.servlet.HttpSolrCall.call(HttpSolrCall.java:522)
	at org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:397)
	at org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:343)
	at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1610)
	at org.apache.solr.client.solrj.embedded.JettySolrRunner$DebugFilter.doFilter(JettySolrRunner.java:165)
	at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1610)
	at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:540)
	at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:255)
	at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1588)
	at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:255)
	at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1345)
	at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:203)
	at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:480)
	at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1557)
	at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:201)
	at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1247)
	at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:144)
	at org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:703)
	at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:132)
	at org.eclipse.jetty.server.Server.handle(Server.java:502)
	at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:364)
	at org.eclipse.jetty.server.HttpChannel.run(HttpChannel.java:305)
	at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:765)
	at org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:683)
	at java.base/java.lang.Thread.run(Thread.java:834)

 expected null, but was:<ObjectTracker found 1 object(s) that were not released!!! [NRTCachingDirectory]
org.apache.solr.common.util.ObjectReleaseTracker$ObjectTrackerException: org.apache.lucene.store.NRTCachingDirectory
	at org.apache.solr.common.util.ObjectReleaseTracker.track(ObjectReleaseTracker.java:42)
	at org.apache.solr.core.CachingDirectoryFactory.get(CachingDirectoryFactory.java:348)
	at org.apache.solr.update.SolrIndexWriter.create(SolrIndexWriter.java:99)
	at org.apache.solr.core.SolrCore.initIndex(SolrCore.java:779)
	at org.apache.solr.core.SolrCore.<init>(SolrCore.java:976)
	at org.apache.solr.core.SolrCore.<init>(SolrCore.java:883)
	at org.apache.solr.core.CoreContainer.createFromDescriptor(CoreContainer.java:1227)
	at org.apache.solr.core.CoreContainer.create(CoreContainer.java:1137)
	at org.apache.solr.handler.admin.CoreAdminOperation.lambda$static$0(CoreAdminOperation.java:92)
	at org.apache.solr.handler.admin.CoreAdminOperation.execute(CoreAdminOperation.java:360)
	at org.apache.solr.handler.admin.CoreAdminHandler$CallInfo.call(CoreAdminHandler.java:396)
	at org.apache.solr.handler.admin.CoreAdminHandler.handleRequestBody(CoreAdminHandler.java:180)
	at org.apache.solr.handler.RequestHandlerBase.handleRequest(RequestHandlerBase.java:199)
	at org.apache.solr.servlet.HttpSolrCall.handleAdmin(HttpSolrCall.java:796)
	at org.apache.solr.servlet.HttpSolrCall.handleAdminRequest(HttpSolrCall.java:762)
	at org.apache.solr.servlet.HttpSolrCall.call(HttpSolrCall.java:522)
	at org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:397)
	at org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:343)
	at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1610)
	at org.apache.solr.client.solrj.embedded.JettySolrRunner$DebugFilter.doFilter(JettySolrRunner.java:165)
	at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1610)
	at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:540)
	at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:255)
	at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1588)
	at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:255)
	at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1345)
	at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:203)
	at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:480)
	at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1557)
	at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:201)
	at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1247)
	at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:144)
	at org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:703)
	at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:132)
	at org.eclipse.jetty.server.Server.handle(Server.java:502)
	at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:364)
	at org.eclipse.jetty.server.HttpChannel.run(HttpChannel.java:305)
	at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:765)
	at org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:683)
	at java.base/java.lang.Thread.run(Thread.java:834)

>
	at __randomizedtesting.SeedInfo.seed([B0F37023B4FB2C09]:0)
	at org.junit.Assert.fail(Assert.java:88)
	at org.junit.Assert.failNotNull(Assert.java:755)
	at org.junit.Assert.assertNull(Assert.java:737)
	at org.apache.solr.SolrTestCaseJ4.teardownTestCases(SolrTestCaseJ4.java:333)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.base/java.lang.reflect.Method.invoke(Method.java:566)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:901)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at java.base/java.lang.Thread.run(Thread.java:834)


FAILED:  org.apache.solr.metrics.rrd.SolrRrdBackendFactoryTest.testBasic

Error Message:
{} expected:<1> but was:<0>

Stack Trace:
java.lang.AssertionError: {} expected:<1> but was:<0>
	at __randomizedtesting.SeedInfo.seed([B0F37023B4FB2C09:1B096D366B27AA27]:0)
	at org.junit.Assert.fail(Assert.java:88)
	at org.junit.Assert.failNotEquals(Assert.java:834)
	at org.junit.Assert.assertEquals(Assert.java:645)
	at org.apache.solr.metrics.rrd.SolrRrdBackendFactoryTest.testBasic(SolrRrdBackendFactoryTest.java:94)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.base/java.lang.reflect.Method.invoke(Method.java:566)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at java.base/java.lang.Thread.run(Thread.java:834)




Build Log:
[...truncated 14102 lines...]
   [junit4] Suite: org.apache.solr.metrics.rrd.SolrRrdBackendFactoryTest
   [junit4]   2> Creating dataDir: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.metrics.rrd.SolrRrdBackendFactoryTest_B0F37023B4FB2C09-001/init-core-data-001
   [junit4]   2> 2420157 WARN  (SUITE-SolrRrdBackendFactoryTest-seed#[B0F37023B4FB2C09]-worker) [    ] o.a.s.SolrTestCaseJ4 startTrackingSearchers: numOpens=5 numCloses=5
   [junit4]   2> 2420157 INFO  (SUITE-SolrRrdBackendFactoryTest-seed#[B0F37023B4FB2C09]-worker) [    ] o.a.s.SolrTestCaseJ4 Using PointFields (NUMERIC_POINTS_SYSPROP=true) w/NUMERIC_DOCVALUES_SYSPROP=false
   [junit4]   2> 2420158 INFO  (SUITE-SolrRrdBackendFactoryTest-seed#[B0F37023B4FB2C09]-worker) [    ] o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (false) via: @org.apache.solr.util.RandomizeSSL(reason="", ssl=0.0/0.0, value=0.0/0.0, clientAuth=0.0/0.0)
   [junit4]   2> 2420158 INFO  (SUITE-SolrRrdBackendFactoryTest-seed#[B0F37023B4FB2C09]-worker) [    ] o.a.s.SolrTestCaseJ4 SecureRandom sanity checks: test.solr.allowed.securerandom=null & java.security.egd=file:/dev/./urandom
   [junit4]   2> 2420232 INFO  (TEST-SolrRrdBackendFactoryTest.testBasic-seed#[B0F37023B4FB2C09]) [    ] o.a.s.SolrTestCaseJ4 ###Starting testBasic
   [junit4]   2> 2420232 DEBUG (TEST-SolrRrdBackendFactoryTest.testBasic-seed#[B0F37023B4FB2C09]) [    ] o.a.s.m.r.SolrRrdBackendFactory Created 949206482
   [junit4]   2> 2420323 DEBUG (SolrRrdBackendFactory-9336-thread-1) [    ] o.a.s.m.r.SolrRrdBackendFactory -- maybe sync backends: [foo]
   [junit4]   2> 2420338 DEBUG (SolrRrdBackendFactory-9336-thread-1) [    ] o.a.s.m.r.SolrRrdBackendFactory -- syncing [foo]
   [junit4]   2> 2420411 DEBUG (TEST-SolrRrdBackendFactoryTest.testBasic-seed#[B0F37023B4FB2C09]) [    ] o.a.s.m.r.SolrRrdBackendFactory Closing 949206482
   [junit4]   2> 2420411 INFO  (TEST-SolrRrdBackendFactoryTest.testBasic-seed#[B0F37023B4FB2C09]) [    ] o.a.s.SolrTestCaseJ4 ###Ending testBasic
   [junit4]   2> NOTE: download the large Jenkins line-docs file by running 'ant get-jenkins-line-docs' in the lucene directory.
   [junit4]   2> NOTE: reproduce with: ant test  -Dtestcase=SolrRrdBackendFactoryTest -Dtests.method=testBasic -Dtests.seed=B0F37023B4FB2C09 -Dtests.multiplier=2 -Dtests.nightly=true -Dtests.slow=true -Dtests.linedocsfile=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/test-data/enwiki.random.lines.txt -Dtests.locale=ca-FR -Dtests.timezone=Asia/Omsk -Dtests.asserts=true -Dtests.file.encoding=UTF-8
   [junit4] FAILURE 0.26s J0 | SolrRrdBackendFactoryTest.testBasic <<<
   [junit4]    > Throwable #1: java.lang.AssertionError: {} expected:<1> but was:<0>
   [junit4]    > 	at __randomizedtesting.SeedInfo.seed([B0F37023B4FB2C09:1B096D366B27AA27]:0)
   [junit4]    > 	at org.apache.solr.metrics.rrd.SolrRrdBackendFactoryTest.testBasic(SolrRrdBackendFactoryTest.java:94)
   [junit4]    > 	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
   [junit4]    > 	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
   [junit4]    > 	at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
   [junit4]    > 	at java.base/java.lang.reflect.Method.invoke(Method.java:566)
   [junit4]    > 	at java.base/java.lang.Thread.run(Thread.java:834)
   [junit4]   2> NOTE: leaving temporary files on disk at: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J0/temp/solr.metrics.rrd.SolrRrdBackendFactoryTest_B0F37023B4FB2C09-001
   [junit4]   2> NOTE: test params are: codec=Asserting(Lucene80): {}, docValues:{}, maxPointsInLeafNode=485, maxMBSortInHeap=6.602216966345267, sim=Asserting(org.apache.lucene.search.similarities.AssertingSimilarity@2ebf9c25), locale=ca-FR, timezone=Asia/Omsk
   [junit4]   2> NOTE: Linux 4.4.0-112-generic amd64/Oracle Corporation 11.0.1 (64-bit)/cpus=4,threads=1,free=243499376,total=536870912
   [junit4]   2> NOTE: All tests run in this JVM: [AsyncCallRequestStatusResponseTest, PreAnalyzedFieldManagedSchemaCloudTest, DataDrivenBlockJoinTest, TestAtomicUpdateErrorCases, TestStressLiveNodes, SimplePostToolTest, CursorMarkTest, TestFieldCollectionResource, TestPhraseSuggestions, TestSimpleTextCodec, SimpleMLTQParserTest, TestClusterProperties, MetricsHandlerTest, DocumentAnalysisRequestHandlerTest, TestQuerySenderNoQuery, CdcrReplicationHandlerTest, MultiSolrCloudTestCaseTest, TestIntervalFaceting, SolrCloudExampleTest, MoveReplicaHDFSFailoverTest, ZookeeperStatusHandlerTest, TestInitParams, DistributedUpdateProcessorTest, TestRequestStatusCollectionAPI, TestLMDirichletSimilarityFactory, TestNumericRangeQuery64, CollectionPropsTest, MetricsHistoryWithAuthIntegrationTest, ResponseBuilderTest, TestConfigSetImmutable, MetricTriggerIntegrationTest, CoreAdminHandlerTest, ConjunctionSolrSpellCheckerTest, UninvertDocValuesMergePolicyTest, TestZkChroot, IndexSchemaTest, DateMathParserTest, TestUseDocValuesAsStored, SolrIndexConfigTest, TestLRUStatsCache, TestCoreContainer, NodeMutatorTest, AddReplicaTest, TestExportWriter, TestMergePolicyConfig, TestXIncludeConfig, ForceLeaderWithTlogReplicasTest, CustomCollectionTest, ConcurrentCreateRoutedAliasTest, UUIDUpdateProcessorFallbackTest, DocValuesMissingTest, TestConfigSetsAPIZkFailure, TestSolrConfigHandlerConcurrent, TestSafeXMLParsing, TestCloudPhrasesIdentificationComponent, TestSimNodeAddedTrigger, TestFreeTextSuggestions, TestDistributedMissingSort, TestFilteredDocIdSet, TestSimExtremeIndexing, NoCacheHeaderTest, TestReplicationHandlerDiskOverFlow, SpellPossibilityIteratorTest, TestManagedResourceStorage, TestSearchPerf, TestSQLHandler, HighlighterConfigTest, SyncSliceTest, TestPostingsSolrHighlighter, TestOverriddenPrefixQueryForCustomFieldType, TestDistribDocBasedVersion, TestSolrDeletionPolicy2, ZkShardTermsTest, JWTAuthPluginTest, AdminHandlersProxyTest, TestDynamicURP, SearchRateTriggerTest, SecurityConfHandlerTest, TestCloudNestedDocsSort, TestDFISimilarityFactory, LeaderFailoverAfterPartitionTest, TestJsonFacetsWithNestedObjects, SubstringBytesRefFilterTest, TestNonDefinedSimilarityFactory, IgnoreCommitOptimizeUpdateProcessorFactoryTest, AutoScalingHandlerTest, UpdateParamsTest, TestLegacyNumericRangeQueryBuilder, NodeAddedTriggerTest, TestExclusionRuleCollectionAccess, TestMaxTokenLenTokenizer, ScheduledTriggerIntegrationTest, SolrRrdBackendFactoryTest]
   [junit4] Completed [239/858 (1!)] on J0 in 0.73s, 1 test, 1 failure <<< FAILURES!

[...truncated 1193 lines...]
   [junit4] Suite: org.apache.solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest
   [junit4]   2> 6493220 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[B0F37023B4FB2C09]-worker) [    ] o.a.s.SolrTestCaseJ4 SecureRandom sanity checks: test.solr.allowed.securerandom=null & java.security.egd=file:/dev/./urandom
   [junit4]   2> Creating dataDir: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest_B0F37023B4FB2C09-001/init-core-data-001
   [junit4]   2> 6493221 WARN  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[B0F37023B4FB2C09]-worker) [    ] o.a.s.SolrTestCaseJ4 startTrackingSearchers: numOpens=1711 numCloses=1711
   [junit4]   2> 6493221 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[B0F37023B4FB2C09]-worker) [    ] o.a.s.SolrTestCaseJ4 Using PointFields (NUMERIC_POINTS_SYSPROP=true) w/NUMERIC_DOCVALUES_SYSPROP=false
   [junit4]   2> 6493222 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[B0F37023B4FB2C09]-worker) [    ] o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (false) via: @org.apache.solr.SolrTestCaseJ4$SuppressSSL(bugUrl="https://issues.apache.org/jira/browse/SOLR-5776")
   [junit4]   2> 6493222 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[B0F37023B4FB2C09]-worker) [    ] o.a.s.BaseDistributedSearchTestCase Setting hostContext system property: /_/u
   [junit4]   1> Formatting using clusterid: testClusterID
   [junit4]   2> 6493583 WARN  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[B0F37023B4FB2C09]-worker) [    ] o.a.h.m.i.MetricsConfig Cannot locate configuration: tried hadoop-metrics2-namenode.properties,hadoop-metrics2.properties
   [junit4]   2> 6493628 WARN  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[B0F37023B4FB2C09]-worker) [    ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 6493642 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[B0F37023B4FB2C09]-worker) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 6493643 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[B0F37023B4FB2C09]-worker) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 6493643 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[B0F37023B4FB2C09]-worker) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 6493643 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[B0F37023B4FB2C09]-worker) [    ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 6493643 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[B0F37023B4FB2C09]-worker) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@3487f5b5{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
   [junit4]   2> 6494807 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[B0F37023B4FB2C09]-worker) [    ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@39e40c56{hdfs,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/jetty-lucene2-us-west.apache.org-38548-hdfs-_-any-4519862239333594203.dir/webapp/,AVAILABLE}{/hdfs}
   [junit4]   2> 6494832 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[B0F37023B4FB2C09]-worker) [    ] o.e.j.s.AbstractConnector Started ServerConnector@2b7961c8{HTTP/1.1,[http/1.1]}{lucene2-us-west.apache.org:38548}
   [junit4]   2> 6494832 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[B0F37023B4FB2C09]-worker) [    ] o.e.j.s.Server Started @6495330ms
   [junit4]   2> 6495226 WARN  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[B0F37023B4FB2C09]-worker) [    ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 6495227 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[B0F37023B4FB2C09]-worker) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 6495265 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[B0F37023B4FB2C09]-worker) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 6495265 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[B0F37023B4FB2C09]-worker) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 6495265 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[B0F37023B4FB2C09]-worker) [    ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 6495266 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[B0F37023B4FB2C09]-worker) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@71d5c69d{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
   [junit4]   2> 6495892 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[B0F37023B4FB2C09]-worker) [    ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@31acfbec{datanode,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/jetty-localhost-41436-datanode-_-any-14027637800994684085.dir/webapp/,AVAILABLE}{/datanode}
   [junit4]   2> 6495893 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[B0F37023B4FB2C09]-worker) [    ] o.e.j.s.AbstractConnector Started ServerConnector@7a0519bb{HTTP/1.1,[http/1.1]}{localhost:41436}
   [junit4]   2> 6495893 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[B0F37023B4FB2C09]-worker) [    ] o.e.j.s.Server Started @6496390ms
   [junit4]   2> 6496148 WARN  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[B0F37023B4FB2C09]-worker) [    ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 6496149 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[B0F37023B4FB2C09]-worker) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 6496150 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[B0F37023B4FB2C09]-worker) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 6496150 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[B0F37023B4FB2C09]-worker) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 6496150 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[B0F37023B4FB2C09]-worker) [    ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 6496151 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[B0F37023B4FB2C09]-worker) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@e8884f4{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
   [junit4]   2> 6496469 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0x48396ea2d656cf8: Processing first storage report for DS-5c1ba598-3cd0-46c8-b53c-5129adef5baf from datanode 6531e369-4065-4d6c-a816-9cc1124bcebc
   [junit4]   2> 6496470 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0x48396ea2d656cf8: from storage DS-5c1ba598-3cd0-46c8-b53c-5129adef5baf node DatanodeRegistration(127.0.0.1:39272, datanodeUuid=6531e369-4065-4d6c-a816-9cc1124bcebc, infoPort=37228, infoSecurePort=0, ipcPort=43463, storageInfo=lv=-57;cid=testClusterID;nsid=1419631018;c=1558124255489), blocks: 0, hasStaleStorage: true, processing time: 1 msecs, invalidatedBlocks: 0
   [junit4]   2> 6496470 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0x48396ea2d656cf8: Processing first storage report for DS-f0848054-56cd-4aab-a5d7-885151f0368b from datanode 6531e369-4065-4d6c-a816-9cc1124bcebc
   [junit4]   2> 6496470 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0x48396ea2d656cf8: from storage DS-f0848054-56cd-4aab-a5d7-885151f0368b node DatanodeRegistration(127.0.0.1:39272, datanodeUuid=6531e369-4065-4d6c-a816-9cc1124bcebc, infoPort=37228, infoSecurePort=0, ipcPort=43463, storageInfo=lv=-57;cid=testClusterID;nsid=1419631018;c=1558124255489), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0
   [junit4]   2> 6496817 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[B0F37023B4FB2C09]-worker) [    ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@53464a0e{datanode,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/jetty-localhost-43251-datanode-_-any-10771087546256842728.dir/webapp/,AVAILABLE}{/datanode}
   [junit4]   2> 6496818 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[B0F37023B4FB2C09]-worker) [    ] o.e.j.s.AbstractConnector Started ServerConnector@5485a410{HTTP/1.1,[http/1.1]}{localhost:43251}
   [junit4]   2> 6496818 INFO  (SUITE-HdfsChaosMonkeyNothingIsSafeTest-seed#[B0F37023B4FB2C09]-worker) [    ] o.e.j.s.Server Started @6497316ms
   [junit4]   2> 6497173 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0x1c020402c6544a60: Processing first storage report for DS-fc9688e1-d38d-4afd-90a4-378aab682dff from datanode dc8cd766-ad05-4495-a501-4511214e149e
   [junit4]   2> 6497173 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0x1c020402c6544a60: from storage DS-fc9688e1-d38d-4afd-90a4-378aab682dff node DatanodeRegistration(127.0.0.1:34463, datanodeUuid=dc8cd766-ad05-4495-a501-4511214e149e, infoPort=34172, infoSecurePort=0, ipcPort=46204, storageInfo=lv=-57;cid=testClusterID;nsid=1419631018;c=1558124255489), blocks: 0, hasStaleStorage: true, processing time: 0 msecs, invalidatedBlocks: 0
   [junit4]   2> 6497173 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0x1c020402c6544a60: Processing first storage report for DS-e95c7c78-f67a-41ac-a25a-322da8a1ebb7 from datanode dc8cd766-ad05-4495-a501-4511214e149e
   [junit4]   2> 6497173 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0x1c020402c6544a60: from storage DS-e95c7c78-f67a-41ac-a25a-322da8a1ebb7 node DatanodeRegistration(127.0.0.1:34463, datanodeUuid=dc8cd766-ad05-4495-a501-4511214e149e, infoPort=34172, infoSecurePort=0, ipcPort=46204, storageInfo=lv=-57;cid=testClusterID;nsid=1419631018;c=1558124255489), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0
   [junit4]   2> 6497656 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
   [junit4]   2> 6497657 INFO  (ZkTestServer Run Thread) [    ] o.a.s.c.ZkTestServer client port:0.0.0.0/0.0.0.0:0
   [junit4]   2> 6497657 INFO  (ZkTestServer Run Thread) [    ] o.a.s.c.ZkTestServer Starting server
   [junit4]   2> 6497768 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.c.ZkTestServer start zk server on port:34018
   [junit4]   2> 6497768 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.c.ZkTestServer parse host and port list: 127.0.0.1:34018
   [junit4]   2> 6497768 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.c.ZkTestServer connecting to 127.0.0.1 34018
   [junit4]   2> 6497776 INFO  (zkConnectionManagerCallback-19294-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 6497817 INFO  (zkConnectionManagerCallback-19296-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 6497818 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/solrconfig-tlog.xml to /configs/conf1/solrconfig.xml
   [junit4]   2> 6497820 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/schema15.xml to /configs/conf1/schema.xml
   [junit4]   2> 6497835 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml to /configs/conf1/solrconfig.snippet.randomindexconfig.xml
   [junit4]   2> 6497846 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/stopwords.txt to /configs/conf1/stopwords.txt
   [junit4]   2> 6497848 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/protwords.txt to /configs/conf1/protwords.txt
   [junit4]   2> 6497849 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/currency.xml to /configs/conf1/currency.xml
   [junit4]   2> 6497863 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/enumsConfig.xml to /configs/conf1/enumsConfig.xml
   [junit4]   2> 6497873 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/open-exchange-rates.json to /configs/conf1/open-exchange-rates.json
   [junit4]   2> 6497875 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/mapping-ISOLatin1Accent.txt to /configs/conf1/mapping-ISOLatin1Accent.txt
   [junit4]   2> 6497876 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/old_synonyms.txt to /configs/conf1/old_synonyms.txt
   [junit4]   2> 6497890 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.c.ZkTestServer put /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/core/src/test-files/solr/collection1/conf/synonyms.txt to /configs/conf1/synonyms.txt
   [junit4]   2> 6497892 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.c.AbstractFullDistribZkTestBase Will use NRT replicas unless explicitly asked otherwise
   [junit4]   2> 6499116 WARN  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 6499116 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 6499116 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 6499116 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 6499117 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 6499117 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 6499117 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 6499118 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@4d7af40a{/_/u,null,AVAILABLE}
   [junit4]   2> 6499118 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.e.j.s.AbstractConnector Started ServerConnector@1b84f5d1{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:41499}
   [junit4]   2> 6499118 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.e.j.s.Server Started @6499616ms
   [junit4]   2> 6499118 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/_/u, solr.data.dir=hdfs://lucene2-us-west.apache.org:45359/hdfs__lucene2-us-west.apache.org_45359__home_jenkins_jenkins-slave_workspace_Lucene-Solr-NightlyTests-master_checkout_solr_build_solr-core_test_J1_temp_solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest_B0F37023B4FB2C09-001_tempDir-002_control_data, hostPort=41499, coreRootDirectory=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/../../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest_B0F37023B4FB2C09-001/control-001/cores}
   [junit4]   2> 6499118 ERROR (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 6499118 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 6499118 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 9.0.0
   [junit4]   2> 6499118 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 6499118 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 6499118 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-05-17T20:17:41.190981Z
   [junit4]   2> 6499141 INFO  (zkConnectionManagerCallback-19298-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 6499142 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 6499142 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest_B0F37023B4FB2C09-001/control-001/solr.xml
   [junit4]   2> 6499145 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 6499145 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 6499179 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@105999ac, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 6499632 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=false]
   [junit4]   2> 6499633 WARN  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@67295bbc[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 6499655 WARN  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@613a9bbd[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 6499660 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:34018/solr
   [junit4]   2> 6499817 INFO  (zkConnectionManagerCallback-19305-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 6499949 INFO  (zkConnectionManagerCallback-19307-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 6500308 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [n:127.0.0.1:41499__%2Fu    ] o.a.s.c.OverseerElectionContext I am going to be the leader 127.0.0.1:41499__%2Fu
   [junit4]   2> 6500309 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [n:127.0.0.1:41499__%2Fu    ] o.a.s.c.Overseer Overseer (id=74709289431465988-127.0.0.1:41499__%2Fu-n_0000000000) starting
   [junit4]   2> 6500315 INFO  (zkConnectionManagerCallback-19314-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 6500329 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [n:127.0.0.1:41499__%2Fu    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:34018/solr ready
   [junit4]   2> 6500330 INFO  (OverseerStateUpdate-74709289431465988-127.0.0.1:41499__%2Fu-n_0000000000) [n:127.0.0.1:41499__%2Fu    ] o.a.s.c.Overseer Starting to work on the main queue : 127.0.0.1:41499__%2Fu
   [junit4]   2> 6500330 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [n:127.0.0.1:41499__%2Fu    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:41499__%2Fu
   [junit4]   2> 6500340 INFO  (zkCallback-19306-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 6500344 INFO  (zkCallback-19313-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 6500441 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [n:127.0.0.1:41499__%2Fu    ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 6500509 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [n:127.0.0.1:41499__%2Fu    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@105999ac
   [junit4]   2> 6500559 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [n:127.0.0.1:41499__%2Fu    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@105999ac
   [junit4]   2> 6500559 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [n:127.0.0.1:41499__%2Fu    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@105999ac
   [junit4]   2> 6500560 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [n:127.0.0.1:41499__%2Fu    ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/../../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest_B0F37023B4FB2C09-001/control-001/cores
   [junit4]   2> 6500640 INFO  (zkConnectionManagerCallback-19320-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 6500648 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 6500650 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:34018/solr ready
   [junit4]   2> 6500660 INFO  (qtp408243211-98628) [n:127.0.0.1:41499__%2Fu    ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params collection.configName=conf1&name=control_collection&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=127.0.0.1:41499__%252Fu&wt=javabin&version=2 and sendToOCPQueue=true
   [junit4]   2> 6500684 INFO  (OverseerThreadFactory-14726-thread-1-processing-n:127.0.0.1:41499__%2Fu) [n:127.0.0.1:41499__%2Fu    ] o.a.s.c.a.c.CreateCollectionCmd Create collection control_collection
   [junit4]   2> 6500836 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu    x:control_collection_shard1_replica_n1] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&collection.configName=conf1&newCollection=true&name=control_collection_shard1_replica_n1&action=CREATE&numShards=1&collection=control_collection&shard=shard1&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 6500836 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu    x:control_collection_shard1_replica_n1] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 6501902 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 9.0.0
   [junit4]   2> 6501975 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.s.IndexSchema [control_collection_shard1_replica_n1] Schema name=test
   [junit4]   2> 6502385 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.s.IndexSchema Loaded schema test/1.6 with uniqueid field id
   [junit4]   2> 6502501 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.CoreContainer Creating SolrCore 'control_collection_shard1_replica_n1' using configuration from collection control_collection, trusted=true
   [junit4]   2> 6502502 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.control_collection.shard1.replica_n1' (registry 'solr.core.control_collection.shard1.replica_n1') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@105999ac
   [junit4]   2> 6502502 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://lucene2-us-west.apache.org:45359/solr_hdfs_home
   [junit4]   2> 6502502 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 6502502 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.SolrCore [[control_collection_shard1_replica_n1] ] Opening new SolrCore at [/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest_B0F37023B4FB2C09-001/control-001/cores/control_collection_shard1_replica_n1], dataDir=[hdfs://lucene2-us-west.apache.org:45359/solr_hdfs_home/control_collection/core_node2/data/]
   [junit4]   2> 6502503 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:45359/solr_hdfs_home/control_collection/core_node2/data/snapshot_metadata
   [junit4]   2> 6502539 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 6502539 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [33554432] will allocate [1] slabs and use ~[33554432] bytes
   [junit4]   2> 6502589 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 6502590 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:45359/solr_hdfs_home/control_collection/core_node2/data
   [junit4]   2> 6502661 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://lucene2-us-west.apache.org:45359/solr_hdfs_home/control_collection/core_node2/data/index
   [junit4]   2> 6502711 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 6502711 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [33554432] will allocate [1] slabs and use ~[33554432] bytes
   [junit4]   2> 6502731 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 6502732 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.MockRandomMergePolicy: org.apache.lucene.index.MockRandomMergePolicy@40fc7228
   [junit4]   2> 6503259 WARN  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class = DumpRequestHandler,attributes = {initParams=a, name=/dump, class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 6503604 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 6503604 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 6503604 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=2
   [junit4]   2> 6503658 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.u.CommitTracker Hard AutoCommit: if uncommitted for 15000ms; 
   [junit4]   2> 6503658 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 6503660 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: maxMergeAtOnce=13, maxMergeAtOnceExplicit=44, maxMergedSegmentMB=7.220703125, floorSegmentMB=2.1640625, forceMergeDeletesPctAllowed=8.946179839446046, segmentsPerTier=35.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.0, deletesPctAllowed=45.357671993833165
   [junit4]   2> 6503678 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.s.SolrIndexSearcher Opening [Searcher@654af086[control_collection_shard1_replica_n1] main]
   [junit4]   2> 6503696 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 6503696 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 6503697 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 6503697 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1633811710102994944
   [junit4]   2> 6503737 INFO  (searcherExecutor-14731-thread-1-processing-n:127.0.0.1:41499__%2Fu x:control_collection_shard1_replica_n1 c:control_collection s:shard1) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.SolrCore [control_collection_shard1_replica_n1] Registered new searcher Searcher@654af086[control_collection_shard1_replica_n1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 6503749 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.ZkShardTerms Successful update of terms at /collections/control_collection/terms/shard1 to Terms{values={core_node2=0}, version=0}
   [junit4]   2> 6503749 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/control_collection/leaders/shard1
   [junit4]   2> 6503761 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 6503761 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 6503761 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.SyncStrategy Sync replicas to http://127.0.0.1:41499/_/u/control_collection_shard1_replica_n1/
   [junit4]   2> 6503761 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
   [junit4]   2> 6503762 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.SyncStrategy http://127.0.0.1:41499/_/u/control_collection_shard1_replica_n1/ has no replicas
   [junit4]   2> 6503762 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContextBase Creating leader registration node /collections/control_collection/leaders/shard1/leader after winning as /collections/control_collection/leader_elect/shard1/election/74709289431465988-core_node2-n_0000000000
   [junit4]   2> 6503776 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContext I am the new leader: http://127.0.0.1:41499/_/u/control_collection_shard1_replica_n1/ shard1
   [junit4]   2> 6503777 INFO  (zkCallback-19306-thread-1) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 6503778 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 6503779 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu c:control_collection s:shard1  x:control_collection_shard1_replica_n1] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={qt=/admin/cores&collection.configName=conf1&newCollection=true&name=control_collection_shard1_replica_n1&action=CREATE&numShards=1&collection=control_collection&shard=shard1&wt=javabin&version=2&replicaType=NRT} status=0 QTime=2943
   [junit4]   2> 6503789 INFO  (qtp408243211-98628) [n:127.0.0.1:41499__%2Fu    ] o.a.s.h.a.CollectionsHandler Wait for new collection to be active for at most 45 seconds. Check all shard replicas
   [junit4]   2> 6503882 INFO  (zkCallback-19306-thread-2) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 6503882 INFO  (zkCallback-19306-thread-1) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/control_collection/state.json] for collection [control_collection] has occurred - updating... (live nodes size: [1])
   [junit4]   2> 6503882 INFO  (qtp408243211-98628) [n:127.0.0.1:41499__%2Fu    ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections params={collection.configName=conf1&name=control_collection&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=127.0.0.1:41499__%252Fu&wt=javabin&version=2} status=0 QTime=3222
   [junit4]   2> 6503926 INFO  (zkConnectionManagerCallback-19326-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 6503927 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 6503933 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:34018/solr ready
   [junit4]   2> 6503933 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.c.ChaosMonkey monkey: init - expire sessions:false cause connection loss:false
   [junit4]   2> 6503955 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu    ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params collection.configName=conf1&name=collection1&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=&stateFormat=2&wt=javabin&version=2 and sendToOCPQueue=true
   [junit4]   2> 6503978 INFO  (OverseerThreadFactory-14726-thread-2-processing-n:127.0.0.1:41499__%2Fu) [n:127.0.0.1:41499__%2Fu    ] o.a.s.c.a.c.CreateCollectionCmd Create collection collection1
   [junit4]   2> 6503978 INFO  (OverseerCollectionConfigSetProcessor-74709289431465988-127.0.0.1:41499__%2Fu-n_0000000000) [n:127.0.0.1:41499__%2Fu    ] o.a.s.c.OverseerTaskQueue Response ZK path: /overseer/collection-queue-work/qnr-0000000000 doesn't exist.  Requestor may have disconnected from ZooKeeper
   [junit4]   2> 6504201 WARN  (OverseerThreadFactory-14726-thread-2-processing-n:127.0.0.1:41499__%2Fu) [n:127.0.0.1:41499__%2Fu    ] o.a.s.c.a.c.CreateCollectionCmd It is unusual to create a collection (collection1) without cores.
   [junit4]   2> 6504203 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu    ] o.a.s.h.a.CollectionsHandler Wait for new collection to be active for at most 45 seconds. Check all shard replicas
   [junit4]   2> 6504203 INFO  (qtp408243211-98630) [n:127.0.0.1:41499__%2Fu    ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections params={collection.configName=conf1&name=collection1&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=&stateFormat=2&wt=javabin&version=2} status=0 QTime=248
   [junit4]   2> 6504204 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.c.AbstractFullDistribZkTestBase Creating jetty instances pullReplicaCount=0 numOtherReplicas=3
   [junit4]   2> 6505179 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 1 in directory /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest_B0F37023B4FB2C09-001/shard-1-001 of type NRT
   [junit4]   2> 6505200 WARN  (closeThreadPool-19327-thread-1) [    ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 6505201 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 6505201 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 6505201 INFO  (closeThreadPool-19327-thread-1) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 6505207 INFO  (closeThreadPool-19327-thread-1) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 6505207 INFO  (closeThreadPool-19327-thread-1) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 6505207 INFO  (closeThreadPool-19327-thread-1) [    ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 6505208 INFO  (closeThreadPool-19327-thread-1) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@67668571{/_/u,null,AVAILABLE}
   [junit4]   2> 6505208 INFO  (closeThreadPool-19327-thread-1) [    ] o.e.j.s.AbstractConnector Started ServerConnector@354d755a{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:44123}
   [junit4]   2> 6505208 INFO  (closeThreadPool-19327-thread-1) [    ] o.e.j.s.Server Started @6505706ms
   [junit4]   2> 6505208 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/_/u, solrconfig=solrconfig.xml, solr.data.dir=hdfs://lucene2-us-west.apache.org:45359/hdfs__lucene2-us-west.apache.org_45359__home_jenkins_jenkins-slave_workspace_Lucene-Solr-NightlyTests-master_checkout_solr_build_solr-core_test_J1_temp_solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest_B0F37023B4FB2C09-001_tempDir-002_jetty1, hostPort=44123, coreRootDirectory=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/../../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest_B0F37023B4FB2C09-001/shard-1-001/cores}
   [junit4]   2> 6505208 ERROR (closeThreadPool-19327-thread-1) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 6505208 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 6505208 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 9.0.0
   [junit4]   2> 6505208 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 6505208 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 6505208 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-05-17T20:17:47.280735Z
   [junit4]   2> 6505266 INFO  (zkConnectionManagerCallback-19329-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 6505339 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 6505339 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest_B0F37023B4FB2C09-001/shard-1-001/solr.xml
   [junit4]   2> 6505342 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 6505342 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 6505343 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@105999ac, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 6505873 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=false]
   [junit4]   2> 6505895 WARN  (closeThreadPool-19327-thread-1) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@5e480f94[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 6505939 WARN  (closeThreadPool-19327-thread-1) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@71649434[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 6505940 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:34018/solr
   [junit4]   2> 6505984 INFO  (zkConnectionManagerCallback-19336-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 6506020 INFO  (zkConnectionManagerCallback-19338-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 6506023 INFO  (OverseerCollectionConfigSetProcessor-74709289431465988-127.0.0.1:41499__%2Fu-n_0000000000) [n:127.0.0.1:41499__%2Fu    ] o.a.s.c.OverseerTaskQueue Response ZK path: /overseer/collection-queue-work/qnr-0000000002 doesn't exist.  Requestor may have disconnected from ZooKeeper
   [junit4]   2> 6506034 INFO  (closeThreadPool-19327-thread-1) [n:127.0.0.1:44123__%2Fu    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 6506049 INFO  (closeThreadPool-19327-thread-1) [n:127.0.0.1:44123__%2Fu    ] o.a.s.c.ZkController Publish node=127.0.0.1:44123__%2Fu as DOWN
   [junit4]   2> 6506050 INFO  (closeThreadPool-19327-thread-1) [n:127.0.0.1:44123__%2Fu    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 6506050 INFO  (closeThreadPool-19327-thread-1) [n:127.0.0.1:44123__%2Fu    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:44123__%2Fu
   [junit4]   2> 6506051 INFO  (zkCallback-19306-thread-3) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 6506061 INFO  (zkCallback-19313-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 6506061 INFO  (zkCallback-19337-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 6506062 INFO  (zkCallback-19325-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 6506062 INFO  (zkConnectionManagerCallback-19345-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 6506073 INFO  (closeThreadPool-19327-thread-1) [n:127.0.0.1:44123__%2Fu    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 6506074 INFO  (closeThreadPool-19327-thread-1) [n:127.0.0.1:44123__%2Fu    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:34018/solr ready
   [junit4]   2> 6506157 INFO  (closeThreadPool-19327-thread-1) [n:127.0.0.1:44123__%2Fu    ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 6506231 INFO  (closeThreadPool-19327-thread-1) [n:127.0.0.1:44123__%2Fu    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@105999ac
   [junit4]   2> 6506316 INFO  (closeThreadPool-19327-thread-1) [n:127.0.0.1:44123__%2Fu    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@105999ac
   [junit4]   2> 6506316 INFO  (closeThreadPool-19327-thread-1) [n:127.0.0.1:44123__%2Fu    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@105999ac
   [junit4]   2> 6506318 INFO  (closeThreadPool-19327-thread-1) [n:127.0.0.1:44123__%2Fu    ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/../../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest_B0F37023B4FB2C09-001/shard-1-001/cores
   [junit4]   2> 6506603 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 2 in directory /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest_B0F37023B4FB2C09-001/shard-2-001 of type NRT
   [junit4]   2> 6506604 WARN  (closeThreadPool-19327-thread-1) [    ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 6506604 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 6506604 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 6506604 INFO  (closeThreadPool-19327-thread-1) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 6506605 INFO  (closeThreadPool-19327-thread-1) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 6506605 INFO  (closeThreadPool-19327-thread-1) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 6506605 INFO  (closeThreadPool-19327-thread-1) [    ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 6506606 INFO  (closeThreadPool-19327-thread-1) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@7e512f3c{/_/u,null,AVAILABLE}
   [junit4]   2> 6506606 INFO  (closeThreadPool-19327-thread-1) [    ] o.e.j.s.AbstractConnector Started ServerConnector@56cf3646{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:37112}
   [junit4]   2> 6506606 INFO  (closeThreadPool-19327-thread-1) [    ] o.e.j.s.Server Started @6507104ms
   [junit4]   2> 6506606 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/_/u, solrconfig=solrconfig.xml, solr.data.dir=hdfs://lucene2-us-west.apache.org:45359/hdfs__lucene2-us-west.apache.org_45359__home_jenkins_jenkins-slave_workspace_Lucene-Solr-NightlyTests-master_checkout_solr_build_solr-core_test_J1_temp_solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest_B0F37023B4FB2C09-001_tempDir-002_jetty2, hostPort=37112, coreRootDirectory=/home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/../../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest_B0F37023B4FB2C09-001/shard-2-001/cores, replicaType=NRT}
   [junit4]   2> 6506606 ERROR (closeThreadPool-19327-thread-1) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 6506606 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 6506606 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 9.0.0
   [junit4]   2> 6506606 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 6506606 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 6506606 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-05-17T20:17:48.678721Z
   [junit4]   2> 6506641 INFO  (zkConnectionManagerCallback-19348-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 6506645 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in ZooKeeper)
   [junit4]   2> 6506645 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.c.SolrXmlConfig Loading container configuration from /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest_B0F37023B4FB2C09-001/shard-2-001/solr.xml
   [junit4]   2> 6506647 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 6506647 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 6506690 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@105999ac, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 6507592 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=false]
   [junit4]   2> 6507608 WARN  (closeThreadPool-19327-thread-1) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@6284da83[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 6507612 WARN  (closeThreadPool-19327-thread-1) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@71cf73d7[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 6507621 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:34018/solr
   [junit4]   2> 6507788 INFO  (zkConnectionManagerCallback-19355-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 6507815 INFO  (zkConnectionManagerCallback-19357-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 6507828 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 6507831 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.c.ZkController Publish node=127.0.0.1:37112__%2Fu as DOWN
   [junit4]   2> 6507832 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 6507832 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:37112__%2Fu
   [junit4]   2> 6507835 INFO  (zkCallback-19313-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 6507835 INFO  (zkCallback-19344-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 6507843 INFO  (zkCallback-19306-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 6507847 INFO  (zkCallback-19337-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 6507847 INFO  (zkCallback-19356-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 6507851 INFO  (zkCallback-19325-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 6507864 INFO  (zkConnectionManagerCallback-19364-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 6507866 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (3)
   [junit4]   2> 6507867 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:34018/solr ready
   [junit4]   2> 6507944 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 6508015 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@105999ac
   [junit4]   2> 6508072 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@105999ac
   [junit4]   2> 6508072 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@105999ac
   [junit4]   2> 6508073 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/../../../../../../../../../../../home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest_B0F37023B4FB2C09-001/shard-2-001/cores
   [junit4]   2> 6508671 INFO  (TEST-HdfsChaosMonkeyNothingIsSafeTest.test-seed#[B0F37023B4FB2C09]) [    ] o.a.s.c.AbstractFullDistribZkTestBase create jetty 3 in directory /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/build/solr-core/test/J1/temp/solr.cloud.hdfs.HdfsChaosMonkeyNothingIsSafeTest_B0F37023B4FB2C09-001/shard-3-001 of type NRT
   [junit4]   2> 6508672 WARN  (closeThreadPool-19327-thread-1) [    ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 6508672 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 6508672 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 6508672 INFO  (closeThreadPool-19327-thread-1) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 11.0.1+13-LTS
   [junit4]   2> 6508698 INFO  (closeThreadPool-19327-thread-1) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 6508698 INFO  (closeThreadPool-19327-thread-1) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 6508698 INFO  (closeThreadPool-19327-thread-1) [    ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 6508698 INFO  (closeThreadPool-19327-thread-1) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@68851679{/_/u,null,AVAILABLE}
   [junit4]   2> 6508698 INFO  (closeThreadPool-19327-thread-1) [    ] o.e.j.s.AbstractConnector Started ServerConnector@151250c3{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:39934}
   [junit4]   2> 6508698 INFO  (closeThreadPool-19327-thread-1) [    ] o.e.j.s.Server Started @6509196ms
   [junit4]   2> 6508698 INFO  (closeThreadPool-19327-thread-1) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/_/u, solrconfig=solrconfig.xml, solr.data.dir=hdfs://lucene2-us-west.apache.org:45359/hdfs__lucene2-us-west.apache.org_45359__home_jenkins_jenk

[...truncated too long message...]

ettings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/lucene/top-level-ivy-settings.xml

resolve:

jar-checksums:
    [mkdir] Created dir: /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/null1100710537
     [copy] Copying 240 files to /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/null1100710537
   [delete] Deleting directory /home/jenkins/jenkins-slave/workspace/Lucene-Solr-NightlyTests-master/checkout/solr/null1100710537

check-working-copy:
[ivy:cachepath] :: resolving dependencies :: #;working@lucene2-us-west.apache.org
[ivy:cachepath] 	confs: [default]
[ivy:cachepath] 	found org.eclipse.jgit#org.eclipse.jgit;5.3.0.201903130848-r in public
[ivy:cachepath] 	found com.jcraft#jsch;0.1.54 in public
[ivy:cachepath] 	found com.jcraft#jzlib;1.1.1 in public
[ivy:cachepath] 	found com.googlecode.javaewah#JavaEWAH;1.1.6 in public
[ivy:cachepath] 	found org.slf4j#slf4j-api;1.7.2 in public
[ivy:cachepath] 	found org.bouncycastle#bcpg-jdk15on;1.60 in public
[ivy:cachepath] 	found org.bouncycastle#bcprov-jdk15on;1.60 in public
[ivy:cachepath] 	found org.bouncycastle#bcpkix-jdk15on;1.60 in public
[ivy:cachepath] 	found org.slf4j#slf4j-nop;1.7.2 in public
[ivy:cachepath] :: resolution report :: resolve 76ms :: artifacts dl 5ms
	---------------------------------------------------------------------
	|                  |            modules            ||   artifacts   |
	|       conf       | number| search|dwnlded|evicted|| number|dwnlded|
	---------------------------------------------------------------------
	|      default     |   9   |   0   |   0   |   0   ||   9   |   0   |
	---------------------------------------------------------------------
[wc-checker] Initializing working copy...
[wc-checker] Checking working copy status...

-jenkins-base:

BUILD SUCCESSFUL
Total time: 630 minutes 54 seconds
Archiving artifacts
java.lang.InterruptedException: no matches found within 10000
	at hudson.FilePath$ValidateAntFileMask.hasMatch(FilePath.java:2847)
	at hudson.FilePath$ValidateAntFileMask.invoke(FilePath.java:2726)
	at hudson.FilePath$ValidateAntFileMask.invoke(FilePath.java:2707)
	at hudson.FilePath$FileCallableWrapper.call(FilePath.java:3086)
Also:   hudson.remoting.Channel$CallSiteStackTrace: Remote call to lucene2
		at hudson.remoting.Channel.attachCallSiteStackTrace(Channel.java:1741)
		at hudson.remoting.UserRequest$ExceptionResponse.retrieve(UserRequest.java:357)
		at hudson.remoting.Channel.call(Channel.java:955)
		at hudson.FilePath.act(FilePath.java:1072)
		at hudson.FilePath.act(FilePath.java:1061)
		at hudson.FilePath.validateAntFileMask(FilePath.java:2705)
		at hudson.tasks.ArtifactArchiver.perform(ArtifactArchiver.java:243)
		at hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
		at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
		at hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
		at hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
		at hudson.model.Build$BuildExecution.post2(Build.java:186)
		at hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
		at hudson.model.Run.execute(Run.java:1835)
		at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
		at hudson.model.ResourceController.execute(ResourceController.java:97)
		at hudson.model.Executor.run(Executor.java:429)
Caused: hudson.FilePath$TunneledInterruptedException
	at hudson.FilePath$FileCallableWrapper.call(FilePath.java:3088)
	at hudson.remoting.UserRequest.perform(UserRequest.java:212)
	at hudson.remoting.UserRequest.perform(UserRequest.java:54)
	at hudson.remoting.Request$2.run(Request.java:369)
	at hudson.remoting.InterceptingExecutorService$1.call(InterceptingExecutorService.java:72)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
	at java.lang.Thread.run(Thread.java:748)
Caused: java.lang.InterruptedException: java.lang.InterruptedException: no matches found within 10000
	at hudson.FilePath.act(FilePath.java:1074)
	at hudson.FilePath.act(FilePath.java:1061)
	at hudson.FilePath.validateAntFileMask(FilePath.java:2705)
	at hudson.tasks.ArtifactArchiver.perform(ArtifactArchiver.java:243)
	at hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
	at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
	at hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
	at hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
	at hudson.model.Build$BuildExecution.post2(Build.java:186)
	at hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
	at hudson.model.Run.execute(Run.java:1835)
	at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
	at hudson.model.ResourceController.execute(ResourceController.java:97)
	at hudson.model.Executor.run(Executor.java:429)
No artifacts found that match the file pattern "**/*.events,heapdumps/**,**/hs_err_pid*". Configuration error?
Recording test results
Build step 'Publish JUnit test result report' changed build result to UNSTABLE
Email was triggered for: Unstable (Test Failures)
Sending email for trigger: Unstable (Test Failures)