You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@lucene.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2019/04/19 11:37:37 UTC

[JENKINS] Lucene-Solr-BadApples-NightlyTests-8.x - Build # 14 - Still Failing

Build: https://builds.apache.org/job/Lucene-Solr-BadApples-NightlyTests-8.x/14/

1 tests failed.
FAILED:  org.apache.solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest.testSimple

Error Message:
Waiting for collection testSimple2 Timeout waiting to see state for collection=testSimple2 :DocCollection(testSimple2//collections/testSimple2/state.json/25)={   "pullReplicas":"0",   "replicationFactor":"2",   "shards":{     "shard1":{       "range":"80000000-ffffffff",       "state":"active",       "replicas":{         "core_node3":{           "dataDir":"hdfs://localhost:34207/solr_hdfs_home/testSimple2/core_node3/data/",           "base_url":"http://127.0.0.1:42151/solr",           "node_name":"127.0.0.1:42151_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:34207/solr_hdfs_home/testSimple2/core_node3/data/tlog",           "core":"testSimple2_shard1_replica_n1",           "shared_storage":"true",           "state":"active",           "leader":"true"},         "core_node4":{           "dataDir":"hdfs://localhost:34207/solr_hdfs_home/testSimple2/core_node4/data/",           "base_url":"http://127.0.0.1:34759/solr",           "node_name":"127.0.0.1:34759_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:34207/solr_hdfs_home/testSimple2/core_node4/data/tlog",           "core":"testSimple2_shard1_replica_n2",           "shared_storage":"true",           "state":"down"}}},     "shard2":{       "range":"0-7fffffff",       "state":"active",       "replicas":{         "core_node7":{           "dataDir":"hdfs://localhost:34207/solr_hdfs_home/testSimple2/core_node7/data/",           "base_url":"http://127.0.0.1:42151/solr",           "node_name":"127.0.0.1:42151_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:34207/solr_hdfs_home/testSimple2/core_node7/data/tlog",           "core":"testSimple2_shard2_replica_n5",           "shared_storage":"true",           "state":"active",           "leader":"true"},         "core_node8":{           "dataDir":"hdfs://localhost:34207/solr_hdfs_home/testSimple2/core_node8/data/",           "base_url":"http://127.0.0.1:34759/solr",           "node_name":"127.0.0.1:34759_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:34207/solr_hdfs_home/testSimple2/core_node8/data/tlog",           "core":"testSimple2_shard2_replica_n6",           "shared_storage":"true",           "state":"down"}}}},   "router":{"name":"compositeId"},   "maxShardsPerNode":"2",   "autoAddReplicas":"true",   "nrtReplicas":"2",   "tlogReplicas":"0"} Live Nodes: [127.0.0.1:39552_solr, 127.0.0.1:42151_solr] Last available state: DocCollection(testSimple2//collections/testSimple2/state.json/25)={   "pullReplicas":"0",   "replicationFactor":"2",   "shards":{     "shard1":{       "range":"80000000-ffffffff",       "state":"active",       "replicas":{         "core_node3":{           "dataDir":"hdfs://localhost:34207/solr_hdfs_home/testSimple2/core_node3/data/",           "base_url":"http://127.0.0.1:42151/solr",           "node_name":"127.0.0.1:42151_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:34207/solr_hdfs_home/testSimple2/core_node3/data/tlog",           "core":"testSimple2_shard1_replica_n1",           "shared_storage":"true",           "state":"active",           "leader":"true"},         "core_node4":{           "dataDir":"hdfs://localhost:34207/solr_hdfs_home/testSimple2/core_node4/data/",           "base_url":"http://127.0.0.1:34759/solr",           "node_name":"127.0.0.1:34759_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:34207/solr_hdfs_home/testSimple2/core_node4/data/tlog",           "core":"testSimple2_shard1_replica_n2",           "shared_storage":"true",           "state":"down"}}},     "shard2":{       "range":"0-7fffffff",       "state":"active",       "replicas":{         "core_node7":{           "dataDir":"hdfs://localhost:34207/solr_hdfs_home/testSimple2/core_node7/data/",           "base_url":"http://127.0.0.1:42151/solr",           "node_name":"127.0.0.1:42151_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:34207/solr_hdfs_home/testSimple2/core_node7/data/tlog",           "core":"testSimple2_shard2_replica_n5",           "shared_storage":"true",           "state":"active",           "leader":"true"},         "core_node8":{           "dataDir":"hdfs://localhost:34207/solr_hdfs_home/testSimple2/core_node8/data/",           "base_url":"http://127.0.0.1:34759/solr",           "node_name":"127.0.0.1:34759_solr",           "type":"NRT",           "force_set_state":"false",           "ulogDir":"hdfs://localhost:34207/solr_hdfs_home/testSimple2/core_node8/data/tlog",           "core":"testSimple2_shard2_replica_n6",           "shared_storage":"true",           "state":"down"}}}},   "router":{"name":"compositeId"},   "maxShardsPerNode":"2",   "autoAddReplicas":"true",   "nrtReplicas":"2",   "tlogReplicas":"0"}

Stack Trace:
java.lang.AssertionError: Waiting for collection testSimple2
Timeout waiting to see state for collection=testSimple2 :DocCollection(testSimple2//collections/testSimple2/state.json/25)={
  "pullReplicas":"0",
  "replicationFactor":"2",
  "shards":{
    "shard1":{
      "range":"80000000-ffffffff",
      "state":"active",
      "replicas":{
        "core_node3":{
          "dataDir":"hdfs://localhost:34207/solr_hdfs_home/testSimple2/core_node3/data/",
          "base_url":"http://127.0.0.1:42151/solr",
          "node_name":"127.0.0.1:42151_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:34207/solr_hdfs_home/testSimple2/core_node3/data/tlog",
          "core":"testSimple2_shard1_replica_n1",
          "shared_storage":"true",
          "state":"active",
          "leader":"true"},
        "core_node4":{
          "dataDir":"hdfs://localhost:34207/solr_hdfs_home/testSimple2/core_node4/data/",
          "base_url":"http://127.0.0.1:34759/solr",
          "node_name":"127.0.0.1:34759_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:34207/solr_hdfs_home/testSimple2/core_node4/data/tlog",
          "core":"testSimple2_shard1_replica_n2",
          "shared_storage":"true",
          "state":"down"}}},
    "shard2":{
      "range":"0-7fffffff",
      "state":"active",
      "replicas":{
        "core_node7":{
          "dataDir":"hdfs://localhost:34207/solr_hdfs_home/testSimple2/core_node7/data/",
          "base_url":"http://127.0.0.1:42151/solr",
          "node_name":"127.0.0.1:42151_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:34207/solr_hdfs_home/testSimple2/core_node7/data/tlog",
          "core":"testSimple2_shard2_replica_n5",
          "shared_storage":"true",
          "state":"active",
          "leader":"true"},
        "core_node8":{
          "dataDir":"hdfs://localhost:34207/solr_hdfs_home/testSimple2/core_node8/data/",
          "base_url":"http://127.0.0.1:34759/solr",
          "node_name":"127.0.0.1:34759_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:34207/solr_hdfs_home/testSimple2/core_node8/data/tlog",
          "core":"testSimple2_shard2_replica_n6",
          "shared_storage":"true",
          "state":"down"}}}},
  "router":{"name":"compositeId"},
  "maxShardsPerNode":"2",
  "autoAddReplicas":"true",
  "nrtReplicas":"2",
  "tlogReplicas":"0"}
Live Nodes: [127.0.0.1:39552_solr, 127.0.0.1:42151_solr]
Last available state: DocCollection(testSimple2//collections/testSimple2/state.json/25)={
  "pullReplicas":"0",
  "replicationFactor":"2",
  "shards":{
    "shard1":{
      "range":"80000000-ffffffff",
      "state":"active",
      "replicas":{
        "core_node3":{
          "dataDir":"hdfs://localhost:34207/solr_hdfs_home/testSimple2/core_node3/data/",
          "base_url":"http://127.0.0.1:42151/solr",
          "node_name":"127.0.0.1:42151_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:34207/solr_hdfs_home/testSimple2/core_node3/data/tlog",
          "core":"testSimple2_shard1_replica_n1",
          "shared_storage":"true",
          "state":"active",
          "leader":"true"},
        "core_node4":{
          "dataDir":"hdfs://localhost:34207/solr_hdfs_home/testSimple2/core_node4/data/",
          "base_url":"http://127.0.0.1:34759/solr",
          "node_name":"127.0.0.1:34759_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:34207/solr_hdfs_home/testSimple2/core_node4/data/tlog",
          "core":"testSimple2_shard1_replica_n2",
          "shared_storage":"true",
          "state":"down"}}},
    "shard2":{
      "range":"0-7fffffff",
      "state":"active",
      "replicas":{
        "core_node7":{
          "dataDir":"hdfs://localhost:34207/solr_hdfs_home/testSimple2/core_node7/data/",
          "base_url":"http://127.0.0.1:42151/solr",
          "node_name":"127.0.0.1:42151_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:34207/solr_hdfs_home/testSimple2/core_node7/data/tlog",
          "core":"testSimple2_shard2_replica_n5",
          "shared_storage":"true",
          "state":"active",
          "leader":"true"},
        "core_node8":{
          "dataDir":"hdfs://localhost:34207/solr_hdfs_home/testSimple2/core_node8/data/",
          "base_url":"http://127.0.0.1:34759/solr",
          "node_name":"127.0.0.1:34759_solr",
          "type":"NRT",
          "force_set_state":"false",
          "ulogDir":"hdfs://localhost:34207/solr_hdfs_home/testSimple2/core_node8/data/tlog",
          "core":"testSimple2_shard2_replica_n6",
          "shared_storage":"true",
          "state":"down"}}}},
  "router":{"name":"compositeId"},
  "maxShardsPerNode":"2",
  "autoAddReplicas":"true",
  "nrtReplicas":"2",
  "tlogReplicas":"0"}
	at __randomizedtesting.SeedInfo.seed([84ED65A251F02B:3837C99B85A224FA]:0)
	at org.junit.Assert.fail(Assert.java:88)
	at org.apache.solr.cloud.SolrCloudTestCase.waitForState(SolrCloudTestCase.java:310)
	at org.apache.solr.cloud.autoscaling.AutoAddReplicasIntegrationTest.testSimple(AutoAddReplicasIntegrationTest.java:169)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
	at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883)
	at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
	at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
	at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
	at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
	at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
	at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
	at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
	at java.lang.Thread.run(Thread.java:748)




Build Log:
[...truncated 14016 lines...]
   [junit4] Suite: org.apache.solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest
   [junit4]   2> 1044670 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[84ED65A251F02B]-worker) [    ] o.a.s.SolrTestCaseJ4 SecureRandom sanity checks: test.solr.allowed.securerandom=null & java.security.egd=file:/dev/./urandom
   [junit4]   2> Creating dataDir: /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_84ED65A251F02B-001/init-core-data-001
   [junit4]   2> 1044671 WARN  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[84ED65A251F02B]-worker) [    ] o.a.s.SolrTestCaseJ4 startTrackingSearchers: numOpens=1 numCloses=1
   [junit4]   2> 1044671 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[84ED65A251F02B]-worker) [    ] o.a.s.SolrTestCaseJ4 Using PointFields (NUMERIC_POINTS_SYSPROP=true) w/NUMERIC_DOCVALUES_SYSPROP=true
   [junit4]   2> 1044673 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[84ED65A251F02B]-worker) [    ] o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (true) via: @org.apache.solr.util.RandomizeSSL(reason=, ssl=NaN, value=NaN, clientAuth=NaN)
   [junit4]   1> Formatting using clusterid: testClusterID
   [junit4]   2> 1044717 WARN  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[84ED65A251F02B]-worker) [    ] o.a.h.m.i.MetricsConfig Cannot locate configuration: tried hadoop-metrics2-namenode.properties,hadoop-metrics2.properties
   [junit4]   2> 1044741 WARN  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[84ED65A251F02B]-worker) [    ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 1044744 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[84ED65A251F02B]-worker) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 1.8.0_191-b12
   [junit4]   2> 1044752 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[84ED65A251F02B]-worker) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 1044752 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[84ED65A251F02B]-worker) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 1044752 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[84ED65A251F02B]-worker) [    ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 1044753 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[84ED65A251F02B]-worker) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@4a5510e4{static,/static,jar:file:/x1/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
   [junit4]   2> 1044913 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[84ED65A251F02B]-worker) [    ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@127deaf0{hdfs,/,file:///x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/solr/build/solr-core/test/J2/temp/jetty-localhost-41166-hdfs-_-any-3882527843238989641.dir/webapp/,AVAILABLE}{/hdfs}
   [junit4]   2> 1044914 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[84ED65A251F02B]-worker) [    ] o.e.j.s.AbstractConnector Started ServerConnector@76ec4d3e{HTTP/1.1,[http/1.1]}{localhost:41166}
   [junit4]   2> 1044914 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[84ED65A251F02B]-worker) [    ] o.e.j.s.Server Started @1044976ms
   [junit4]   2> 1045073 WARN  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[84ED65A251F02B]-worker) [    ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 1045077 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[84ED65A251F02B]-worker) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 1.8.0_191-b12
   [junit4]   2> 1045078 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[84ED65A251F02B]-worker) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 1045078 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[84ED65A251F02B]-worker) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 1045078 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[84ED65A251F02B]-worker) [    ] o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 1045078 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[84ED65A251F02B]-worker) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@278e3711{static,/static,jar:file:/x1/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
   [junit4]   2> 1045232 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[84ED65A251F02B]-worker) [    ] o.e.j.s.h.ContextHandler Started o.e.j.w.WebAppContext@243b7939{datanode,/,file:///x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/solr/build/solr-core/test/J2/temp/jetty-localhost-33163-datanode-_-any-309235434595814441.dir/webapp/,AVAILABLE}{/datanode}
   [junit4]   2> 1045232 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[84ED65A251F02B]-worker) [    ] o.e.j.s.AbstractConnector Started ServerConnector@29c73b71{HTTP/1.1,[http/1.1]}{localhost:33163}
   [junit4]   2> 1045232 INFO  (SUITE-HdfsAutoAddReplicasIntegrationTest-seed#[84ED65A251F02B]-worker) [    ] o.e.j.s.Server Started @1045294ms
   [junit4]   2> 1045419 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0x56712950429b1d03: Processing first storage report for DS-912dd64a-2013-47bc-b7ae-47bdf1bcd541 from datanode dae32016-57f6-4c7d-9891-0b7ed88ece0d
   [junit4]   2> 1045419 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0x56712950429b1d03: from storage DS-912dd64a-2013-47bc-b7ae-47bdf1bcd541 node DatanodeRegistration(127.0.0.1:36815, datanodeUuid=dae32016-57f6-4c7d-9891-0b7ed88ece0d, infoPort=46157, infoSecurePort=0, ipcPort=40216, storageInfo=lv=-57;cid=testClusterID;nsid=533354744;c=1555668162354), blocks: 0, hasStaleStorage: true, processing time: 0 msecs, invalidatedBlocks: 0
   [junit4]   2> 1045419 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0x56712950429b1d03: Processing first storage report for DS-8dedd6a8-37f4-45bd-9ccd-11f3983a3ba2 from datanode dae32016-57f6-4c7d-9891-0b7ed88ece0d
   [junit4]   2> 1045419 INFO  (Block report processor) [    ] BlockStateChange BLOCK* processReport 0x56712950429b1d03: from storage DS-8dedd6a8-37f4-45bd-9ccd-11f3983a3ba2 node DatanodeRegistration(127.0.0.1:36815, datanodeUuid=dae32016-57f6-4c7d-9891-0b7ed88ece0d, infoPort=46157, infoSecurePort=0, ipcPort=40216, storageInfo=lv=-57;cid=testClusterID;nsid=533354744;c=1555668162354), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0
   [junit4]   2> 1045675 INFO  (TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[84ED65A251F02B]) [    ] o.a.s.SolrTestCaseJ4 ###Starting testSimple
   [junit4]   2> 1045676 INFO  (TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[84ED65A251F02B]) [    ] o.a.s.c.MiniSolrCloudCluster Starting cluster of 3 servers in /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_84ED65A251F02B-001/tempDir-002
   [junit4]   2> 1045677 INFO  (TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[84ED65A251F02B]) [    ] o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
   [junit4]   2> 1045677 INFO  (ZkTestServer Run Thread) [    ] o.a.s.c.ZkTestServer client port:0.0.0.0/0.0.0.0:0
   [junit4]   2> 1045677 INFO  (ZkTestServer Run Thread) [    ] o.a.s.c.ZkTestServer Starting server
   [junit4]   2> 1045777 INFO  (TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[84ED65A251F02B]) [    ] o.a.s.c.ZkTestServer start zk server on port:33969
   [junit4]   2> 1045777 INFO  (TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[84ED65A251F02B]) [    ] o.a.s.c.ZkTestServer parse host and port list: 127.0.0.1:33969
   [junit4]   2> 1045777 INFO  (TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[84ED65A251F02B]) [    ] o.a.s.c.ZkTestServer connecting to 127.0.0.1 33969
   [junit4]   2> 1045790 INFO  (zkConnectionManagerCallback-3531-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1045803 INFO  (zkConnectionManagerCallback-3533-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1045805 INFO  (zkConnectionManagerCallback-3535-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1045817 WARN  (jetty-launcher-3536-thread-1) [    ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 1045817 INFO  (jetty-launcher-3536-thread-1) [    ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 1045817 INFO  (jetty-launcher-3536-thread-1) [    ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 1045817 INFO  (jetty-launcher-3536-thread-1) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 1.8.0_191-b12
   [junit4]   2> 1045818 WARN  (jetty-launcher-3536-thread-2) [    ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 1045818 INFO  (jetty-launcher-3536-thread-2) [    ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 1045818 INFO  (jetty-launcher-3536-thread-2) [    ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 1045819 INFO  (jetty-launcher-3536-thread-2) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 1.8.0_191-b12
   [junit4]   2> 1045819 WARN  (jetty-launcher-3536-thread-3) [    ] o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 1045819 INFO  (jetty-launcher-3536-thread-3) [    ] o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 1045819 INFO  (jetty-launcher-3536-thread-3) [    ] o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 1045820 INFO  (jetty-launcher-3536-thread-3) [    ] o.e.j.s.Server jetty-9.4.14.v20181114; built: 2018-11-14T21:20:31.478Z; git: c4550056e785fb5665914545889f21dc136ad9e6; jvm 1.8.0_191-b12
   [junit4]   2> 1045821 INFO  (jetty-launcher-3536-thread-1) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 1045821 INFO  (jetty-launcher-3536-thread-1) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 1045821 INFO  (jetty-launcher-3536-thread-1) [    ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 1045821 INFO  (jetty-launcher-3536-thread-1) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@100ac5ea{/solr,null,AVAILABLE}
   [junit4]   2> 1045823 INFO  (jetty-launcher-3536-thread-2) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 1045823 INFO  (jetty-launcher-3536-thread-2) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 1045823 INFO  (jetty-launcher-3536-thread-2) [    ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 1045823 INFO  (jetty-launcher-3536-thread-2) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@34c72f99{/solr,null,AVAILABLE}
   [junit4]   2> 1045825 INFO  (jetty-launcher-3536-thread-3) [    ] o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 1045825 INFO  (jetty-launcher-3536-thread-3) [    ] o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 1045825 INFO  (jetty-launcher-3536-thread-3) [    ] o.e.j.s.session node0 Scavenging every 660000ms
   [junit4]   2> 1045825 INFO  (jetty-launcher-3536-thread-3) [    ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@947192b{/solr,null,AVAILABLE}
   [junit4]   2> 1045826 INFO  (jetty-launcher-3536-thread-2) [    ] o.e.j.s.AbstractConnector Started ServerConnector@2034cf79{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:42151}
   [junit4]   2> 1045826 INFO  (jetty-launcher-3536-thread-2) [    ] o.e.j.s.Server Started @1045888ms
   [junit4]   2> 1045826 INFO  (jetty-launcher-3536-thread-2) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=42151}
   [junit4]   2> 1045826 INFO  (jetty-launcher-3536-thread-1) [    ] o.e.j.s.AbstractConnector Started ServerConnector@bfdf641{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:34759}
   [junit4]   2> 1045826 INFO  (jetty-launcher-3536-thread-1) [    ] o.e.j.s.Server Started @1045888ms
   [junit4]   2> 1045826 INFO  (jetty-launcher-3536-thread-1) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=34759}
   [junit4]   2> 1045826 ERROR (jetty-launcher-3536-thread-1) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 1045826 INFO  (jetty-launcher-3536-thread-1) [    ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 1045827 INFO  (jetty-launcher-3536-thread-1) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 8.1.0
   [junit4]   2> 1045827 INFO  (jetty-launcher-3536-thread-1) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 1045827 INFO  (jetty-launcher-3536-thread-1) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 1045827 INFO  (jetty-launcher-3536-thread-1) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-04-19T10:02:43.488Z
   [junit4]   2> 1045827 ERROR (jetty-launcher-3536-thread-2) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 1045827 INFO  (jetty-launcher-3536-thread-2) [    ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 1045827 INFO  (jetty-launcher-3536-thread-2) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 8.1.0
   [junit4]   2> 1045827 INFO  (jetty-launcher-3536-thread-2) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 1045827 INFO  (jetty-launcher-3536-thread-2) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 1045827 INFO  (jetty-launcher-3536-thread-2) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-04-19T10:02:43.488Z
   [junit4]   2> 1045832 INFO  (jetty-launcher-3536-thread-3) [    ] o.e.j.s.AbstractConnector Started ServerConnector@623064f9{HTTP/1.1,[http/1.1, h2c]}{127.0.0.1:39552}
   [junit4]   2> 1045832 INFO  (jetty-launcher-3536-thread-3) [    ] o.e.j.s.Server Started @1045894ms
   [junit4]   2> 1045832 INFO  (jetty-launcher-3536-thread-3) [    ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=39552}
   [junit4]   2> 1045833 ERROR (jetty-launcher-3536-thread-3) [    ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete.
   [junit4]   2> 1045833 INFO  (jetty-launcher-3536-thread-3) [    ] o.a.s.s.SolrDispatchFilter Using logger factory org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 1045833 INFO  (jetty-launcher-3536-thread-3) [    ] o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 8.1.0
   [junit4]   2> 1045833 INFO  (jetty-launcher-3536-thread-3) [    ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 1045833 INFO  (jetty-launcher-3536-thread-3) [    ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 1045833 INFO  (jetty-launcher-3536-thread-3) [    ] o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 2019-04-19T10:02:43.494Z
   [junit4]   2> 1045834 INFO  (zkConnectionManagerCallback-3538-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1045835 INFO  (jetty-launcher-3536-thread-2) [    ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 1045839 INFO  (zkConnectionManagerCallback-3540-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1045840 INFO  (jetty-launcher-3536-thread-1) [    ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 1045840 INFO  (jetty-launcher-3536-thread-2) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 1045840 INFO  (jetty-launcher-3536-thread-2) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 1045843 INFO  (jetty-launcher-3536-thread-2) [    ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@3268ada1, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 1045843 INFO  (jetty-launcher-3536-thread-1) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 1045843 INFO  (jetty-launcher-3536-thread-1) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 1045843 INFO  (zkConnectionManagerCallback-3542-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1045844 INFO  (jetty-launcher-3536-thread-3) [    ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 1045847 INFO  (jetty-launcher-3536-thread-3) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverWorkLoopDelay is ignored
   [junit4]   2> 1045847 INFO  (jetty-launcher-3536-thread-3) [    ] o.a.s.c.SolrXmlConfig Configuration parameter autoReplicaFailoverBadNodeExpiration is ignored
   [junit4]   2> 1045849 INFO  (jetty-launcher-3536-thread-3) [    ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@3268ada1, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 1045851 INFO  (jetty-launcher-3536-thread-1) [    ] o.a.s.c.SolrXmlConfig MBean server found: com.sun.jmx.mbeanserver.JmxMBeanServer@3268ada1, but no JMX reporters were configured - adding default JMX reporter.
   [junit4]   2> 1046030 INFO  (jetty-launcher-3536-thread-2) [    ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
   [junit4]   2> 1046032 WARN  (jetty-launcher-3536-thread-2) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@7c74eca6[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 1046036 WARN  (jetty-launcher-3536-thread-2) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@2daa79[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 1046037 INFO  (jetty-launcher-3536-thread-2) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:33969/solr
   [junit4]   2> 1046039 INFO  (zkConnectionManagerCallback-3551-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1046041 INFO  (jetty-launcher-3536-thread-1) [    ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
   [junit4]   2> 1046041 INFO  (zkConnectionManagerCallback-3553-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1046043 WARN  (jetty-launcher-3536-thread-1) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@7a2a70eb[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 1046059 INFO  (jetty-launcher-3536-thread-3) [    ] o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
   [junit4]   2> 1046061 WARN  (jetty-launcher-3536-thread-3) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@22ab19ff[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 1046063 WARN  (jetty-launcher-3536-thread-1) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@3c861980[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 1046064 INFO  (jetty-launcher-3536-thread-1) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:33969/solr
   [junit4]   2> 1046067 WARN  (jetty-launcher-3536-thread-3) [    ] o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for SslContextFactory@1d67b027[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 1046068 INFO  (zkConnectionManagerCallback-3563-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1046068 INFO  (jetty-launcher-3536-thread-3) [    ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:33969/solr
   [junit4]   2> 1046069 WARN  (NIOServerCxn.Factory:0.0.0.0/0.0.0.0:0) [    ] o.a.z.s.NIOServerCnxn Unable to read additional data from client sessionid 0x103e98822c70008, likely client has closed socket
   [junit4]   2> 1046070 INFO  (zkConnectionManagerCallback-3567-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1046071 INFO  (zkConnectionManagerCallback-3569-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1046072 WARN  (NIOServerCxn.Factory:0.0.0.0/0.0.0.0:0) [    ] o.a.z.s.NIOServerCnxn Unable to read additional data from client sessionid 0x103e98822c70009, likely client has closed socket
   [junit4]   2> 1046073 INFO  (zkConnectionManagerCallback-3571-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1046096 INFO  (jetty-launcher-3536-thread-1) [n:127.0.0.1:34759_solr    ] o.a.s.c.OverseerElectionContext I am going to be the leader 127.0.0.1:34759_solr
   [junit4]   2> 1046097 INFO  (jetty-launcher-3536-thread-1) [n:127.0.0.1:34759_solr    ] o.a.s.c.Overseer Overseer (id=73158789876350986-127.0.0.1:34759_solr-n_0000000000) starting
   [junit4]   2> 1046113 INFO  (jetty-launcher-3536-thread-3) [n:127.0.0.1:39552_solr    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:39552_solr
   [junit4]   2> 1046118 INFO  (zkConnectionManagerCallback-3580-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1046121 INFO  (zkCallback-3570-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 1046122 INFO  (jetty-launcher-3536-thread-1) [n:127.0.0.1:34759_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 1046124 INFO  (jetty-launcher-3536-thread-1) [n:127.0.0.1:34759_solr    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:33969/solr ready
   [junit4]   2> 1046124 INFO  (zkCallback-3568-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 1046125 INFO  (OverseerStateUpdate-73158789876350986-127.0.0.1:34759_solr-n_0000000000) [n:127.0.0.1:34759_solr    ] o.a.s.c.Overseer Starting to work on the main queue : 127.0.0.1:34759_solr
   [junit4]   2> 1046131 INFO  (jetty-launcher-3536-thread-1) [n:127.0.0.1:34759_solr    ] o.a.s.c.ZkController Publish node=127.0.0.1:34759_solr as DOWN
   [junit4]   2> 1046133 INFO  (jetty-launcher-3536-thread-1) [n:127.0.0.1:34759_solr    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 1046133 INFO  (jetty-launcher-3536-thread-1) [n:127.0.0.1:34759_solr    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:34759_solr
   [junit4]   2> 1046135 INFO  (zkCallback-3570-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 1046135 INFO  (zkCallback-3568-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 1046135 INFO  (zkCallback-3579-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 1046169 INFO  (jetty-launcher-3536-thread-2) [n:127.0.0.1:42151_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 1046174 INFO  (jetty-launcher-3536-thread-2) [n:127.0.0.1:42151_solr    ] o.a.s.c.ZkController Publish node=127.0.0.1:42151_solr as DOWN
   [junit4]   2> 1046175 INFO  (jetty-launcher-3536-thread-2) [n:127.0.0.1:42151_solr    ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 1046175 INFO  (jetty-launcher-3536-thread-2) [n:127.0.0.1:42151_solr    ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:42151_solr
   [junit4]   2> 1046177 INFO  (zkCallback-3570-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 1046177 INFO  (zkCallback-3579-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 1046177 INFO  (zkCallback-3568-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 1046179 INFO  (zkCallback-3552-thread-1) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 1046188 INFO  (zkConnectionManagerCallback-3585-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1046188 INFO  (jetty-launcher-3536-thread-1) [n:127.0.0.1:34759_solr    ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 1046190 INFO  (jetty-launcher-3536-thread-3) [n:127.0.0.1:39552_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (3)
   [junit4]   2> 1046191 INFO  (jetty-launcher-3536-thread-3) [n:127.0.0.1:39552_solr    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:33969/solr ready
   [junit4]   2> 1046192 INFO  (jetty-launcher-3536-thread-3) [n:127.0.0.1:39552_solr    ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 1046202 INFO  (zkConnectionManagerCallback-3592-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1046204 INFO  (jetty-launcher-3536-thread-2) [n:127.0.0.1:42151_solr    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (3)
   [junit4]   2> 1046205 INFO  (jetty-launcher-3536-thread-2) [n:127.0.0.1:42151_solr    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:33969/solr ready
   [junit4]   2> 1046206 INFO  (jetty-launcher-3536-thread-2) [n:127.0.0.1:42151_solr    ] o.a.s.h.a.MetricsHistoryHandler No .system collection, keeping metrics history in memory.
   [junit4]   2> 1046228 INFO  (jetty-launcher-3536-thread-3) [n:127.0.0.1:39552_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@3268ada1
   [junit4]   2> 1046240 INFO  (jetty-launcher-3536-thread-1) [n:127.0.0.1:34759_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@3268ada1
   [junit4]   2> 1046252 INFO  (jetty-launcher-3536-thread-3) [n:127.0.0.1:39552_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@3268ada1
   [junit4]   2> 1046252 INFO  (jetty-launcher-3536-thread-3) [n:127.0.0.1:39552_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@3268ada1
   [junit4]   2> 1046255 INFO  (jetty-launcher-3536-thread-3) [n:127.0.0.1:39552_solr    ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_84ED65A251F02B-001/tempDir-002/node3/.
   [junit4]   2> 1046257 INFO  (jetty-launcher-3536-thread-2) [n:127.0.0.1:42151_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@3268ada1
   [junit4]   2> 1046261 INFO  (jetty-launcher-3536-thread-1) [n:127.0.0.1:34759_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@3268ada1
   [junit4]   2> 1046261 INFO  (jetty-launcher-3536-thread-1) [n:127.0.0.1:34759_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@3268ada1
   [junit4]   2> 1046263 INFO  (jetty-launcher-3536-thread-1) [n:127.0.0.1:34759_solr    ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_84ED65A251F02B-001/tempDir-002/node1/.
   [junit4]   2> 1046275 INFO  (jetty-launcher-3536-thread-2) [n:127.0.0.1:42151_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@3268ada1
   [junit4]   2> 1046275 INFO  (jetty-launcher-3536-thread-2) [n:127.0.0.1:42151_solr    ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@3268ada1
   [junit4]   2> 1046277 INFO  (jetty-launcher-3536-thread-2) [n:127.0.0.1:42151_solr    ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_84ED65A251F02B-001/tempDir-002/node2/.
   [junit4]   2> 1046399 INFO  (zkConnectionManagerCallback-3598-thread-1) [    ] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 1046401 INFO  (TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[84ED65A251F02B]) [    ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (3)
   [junit4]   2> 1046402 INFO  (TEST-HdfsAutoAddReplicasIntegrationTest.testSimple-seed#[84ED65A251F02B]) [    ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:33969/solr ready
   [junit4]   2> 1046412 INFO  (qtp107901482-18093) [n:127.0.0.1:34759_solr    ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/cluster params={wt=javabin&version=2} status=0 QTime=4
   [junit4]   2> 1046415 INFO  (qtp1362134977-18102) [n:127.0.0.1:42151_solr    ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params collection.configName=conf&maxShardsPerNode=2&autoAddReplicas=true&name=testSimple1&nrtReplicas=2&action=CREATE&numShards=2&createNodeSet=127.0.0.1:39552_solr,127.0.0.1:34759_solr&wt=javabin&version=2 and sendToOCPQueue=true
   [junit4]   2> 1046420 INFO  (OverseerThreadFactory-2762-thread-1-processing-n:127.0.0.1:34759_solr) [n:127.0.0.1:34759_solr    ] o.a.s.c.a.c.CreateCollectionCmd Create collection testSimple1
   [junit4]   2> 1046532 INFO  (OverseerStateUpdate-73158789876350986-127.0.0.1:34759_solr-n_0000000000) [n:127.0.0.1:34759_solr    ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"testSimple1",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "core":"testSimple1_shard1_replica_n1",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"http://127.0.0.1:34759/solr",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 1046536 INFO  (OverseerStateUpdate-73158789876350986-127.0.0.1:34759_solr-n_0000000000) [n:127.0.0.1:34759_solr    ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"testSimple1",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "core":"testSimple1_shard1_replica_n3",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"http://127.0.0.1:39552/solr",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 1046540 INFO  (OverseerStateUpdate-73158789876350986-127.0.0.1:34759_solr-n_0000000000) [n:127.0.0.1:34759_solr    ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"testSimple1",
   [junit4]   2>   "shard":"shard2",
   [junit4]   2>   "core":"testSimple1_shard2_replica_n5",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"http://127.0.0.1:34759/solr",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 1046544 INFO  (OverseerStateUpdate-73158789876350986-127.0.0.1:34759_solr-n_0000000000) [n:127.0.0.1:34759_solr    ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"testSimple1",
   [junit4]   2>   "shard":"shard2",
   [junit4]   2>   "core":"testSimple1_shard2_replica_n7",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"http://127.0.0.1:39552/solr",
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 1046754 INFO  (qtp107901482-18095) [n:127.0.0.1:34759_solr    x:testSimple1_shard1_replica_n1] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node2&collection.configName=conf&newCollection=true&name=testSimple1_shard1_replica_n1&action=CREATE&numShards=2&collection=testSimple1&shard=shard1&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 1046755 INFO  (qtp1422037242-18110) [n:127.0.0.1:39552_solr    x:testSimple1_shard2_replica_n7] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node8&collection.configName=conf&newCollection=true&name=testSimple1_shard2_replica_n7&action=CREATE&numShards=2&collection=testSimple1&shard=shard2&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 1046755 INFO  (qtp1422037242-18110) [n:127.0.0.1:39552_solr    x:testSimple1_shard2_replica_n7] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 4 transient cores
   [junit4]   2> 1046758 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr    x:testSimple1_shard1_replica_n3] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node4&collection.configName=conf&newCollection=true&name=testSimple1_shard1_replica_n3&action=CREATE&numShards=2&collection=testSimple1&shard=shard1&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 1046758 INFO  (qtp107901482-18092) [n:127.0.0.1:34759_solr    x:testSimple1_shard2_replica_n5] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node6&collection.configName=conf&newCollection=true&name=testSimple1_shard2_replica_n5&action=CREATE&numShards=2&collection=testSimple1&shard=shard2&wt=javabin&version=2&replicaType=NRT
   [junit4]   2> 1047781 INFO  (qtp1422037242-18110) [n:127.0.0.1:39552_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.SolrConfig Using Lucene MatchVersion: 8.1.0
   [junit4]   2> 1047781 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.c.SolrConfig Using Lucene MatchVersion: 8.1.0
   [junit4]   2> 1047801 INFO  (qtp107901482-18095) [n:127.0.0.1:34759_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 8.1.0
   [junit4]   2> 1047803 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.s.IndexSchema [testSimple1_shard1_replica_n3] Schema name=minimal
   [junit4]   2> 1047806 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 1047806 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.c.CoreContainer Creating SolrCore 'testSimple1_shard1_replica_n3' using configuration from collection testSimple1, trusted=true
   [junit4]   2> 1047807 INFO  (qtp107901482-18092) [n:127.0.0.1:34759_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.c.SolrConfig Using Lucene MatchVersion: 8.1.0
   [junit4]   2> 1047811 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.testSimple1.shard1.replica_n3' (registry 'solr.core.testSimple1.shard1.replica_n3') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@3268ada1
   [junit4]   2> 1047813 INFO  (qtp1422037242-18110) [n:127.0.0.1:39552_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.s.IndexSchema [testSimple1_shard2_replica_n7] Schema name=minimal
   [junit4]   2> 1047815 INFO  (qtp1422037242-18110) [n:127.0.0.1:39552_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 1047816 INFO  (qtp1422037242-18110) [n:127.0.0.1:39552_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.CoreContainer Creating SolrCore 'testSimple1_shard2_replica_n7' using configuration from collection testSimple1, trusted=true
   [junit4]   2> 1047816 INFO  (qtp1422037242-18110) [n:127.0.0.1:39552_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.testSimple1.shard2.replica_n7' (registry 'solr.core.testSimple1.shard2.replica_n7') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@3268ada1
   [junit4]   2> 1047824 INFO  (qtp107901482-18092) [n:127.0.0.1:34759_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.s.IndexSchema [testSimple1_shard2_replica_n5] Schema name=minimal
   [junit4]   2> 1047827 INFO  (qtp107901482-18092) [n:127.0.0.1:34759_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 1047827 INFO  (qtp107901482-18092) [n:127.0.0.1:34759_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.c.CoreContainer Creating SolrCore 'testSimple1_shard2_replica_n5' using configuration from collection testSimple1, trusted=true
   [junit4]   2> 1047827 INFO  (qtp107901482-18092) [n:127.0.0.1:34759_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.testSimple1.shard2.replica_n5' (registry 'solr.core.testSimple1.shard2.replica_n5') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@3268ada1
   [junit4]   2> 1047831 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://localhost:34207/solr_hdfs_home
   [junit4]   2> 1047831 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 1047831 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.c.SolrCore [[testSimple1_shard1_replica_n3] ] Opening new SolrCore at [/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_84ED65A251F02B-001/tempDir-002/node3/testSimple1_shard1_replica_n3], dataDir=[hdfs://localhost:34207/solr_hdfs_home/testSimple1/core_node4/data/]
   [junit4]   2> 1047833 INFO  (qtp1422037242-18110) [n:127.0.0.1:39552_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://localhost:34207/solr_hdfs_home
   [junit4]   2> 1047834 INFO  (qtp1422037242-18110) [n:127.0.0.1:39552_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 1047834 INFO  (qtp1422037242-18110) [n:127.0.0.1:39552_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.SolrCore [[testSimple1_shard2_replica_n7] ] Opening new SolrCore at [/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_84ED65A251F02B-001/tempDir-002/node3/testSimple1_shard2_replica_n7], dataDir=[hdfs://localhost:34207/solr_hdfs_home/testSimple1/core_node8/data/]
   [junit4]   2> 1047834 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:34207/solr_hdfs_home/testSimple1/core_node4/data/snapshot_metadata
   [junit4]   2> 1047835 INFO  (qtp1422037242-18110) [n:127.0.0.1:39552_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:34207/solr_hdfs_home/testSimple1/core_node8/data/snapshot_metadata
   [junit4]   2> 1047840 INFO  (qtp107901482-18092) [n:127.0.0.1:34759_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://localhost:34207/solr_hdfs_home
   [junit4]   2> 1047840 INFO  (qtp107901482-18092) [n:127.0.0.1:34759_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 1047840 INFO  (qtp107901482-18092) [n:127.0.0.1:34759_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.c.SolrCore [[testSimple1_shard2_replica_n5] ] Opening new SolrCore at [/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_84ED65A251F02B-001/tempDir-002/node1/testSimple1_shard2_replica_n5], dataDir=[hdfs://localhost:34207/solr_hdfs_home/testSimple1/core_node6/data/]
   [junit4]   2> 1047841 INFO  (qtp107901482-18092) [n:127.0.0.1:34759_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:34207/solr_hdfs_home/testSimple1/core_node6/data/snapshot_metadata
   [junit4]   2> 1047844 INFO  (qtp107901482-18095) [n:127.0.0.1:34759_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.s.IndexSchema [testSimple1_shard1_replica_n1] Schema name=minimal
   [junit4]   2> 1047847 INFO  (qtp107901482-18095) [n:127.0.0.1:34759_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id
   [junit4]   2> 1047848 INFO  (qtp107901482-18095) [n:127.0.0.1:34759_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.c.CoreContainer Creating SolrCore 'testSimple1_shard1_replica_n1' using configuration from collection testSimple1, trusted=true
   [junit4]   2> 1047849 INFO  (qtp107901482-18095) [n:127.0.0.1:34759_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr.core.testSimple1.shard1.replica_n1' (registry 'solr.core.testSimple1.shard1.replica_n1') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@3268ada1
   [junit4]   2> 1047849 INFO  (qtp107901482-18095) [n:127.0.0.1:34759_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://localhost:34207/solr_hdfs_home
   [junit4]   2> 1047849 INFO  (qtp107901482-18095) [n:127.0.0.1:34759_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled
   [junit4]   2> 1047849 INFO  (qtp107901482-18092) [n:127.0.0.1:34759_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 1047849 INFO  (qtp107901482-18092) [n:127.0.0.1:34759_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 1047849 INFO  (qtp107901482-18092) [n:127.0.0.1:34759_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 1047852 INFO  (qtp107901482-18095) [n:127.0.0.1:34759_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.c.SolrCore [[testSimple1_shard1_replica_n1] ] Opening new SolrCore at [/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/solr/build/solr-core/test/J2/temp/solr.cloud.autoscaling.HdfsAutoAddReplicasIntegrationTest_84ED65A251F02B-001/tempDir-002/node1/testSimple1_shard1_replica_n1], dataDir=[hdfs://localhost:34207/solr_hdfs_home/testSimple1/core_node2/data/]
   [junit4]   2> 1047854 INFO  (qtp107901482-18095) [n:127.0.0.1:34759_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:34207/solr_hdfs_home/testSimple1/core_node2/data/snapshot_metadata
   [junit4]   2> 1047867 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 1047867 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 1047867 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 1047871 INFO  (qtp1422037242-18110) [n:127.0.0.1:39552_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 1047871 INFO  (qtp1422037242-18110) [n:127.0.0.1:39552_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 1047871 INFO  (qtp1422037242-18110) [n:127.0.0.1:39552_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 1047878 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 1047879 INFO  (qtp107901482-18092) [n:127.0.0.1:34759_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 1047879 INFO  (qtp107901482-18095) [n:127.0.0.1:34759_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 1047879 INFO  (qtp107901482-18095) [n:127.0.0.1:34759_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 1047879 INFO  (qtp107901482-18095) [n:127.0.0.1:34759_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 1047881 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:34207/solr_hdfs_home/testSimple1/core_node4/data
   [junit4]   2> 1047881 INFO  (qtp107901482-18092) [n:127.0.0.1:34759_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:34207/solr_hdfs_home/testSimple1/core_node6/data
   [junit4]   2> 1047882 INFO  (qtp1422037242-18110) [n:127.0.0.1:39552_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 1047882 INFO  (qtp1422037242-18110) [n:127.0.0.1:39552_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:34207/solr_hdfs_home/testSimple1/core_node8/data
   [junit4]   2> 1047902 INFO  (qtp107901482-18095) [n:127.0.0.1:34759_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 1047905 INFO  (qtp107901482-18095) [n:127.0.0.1:34759_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:34207/solr_hdfs_home/testSimple1/core_node2/data
   [junit4]   2> 1047922 INFO  (qtp1422037242-18110) [n:127.0.0.1:39552_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:34207/solr_hdfs_home/testSimple1/core_node8/data/index
   [junit4]   2> 1047924 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:34207/solr_hdfs_home/testSimple1/core_node4/data/index
   [junit4]   2> 1047924 INFO  (qtp107901482-18092) [n:127.0.0.1:34759_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:34207/solr_hdfs_home/testSimple1/core_node6/data/index
   [junit4]   2> 1047941 INFO  (qtp107901482-18092) [n:127.0.0.1:34759_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 1047941 INFO  (qtp107901482-18092) [n:127.0.0.1:34759_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 1047941 INFO  (qtp107901482-18092) [n:127.0.0.1:34759_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 1047945 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 1047945 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 1047945 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 1047951 INFO  (qtp1422037242-18110) [n:127.0.0.1:39552_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 1047951 INFO  (qtp1422037242-18110) [n:127.0.0.1:39552_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 1047951 INFO  (qtp1422037242-18110) [n:127.0.0.1:39552_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 1047958 INFO  (qtp1422037242-18110) [n:127.0.0.1:39552_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 1047961 INFO  (qtp107901482-18095) [n:127.0.0.1:34759_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost:34207/solr_hdfs_home/testSimple1/core_node2/data/index
   [junit4]   2> 1047968 INFO  (qtp107901482-18095) [n:127.0.0.1:34759_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Number of slabs of block cache [1] with direct memory allocation set to [true]
   [junit4]   2> 1047968 INFO  (qtp107901482-18095) [n:127.0.0.1:34759_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Block cache target memory usage, slab size of [4194304] will allocate [1] slabs and use ~[4194304] bytes
   [junit4]   2> 1047968 INFO  (qtp107901482-18095) [n:127.0.0.1:34759_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Creating new single instance HDFS BlockCache
   [junit4]   2> 1047971 INFO  (qtp107901482-18092) [n:127.0.0.1:34759_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 1047973 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 1047976 INFO  (qtp107901482-18095) [n:127.0.0.1:34759_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.s.b.BlockDirectory Block cache on write is disabled
   [junit4]   2> 1048107 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 1048107 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 1048107 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=3
   [junit4]   2> 1048112 INFO  (qtp107901482-18095) [n:127.0.0.1:34759_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 1048112 INFO  (qtp107901482-18095) [n:127.0.0.1:34759_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 1048112 INFO  (qtp107901482-18095) [n:127.0.0.1:34759_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=3
   [junit4]   2> 1048123 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 1048123 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 1048124 INFO  (qtp107901482-18095) [n:127.0.0.1:34759_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 1048124 INFO  (qtp107901482-18095) [n:127.0.0.1:34759_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 1048149 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.s.SolrIndexSearcher Opening [Searcher@5a1f786a[testSimple1_shard1_replica_n3] main]
   [junit4]   2> 1048151 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf
   [junit4]   2> 1048151 INFO  (qtp107901482-18095) [n:127.0.0.1:34759_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.s.SolrIndexSearcher Opening [Searcher@11fd18b9[testSimple1_shard1_replica_n1] main]
   [junit4]   2> 1048152 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf
   [junit4]   2> 1048153 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 1048153 INFO  (qtp107901482-18095) [n:127.0.0.1:34759_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf
   [junit4]   2> 1048154 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1631236302637629440
   [junit4]   2> 1048154 INFO  (qtp107901482-18095) [n:127.0.0.1:34759_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf
   [junit4]   2> 1048154 INFO  (qtp107901482-18095) [n:127.0.0.1:34759_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 1048155 INFO  (qtp107901482-18095) [n:127.0.0.1:34759_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1631236302638678016
   [junit4]   2> 1048158 INFO  (searcherExecutor-2775-thread-1-processing-n:127.0.0.1:39552_solr x:testSimple1_shard1_replica_n3 c:testSimple1 s:shard1 r:core_node4) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.c.SolrCore [testSimple1_shard1_replica_n3] Registered new searcher Searcher@5a1f786a[testSimple1_shard1_replica_n3] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 1048159 INFO  (searcherExecutor-2778-thread-1-processing-n:127.0.0.1:34759_solr x:testSimple1_shard1_replica_n1 c:testSimple1 s:shard1 r:core_node2) [n:127.0.0.1:34759_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.c.SolrCore [testSimple1_shard1_replica_n1] Registered new searcher Searcher@11fd18b9[testSimple1_shard1_replica_n1] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 1048163 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.c.ZkShardTerms Successful update of terms at /collections/testSimple1/terms/shard1 to Terms{values={core_node4=0}, version=0}
   [junit4]   2> 1048163 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/testSimple1/leaders/shard1
   [junit4]   2> 1048166 INFO  (qtp107901482-18095) [n:127.0.0.1:34759_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.c.ZkShardTerms Successful update of terms at /collections/testSimple1/terms/shard1 to Terms{values={core_node2=0, core_node4=0}, version=1}
   [junit4]   2> 1048166 INFO  (qtp107901482-18095) [n:127.0.0.1:34759_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/testSimple1/leaders/shard1
   [junit4]   2> 1048170 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 1048170 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 1048170 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.c.SyncStrategy Sync replicas to http://127.0.0.1:39552/solr/testSimple1_shard1_replica_n3/
   [junit4]   2> 1048171 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.u.PeerSync PeerSync: core=testSimple1_shard1_replica_n3 url=http://127.0.0.1:39552/solr START replicas=[http://127.0.0.1:34759/solr/testSimple1_shard1_replica_n1/] nUpdates=100
   [junit4]   2> 1048171 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.u.PeerSync PeerSync: core=testSimple1_shard1_replica_n3 url=http://127.0.0.1:39552/solr DONE.  We have no versions.  sync failed.
   [junit4]   2> 1048174 INFO  (qtp107901482-18094) [n:127.0.0.1:34759_solr c:testSimple1 s:shard1 r:core_node2 x:testSimple1_shard1_replica_n1] o.a.s.c.S.Request [testSimple1_shard1_replica_n1]  webapp=/solr path=/get params={distrib=false&qt=/get&fingerprint=false&getVersions=100&wt=javabin&version=2} status=0 QTime=1
   [junit4]   2> 1048175 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.c.SyncStrategy Leader's attempt to sync with shard failed, moving to the next candidate
   [junit4]   2> 1048175 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.c.ShardLeaderElectionContext We failed sync, but we have no versions - we can't sync in that case - we were active before, so become leader anyway
   [junit4]   2> 1048175 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.c.ShardLeaderElectionContextBase Creating leader registration node /collections/testSimple1/leaders/shard1/leader after winning as /collections/testSimple1/leader_elect/shard1/election/73158789876350987-core_node4-n_0000000000
   [junit4]   2> 1048177 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.c.ShardLeaderElectionContext I am the new leader: http://127.0.0.1:39552/solr/testSimple1_shard1_replica_n3/ shard1
   [junit4]   2> 1048280 INFO  (zkCallback-3570-thread-1) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/testSimple1/state.json] for collection [testSimple1] has occurred - updating... (live nodes size: [3])
   [junit4]   2> 1048281 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 1048284 INFO  (qtp1422037242-18112) [n:127.0.0.1:39552_solr c:testSimple1 s:shard1 r:core_node4 x:testSimple1_shard1_replica_n3] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={qt=/admin/cores&coreNodeName=core_node4&collection.configName=conf&newCollection=true&name=testSimple1_shard1_replica_n3&action=CREATE&numShards=2&collection=testSimple1&shard=shard1&wt=javabin&version=2&replicaType=NRT} status=0 QTime=1526
   [junit4]   2> 1048384 INFO  (zkCallback-3570-thread-2) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/testSimple1/state.json] for collection [testSimple1] has occurred - updating... (live nodes size: [3])
   [junit4]   2> 1048384 INFO  (zkCallback-3570-thread-1) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/testSimple1/state.json] for collection [testSimple1] has occurred - updating... (live nodes size: [3])
   [junit4]   2> 1048519 INFO  (qtp1422037242-18110) [n:127.0.0.1:39552_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 1048519 INFO  (qtp1422037242-18110) [n:127.0.0.1:39552_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 1048519 INFO  (qtp1422037242-18110) [n:127.0.0.1:39552_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=3
   [junit4]   2> 1048529 INFO  (qtp107901482-18092) [n:127.0.0.1:34759_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog
   [junit4]   2> 1048529 INFO  (qtp107901482-18092) [n:127.0.0.1:34759_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 1048529 INFO  (qtp107901482-18092) [n:127.0.0.1:34759_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=3
   [junit4]   2> 1048534 INFO  (qtp1422037242-18110) [n:127.0.0.1:39552_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 1048534 INFO  (qtp1422037242-18110) [n:127.0.0.1:39552_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 1048551 INFO  (qtp107901482-18092) [n:127.0.0.1:34759_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 1048551 INFO  (qtp107901482-18092) [n:127.0.0.1:34759_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 1048557 INFO  (qtp1422037242-18110) [n:127.0.0.1:39552_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.s.SolrIndexSearcher Opening [Searcher@149a652d[testSimple1_shard2_replica_n7] main]
   [junit4]   2> 1048559 INFO  (qtp1422037242-18110) [n:127.0.0.1:39552_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf
   [junit4]   2> 1048560 INFO  (qtp1422037242-18110) [n:127.0.0.1:39552_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf
   [junit4]   2> 1048560 INFO  (qtp1422037242-18110) [n:127.0.0.1:39552_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 1048561 INFO  (qtp1422037242-18110) [n:127.0.0.1:39552_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1631236303064399872
   [junit4]   2> 1048564 INFO  (searcherExecutor-2776-thread-1-processing-n:127.0.0.1:39552_solr x:testSimple1_shard2_replica_n7 c:testSimple1 s:shard2 r:core_node8) [n:127.0.0.1:39552_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.SolrCore [testSimple1_shard2_replica_n7] Registered new searcher Searcher@149a652d[testSimple1_shard2_replica_n7] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 1048567 INFO  (qtp1422037242-18110) [n:127.0.0.1:39552_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.ZkShardTerms Successful update of terms at /collections/testSimple1/terms/shard2 to Terms{values={core_node8=0}, version=0}
   [junit4]   2> 1048568 INFO  (qtp1422037242-18110) [n:127.0.0.1:39552_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/testSimple1/leaders/shard2
   [junit4]   2> 1048579 INFO  (qtp107901482-18092) [n:127.0.0.1:34759_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.s.SolrIndexSearcher Opening [Searcher@332b066f[testSimple1_shard2_replica_n5] main]
   [junit4]   2> 1048580 INFO  (qtp107901482-18092) [n:127.0.0.1:34759_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf
   [junit4]   2> 1048581 INFO  (qtp107901482-18092) [n:127.0.0.1:34759_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf
   [junit4]   2> 1048581 INFO  (qtp107901482-18092) [n:127.0.0.1:34759_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms.
   [junit4]   2> 1048582 INFO  (qtp1422037242-18110) [n:127.0.0.1:39552_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.ShardLeaderElectionContext Waiting until we see more replicas up for shard shard2: total=2 found=1 timeoutin=14999ms
   [junit4]   2> 1048582 INFO  (qtp107901482-18092) [n:127.0.0.1:34759_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1631236303086419968
   [junit4]   2> 1048587 INFO  (searcherExecutor-2777-thread-1-processing-n:127.0.0.1:34759_solr x:testSimple1_shard2_replica_n5 c:testSimple1 s:shard2 r:core_node6) [n:127.0.0.1:34759_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.c.SolrCore [testSimple1_shard2_replica_n5] Registered new searcher Searcher@332b066f[testSimple1_shard2_replica_n5] main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 1048590 INFO  (qtp107901482-18092) [n:127.0.0.1:34759_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.c.ZkShardTerms Successful update of terms at /collections/testSimple1/terms/shard2 to Terms{values={core_node6=0, core_node8=0}, version=1}
   [junit4]   2> 1048596 INFO  (qtp107901482-18092) [n:127.0.0.1:34759_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.c.ShardLeaderElectionContextBase make sure parent is created /collections/testSimple1/leaders/shard2
   [junit4]   2> 1048687 INFO  (zkCallback-3570-thread-2) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/testSimple1/state.json] for collection [testSimple1] has occurred - updating... (live nodes size: [3])
   [junit4]   2> 1048687 INFO  (zkCallback-3570-thread-1) [    ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/testSimple1/state.json] for collection [testSimple1] has occurred - updating... (live nodes size: [3])
   [junit4]   2> 1049082 INFO  (qtp1422037242-18110) [n:127.0.0.1:39552_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 1049082 INFO  (qtp1422037242-18110) [n:127.0.0.1:39552_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 1049082 INFO  (qtp1422037242-18110) [n:127.0.0.1:39552_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.c.SyncStrategy Sync replicas to http://127.0.0.1:39552/solr/testSimple1_shard2_replica_n7/
   [junit4]   2> 1049083 INFO  (qtp1422037242-18110) [n:127.0.0.1:39552_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.u.PeerSync PeerSync: core=testSimple1_shard2_replica_n7 url=http://127.0.0.1:39552/solr START replicas=[http://127.0.0.1:34759/solr/testSimple1_shard2_replica_n5/] nUpdates=100
   [junit4]   2> 1049083 INFO  (qtp1422037242-18110) [n:127.0.0.1:39552_solr c:testSimple1 s:shard2 r:core_node8 x:testSimple1_shard2_replica_n7] o.a.s.u.PeerSync PeerSync: core=testSimple1_shard2_replica_n7 url=http://127.0.0.1:39552/solr DONE.  We have no versions.  sync failed.
   [junit4]   2> 1049085 INFO  (qtp107901482-18096) [n:127.0.0.1:34759_solr c:testSimple1 s:shard2 r:core_node6 x:testSimple1_shard2_replica_n5] o.a.s.c.S.Request

[...truncated too long message...]

re:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/lucene/top-level-ivy-settings.xml

resolve:

jar-checksums:
    [mkdir] Created dir: /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/lucene/null334253230
     [copy] Copying 38 files to /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/lucene/null334253230
   [delete] Deleting directory /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/lucene/null334253230

resolve-example:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/lucene/top-level-ivy-settings.xml

resolve:

resolve-server:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/lucene/top-level-ivy-settings.xml

resolve:

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/lucene/top-level-ivy-settings.xml

resolve:

jar-checksums:
    [mkdir] Created dir: /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/solr/null1760319963
     [copy] Copying 239 files to /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/solr/null1760319963
   [delete] Deleting directory /x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/solr/null1760319963

check-working-copy:
[ivy:cachepath] :: resolving dependencies :: org.eclipse.jgit#org.eclipse.jgit-caller;working
[ivy:cachepath] 	confs: [default]
[ivy:cachepath] 	found org.eclipse.jgit#org.eclipse.jgit;4.6.0.201612231935-r in public
[ivy:cachepath] 	found com.jcraft#jsch;0.1.53 in public
[ivy:cachepath] 	found com.googlecode.javaewah#JavaEWAH;1.1.6 in public
[ivy:cachepath] 	found org.apache.httpcomponents#httpclient;4.3.6 in public
[ivy:cachepath] 	found org.apache.httpcomponents#httpcore;4.3.3 in public
[ivy:cachepath] 	found commons-logging#commons-logging;1.1.3 in public
[ivy:cachepath] 	found commons-codec#commons-codec;1.6 in public
[ivy:cachepath] 	found org.slf4j#slf4j-api;1.7.2 in public
[ivy:cachepath] :: resolution report :: resolve 53ms :: artifacts dl 7ms
	---------------------------------------------------------------------
	|                  |            modules            ||   artifacts   |
	|       conf       | number| search|dwnlded|evicted|| number|dwnlded|
	---------------------------------------------------------------------
	|      default     |   8   |   0   |   0   |   0   ||   8   |   0   |
	---------------------------------------------------------------------
[wc-checker] Initializing working copy...
[wc-checker] SLF4J: Failed to load class "org.slf4j.impl.StaticLoggerBinder".
[wc-checker] SLF4J: Defaulting to no-operation (NOP) logger implementation
[wc-checker] SLF4J: See http://www.slf4j.org/codes.html#StaticLoggerBinder for further details.
[wc-checker] Checking working copy status...

BUILD FAILED
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/build.xml:661: The following error occurred while executing this line:
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/build.xml:506: The following error occurred while executing this line:
/x1/jenkins/jenkins-slave/workspace/Lucene-Solr-BadApples-NightlyTests-8.x/checkout/build.xml:493: Source checkout is dirty (unversioned/missing files) after running tests!!! Offending files:
* solr/licenses/gmetric4j-1.0.7.jar.sha1

Total time: 312 minutes 17 seconds
Build step 'Invoke Ant' marked build as failure
Archiving artifacts
Recording test results
Email was triggered for: Failure - Any
Sending email for trigger: Failure - Any