You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by mi...@apache.org on 2015/09/02 15:06:22 UTC

svn commit: r1700800 [14/24] - in /lucene/dev/branches/lucene6699: ./ dev-tools/ dev-tools/eclipse/ dev-tools/idea/.idea/ dev-tools/scripts/ lucene/ lucene/analysis/ lucene/analysis/common/ lucene/analysis/common/src/java/org/apache/lucene/analysis/ar/...

Modified: lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateWriterTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateWriterTest.java?rev=1700800&r1=1700799&r2=1700800&view=diff
==============================================================================
--- lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateWriterTest.java (original)
+++ lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateWriterTest.java Wed Sep  2 13:06:13 2015
@@ -31,6 +31,7 @@ import org.apache.solr.common.cloud.Slic
 import org.apache.solr.common.cloud.SolrZkClient;
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.util.Utils;
+import org.apache.zookeeper.KeeperException;
 
 import java.util.HashMap;
 import java.util.Map;
@@ -204,4 +205,160 @@ public class ZkStateWriterTest extends S
 
   }
 
+  public void testExternalModificationToSharedClusterState() throws Exception {
+    String zkDir = createTempDir("testExternalModification").toFile().getAbsolutePath();
+
+    ZkTestServer server = new ZkTestServer(zkDir);
+
+    SolrZkClient zkClient = null;
+
+    try {
+      server.run();
+      AbstractZkTestCase.tryCleanSolrZkNode(server.getZkHost());
+      AbstractZkTestCase.makeSolrZkNode(server.getZkHost());
+
+      zkClient = new SolrZkClient(server.getZkAddress(), OverseerTest.DEFAULT_CONNECTION_TIMEOUT);
+      ZkController.createClusterZkNodes(zkClient);
+
+      ZkStateReader reader = new ZkStateReader(zkClient);
+      reader.createClusterStateWatchersAndUpdate();
+
+      ZkStateWriter writer = new ZkStateWriter(reader, new Overseer.Stats());
+
+      zkClient.makePath(ZkStateReader.COLLECTIONS_ZKNODE + "/c1", true);
+      zkClient.makePath(ZkStateReader.COLLECTIONS_ZKNODE + "/c2", true);
+
+      // create collection 1 with stateFormat = 1
+      ZkWriteCommand c1 = new ZkWriteCommand("c1",
+          new DocCollection("c1", new HashMap<String, Slice>(), new HashMap<String, Object>(), DocRouter.DEFAULT, 0, ZkStateReader.CLUSTER_STATE));
+      writer.enqueueUpdate(reader.getClusterState(), c1, null);
+      writer.writePendingUpdates();
+
+      reader.updateClusterState();
+      ClusterState clusterState = reader.getClusterState(); // keep a reference to the current cluster state object
+      assertTrue(clusterState.hasCollection("c1"));
+      assertFalse(clusterState.hasCollection("c2"));
+
+      // Simulate an external modification to /clusterstate.json
+      byte[] data = zkClient.getData("/clusterstate.json", null, null, true);
+      zkClient.setData("/clusterstate.json", data, true);
+
+      // enqueue another c1 so that ZkStateWriter has pending updates
+      writer.enqueueUpdate(clusterState, c1, null);
+      assertTrue(writer.hasPendingUpdates());
+
+      // create collection 2 with stateFormat = 1
+      ZkWriteCommand c2 = new ZkWriteCommand("c2",
+          new DocCollection("c2", new HashMap<String, Slice>(), new HashMap<String, Object>(), DocRouter.DEFAULT, 0, ZkStateReader.getCollectionPath("c2")));
+
+      try {
+        writer.enqueueUpdate(clusterState, c2, null); // we are sending in the old cluster state object
+        fail("Enqueue should not have succeeded");
+      } catch (KeeperException.BadVersionException bve) {
+        // expected
+      }
+
+      reader.updateClusterState();
+      try {
+        writer.enqueueUpdate(reader.getClusterState(), c2, null);
+        fail("enqueueUpdate after BadVersionException should not have suceeded");
+      } catch (IllegalStateException e) {
+        // expected
+      }
+
+      try {
+        writer.writePendingUpdates();
+        fail("writePendingUpdates after BadVersionException should not have suceeded");
+      } catch (IllegalStateException e) {
+        // expected
+      }
+
+    } finally {
+      IOUtils.close(zkClient);
+      server.shutdown();
+    }
+  }
+
+  public void testExternalModificationToStateFormat2() throws Exception {
+    String zkDir = createTempDir("testExternalModificationToStateFormat2").toFile().getAbsolutePath();
+
+    ZkTestServer server = new ZkTestServer(zkDir);
+
+    SolrZkClient zkClient = null;
+
+    try {
+      server.run();
+      AbstractZkTestCase.tryCleanSolrZkNode(server.getZkHost());
+      AbstractZkTestCase.makeSolrZkNode(server.getZkHost());
+
+      zkClient = new SolrZkClient(server.getZkAddress(), OverseerTest.DEFAULT_CONNECTION_TIMEOUT);
+      ZkController.createClusterZkNodes(zkClient);
+
+      ZkStateReader reader = new ZkStateReader(zkClient);
+      reader.createClusterStateWatchersAndUpdate();
+
+      ZkStateWriter writer = new ZkStateWriter(reader, new Overseer.Stats());
+
+      zkClient.makePath(ZkStateReader.COLLECTIONS_ZKNODE + "/c1", true);
+      zkClient.makePath(ZkStateReader.COLLECTIONS_ZKNODE + "/c2", true);
+
+      ClusterState state = reader.getClusterState();
+
+      // create collection 2 with stateFormat = 2
+      ZkWriteCommand c2 = new ZkWriteCommand("c2",
+          new DocCollection("c2", new HashMap<String, Slice>(), new HashMap<String, Object>(), DocRouter.DEFAULT, 0, ZkStateReader.getCollectionPath("c2")));
+      state = writer.enqueueUpdate(reader.getClusterState(), c2, null);
+      assertFalse(writer.hasPendingUpdates()); // first write is flushed immediately
+
+      int sharedClusterStateVersion = state.getZkClusterStateVersion();
+      int stateFormat2Version = state.getCollection("c2").getZNodeVersion();
+
+      // Simulate an external modification to /collections/c2/state.json
+      byte[] data = zkClient.getData(ZkStateReader.getCollectionPath("c2"), null, null, true);
+      zkClient.setData(ZkStateReader.getCollectionPath("c2"), data, true);
+
+      // get the most up-to-date state
+      reader.updateClusterState();
+      state = reader.getClusterState();
+      assertTrue(state.hasCollection("c2"));
+      assertEquals(sharedClusterStateVersion, (int) state.getZkClusterStateVersion());
+      assertEquals(stateFormat2Version + 1, state.getCollection("c2").getZNodeVersion());
+
+      // enqueue an update to stateFormat2 collection such that update is pending
+      state = writer.enqueueUpdate(state, c2, null);
+      assertTrue(writer.hasPendingUpdates());
+
+      // get the most up-to-date state
+      reader.updateClusterState();
+      state = reader.getClusterState();
+
+      // enqueue a stateFormat=1 collection which should cause a flush
+      ZkWriteCommand c1 = new ZkWriteCommand("c1",
+          new DocCollection("c1", new HashMap<String, Slice>(), new HashMap<String, Object>(), DocRouter.DEFAULT, 0, ZkStateReader.CLUSTER_STATE));
+
+      try {
+        state = writer.enqueueUpdate(state, c1, null);
+        fail("Enqueue should not have succeeded");
+      } catch (KeeperException.BadVersionException bve) {
+        // expected
+      }
+
+      try {
+        writer.enqueueUpdate(reader.getClusterState(), c2, null);
+        fail("enqueueUpdate after BadVersionException should not have suceeded");
+      } catch (IllegalStateException e) {
+        // expected
+      }
+
+      try {
+        writer.writePendingUpdates();
+        fail("writePendingUpdates after BadVersionException should not have suceeded");
+      } catch (IllegalStateException e) {
+        // expected
+      }
+    } finally {
+      IOUtils.close(zkClient);
+      server.shutdown();
+    }
+  }
 }

Modified: lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/cloud/rule/RulesTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/cloud/rule/RulesTest.java?rev=1700800&r1=1700799&r2=1700800&view=diff
==============================================================================
--- lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/cloud/rule/RulesTest.java (original)
+++ lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/cloud/rule/RulesTest.java Wed Sep  2 13:06:13 2015
@@ -37,7 +37,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import static org.apache.solr.client.solrj.SolrRequest.METHOD.POST;
-import static org.apache.solr.core.CoreContainer.COLLECTIONS_HANDLER_PATH;
+import static org.apache.solr.common.params.CommonParams.COLLECTIONS_HANDLER_PATH;
 
 public class RulesTest extends AbstractFullDistribZkTestBase {
   static final Logger log = LoggerFactory.getLogger(RulesTest.class);

Modified: lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/core/TestCoreContainer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/core/TestCoreContainer.java?rev=1700800&r1=1700799&r2=1700800&view=diff
==============================================================================
--- lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/core/TestCoreContainer.java (original)
+++ lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/core/TestCoreContainer.java Wed Sep  2 13:06:13 2015
@@ -24,6 +24,7 @@ import org.apache.solr.common.SolrExcept
 import org.apache.solr.handler.admin.CollectionsHandler;
 import org.apache.solr.handler.admin.CoreAdminHandler;
 import org.apache.solr.handler.admin.InfoHandler;
+import org.apache.solr.handler.admin.ConfigSetsHandler;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -308,6 +309,7 @@ public class TestCoreContainer extends S
       " <str name=\"collectionsHandler\">" + CustomCollectionsHandler.class.getName() + "</str>" +
       " <str name=\"infoHandler\">" + CustomInfoHandler.class.getName() + "</str>" +
       " <str name=\"adminHandler\">" + CustomCoreAdminHandler.class.getName() + "</str>" +
+      " <str name=\"configSetsHandler\">" + CustomConfigSetsHandler.class.getName() + "</str>" +
       "</solr>";
 
   public static class CustomCollectionsHandler extends CollectionsHandler {
@@ -327,6 +329,12 @@ public class TestCoreContainer extends S
       super(cc);
     }
   }
+
+  public static class CustomConfigSetsHandler extends ConfigSetsHandler {
+    public CustomConfigSetsHandler(CoreContainer cc) {
+      super(cc);
+    }
+  }
 
   @Test
   public void testCustomHandlers() throws Exception {

Modified: lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java?rev=1700800&r1=1700799&r2=1700800&view=diff
==============================================================================
--- lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java (original)
+++ lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java Wed Sep  2 13:06:13 2015
@@ -189,7 +189,7 @@ public class TestSolrConfigHandler exten
         10);
 
     payload = "{\n" +
-        "'update-requesthandler' : { 'name' : '/x', 'class': 'org.apache.solr.handler.DumpRequestHandler' , 'startup' : 'lazy' , 'a':'b' , 'defaults': {'def_a':'def A val'}}\n" +
+        "'update-requesthandler' : { 'name' : '/x', 'class': 'org.apache.solr.handler.DumpRequestHandler' , 'startup' : 'lazy' , 'a':'b' , 'defaults': {'def_a':'def A val', 'multival':['a','b','c']}}\n" +
         "}";
     runConfigCommand(writeHarness, "/config?wt=json", payload);
 
@@ -209,6 +209,14 @@ public class TestSolrConfigHandler exten
         "def A val",
         10);
 
+    testForResponseElement(writeHarness,
+        testServerBaseUrl,
+        "/x?wt=json&param=multival&json.nl=map",
+        cloudSolrClient,
+        Arrays.asList("params", "multival"),
+        Arrays.asList("a", "b", "c"),
+        10);
+
     payload = "{\n" +
         "'delete-requesthandler' : '/x'" +
         "}";

Modified: lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/core/TestSolrXml.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/core/TestSolrXml.java?rev=1700800&r1=1700799&r2=1700800&view=diff
==============================================================================
--- lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/core/TestSolrXml.java (original)
+++ lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/core/TestSolrXml.java Wed Sep  2 13:06:13 2015
@@ -72,6 +72,7 @@ public class TestSolrXml extends SolrTes
     assertEquals("core admin handler class", "testAdminHandler", cfg.getCoreAdminHandlerClass());
     assertEquals("collection handler class", "testCollectionsHandler", cfg.getCollectionsHandlerClass());
     assertEquals("info handler class", "testInfoHandler", cfg.getInfoHandlerClass());
+    assertEquals("config set handler class", "testConfigSetsHandler", cfg.getConfigSetsHandlerClass());
     assertEquals("core load threads", 11, cfg.getCoreLoadThreadCount());
     assertThat("core root dir", cfg.getCoreRootDirectory(), containsString("testCoreRootDirectory"));
     assertEquals("distrib conn timeout", 22, cfg.getDistributedConnectionTimeout());

Modified: lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerCloud.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerCloud.java?rev=1700800&r1=1700799&r2=1700800&view=diff
==============================================================================
--- lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerCloud.java (original)
+++ lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerCloud.java Wed Sep  2 13:06:13 2015
@@ -25,12 +25,15 @@ import java.util.Map;
 import java.util.Objects;
 
 import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
+import org.apache.solr.client.solrj.request.LukeRequest;
 import org.apache.solr.cloud.AbstractFullDistribZkTestBase;
 import org.apache.solr.common.cloud.DocCollection;
 import org.apache.solr.common.cloud.Replica;
 import org.apache.solr.common.cloud.Slice;
 import org.apache.solr.common.cloud.ZkStateReader;
+import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.StrUtils;
 import org.apache.solr.common.util.Utils;
 import org.apache.solr.core.RequestParams;
@@ -73,19 +76,45 @@ public class TestSolrConfigHandlerCloud
     setupHarnesses();
     testReqHandlerAPIs();
     testReqParams();
+    testAdminPath();
+  }
+
+  private void testAdminPath() throws Exception{
+    String testServerBaseUrl = getRandomServer(cloudClient,"collection1");
+    RestTestHarness writeHarness = restTestHarnesses.get(random().nextInt(restTestHarnesses.size()));
+    String payload = "{\n" +
+        "'create-requesthandler' : { 'name' : '/admin/luke', " +
+        "'class': 'org.apache.solr.handler.DumpRequestHandler'}}";
+
+    TestSolrConfigHandler.runConfigCommand(writeHarness, "/config?wt=json", payload);
+
+
+    TestSolrConfigHandler.testForResponseElement(writeHarness,
+        testServerBaseUrl,
+        "/config/overlay?wt=json",
+        cloudClient,
+        Arrays.asList("overlay", "requestHandler", "/admin/luke", "class"),
+        "org.apache.solr.handler.DumpRequestHandler",
+        10);
+
+   NamedList<Object> rsp = cloudClient.request(new LukeRequest());
+   System.out.println(rsp);
   }
 
   private void testReqHandlerAPIs() throws Exception {
-    DocCollection coll = cloudClient.getZkStateReader().getClusterState().getCollection("collection1");
+    String testServerBaseUrl = getRandomServer(cloudClient,"collection1");
+    RestTestHarness writeHarness = restTestHarnesses.get(random().nextInt(restTestHarnesses.size()));
+    TestSolrConfigHandler.reqhandlertests(writeHarness, testServerBaseUrl , cloudClient);
+  }
+
+  public static String getRandomServer(CloudSolrClient cloudClient, String collName) {
+    DocCollection coll = cloudClient.getZkStateReader().getClusterState().getCollection(collName);
     List<String> urls = new ArrayList<>();
     for (Slice slice : coll.getSlices()) {
       for (Replica replica : slice.getReplicas())
         urls.add(""+replica.get(ZkStateReader.BASE_URL_PROP) + "/"+replica.get(ZkStateReader.CORE_NAME_PROP));
     }
-
-    RestTestHarness writeHarness = restTestHarnesses.get(random().nextInt(restTestHarnesses.size()));
-    String testServerBaseUrl = urls.get(random().nextInt(urls.size()));
-    TestSolrConfigHandler.reqhandlertests(writeHarness, testServerBaseUrl , cloudClient);
+    return urls.get(random().nextInt(urls.size()));
   }
 
   private void testReqParams() throws Exception{

Modified: lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/handler/component/TestDistributedStatsComponentCardinality.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/handler/component/TestDistributedStatsComponentCardinality.java?rev=1700800&r1=1700799&r2=1700800&view=diff
==============================================================================
--- lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/handler/component/TestDistributedStatsComponentCardinality.java (original)
+++ lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/handler/component/TestDistributedStatsComponentCardinality.java Wed Sep  2 13:06:13 2015
@@ -88,7 +88,6 @@ public class TestDistributedStatsCompone
                     "long_l", ""+longValue, 
                     "long_l_prehashed_l", ""+HASHER.hashLong(longValue).asLong(),
                     "string_s", strValue,
-                    // NOTE: renamed hashUnencodedChars starting with guava 15
                     "string_s_prehashed_l", ""+HASHER.hashString(strValue).asLong()));
 
       longValue -= BIG_PRIME;

Modified: lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/response/TestCustomDocTransformer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/response/TestCustomDocTransformer.java?rev=1700800&r1=1700799&r2=1700800&view=diff
==============================================================================
--- lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/response/TestCustomDocTransformer.java (original)
+++ lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/response/TestCustomDocTransformer.java Wed Sep  2 13:06:13 2015
@@ -19,8 +19,6 @@ package org.apache.solr.response;
 
 import java.io.IOException;
 
-import org.apache.lucene.document.StoredField;
-import org.apache.lucene.index.IndexableField;
 import org.apache.lucene.index.StorableField;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.SolrDocument;
@@ -108,7 +106,7 @@ public class TestCustomDocTransformer ex
      * This transformer simply concatinates the values of multipe fields
      */
     @Override
-    public void transform(SolrDocument doc, int docid) throws IOException {
+    public void transform(SolrDocument doc, int docid, float score) throws IOException {
       str.setLength(0);
       for(String s : extra) {
         String v = getAsString(s, doc);

Modified: lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/ReturnFieldsTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/ReturnFieldsTest.java?rev=1700800&r1=1700799&r2=1700800&view=diff
==============================================================================
--- lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/ReturnFieldsTest.java (original)
+++ lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/ReturnFieldsTest.java Wed Sep  2 13:06:13 2015
@@ -20,28 +20,16 @@ package org.apache.solr.search;
 import org.apache.lucene.util.TestUtil;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.response.transform.*;
-
 import org.junit.BeforeClass;
 import org.junit.Test;
 
+import java.util.Locale;
 import java.util.Random;
 
 public class ReturnFieldsTest extends SolrTestCaseJ4 {
 
   // :TODO: datatypes produced by the functions used may change
 
-  /**
-   * values of the fl param that mean all real fields
-   */
-  private static String[] ALL_REAL_FIELDS = new String[] { "", "*" };
-
-  /**
-   * values of the fl param that mean all real fields and score
-   */
-  private static String[] SCORE_AND_REAL_FIELDS = new String[] {
-      "score", "score,*", "*,score"
-  };
-
   @BeforeClass
   public static void beforeClass() throws Exception {
     System.setProperty("enable.update.log", "false"); // schema12 doesn't support _version_
@@ -341,14 +329,14 @@ public class ReturnFieldsTest extends So
       final boolean aliasId = r.nextBoolean();
       final boolean aliasFoo = r.nextBoolean();
 
-      final String id = TestUtil.randomWhitespace(r, 0, 3) +
+      final String id = randomWhitespace(r, 0, 3) +
         (aliasId ? "aliasId:" : "") +
         "id" + 
-        TestUtil.randomWhitespace(r, 1, 3);
-      final String foo_i = TestUtil.randomWhitespace(r, 0, 3) +
+        randomWhitespace(r, 1, 3);
+      final String foo_i = randomWhitespace(r, 0, 3) +
         (aliasFoo ? "aliasFoo:" : "") +
         "foo_i" + 
-        TestUtil.randomWhitespace(r, 0, 3);
+        randomWhitespace(r, 0, 3);
 
       final String fl = id + (r.nextBoolean() ? "" : ",") + foo_i;
       ReturnFields rf = new SolrReturnFields(req("fl", fl));
@@ -366,4 +354,67 @@ public class ReturnFieldsTest extends So
     }
   }
 
+  /** List of characters that match {@link Character#isWhitespace} */
+  private static final char[] WHITESPACE_CHARACTERS = new char[] {
+    // :TODO: is this list exhaustive?
+    '\u0009',
+    '\n',    
+    '\u000B',
+    '\u000C',
+    '\r',    
+    '\u001C',
+    '\u001D',
+    '\u001E',
+    '\u001F',
+    '\u0020',
+    // '\u0085', failed sanity check?
+    '\u1680',
+    // '\u180E', no longer whitespace in Unicode 7.0 (Java 9)!
+    '\u2000',
+    '\u2001',
+    '\u2002',
+    '\u2003',
+    '\u2004',
+    '\u2005',
+    '\u2006',
+    '\u2008',
+    '\u2009',
+    '\u200A',
+    '\u2028',
+    '\u2029',
+    '\u205F',
+    '\u3000',
+  };
+
+  static {
+    // if the JVM/unicode can redefine whitespace once (LUCENE-6760), it might happen again
+    // in the future.  if that happens, fail early with a clera msg, even if java asserts
+    // (used in randomWhitespace) are disbled
+    
+    for (int offset = 0; offset < WHITESPACE_CHARACTERS.length; offset++) {
+      char c = WHITESPACE_CHARACTERS[offset];
+      if (! Character.isWhitespace(c) ) {
+        fail(String.format(Locale.ENGLISH, "Not really whitespace? New JVM/Unicode definitions? WHITESPACE_CHARACTERS[%d] is '\\u%04X'", offset, (int) c));
+      }
+    }
+  }
+  
+  /**
+   * Returns a random string in the specified length range consisting 
+   * entirely of whitespace characters 
+   * @see #WHITESPACE_CHARACTERS
+   */
+  public static String randomWhitespace(Random r, int minLength, int maxLength) {
+    final int end = TestUtil.nextInt(r, minLength, maxLength);
+    StringBuilder out = new StringBuilder();
+    for (int i = 0; i < end; i++) {
+      int offset = TestUtil.nextInt(r, 0, WHITESPACE_CHARACTERS.length-1);
+      char c = WHITESPACE_CHARACTERS[offset];
+      // sanity check
+      assert Character.isWhitespace(c) : String.format(Locale.ENGLISH, "Not really whitespace? WHITESPACE_CHARACTERS[%d] is '\\u%04X'", offset, (int) c);
+      out.append(c);
+    }
+    return out.toString();
+  }
+
 }

Modified: lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/TestElisionMultitermQuery.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/TestElisionMultitermQuery.java?rev=1700800&r1=1700799&r2=1700800&view=diff
==============================================================================
--- lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/TestElisionMultitermQuery.java (original)
+++ lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/TestElisionMultitermQuery.java Wed Sep  2 13:06:13 2015
@@ -4,7 +4,7 @@ import org.apache.solr.SolrTestCaseJ4;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.

Modified: lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/TestExtendedDismaxParser.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/TestExtendedDismaxParser.java?rev=1700800&r1=1700799&r2=1700800&view=diff
==============================================================================
--- lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/TestExtendedDismaxParser.java (original)
+++ lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/TestExtendedDismaxParser.java Wed Sep  2 13:06:13 2015
@@ -566,8 +566,8 @@ public class TestExtendedDismaxParser ex
   }
   
   public void testAliasingBoost() throws Exception {
-    assertQ(req("defType","edismax", "q","Zapp Pig", "qf","myalias", "f.myalias.qf","name trait_ss^0.5"), "//result/doc[1]/str[@name='id']=42", "//result/doc[2]/str[@name='id']=47");//doc 42 should score higher than 46
-    assertQ(req("defType","edismax", "q","Zapp Pig", "qf","myalias^100 name", "f.myalias.qf","trait_ss^0.5"), "//result/doc[1]/str[@name='id']=47", "//result/doc[2]/str[@name='id']=42");//Now the order should be inverse
+    assertQ(req("defType","edismax", "q","Zapp Pig", "qf","myalias", "f.myalias.qf","name trait_ss^0.1"), "//result/doc[1]/str[@name='id']=42", "//result/doc[2]/str[@name='id']=47");//doc 42 should score higher than 46
+    assertQ(req("defType","edismax", "q","Zapp Pig", "qf","myalias^100 name", "f.myalias.qf","trait_ss^0.1"), "//result/doc[1]/str[@name='id']=47", "//result/doc[2]/str[@name='id']=42");//Now the order should be inverse
   }
   
   public void testCyclicAliasing() throws Exception {

Modified: lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/TestRangeQuery.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/TestRangeQuery.java?rev=1700800&r1=1700799&r2=1700800&view=diff
==============================================================================
--- lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/TestRangeQuery.java (original)
+++ lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/TestRangeQuery.java Wed Sep  2 13:06:13 2015
@@ -259,8 +259,8 @@ public class TestRangeQuery extends Solr
         SolrQueryResponse qr = h.queryAndResponse(handler, req);
         if (last != null) {
           // we only test if the same docs matched since some queries will include factors like idf, etc.
-          DocList rA = ((ResultContext)qr.getValues().get("response")).docs;
-          DocList rB = ((ResultContext)last.getValues().get("response")).docs;
+          DocList rA = ((ResultContext)qr.getValues().get("response")).getDocList();
+          DocList rB = ((ResultContext)last.getValues().get("response")).getDocList();
           sameDocs( rA, rB );
         }
         req.close();

Modified: lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java?rev=1700800&r1=1700799&r2=1700800&view=diff
==============================================================================
--- lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java (original)
+++ lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java Wed Sep  2 13:06:13 2015
@@ -362,12 +362,15 @@ public class TestJsonFacets extends Solr
   }
 
   public static void doStatsTemplated(Client client, ModifiableSolrParams p) throws Exception {
+    p.set("Z_num_i", "Z_" + p.get("num_i") );
+
     MacroExpander m = new MacroExpander( p.getMap() );
 
     String cat_s = m.expand("${cat_s}");
     String where_s = m.expand("${where_s}");
     String num_d = m.expand("${num_d}");
     String num_i = m.expand("${num_i}");
+    String Z_num_i = m.expand("${Z_num_i}");
     String val_b = m.expand("${val_b}");
     String date = m.expand("${date}");
     String super_s = m.expand("${super_s}");
@@ -377,13 +380,13 @@ public class TestJsonFacets extends Solr
     client.deleteByQuery("*:*", null);
 
     client.add(sdoc("id", "1", cat_s, "A", where_s, "NY", num_d, "4", num_i, "2",   super_s, "zodiac",  date,"2001-01-01T01:01:01Z", val_b, "true", sparse_s, "one"), null);
-    client.add(sdoc("id", "2", cat_s, "B", where_s, "NJ", num_d, "-9", num_i, "-5", super_s,"superman", date,"2002-02-02T02:02:02Z", val_b, "false"         , multi_ss,"a", multi_ss,"b" ), null);
+    client.add(sdoc("id", "2", cat_s, "B", where_s, "NJ", num_d, "-9", num_i, "-5", super_s,"superman", date,"2002-02-02T02:02:02Z", val_b, "false"         , multi_ss,"a", multi_ss,"b" , Z_num_i, "0"), null);
     client.add(sdoc("id", "3"), null);
     client.commit();
-    client.add(sdoc("id", "4", cat_s, "A", where_s, "NJ", num_d, "2", num_i, "3",   super_s,"spiderman", date,"2003-03-03T03:03:03Z"                         , multi_ss, "b"), null);
+    client.add(sdoc("id", "4", cat_s, "A", where_s, "NJ", num_d, "2", num_i, "3",   super_s,"spiderman", date,"2003-03-03T03:03:03Z"                         , multi_ss, "b", Z_num_i, ""+Integer.MIN_VALUE), null);
     client.add(sdoc("id", "5", cat_s, "B", where_s, "NJ", num_d, "11", num_i, "7",  super_s,"batman"   , date,"2001-02-03T01:02:03Z"          ,sparse_s,"two", multi_ss, "a"), null);
     client.commit();
-    client.add(sdoc("id", "6", cat_s, "B", where_s, "NY", num_d, "-5", num_i, "-5", super_s,"hulk"     , date,"2002-03-01T03:02:01Z"                         , multi_ss, "b", multi_ss, "a" ), null);
+    client.add(sdoc("id", "6", cat_s, "B", where_s, "NY", num_d, "-5", num_i, "-5", super_s,"hulk"     , date,"2002-03-01T03:02:01Z"                         , multi_ss, "b", multi_ss, "a", Z_num_i, ""+Integer.MAX_VALUE), null);
     client.commit();
 
 
@@ -969,6 +972,20 @@ public class TestJsonFacets extends Solr
             "}"
     );
 
+    // test 0, min/max int
+    client.testJQ(params(p, "q", "*:*"
+        , "json.facet", "{" +
+                " u : 'unique(${Z_num_i})'" +
+                ", f1:{ type:field, field:${Z_num_i} }" +
+        "}"
+        )
+        , "facets=={count:6 " +
+            ",u:3" +
+            ",f1:{ buckets:[{val:" + Integer.MIN_VALUE + ",count:1},{val:0,count:1},{val:" + Integer.MAX_VALUE+",count:1}]} " +
+            "}"
+    );
+
+
   }
 
 

Modified: lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/stats/TestBaseStatsCache.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/stats/TestBaseStatsCache.java?rev=1700800&r1=1700799&r2=1700800&view=diff
==============================================================================
--- lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/stats/TestBaseStatsCache.java (original)
+++ lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/stats/TestBaseStatsCache.java Wed Sep  2 13:06:13 2015
@@ -1,6 +1,6 @@
 package org.apache.solr.search.stats;
 
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.

Modified: lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/stats/TestDefaultStatsCache.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/stats/TestDefaultStatsCache.java?rev=1700800&r1=1700799&r2=1700800&view=diff
==============================================================================
--- lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/stats/TestDefaultStatsCache.java (original)
+++ lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/stats/TestDefaultStatsCache.java Wed Sep  2 13:06:13 2015
@@ -1,6 +1,6 @@
 package org.apache.solr.search.stats;
 
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.

Modified: lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/stats/TestExactSharedStatsCache.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/stats/TestExactSharedStatsCache.java?rev=1700800&r1=1700799&r2=1700800&view=diff
==============================================================================
--- lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/stats/TestExactSharedStatsCache.java (original)
+++ lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/stats/TestExactSharedStatsCache.java Wed Sep  2 13:06:13 2015
@@ -1,6 +1,6 @@
 package org.apache.solr.search.stats;
 
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.

Modified: lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/stats/TestExactStatsCache.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/stats/TestExactStatsCache.java?rev=1700800&r1=1700799&r2=1700800&view=diff
==============================================================================
--- lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/stats/TestExactStatsCache.java (original)
+++ lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/stats/TestExactStatsCache.java Wed Sep  2 13:06:13 2015
@@ -1,6 +1,6 @@
 package org.apache.solr.search.stats;
 
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.

Modified: lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/stats/TestLRUStatsCache.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/stats/TestLRUStatsCache.java?rev=1700800&r1=1700799&r2=1700800&view=diff
==============================================================================
--- lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/stats/TestLRUStatsCache.java (original)
+++ lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/search/stats/TestLRUStatsCache.java Wed Sep  2 13:06:13 2015
@@ -1,6 +1,6 @@
 package org.apache.solr.search.stats;
 
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.

Modified: lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java?rev=1700800&r1=1700799&r2=1700800&view=diff
==============================================================================
--- lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java (original)
+++ lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java Wed Sep  2 13:06:13 2015
@@ -35,8 +35,10 @@ import org.apache.http.message.AbstractH
 import org.apache.http.message.BasicHeader;
 import org.apache.http.util.EntityUtils;
 import org.apache.solr.client.solrj.SolrRequest;
+import org.apache.solr.client.solrj.embedded.JettySolrRunner;
 import org.apache.solr.client.solrj.impl.CloudSolrClient;
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
 import org.apache.solr.client.solrj.request.GenericSolrRequest;
 import org.apache.solr.cloud.MiniSolrCloudCluster;
 import org.apache.solr.cloud.TestMiniSolrCloudCluster;
@@ -64,7 +66,6 @@ public class BasicAuthIntegrationTest ex
 
   private static final Logger log = LoggerFactory.getLogger(BasicAuthIntegrationTest.class);
 
-
   @Override
   protected void doExtraTests(MiniSolrCloudCluster miniCluster, SolrZkClient zkClient, ZkStateReader zkStateReader,
                               CloudSolrClient cloudSolrClient, String defaultCollName) throws Exception {
@@ -73,10 +74,21 @@ public class BasicAuthIntegrationTest ex
     assertNotNull(rsp.get(CommandOperation.ERR_MSGS));
     zkClient.setData("/security.json", STD_CONF.replaceAll("'", "\"").getBytes(UTF_8), true);
     String baseUrl = getRandomReplica(zkStateReader.getClusterState().getCollection(defaultCollName), random()).getStr(BASE_URL_PROP);
-
     HttpClient cl = cloudSolrClient.getLbClient().getHttpClient();
     verifySecurityStatus(cl, baseUrl + "/admin/authentication", "authentication/class", "solr.BasicAuthPlugin", 20);
 
+    boolean found = false;
+    for (JettySolrRunner jettySolrRunner : miniCluster.getJettySolrRunners()) {
+      if(baseUrl.contains(String.valueOf(jettySolrRunner.getLocalPort()))){
+        found = true;
+        jettySolrRunner.stop();
+        jettySolrRunner.start();
+        verifySecurityStatus(cl, baseUrl + "/admin/authentication", "authentication/class", "solr.BasicAuthPlugin", 20);
+        break;
+      }
+    }
+    assertTrue("No server found to restart , looking for : "+baseUrl , found);
+
     String command = "{\n" +
         "'set-user': {'harry':'HarryIsCool'}\n" +
         "}";
@@ -133,6 +145,30 @@ public class BasicAuthIntegrationTest ex
 
     verifySecurityStatus(cl, baseUrl+"/admin/authorization", "authorization/permissions[1]/collection", "x", 20);
 
+    httpPost = new HttpPost(baseUrl + "/admin/authorization");
+    setBasicAuthHeader(httpPost, "harry", "HarryIsUberCool");
+    httpPost.setEntity(new ByteArrayEntity(Utils.toJSON(singletonMap("set-permission", Utils.makeMap
+        ("name","collection-admin-edit", "role", "admin" )))));
+    r = cl.execute(httpPost);
+
+    verifySecurityStatus(cl, baseUrl+"/admin/authorization", "authorization/permissions[2]/name", "collection-admin-edit", 20);
+
+    CollectionAdminRequest.Reload reload = new CollectionAdminRequest.Reload();
+    reload.setCollectionName(cloudSolrClient.getDefaultCollection());
+
+    HttpSolrClient solrClient = new HttpSolrClient(baseUrl);
+    try {
+      rsp = solrClient.request(reload);
+      fail("must have failed");
+    } catch (HttpSolrClient.RemoteSolrException e) {
+
+    }
+
+    httpPost = new HttpPost(baseUrl + "/admin/authorization");
+    setBasicAuthHeader(httpPost, "harry", "HarryIsUberCool");
+    httpPost.setEntity(new ByteArrayEntity(Utils.toJSON(singletonMap("delete-permission", "collection-admin-edit"))));
+    r = cl.execute(httpPost);//cleanup so that the super class does not need to pass on credentials
+
   }
 
   public static void verifySecurityStatus(HttpClient cl, String url, String objPath, Object expected, int count) throws Exception {
@@ -197,6 +233,11 @@ public class BasicAuthIntegrationTest ex
   public void testErrorsInShutdown() throws Exception {
   }
 
+
+  @Override
+  public void testCollectionCreateWithoutCoresThenDelete() throws Exception {
+  }
+
   //the password is 'SolrRocks'
   //this could be generated everytime. But , then we will not know if there is any regression
   private static final String STD_CONF = "{\n" +

Modified: lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/security/TestRuleBasedAuthorizationPlugin.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/security/TestRuleBasedAuthorizationPlugin.java?rev=1700800&r1=1700799&r2=1700800&view=diff
==============================================================================
--- lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/security/TestRuleBasedAuthorizationPlugin.java (original)
+++ lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/security/TestRuleBasedAuthorizationPlugin.java Wed Sep  2 13:06:13 2015
@@ -95,7 +95,7 @@ public class TestRuleBasedAuthorizationP
     assertEquals(FORBIDDEN,authResp.statusCode);
 
     values.put("resource","/admin/collections");
-    values.put("collectionRequests",new ArrayList<>());
+    values.put("requestType", AuthorizationContext.RequestType.ADMIN);
     values.put("params", new MapSolrParams(Collections.singletonMap("action", "LIST")));
     values.put("httpMethod","GET");
     authResp = plugin.authorize(context);
@@ -109,6 +109,10 @@ public class TestRuleBasedAuthorizationP
     authResp = plugin.authorize(context);
     assertEquals(PROMPT_FOR_CREDENTIALS, authResp.statusCode);
 
+    values.put("params", new MapSolrParams(Collections.singletonMap("action", "RELOAD")));
+    authResp = plugin.authorize(context);
+    assertEquals(PROMPT_FOR_CREDENTIALS, authResp.statusCode);
+
     values.put("userPrincipal", new BasicUserPrincipal("somebody"));
     authResp = plugin.authorize(context);
     assertEquals(FORBIDDEN,authResp.statusCode);

Modified: lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/servlet/SolrRequestParserTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/servlet/SolrRequestParserTest.java?rev=1700800&r1=1700799&r2=1700800&view=diff
==============================================================================
--- lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/servlet/SolrRequestParserTest.java (original)
+++ lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/servlet/SolrRequestParserTest.java Wed Sep  2 13:06:13 2015
@@ -24,9 +24,9 @@ import static org.easymock.EasyMock.repl
 
 import java.io.BufferedInputStream;
 import java.io.ByteArrayInputStream;
+import java.io.File;
 import java.io.IOException;
-import java.net.HttpURLConnection;
-import java.net.SocketTimeoutException;
+import java.io.InputStream;
 import java.net.URL;
 import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
@@ -40,6 +40,7 @@ import javax.servlet.ReadListener;
 import javax.servlet.ServletInputStream;
 import javax.servlet.http.HttpServletRequest;
 
+import org.apache.commons.io.FileUtils;
 import org.apache.commons.io.IOUtils;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.SolrException;
@@ -124,38 +125,45 @@ public class SolrRequestParserTest exten
   @Test
   public void testStreamURL() throws Exception
   {
-    String url = "http://www.apache.org/dist/lucene/solr/";
-    byte[] bytes = null;
-    try {
-      URL u = new URL(url);
-      HttpURLConnection connection = (HttpURLConnection)u.openConnection();
-      connection.setConnectTimeout(5000);
-      connection.setReadTimeout(5000);
-      connection.connect();
-      int code = connection.getResponseCode();
-      assumeTrue("wrong response code from server: " + code, 200 == code);
-      bytes = IOUtils.toByteArray( connection.getInputStream());
-    }
-    catch( Exception ex ) {
-      assumeNoException("Unable to connect to " + url + " to run the test.", ex);
-      return;
+    URL url = getClass().getResource("/README");
+    assertNotNull("Missing file 'README' in test-resources root folder.", url);
+    
+    byte[] bytes = IOUtils.toByteArray(url);
+
+    SolrCore core = h.getCore();
+    
+    Map<String,String[]> args = new HashMap<>();
+    args.put( CommonParams.STREAM_URL, new String[] { url.toExternalForm() } );
+    
+    // Make sure it got a single stream in and out ok
+    List<ContentStream> streams = new ArrayList<>();
+    try (SolrQueryRequest req = parser.buildRequestFrom( core, new MultiMapSolrParams( args ), streams )) {
+      assertEquals( 1, streams.size() );
+      try (InputStream in = streams.get(0).getStream()) {
+        assertArrayEquals( bytes, IOUtils.toByteArray( in ) );
+      }
     }
+  }
+  
+  @Test
+  public void testStreamFile() throws Exception
+  {
+    File file = getFile("README");
+    
+    byte[] bytes = FileUtils.readFileToByteArray(file);
 
     SolrCore core = h.getCore();
     
     Map<String,String[]> args = new HashMap<>();
-    args.put( CommonParams.STREAM_URL, new String[] {url} );
+    args.put( CommonParams.STREAM_FILE, new String[] { file.getAbsolutePath() } );
     
     // Make sure it got a single stream in and out ok
     List<ContentStream> streams = new ArrayList<>();
-    SolrQueryRequest req = parser.buildRequestFrom( core, new MultiMapSolrParams( args ), streams );
-    assertEquals( 1, streams.size() );
-    try {
-      assertArrayEquals( bytes, IOUtils.toByteArray( streams.get(0).getStream() ) );
-    } catch (SocketTimeoutException ex) {
-      assumeNoException("Problems retrieving from " + url + " to run the test.", ex);
-    } finally {
-      req.close();
+    try (SolrQueryRequest req = parser.buildRequestFrom( core, new MultiMapSolrParams( args ), streams )) {
+      assertEquals( 1, streams.size() );
+      try (InputStream in = streams.get(0).getStream()) {
+        assertArrayEquals( bytes, IOUtils.toByteArray( in ) );
+      }
     }
   }
   

Modified: lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/update/AddBlockUpdateTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/update/AddBlockUpdateTest.java?rev=1700800&r1=1700799&r2=1700800&view=diff
==============================================================================
--- lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/update/AddBlockUpdateTest.java (original)
+++ lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/update/AddBlockUpdateTest.java Wed Sep  2 13:06:13 2015
@@ -1,3 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to You under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
 package org.apache.solr.update;
 
 import java.io.ByteArrayInputStream;
@@ -49,23 +66,6 @@ import org.junit.BeforeClass;
 import org.junit.Test;
 import org.xml.sax.SAXException;
 
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to You under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-
 public class AddBlockUpdateTest extends SolrTestCaseJ4 {
   
   private static final String child = "child_s";

Modified: lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/update/DocumentBuilderTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/update/DocumentBuilderTest.java?rev=1700800&r1=1700799&r2=1700800&view=diff
==============================================================================
--- lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/update/DocumentBuilderTest.java (original)
+++ lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/update/DocumentBuilderTest.java Wed Sep  2 13:06:13 2015
@@ -33,6 +33,7 @@ import org.apache.solr.common.SolrExcept
 import org.apache.solr.common.SolrInputDocument;
 import org.apache.solr.common.SolrInputField;
 import org.apache.solr.core.SolrCore;
+import org.apache.solr.response.ResultContext;
 import org.apache.solr.search.SolrIndexSearcher;
 import org.apache.solr.search.DocList;
 import org.apache.solr.schema.CopyField;
@@ -40,7 +41,6 @@ import org.apache.solr.schema.FieldType;
 import org.apache.solr.schema.IndexSchema;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.response.SolrQueryResponse;
-import org.apache.solr.response.ResultContext;
 
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -345,7 +345,7 @@ public class DocumentBuilderTest extends
       SolrQueryResponse rsp = new SolrQueryResponse();
       core.execute(core.getRequestHandler(req.getParams().get(CommonParams.QT)), req, rsp);
 
-      DocList dl = ((ResultContext) rsp.getValues().get("response")).docs;
+      DocList dl = ((ResultContext) rsp.getValues().get("response")).getDocList();
       assertTrue("can't find the doc we just added", 1 == dl.size());
       int docid = dl.iterator().nextDoc();
 

Modified: lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/util/PrimUtilsTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/util/PrimUtilsTest.java?rev=1700800&r1=1700799&r2=1700800&view=diff
==============================================================================
--- lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/util/PrimUtilsTest.java (original)
+++ lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/util/PrimUtilsTest.java Wed Sep  2 13:06:13 2015
@@ -1,6 +1,6 @@
 package org.apache.solr.util;
 
-/**
+/*
  * Copyright 2004 The Apache Software Foundation
  *
  * Licensed under the Apache License, Version 2.0 (the "License");

Modified: lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/util/TestSolrCLIRunExample.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/util/TestSolrCLIRunExample.java?rev=1700800&r1=1700799&r2=1700800&view=diff
==============================================================================
--- lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/util/TestSolrCLIRunExample.java (original)
+++ lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/util/TestSolrCLIRunExample.java Wed Sep  2 13:06:13 2015
@@ -158,34 +158,12 @@ public class TestSolrCLIRunExample exten
         }
       } else {
         String cmdLine = joinArgs(cmd.getArguments());
-        if (cmdLine.indexOf("post.jar") != -1) {
-          // invocation of the post.jar file ... we'll just hit the SimplePostTool directly vs. trying to invoke another JVM
-          List<String> argsToSimplePostTool = new ArrayList<String>();
-          boolean afterPostJarArg = false;
-          for (String arg : cmd.getArguments()) {
-            if (arg.startsWith("-D")) {
-              arg = arg.substring(2);
-              int eqPos = arg.indexOf("=");
-              System.setProperty(arg.substring(0,eqPos), arg.substring(eqPos+1));
-            } else {
-              if (arg.endsWith("post.jar")) {
-                afterPostJarArg = true;
-              } else {
-                if (afterPostJarArg) {
-                  argsToSimplePostTool.add(arg);
-                }
-              }
-            }
-          }
-          SimplePostTool.main(argsToSimplePostTool.toArray(new String[0]));
-        } else {
-          log.info("Executing command: "+cmdLine);
-          try {
-            return super.execute(cmd);
-          } catch (Exception exc) {
-            log.error("Execute command ["+cmdLine+"] failed due to: "+exc, exc);
-            throw exc;
-          }
+        log.info("Executing command: "+cmdLine);
+        try {
+          return super.execute(cmd);
+        } catch (Exception exc) {
+          log.error("Execute command ["+cmdLine+"] failed due to: "+exc, exc);
+          throw exc;
         }
       }
 
@@ -367,7 +345,17 @@ public class TestSolrCLIRunExample exten
       SolrQuery query = new SolrQuery("*:*");
       QueryResponse qr = solrClient.query(query);
       long numFound = qr.getResults().getNumFound();
-      assertTrue("expected 32 docs in the "+exampleName+" example but found " + numFound + ", output: " + toolOutput,
+      if (numFound == 0) {
+        // brief wait in case of timing issue in getting the new docs committed
+        log.warn("Going to wait for 1 second before re-trying query for techproduct example docs ...");
+        try {
+          Thread.sleep(1000);
+        } catch (InterruptedException ignore) {
+          Thread.interrupted();
+        }
+        numFound = solrClient.query(query).getResults().getNumFound();
+      }
+      assertTrue("expected 32 docs in the " + exampleName + " example but found " + numFound + ", output: " + toolOutput,
           numFound == 32);
     }
 

Modified: lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/util/hll/ExplicitHLLTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/util/hll/ExplicitHLLTest.java?rev=1700800&r1=1700799&r2=1700800&view=diff
==============================================================================
--- lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/util/hll/ExplicitHLLTest.java (original)
+++ lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/util/hll/ExplicitHLLTest.java Wed Sep  2 13:06:13 2015
@@ -22,7 +22,7 @@ import java.util.HashSet;
 import org.apache.lucene.util.LuceneTestCase;
 import org.junit.Test;
 
-import com.carrotsearch.hppc.LongOpenHashSet;
+import com.carrotsearch.hppc.LongHashSet;
 import static com.carrotsearch.randomizedtesting.RandomizedTest.*;
 
 
@@ -214,8 +214,8 @@ public class ExplicitHLLTest extends Luc
      * Asserts that values in both sets are exactly equal.
      */
     private static void assertElementsEqual(final HLL hllA, final HLL hllB) {
-        final LongOpenHashSet internalSetA = hllA.explicitStorage;
-        final LongOpenHashSet internalSetB = hllB.explicitStorage;
+        final LongHashSet internalSetA = hllA.explicitStorage;
+        final LongHashSet internalSetB = hllB.explicitStorage;
 
         assertTrue(internalSetA.equals(internalSetB));
     }

Modified: lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/util/hll/HLLSerializationTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/util/hll/HLLSerializationTest.java?rev=1700800&r1=1700799&r2=1700800&view=diff
==============================================================================
--- lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/util/hll/HLLSerializationTest.java (original)
+++ lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/util/hll/HLLSerializationTest.java Wed Sep  2 13:06:13 2015
@@ -18,6 +18,8 @@
 package org.apache.solr.util.hll;
 
 import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util.TestUtil;
+
 import org.junit.Test;
 
 import static com.carrotsearch.randomizedtesting.RandomizedTest.*;
@@ -27,6 +29,7 @@ import java.util.Arrays;
 import java.util.Collection;
 import java.util.List;
 import java.util.Random;
+import java.util.EnumSet;
 
 import static org.apache.solr.util.hll.HLL.*;
 
@@ -34,55 +37,190 @@ import static org.apache.solr.util.hll.H
  * Serialization smoke-tests.
  */
 public class HLLSerializationTest extends LuceneTestCase {
-    /**
-     * A smoke-test that covers serialization/deserialization of an HLL
-     * under all possible parameters.
-     */
-    @Test
-    @Slow
-    @Nightly
-    public void serializationSmokeTest() throws Exception {
-        final Random random = new Random(randomLong());
-        final int randomCount = 250;
-        final List<Long> randoms = new ArrayList<Long>(randomCount);
-        for (int i=0; i<randomCount; i++) {
-          randoms.add(random.nextLong());
-      }
-
-        assertCardinality(HLLType.EMPTY, randoms);
-        assertCardinality(HLLType.EXPLICIT, randoms);
-        assertCardinality(HLLType.SPARSE, randoms);
-        assertCardinality(HLLType.FULL, randoms);
+  
+  /**
+   * A smoke-test that covers serialization/deserialization of an HLL
+   * under most possible init parameters.
+   */
+  @Test
+  @Slow
+  @Nightly
+  public void serializationSmokeTest() throws Exception {
+    final Random random = new Random(randomLong());
+    final int randomCount = 250;
+    final List<Long> randoms = new ArrayList<Long>(randomCount);
+    for (int i=0; i<randomCount; i++) {
+      randoms.add(random.nextLong());
     }
-
+    
     // NOTE: log2m<=16 was chosen as the max log2m parameter so that the test
     //       completes in a reasonable amount of time. Not much is gained by
-    //       testing larger values - there are no more known serialization
-    //       related edge cases that appear as log2m gets even larger.
-    // NOTE: This test completed successfully with log2m<=MAXIMUM_LOG2M_PARAM
-    //       on 2014-01-30.
-    private static void assertCardinality(final HLLType hllType, final Collection<Long> items)
-           throws CloneNotSupportedException {
-        for(int log2m=MINIMUM_LOG2M_PARAM; log2m<=16; log2m++) {
-            for(int regw=MINIMUM_REGWIDTH_PARAM; regw<=MAXIMUM_REGWIDTH_PARAM; regw++) {
-                for(int expthr=MINIMUM_EXPTHRESH_PARAM; expthr<=MAXIMUM_EXPTHRESH_PARAM; expthr++ ) {
-                    for(final boolean sparse: new boolean[]{true, false}) {
-                        HLL hll = new HLL(log2m, regw, expthr, sparse, hllType);
-                        for(final Long item: items) {
-                            hll.addRaw(item);
-                        }
-                        HLL copy = HLL.fromBytes(hll.toBytes());
-                        assertEquals(copy.cardinality(), hll.cardinality());
-                        assertEquals(copy.getType(), hll.getType());
-                        assertTrue(Arrays.equals(copy.toBytes(), hll.toBytes()));
-
-                        HLL clone = hll.clone();
-                        assertEquals(clone.cardinality(), hll.cardinality());
-                        assertEquals(clone.getType(), hll.getType());
-                        assertTrue(Arrays.equals(clone.toBytes(), hll.toBytes()));
-                    }
-                }
-            }
+    //       testing larger values
+    final int maxLog2m = 16;
+    for (HLLType type : EnumSet.allOf(HLLType.class)) {
+      assertCardinality(type, maxLog2m, randoms);
+    }
+  }
+  
+  /**
+   * A smoke-test that covers serialization/deserialization of HLLs
+   * under the max possible numeric init parameters, iterating over all possible combinations of 
+   * the other params.
+   *
+   * @see #manyValuesHLLSerializationTest
+   */
+  @Test
+  @Slow
+  @Monster("needs roughly -Dtests.heapsize=8g because of the (multiple) massive data structs")
+  public void monsterHLLSerializationTest() throws Exception {
+    final Random random = new Random(randomLong());
+    final int randomCount = 250;
+    final List<Long> randoms = new ArrayList<Long>(randomCount);
+    for (int i=0; i<randomCount; i++) {
+      randoms.add(random.nextLong());
+    }
+
+    for (HLLType type : EnumSet.allOf(HLLType.class)) {
+      for (boolean sparse : new boolean[] {true, false} ) {
+        HLL hll = new HLL(MAXIMUM_LOG2M_PARAM, MAXIMUM_REGWIDTH_PARAM, MAXIMUM_EXPTHRESH_PARAM,
+                          sparse, type);
+        assertCardinality(hll, randoms);
+      }
+    }
+  }
+  
+  /**
+   * A smoke-test that covers serialization/deserialization of a (single) HLL
+   * with random init params with an extremely large number of unique values added to it.
+   *
+   * @see #monsterHLLSerializationTest
+   */
+  @Test
+  @Slow
+  @Monster("may require as much as -Dtests.heapsize=4g depending on random values picked")
+  public void manyValuesHLLSerializationTest() throws Exception {
+
+    final HLLType[] ALL_TYPES = EnumSet.allOf(HLLType.class).toArray(new HLLType[0]);
+    Arrays.sort(ALL_TYPES);
+      
+    final int log2m = TestUtil.nextInt(random(), MINIMUM_LOG2M_PARAM, MAXIMUM_LOG2M_PARAM);
+    final int regwidth = TestUtil.nextInt(random(), MINIMUM_REGWIDTH_PARAM, MAXIMUM_REGWIDTH_PARAM);
+    final int expthresh = TestUtil.nextInt(random(), MINIMUM_EXPTHRESH_PARAM, MAXIMUM_EXPTHRESH_PARAM);
+    final boolean sparse = random().nextBoolean();
+    final HLLType type = ALL_TYPES[TestUtil.nextInt(random(), 0, ALL_TYPES.length-1)];
+    
+    HLL hll = new HLL(log2m, regwidth, expthresh, sparse, type);
+
+    final long NUM_VALS = TestUtil.nextLong(random(), 150000, 1000000);
+    final long MIN_VAL = TestUtil.nextLong(random(), Long.MIN_VALUE, Long.MAX_VALUE-NUM_VALS);
+    final long MAX_VAL = MIN_VAL + NUM_VALS;
+    assert MIN_VAL < MAX_VAL;
+    
+    for (long val = MIN_VAL; val < MAX_VAL; val++) {
+      hll.addRaw(val);
+    }
+    
+    final long expectedCardinality = hll.cardinality();
+    final HLLType expectedType = hll.getType();
+
+    byte[] serializedData = hll.toBytes();
+    hll = null; // allow some GC
+    
+    HLL copy = HLL.fromBytes(serializedData);
+    serializedData = null; // allow some GC
+    
+    assertEquals(expectedCardinality, copy.cardinality());
+    assertEquals(expectedType, copy.getType());
+    
+  }
+  
+  /**
+   * A smoke-test that covers serialization/deserialization of a (single) HLL
+   * with random the max possible numeric init parameters, with randomized values for the other params.
+   *
+   * @see #monsterHLLSerializationTest
+   */
+  @Test
+  @Slow
+  @Monster("can require as much as -Dtests.heapsize=4g because of the massive data structs")
+  public void manyValuesMonsterHLLSerializationTest() throws Exception {
+
+    final HLLType[] ALL_TYPES = EnumSet.allOf(HLLType.class).toArray(new HLLType[0]);
+    Arrays.sort(ALL_TYPES);
+      
+    final boolean sparse = random().nextBoolean();
+    final HLLType type = ALL_TYPES[TestUtil.nextInt(random(), 0, ALL_TYPES.length-1)];
+    
+    HLL hll = new HLL(MAXIMUM_LOG2M_PARAM, MAXIMUM_REGWIDTH_PARAM, MAXIMUM_EXPTHRESH_PARAM, sparse, type);
+
+    final long NUM_VALS = TestUtil.nextLong(random(), 150000, 1000000);
+    final long MIN_VAL = TestUtil.nextLong(random(), Long.MIN_VALUE, Long.MAX_VALUE-NUM_VALS);
+    final long MAX_VAL = MIN_VAL + NUM_VALS;
+    assert MIN_VAL < MAX_VAL;
+    
+    for (long val = MIN_VAL; val < MAX_VAL; val++) {
+      hll.addRaw(val);
+    }
+    
+    final long expectedCardinality = hll.cardinality();
+    final HLLType expectedType = hll.getType();
+
+    byte[] serializedData = hll.toBytes();
+    hll = null; // allow some GC
+    
+    HLL copy = HLL.fromBytes(serializedData);
+    serializedData = null; // allow some GC
+    
+    assertEquals(expectedCardinality, copy.cardinality());
+    assertEquals(expectedType, copy.getType());
+    
+  }
+
+  /**
+   * Iterates over all possible constructor args, with the exception of log2m, 
+   * which is only iterated up to the specified max so the test runs in a 
+   * "reasonable" amount of time and ram.
+   */
+  private static void assertCardinality(final HLLType hllType,
+                                        final int maxLog2m,
+                                        final Collection<Long> items) throws CloneNotSupportedException {
+    for(int regw=MINIMUM_REGWIDTH_PARAM; regw<=MAXIMUM_REGWIDTH_PARAM; regw++) {
+      for(int expthr=MINIMUM_EXPTHRESH_PARAM; expthr<=MAXIMUM_EXPTHRESH_PARAM; expthr++ ) {
+        for(final boolean sparse: new boolean[]{true, false}) {
+          for(int log2m=MINIMUM_LOG2M_PARAM; log2m<=maxLog2m; log2m++) {
+            assertCardinality(new HLL(log2m, regw, expthr, sparse, hllType), items);
+          }
         }
+      }
+    }
+  }
+
+  /**
+   * Adds all of the items to the specified hll, then does a round trip serialize/deserialize and confirms
+   * equality of several properties (including the byte serialization).  Repeats process with a clone.
+   */
+  private static void assertCardinality(HLL hll, final Collection<Long> items)
+    throws CloneNotSupportedException {
+    
+    for (final Long item: items) {
+      hll.addRaw(item);
     }
+    
+    final long hllCardinality = hll.cardinality();
+    final HLLType hllType = hll.getType();
+    final byte[] hllBytes = hll.toBytes();
+    hll = null; // allow some GC
+    
+    HLL copy = HLL.fromBytes(hllBytes);
+    assertEquals(copy.cardinality(), hllCardinality);
+    assertEquals(copy.getType(), hllType);
+    assertTrue(Arrays.equals(copy.toBytes(), hllBytes));
+    
+    HLL clone = copy.clone();
+    copy = null; // allow some GC
+    
+    assertEquals(clone.cardinality(), hllCardinality);
+    assertEquals(clone.getType(), hllType);
+    assertTrue(Arrays.equals(clone.toBytes(), hllBytes));
+  }
 }

Modified: lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/util/hll/SparseHLLTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/util/hll/SparseHLLTest.java?rev=1700800&r1=1700799&r2=1700800&view=diff
==============================================================================
--- lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/util/hll/SparseHLLTest.java (original)
+++ lucene/dev/branches/lucene6699/solr/core/src/test/org/apache/solr/util/hll/SparseHLLTest.java Wed Sep  2 13:06:13 2015
@@ -20,7 +20,7 @@ package org.apache.solr.util.hll;
 import org.apache.lucene.util.LuceneTestCase;
 import org.junit.Test;
 
-import com.carrotsearch.hppc.IntByteOpenHashMap;
+import com.carrotsearch.hppc.IntByteHashMap;
 import com.carrotsearch.hppc.cursors.IntByteCursor;
 import com.carrotsearch.randomizedtesting.RandomizedTest;
 
@@ -393,7 +393,7 @@ public class SparseHLLTest extends Lucen
         for(int run=0; run<100; run++) {
             final HLL hll = new HLL(log2m, regwidth, 128/*explicitThreshold, arbitrary, unused*/, sparseThreshold, HLLType.SPARSE);
 
-            final IntByteOpenHashMap map = new IntByteOpenHashMap();
+            final IntByteHashMap map = new IntByteHashMap();
 
             for(int i=0; i<sparseThreshold; i++) {
                 final long rawValue = RandomizedTest.randomLong();
@@ -423,7 +423,7 @@ public class SparseHLLTest extends Lucen
     private static void assertRegisterPresent(final HLL hll,
                                               final int registerIndex,
                                               final int registerValue) {
-        final IntByteOpenHashMap sparseProbabilisticStorage = hll.sparseProbabilisticStorage;
+        final IntByteHashMap sparseProbabilisticStorage = hll.sparseProbabilisticStorage;
         assertEquals(sparseProbabilisticStorage.get(registerIndex), registerValue);
     }
 
@@ -433,7 +433,7 @@ public class SparseHLLTest extends Lucen
     private static void assertOneRegisterSet(final HLL hll,
                                              final int registerIndex,
                                              final byte registerValue) {
-        final IntByteOpenHashMap sparseProbabilisticStorage = hll.sparseProbabilisticStorage;
+        final IntByteHashMap sparseProbabilisticStorage = hll.sparseProbabilisticStorage;
         assertEquals(sparseProbabilisticStorage.size(), 1);
         assertEquals(sparseProbabilisticStorage.get(registerIndex), registerValue);
     }
@@ -442,8 +442,8 @@ public class SparseHLLTest extends Lucen
      * Asserts that all registers in the two {@link HLL} instances are identical.
      */
     private static void assertElementsEqual(final HLL hllA, final HLL hllB) {
-        final IntByteOpenHashMap sparseProbabilisticStorageA = hllA.sparseProbabilisticStorage;
-        final IntByteOpenHashMap sparseProbabilisticStorageB = hllB.sparseProbabilisticStorage;
+        final IntByteHashMap sparseProbabilisticStorageA = hllA.sparseProbabilisticStorage;
+        final IntByteHashMap sparseProbabilisticStorageB = hllB.sparseProbabilisticStorage;
         assertEquals(sparseProbabilisticStorageA.size(), sparseProbabilisticStorageB.size());
         for (IntByteCursor c : sparseProbabilisticStorageA) {
             assertEquals(sparseProbabilisticStorageA.get(c.key), 

Modified: lucene/dev/branches/lucene6699/solr/example/example-DIH/solr/db/conf/schema.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6699/solr/example/example-DIH/solr/db/conf/schema.xml?rev=1700800&r1=1700799&r2=1700800&view=diff
==============================================================================
--- lucene/dev/branches/lucene6699/solr/example/example-DIH/solr/db/conf/schema.xml (original)
+++ lucene/dev/branches/lucene6699/solr/example/example-DIH/solr/db/conf/schema.xml Wed Sep  2 13:06:13 2015
@@ -292,9 +292,9 @@
    <copyField source="author" dest="author_s"/>
 
    <!-- Above, multiple source fields are copied to the [text] field.
-	  Another way to map multiple source fields to the same
-	  destination field is to use the dynamic field syntax.
-	  copyField also supports a maxChars to copy setting.  -->
+    Another way to map multiple source fields to the same
+    destination field is to use the dynamic field syntax.
+    copyField also supports a maxChars to copy setting.  -->
 
    <!-- <copyField source="*_t" dest="text" maxChars="3000"/> -->
 
@@ -322,8 +322,8 @@
     <!-- sortMissingLast and sortMissingFirst attributes are optional attributes are
          currently supported on types that are sorted internally as strings
          and on numeric types.
-	     This includes "string","boolean", and, as of 3.5 (and 4.x),
-	     int, float, long, date, double, including the "Trie" variants.
+       This includes "string","boolean", and, as of 3.5 (and 4.x),
+       int, float, long, date, double, including the "Trie" variants.
        - If sortMissingLast="true", then a sort on this field will cause documents
          without the field to come after documents with the field,
          regardless of the requested sort order (asc or desc).
@@ -434,9 +434,9 @@
 
     <!-- A general text field that has reasonable, generic
          cross-language defaults: it tokenizes with StandardTokenizer,
-	 removes stop words from case-insensitive "stopwords.txt"
-	 (empty by default), and down cases.  At query time only, it
-	 also applies synonyms. -->
+   removes stop words from case-insensitive "stopwords.txt"
+   (empty by default), and down cases.  At query time only, it
+   also applies synonyms. -->
     <fieldType name="text_general" class="solr.TextField" positionIncrementGap="100">
       <analyzer type="index">
         <tokenizer class="solr.StandardTokenizerFactory"/>
@@ -472,11 +472,11 @@
                 words="lang/stopwords_en.txt"
                 />
         <filter class="solr.LowerCaseFilterFactory"/>
-	<filter class="solr.EnglishPossessiveFilterFactory"/>
+  <filter class="solr.EnglishPossessiveFilterFactory"/>
         <filter class="solr.KeywordMarkerFilterFactory" protected="protwords.txt"/>
-	<!-- Optionally you may want to use this less aggressive stemmer instead of PorterStemFilterFactory:
+  <!-- Optionally you may want to use this less aggressive stemmer instead of PorterStemFilterFactory:
         <filter class="solr.EnglishMinimalStemFilterFactory"/>
-	-->
+  -->
         <filter class="solr.PorterStemFilterFactory"/>
       </analyzer>
       <analyzer type="query">
@@ -487,23 +487,23 @@
                 words="lang/stopwords_en.txt"
                 />
         <filter class="solr.LowerCaseFilterFactory"/>
-	<filter class="solr.EnglishPossessiveFilterFactory"/>
+  <filter class="solr.EnglishPossessiveFilterFactory"/>
         <filter class="solr.KeywordMarkerFilterFactory" protected="protwords.txt"/>
-	<!-- Optionally you may want to use this less aggressive stemmer instead of PorterStemFilterFactory:
+  <!-- Optionally you may want to use this less aggressive stemmer instead of PorterStemFilterFactory:
         <filter class="solr.EnglishMinimalStemFilterFactory"/>
-	-->
+  -->
         <filter class="solr.PorterStemFilterFactory"/>
       </analyzer>
     </fieldType>
 
     <!-- A text field with defaults appropriate for English, plus
-	 aggressive word-splitting and autophrase features enabled.
-	 This field is just like text_en, except it adds
-	 WordDelimiterFilter to enable splitting and matching of
-	 words on case-change, alpha numeric boundaries, and
-	 non-alphanumeric chars.  This means certain compound word
-	 cases will work, for example query "wi fi" will match
-	 document "WiFi" or "wi-fi".
+   aggressive word-splitting and autophrase features enabled.
+   This field is just like text_en, except it adds
+   WordDelimiterFilter to enable splitting and matching of
+   words on case-change, alpha numeric boundaries, and
+   non-alphanumeric chars.  This means certain compound word
+   cases will work, for example query "wi fi" will match
+   document "WiFi" or "wi-fi".
         -->
     <fieldType name="text_en_splitting" class="solr.TextField" positionIncrementGap="100" autoGeneratePhraseQueries="true">
       <analyzer type="index">
@@ -554,7 +554,7 @@
     </fieldType>
 
     <!-- Just like text_general except it reverses the characters of
-	 each token, to enable more efficient leading wildcard queries. -->
+   each token, to enable more efficient leading wildcard queries. -->
     <fieldType name="text_general_rev" class="solr.TextField" positionIncrementGap="100">
       <analyzer type="index">
         <tokenizer class="solr.StandardTokenizerFactory"/>
@@ -629,10 +629,10 @@
         a token of "foo|1.4"  would be indexed as "foo" with a payload of 1.4f
         Attributes of the DelimitedPayloadTokenFilterFactory : 
          "delimiter" - a one character delimiter. Default is | (pipe)
-	 "encoder" - how to encode the following value into a playload
-	    float -> org.apache.lucene.analysis.payloads.FloatEncoder,
-	    integer -> o.a.l.a.p.IntegerEncoder
-	    identity -> o.a.l.a.p.IdentityEncoder
+   "encoder" - how to encode the following value into a playload
+      float -> org.apache.lucene.analysis.payloads.FloatEncoder,
+      integer -> o.a.l.a.p.IntegerEncoder
+      identity -> o.a.l.a.p.IdentityEncoder
             Fully Qualified class name implementing PayloadEncoder, Encoder must have a no arg constructor.
          -->
         <filter class="solr.DelimitedPayloadTokenFilterFactory" encoder="float"/>
@@ -653,10 +653,10 @@
     -->
     <fieldType name="descendent_path" class="solr.TextField">
       <analyzer type="index">
-	<tokenizer class="solr.PathHierarchyTokenizerFactory" delimiter="/" />
+  <tokenizer class="solr.PathHierarchyTokenizerFactory" delimiter="/" />
       </analyzer>
       <analyzer type="query">
-	<tokenizer class="solr.KeywordTokenizerFactory" />
+  <tokenizer class="solr.KeywordTokenizerFactory" />
       </analyzer>
     </fieldType>
     <!-- 
@@ -665,10 +665,10 @@
     -->
     <fieldType name="ancestor_path" class="solr.TextField">
       <analyzer type="index">
-	<tokenizer class="solr.KeywordTokenizerFactory" />
+  <tokenizer class="solr.KeywordTokenizerFactory" />
       </analyzer>
       <analyzer type="query">
-	<tokenizer class="solr.PathHierarchyTokenizerFactory" delimiter="/" />
+  <tokenizer class="solr.PathHierarchyTokenizerFactory" delimiter="/" />
       </analyzer>
     </fieldType>
 

Modified: lucene/dev/branches/lucene6699/solr/example/example-DIH/solr/db/conf/solrconfig.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6699/solr/example/example-DIH/solr/db/conf/solrconfig.xml?rev=1700800&r1=1700799&r2=1700800&view=diff
==============================================================================
--- lucene/dev/branches/lucene6699/solr/example/example-DIH/solr/db/conf/solrconfig.xml (original)
+++ lucene/dev/branches/lucene6699/solr/example/example-DIH/solr/db/conf/solrconfig.xml Wed Sep  2 13:06:13 2015
@@ -1066,7 +1066,7 @@
       <!-- maximum threshold of documents a query term can appear to be considered for correction -->
       <float name="maxQueryFrequency">0.01</float>
       <!-- uncomment this to require suggestions to occur in 1% of the documents
-      	<float name="thresholdTokenFrequency">.01</float>
+        <float name="thresholdTokenFrequency">.01</float>
       -->
     </lst>
     
@@ -1157,7 +1157,7 @@
   </requestHandler>
 
   <searchComponent name="suggest" class="solr.SuggestComponent">
-  	<lst name="suggester">
+    <lst name="suggester">
       <str name="name">mySuggester</str>
       <str name="lookupImpl">FuzzyLookupFactory</str>      <!-- org.apache.solr.spelling.suggest.fst -->
       <str name="dictionaryImpl">DocumentDictionaryFactory</str>     <!-- org.apache.solr.spelling.suggest.HighFrequencyDictionaryFactory --> 

Modified: lucene/dev/branches/lucene6699/solr/example/example-DIH/solr/db/conf/xslt/example_rss.xsl
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6699/solr/example/example-DIH/solr/db/conf/xslt/example_rss.xsl?rev=1700800&r1=1700799&r2=1700800&view=diff
==============================================================================
--- lucene/dev/branches/lucene6699/solr/example/example-DIH/solr/db/conf/xslt/example_rss.xsl (original)
+++ lucene/dev/branches/lucene6699/solr/example/example-DIH/solr/db/conf/xslt/example_rss.xsl Wed Sep  2 13:06:13 2015
@@ -32,7 +32,7 @@
   <xsl:template match='/'>
     <rss version="2.0">
        <channel>
-	 <title>Example Solr RSS 2.0 Feed</title>
+         <title>Example Solr RSS 2.0 Feed</title>
          <link>http://localhost:8983/solr</link>
          <description>
           This has been formatted by the sample "example_rss.xsl" transform -

Modified: lucene/dev/branches/lucene6699/solr/example/example-DIH/solr/db/conf/xslt/updateXml.xsl
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6699/solr/example/example-DIH/solr/db/conf/xslt/updateXml.xsl?rev=1700800&r1=1700799&r2=1700800&view=diff
==============================================================================
--- lucene/dev/branches/lucene6699/solr/example/example-DIH/solr/db/conf/xslt/updateXml.xsl (original)
+++ lucene/dev/branches/lucene6699/solr/example/example-DIH/solr/db/conf/xslt/updateXml.xsl Wed Sep  2 13:06:13 2015
@@ -49,10 +49,10 @@
       <xsl:variable name="fn" select="@name"/>
       
       <xsl:for-each select="*">
-		<xsl:element name="field">
-		    <xsl:attribute name="name"><xsl:value-of select="$fn"/></xsl:attribute>
-	        <xsl:value-of select="."/>
-		</xsl:element>
+        <xsl:element name="field">
+            <xsl:attribute name="name"><xsl:value-of select="$fn"/></xsl:attribute>
+              <xsl:value-of select="."/>
+        </xsl:element>
       </xsl:for-each>
   </xsl:template>
 
@@ -60,10 +60,10 @@
   <xsl:template match="doc/*">
       <xsl:variable name="fn" select="@name"/>
 
-	<xsl:element name="field">
-	    <xsl:attribute name="name"><xsl:value-of select="$fn"/></xsl:attribute>
+       <xsl:element name="field">
+        <xsl:attribute name="name"><xsl:value-of select="$fn"/></xsl:attribute>
         <xsl:value-of select="."/>
-	</xsl:element>
+       </xsl:element>
   </xsl:template>
 
   <xsl:template match="*"/>

Modified: lucene/dev/branches/lucene6699/solr/example/example-DIH/solr/mail/conf/schema.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6699/solr/example/example-DIH/solr/mail/conf/schema.xml?rev=1700800&r1=1700799&r2=1700800&view=diff
==============================================================================
--- lucene/dev/branches/lucene6699/solr/example/example-DIH/solr/mail/conf/schema.xml (original)
+++ lucene/dev/branches/lucene6699/solr/example/example-DIH/solr/mail/conf/schema.xml Wed Sep  2 13:06:13 2015
@@ -211,10 +211,10 @@
     <copyField source="allTo" dest="text"/>
 
    <!-- Above, multiple source fields are copied to the [text] field. 
-	  Another way to map multiple source fields to the same 
-	  destination field is to use the dynamic field syntax. 
-	  copyField also supports a maxChars to copy setting.  -->
-	   
+    Another way to map multiple source fields to the same 
+    destination field is to use the dynamic field syntax. 
+    copyField also supports a maxChars to copy setting.  -->
+     
    <!-- <copyField source="*_t" dest="text" maxChars="3000"/> -->
 
    <!-- copy name to alphaNameSort, a field designed for sorting by name -->
@@ -241,8 +241,8 @@
     <!-- sortMissingLast and sortMissingFirst attributes are optional attributes are
          currently supported on types that are sorted internally as strings
          and on numeric types.
-	     This includes "string","boolean", and, as of 3.5 (and 4.x),
-	     int, float, long, date, double, including the "Trie" variants.
+       This includes "string","boolean", and, as of 3.5 (and 4.x),
+       int, float, long, date, double, including the "Trie" variants.
        - If sortMissingLast="true", then a sort on this field will cause documents
          without the field to come after documents with the field,
          regardless of the requested sort order (asc or desc).
@@ -353,9 +353,9 @@
 
     <!-- A general text field that has reasonable, generic
          cross-language defaults: it tokenizes with StandardTokenizer,
-	 removes stop words from case-insensitive "stopwords.txt"
-	 (empty by default), and down cases.  At query time only, it
-	 also applies synonyms. -->
+   removes stop words from case-insensitive "stopwords.txt"
+   (empty by default), and down cases.  At query time only, it
+   also applies synonyms. -->
     <fieldType name="text_general" class="solr.TextField" positionIncrementGap="100">
       <analyzer type="index">
         <tokenizer class="solr.StandardTokenizerFactory"/>
@@ -391,11 +391,11 @@
                 words="lang/stopwords_en.txt"
                 />
         <filter class="solr.LowerCaseFilterFactory"/>
-	<filter class="solr.EnglishPossessiveFilterFactory"/>
+  <filter class="solr.EnglishPossessiveFilterFactory"/>
         <filter class="solr.KeywordMarkerFilterFactory" protected="protwords.txt"/>
-	<!-- Optionally you may want to use this less aggressive stemmer instead of PorterStemFilterFactory:
+  <!-- Optionally you may want to use this less aggressive stemmer instead of PorterStemFilterFactory:
         <filter class="solr.EnglishMinimalStemFilterFactory"/>
-	-->
+  -->
         <filter class="solr.PorterStemFilterFactory"/>
       </analyzer>
       <analyzer type="query">
@@ -406,23 +406,23 @@
                 words="lang/stopwords_en.txt"
                 />
         <filter class="solr.LowerCaseFilterFactory"/>
-	<filter class="solr.EnglishPossessiveFilterFactory"/>
+  <filter class="solr.EnglishPossessiveFilterFactory"/>
         <filter class="solr.KeywordMarkerFilterFactory" protected="protwords.txt"/>
-	<!-- Optionally you may want to use this less aggressive stemmer instead of PorterStemFilterFactory:
+  <!-- Optionally you may want to use this less aggressive stemmer instead of PorterStemFilterFactory:
         <filter class="solr.EnglishMinimalStemFilterFactory"/>
-	-->
+  -->
         <filter class="solr.PorterStemFilterFactory"/>
       </analyzer>
     </fieldType>
 
     <!-- A text field with defaults appropriate for English, plus
-	 aggressive word-splitting and autophrase features enabled.
-	 This field is just like text_en, except it adds
-	 WordDelimiterFilter to enable splitting and matching of
-	 words on case-change, alpha numeric boundaries, and
-	 non-alphanumeric chars.  This means certain compound word
-	 cases will work, for example query "wi fi" will match
-	 document "WiFi" or "wi-fi".
+   aggressive word-splitting and autophrase features enabled.
+   This field is just like text_en, except it adds
+   WordDelimiterFilter to enable splitting and matching of
+   words on case-change, alpha numeric boundaries, and
+   non-alphanumeric chars.  This means certain compound word
+   cases will work, for example query "wi fi" will match
+   document "WiFi" or "wi-fi".
         -->
     <fieldType name="text_en_splitting" class="solr.TextField" positionIncrementGap="100" autoGeneratePhraseQueries="true">
       <analyzer type="index">
@@ -473,7 +473,7 @@
     </fieldType>
 
     <!-- Just like text_general except it reverses the characters of
-	 each token, to enable more efficient leading wildcard queries. -->
+   each token, to enable more efficient leading wildcard queries. -->
     <fieldType name="text_general_rev" class="solr.TextField" positionIncrementGap="100">
       <analyzer type="index">
         <tokenizer class="solr.StandardTokenizerFactory"/>
@@ -548,10 +548,10 @@
         a token of "foo|1.4"  would be indexed as "foo" with a payload of 1.4f
         Attributes of the DelimitedPayloadTokenFilterFactory : 
          "delimiter" - a one character delimiter. Default is | (pipe)
-	 "encoder" - how to encode the following value into a playload
-	    float -> org.apache.lucene.analysis.payloads.FloatEncoder,
-	    integer -> o.a.l.a.p.IntegerEncoder
-	    identity -> o.a.l.a.p.IdentityEncoder
+   "encoder" - how to encode the following value into a playload
+      float -> org.apache.lucene.analysis.payloads.FloatEncoder,
+      integer -> o.a.l.a.p.IntegerEncoder
+      identity -> o.a.l.a.p.IdentityEncoder
             Fully Qualified class name implementing PayloadEncoder, Encoder must have a no arg constructor.
          -->
         <filter class="solr.DelimitedPayloadTokenFilterFactory" encoder="float"/>
@@ -572,10 +572,10 @@
     -->
     <fieldType name="descendent_path" class="solr.TextField">
       <analyzer type="index">
-	<tokenizer class="solr.PathHierarchyTokenizerFactory" delimiter="/" />
+  <tokenizer class="solr.PathHierarchyTokenizerFactory" delimiter="/" />
       </analyzer>
       <analyzer type="query">
-	<tokenizer class="solr.KeywordTokenizerFactory" />
+  <tokenizer class="solr.KeywordTokenizerFactory" />
       </analyzer>
     </fieldType>
     <!-- 
@@ -584,10 +584,10 @@
     -->
     <fieldType name="ancestor_path" class="solr.TextField">
       <analyzer type="index">
-	<tokenizer class="solr.KeywordTokenizerFactory" />
+  <tokenizer class="solr.KeywordTokenizerFactory" />
       </analyzer>
       <analyzer type="query">
-	<tokenizer class="solr.PathHierarchyTokenizerFactory" delimiter="/" />
+  <tokenizer class="solr.PathHierarchyTokenizerFactory" delimiter="/" />
       </analyzer>
     </fieldType>
 

Modified: lucene/dev/branches/lucene6699/solr/example/example-DIH/solr/mail/conf/solrconfig.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6699/solr/example/example-DIH/solr/mail/conf/solrconfig.xml?rev=1700800&r1=1700799&r2=1700800&view=diff
==============================================================================
--- lucene/dev/branches/lucene6699/solr/example/example-DIH/solr/mail/conf/solrconfig.xml (original)
+++ lucene/dev/branches/lucene6699/solr/example/example-DIH/solr/mail/conf/solrconfig.xml Wed Sep  2 13:06:13 2015
@@ -1068,7 +1068,7 @@
       <!-- maximum threshold of documents a query term can appear to be considered for correction -->
       <float name="maxQueryFrequency">0.01</float>
       <!-- uncomment this to require suggestions to occur in 1% of the documents
-      	<float name="thresholdTokenFrequency">.01</float>
+        <float name="thresholdTokenFrequency">.01</float>
       -->
     </lst>
     
@@ -1159,7 +1159,7 @@
   </requestHandler>
 
   <searchComponent name="suggest" class="solr.SuggestComponent">
-  	<lst name="suggester">
+    <lst name="suggester">
       <str name="name">mySuggester</str>
       <str name="lookupImpl">FuzzyLookupFactory</str>      <!-- org.apache.solr.spelling.suggest.fst -->
       <str name="dictionaryImpl">DocumentDictionaryFactory</str>     <!-- org.apache.solr.spelling.suggest.HighFrequencyDictionaryFactory -->