You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by is...@apache.org on 2017/01/20 13:51:16 UTC

[01/14] lucene-solr:jira/solr-5944: SOLR-9926: Allow passing arbitrary java system properties to zkcli.

Repository: lucene-solr
Updated Branches:
  refs/heads/jira/solr-5944 87c02d7df -> 829f5293b


SOLR-9926: Allow passing arbitrary java system properties to zkcli.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/9f58b6cd
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/9f58b6cd
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/9f58b6cd

Branch: refs/heads/jira/solr-5944
Commit: 9f58b6cd177f72b226c83adbb965cfe08d61d2fb
Parents: 57934ba
Author: markrmiller <ma...@apache.org>
Authored: Wed Jan 18 21:23:36 2017 -0500
Committer: markrmiller <ma...@apache.org>
Committed: Wed Jan 18 21:23:36 2017 -0500

----------------------------------------------------------------------
 solr/CHANGES.txt                            | 2 ++
 solr/server/scripts/cloud-scripts/zkcli.bat | 2 +-
 solr/server/scripts/cloud-scripts/zkcli.sh  | 2 +-
 3 files changed, 4 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9f58b6cd/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 82c3d2b..aab5116 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -100,6 +100,8 @@ New Features
 * SOLR-9836: Add ability to recover from leader when index corruption is detected on SolrCore creation.
   (Mike Drob via Mark Miller)
 
+* SOLR-9926: Allow passing arbitrary java system properties to zkcli. (Hrishikesh Gadre via Mark Miller)
+
 Bug Fixes
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9f58b6cd/solr/server/scripts/cloud-scripts/zkcli.bat
----------------------------------------------------------------------
diff --git a/solr/server/scripts/cloud-scripts/zkcli.bat b/solr/server/scripts/cloud-scripts/zkcli.bat
index 0e4359c..c372685 100644
--- a/solr/server/scripts/cloud-scripts/zkcli.bat
+++ b/solr/server/scripts/cloud-scripts/zkcli.bat
@@ -21,5 +21,5 @@ REM  -DzkCredentialsProvider=org.apache.solr.common.cloud.VMParamsSingleSetCrede
 REM  -DzkDigestUsername=admin-user -DzkDigestPassword=CHANGEME-ADMIN-PASSWORD ^
 REM  -DzkDigestReadonlyUsername=readonly-user -DzkDigestReadonlyPassword=CHANGEME-READONLY-PASSWORD
 
-"%JVM%" %SOLR_ZK_CREDS_AND_ACLS% -Dlog4j.configuration="%LOG4J_CONFIG%" ^
+"%JVM%" %SOLR_ZK_CREDS_AND_ACLS% %ZKCLI_JVM_FLAGS% -Dlog4j.configuration="%LOG4J_CONFIG%" ^
 -classpath "%SDIR%\..\..\solr-webapp\webapp\WEB-INF\lib\*;%SDIR%\..\..\lib\ext\*" org.apache.solr.cloud.ZkCLI %*

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9f58b6cd/solr/server/scripts/cloud-scripts/zkcli.sh
----------------------------------------------------------------------
diff --git a/solr/server/scripts/cloud-scripts/zkcli.sh b/solr/server/scripts/cloud-scripts/zkcli.sh
index e37b6da..df43265 100755
--- a/solr/server/scripts/cloud-scripts/zkcli.sh
+++ b/solr/server/scripts/cloud-scripts/zkcli.sh
@@ -21,6 +21,6 @@ fi
 #  -DzkDigestUsername=admin-user -DzkDigestPassword=CHANGEME-ADMIN-PASSWORD \
 #  -DzkDigestReadonlyUsername=readonly-user -DzkDigestReadonlyPassword=CHANGEME-READONLY-PASSWORD"
 
-PATH=$JAVA_HOME/bin:$PATH $JVM $SOLR_ZK_CREDS_AND_ACLS  -Dlog4j.configuration=$log4j_config \
+PATH=$JAVA_HOME/bin:$PATH $JVM $SOLR_ZK_CREDS_AND_ACLS $ZKCLI_JVM_FLAGS -Dlog4j.configuration=$log4j_config \
 -classpath "$sdir/../../solr-webapp/webapp/WEB-INF/lib/*:$sdir/../../lib/ext/*" org.apache.solr.cloud.ZkCLI ${1+"$@"}
 


[13/14] lucene-solr:jira/solr-5944: SOLR-9996: Unstored IntPointField returns Long type

Posted by is...@apache.org.
SOLR-9996: Unstored IntPointField returns Long type


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/69055aa4
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/69055aa4
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/69055aa4

Branch: refs/heads/jira/solr-5944
Commit: 69055aa4a82d144dc04bf10547912ccc4a7011df
Parents: f57e017
Author: Ishan Chattopadhyaya <is...@apache.org>
Authored: Fri Jan 20 19:08:05 2017 +0530
Committer: Ishan Chattopadhyaya <is...@apache.org>
Committed: Fri Jan 20 19:08:05 2017 +0530

----------------------------------------------------------------------
 solr/CHANGES.txt                                         |  2 ++
 .../src/java/org/apache/solr/schema/IntPointField.java   |  2 +-
 .../src/test/org/apache/solr/schema/TestPointFields.java | 11 +++++++++++
 3 files changed, 14 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/69055aa4/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index c0fe505..748125a 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -74,6 +74,8 @@ Optimizations
 * SOLR-9584: Support Solr being proxied with another endpoint than default /solr, by using relative links
   in AdminUI javascripts (Yun Jie Zhou via janhoy)
 
+* SOLR-9996: Unstored IntPointField returns Long type (Ishan Chattopadhyaya)
+
 Other Changes
 ----------------------
 * SOLR-8396: Add support for PointFields in Solr (Ishan Chattopadhyaya, Tom�s Fern�ndez L�bbe)

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/69055aa4/solr/core/src/java/org/apache/solr/schema/IntPointField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/IntPointField.java b/solr/core/src/java/org/apache/solr/schema/IntPointField.java
index a7bab07..2271282 100644
--- a/solr/core/src/java/org/apache/solr/schema/IntPointField.java
+++ b/solr/core/src/java/org/apache/solr/schema/IntPointField.java
@@ -89,7 +89,7 @@ public class IntPointField extends PointField implements IntValueFieldType {
   public Object toObject(IndexableField f) {
     final Number val = f.numericValue();
     if (val != null) {
-      return val;
+      return val.intValue();
     } else {
       throw new AssertionError("Unexpected state. Field: '" + f + "'");
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/69055aa4/solr/core/src/test/org/apache/solr/schema/TestPointFields.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/schema/TestPointFields.java b/solr/core/src/test/org/apache/solr/schema/TestPointFields.java
index 12f1504..8fb6926 100644
--- a/solr/core/src/test/org/apache/solr/schema/TestPointFields.java
+++ b/solr/core/src/test/org/apache/solr/schema/TestPointFields.java
@@ -785,6 +785,11 @@ public class TestPointFields extends SolrTestCaseJ4 {
     for (int i=0; i < values.length; i++) {
       assertU(adoc("id", String.valueOf(i), field, values[i]));
     }
+    // Check using RTG
+    for (int i = 0; i < values.length; i++) {
+      assertQ(req("qt", "/get", "id", String.valueOf(i)),
+      "//doc/" + type + "[@name='" + field + "'][.='" + values[i] + "']");
+    }
     assertU(commit());
     String[] expected = new String[values.length + 1];
     expected[0] = "//*[@numFound='" + values.length + "']"; 
@@ -792,6 +797,12 @@ public class TestPointFields extends SolrTestCaseJ4 {
       expected[i] = "//result/doc[" + i + "]/" + type + "[@name='" + field + "'][.='" + values[i-1] + "']";
     }
     assertQ(req("q", "*:*", "fl", "id, " + field, "rows", String.valueOf(values.length)), expected);
+
+    // Check using RTG
+    for (int i = 0; i < values.length; i++) {
+      assertQ(req("qt", "/get", "id", String.valueOf(i)),
+      "//doc/" + type + "[@name='" + field + "'][.='" + values[i] + "']");
+    }
   }
 
   private void doTestIntPointFieldRangeQuery(String fieldName, String type, boolean testLong) throws Exception {


[12/14] lucene-solr:jira/solr-5944: LUCENE-7643: Fix leftover.

Posted by is...@apache.org.
LUCENE-7643: Fix leftover.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/f57e0177
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/f57e0177
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/f57e0177

Branch: refs/heads/jira/solr-5944
Commit: f57e0177ffd3f367de81bdf7f2ad67ad0f94264a
Parents: 71ca2a8
Author: Adrien Grand <jp...@gmail.com>
Authored: Fri Jan 20 13:47:29 2017 +0100
Committer: Adrien Grand <jp...@gmail.com>
Committed: Fri Jan 20 13:47:29 2017 +0100

----------------------------------------------------------------------
 lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f57e0177/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java b/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java
index f1b8551..7c997ca 100644
--- a/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java
@@ -281,7 +281,7 @@ public abstract class PointRangeQuery extends Query {
 
             @Override
             public Scorer get(boolean randomAccess) throws IOException {
-              if (false && values.getDocCount() == reader.maxDoc()
+              if (values.getDocCount() == reader.maxDoc()
                   && values.getDocCount() == values.size()
                   && cost() > reader.maxDoc() / 2) {
                 // If all docs have exactly one value and the cost is greater


[09/14] lucene-solr:jira/solr-5944: LUCENE-7640: Fix test bug.

Posted by is...@apache.org.
LUCENE-7640: Fix test bug.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/a2131a9e
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/a2131a9e
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/a2131a9e

Branch: refs/heads/jira/solr-5944
Commit: a2131a9e1e3a22dec3ab2185c06999edac3e2f73
Parents: eba9390
Author: Adrien Grand <jp...@gmail.com>
Authored: Thu Jan 19 20:02:18 2017 +0100
Committer: Adrien Grand <jp...@gmail.com>
Committed: Thu Jan 19 20:02:18 2017 +0100

----------------------------------------------------------------------
 lucene/core/src/test/org/apache/lucene/util/bkd/TestBKD.java | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a2131a9e/lucene/core/src/test/org/apache/lucene/util/bkd/TestBKD.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/util/bkd/TestBKD.java b/lucene/core/src/test/org/apache/lucene/util/bkd/TestBKD.java
index f01f058..fecdaa5 100644
--- a/lucene/core/src/test/org/apache/lucene/util/bkd/TestBKD.java
+++ b/lucene/core/src/test/org/apache/lucene/util/bkd/TestBKD.java
@@ -1183,8 +1183,8 @@ public class TestBKD extends LuceneTestCase {
           
           @Override
           public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
-            if (StringHelper.compare(3, uniquePointValue, 0, maxPackedValue, 0) > 0 ||
-                StringHelper.compare(3, uniquePointValue, 0, minPackedValue, 0) < 0) {
+            if (StringHelper.compare(numBytesPerDim, uniquePointValue, 0, maxPackedValue, 0) > 0 ||
+                StringHelper.compare(numBytesPerDim, uniquePointValue, 0, minPackedValue, 0) < 0) {
               return Relation.CELL_OUTSIDE_QUERY;
             }
             return Relation.CELL_CROSSES_QUERY;


[08/14] lucene-solr:jira/solr-5944: SOLR-10001: Fix overseer-roles test bug

Posted by is...@apache.org.
SOLR-10001: Fix overseer-roles test bug


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/eba93909
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/eba93909
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/eba93909

Branch: refs/heads/jira/solr-5944
Commit: eba9390965bcf6b2422524a5628a160ce26c1226
Parents: a14d793
Author: Alan Woodward <ro...@apache.org>
Authored: Thu Jan 19 17:16:42 2017 +0000
Committer: Alan Woodward <ro...@apache.org>
Committed: Thu Jan 19 17:46:03 2017 +0000

----------------------------------------------------------------------
 .../solr/cloud/CollectionsAPISolrJTest.java     |  34 ----
 .../apache/solr/cloud/OverseerRolesTest.java    | 173 +++++++++----------
 2 files changed, 77 insertions(+), 130 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eba93909/solr/core/src/test/org/apache/solr/cloud/CollectionsAPISolrJTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/CollectionsAPISolrJTest.java b/solr/core/src/test/org/apache/solr/cloud/CollectionsAPISolrJTest.java
index 616b657..3e0d840 100644
--- a/solr/core/src/test/org/apache/solr/cloud/CollectionsAPISolrJTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/CollectionsAPISolrJTest.java
@@ -21,7 +21,6 @@ import java.nio.file.Path;
 import java.nio.file.Paths;
 import java.util.ArrayList;
 import java.util.Collections;
-import java.util.List;
 import java.util.Map;
 
 import org.apache.lucene.util.LuceneTestCase;
@@ -255,39 +254,6 @@ public class CollectionsAPISolrJTest extends SolrCloudTestCase {
   }
 
   @Test
-  @SuppressWarnings("unchecked")
-  public void testAddAndRemoveRole() throws InterruptedException, IOException, SolrServerException {
-
-    String node = cluster.getRandomJetty(random()).getNodeName();
-
-    CollectionAdminRequest.addRole(node, "overseer").process(cluster.getSolrClient());
-
-    CollectionAdminResponse response = CollectionAdminRequest.getClusterStatus().process(cluster.getSolrClient());
-
-    NamedList<Object> rsp = response.getResponse();
-    NamedList<Object> cs = (NamedList<Object>) rsp.get("cluster");
-    assertNotNull("Cluster state should not be null", cs);
-    Map<String, Object> roles = (Map<String, Object>) cs.get("roles");
-    assertNotNull("Role information should not be null", roles);
-    List<String> overseer = (List<String>) roles.get("overseer");
-    assertNotNull(overseer);
-    assertEquals(1, overseer.size());
-    assertTrue(overseer.contains(node));
-    
-    // Remove role
-    CollectionAdminRequest.removeRole(node, "overseer").process(cluster.getSolrClient());
-
-    response = CollectionAdminRequest.getClusterStatus().process(cluster.getSolrClient());
-    rsp = response.getResponse();
-    cs = (NamedList<Object>) rsp.get("cluster");
-    assertNotNull("Cluster state should not be null", cs);
-    roles = (Map<String, Object>) cs.get("roles");
-    assertNotNull("Role information should not be null", roles);
-    overseer = (List<String>) roles.get("overseer");
-    assertFalse(overseer.contains(node));
-  }
-
-  @Test
   public void testOverseerStatus() throws IOException, SolrServerException {
     CollectionAdminResponse response = new CollectionAdminRequest.OverseerStatus().process(cluster.getSolrClient());
     assertEquals(0, response.getStatus());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eba93909/solr/core/src/test/org/apache/solr/cloud/OverseerRolesTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/OverseerRolesTest.java b/solr/core/src/test/org/apache/solr/cloud/OverseerRolesTest.java
index 762bbeb..3c2ca87 100644
--- a/solr/core/src/test/org/apache/solr/cloud/OverseerRolesTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/OverseerRolesTest.java
@@ -17,28 +17,27 @@
 package org.apache.solr.cloud;
 
 import java.lang.invoke.MethodHandles;
+import java.net.URL;
 import java.util.Collections;
 import java.util.List;
-import java.util.Map;
+import java.util.Objects;
 import java.util.concurrent.TimeUnit;
+import java.util.function.Predicate;
 
 import org.apache.solr.client.solrj.embedded.JettySolrRunner;
 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
 import org.apache.solr.cloud.overseer.OverseerAction;
-import org.apache.solr.common.cloud.SolrZkClient;
 import org.apache.solr.common.cloud.ZkNodeProps;
 import org.apache.solr.common.util.Utils;
 import org.apache.solr.util.TimeOut;
-import org.apache.zookeeper.data.Stat;
-import org.junit.Before;
+import org.apache.zookeeper.KeeperException;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import static org.apache.solr.cloud.OverseerCollectionConfigSetProcessor.getLeaderNode;
-import static org.apache.solr.cloud.OverseerCollectionConfigSetProcessor.getSortedOverseerNodeNames;
-import static org.hamcrest.CoreMatchers.not;
+import static org.apache.solr.cloud.OverseerTaskProcessor.getSortedElectionNodes;
 
 public class OverseerRolesTest extends SolrCloudTestCase {
 
@@ -51,117 +50,99 @@ public class OverseerRolesTest extends SolrCloudTestCase {
         .configure();
   }
 
-  @Before
-  public void clearAllOverseerRoles() throws Exception {
-    for (String node : OverseerCollectionConfigSetProcessor.getSortedOverseerNodeNames(zkClient())) {
-      CollectionAdminRequest.removeRole(node, "overseer").process(cluster.getSolrClient());
+  private void waitForNewOverseer(int seconds, Predicate<String> state) throws Exception {
+    TimeOut timeout = new TimeOut(seconds, TimeUnit.SECONDS);
+    String current = null;
+    while (timeout.hasTimedOut() == false) {
+      current = OverseerCollectionConfigSetProcessor.getLeaderNode(zkClient());
+      if (state.test(current))
+        return;
+      Thread.sleep(100);
     }
+    fail("Timed out waiting for overseer state change");
   }
 
-  @Test
-  public void testQuitCommand() throws Exception {
-
-    SolrZkClient zk = zkClient();
-    byte[] data = zk.getData("/overseer_elect/leader", null, new Stat(), true);
-    Map m = (Map) Utils.fromJSON(data);
-    String s = (String) m.get("id");
-    String leader = LeaderElector.getNodeName(s);
-    log.info("Current overseer: {}", leader);
-    Overseer.getStateUpdateQueue(zk)
-        .offer(Utils.toJSON(new ZkNodeProps(Overseer.QUEUE_OPERATION, OverseerAction.QUIT.toLower(),
-                                            "id", s)));
-    final TimeOut timeout = new TimeOut(10, TimeUnit.SECONDS);
-    String newLeader = null;
-    for(;! timeout.hasTimedOut();){
-      newLeader = OverseerCollectionConfigSetProcessor.getLeaderNode(zk);
-      if (newLeader != null && !newLeader.equals(leader))
-        break;
-      Thread.sleep(100);
+  private void waitForNewOverseer(int seconds, String expected) throws Exception {
+    waitForNewOverseer(seconds, s -> Objects.equals(s, expected));
+  }
+
+  private JettySolrRunner getOverseerJetty() throws Exception {
+    String overseer = getLeaderNode(zkClient());
+    URL overseerUrl = new URL("http://" + overseer.substring(0, overseer.indexOf('_')));
+    int hostPort = overseerUrl.getPort();
+    for (JettySolrRunner jetty : cluster.getJettySolrRunners()) {
+      if (jetty.getBaseUrl().getPort() == hostPort)
+        return jetty;
     }
-    assertThat("Leader not changed yet", newLeader, not(leader));
+    fail("Couldn't find overseer node " + overseer);
+    return null; // to keep the compiler happy
+  }
 
-    assertTrue("The old leader should have rejoined election",
-        OverseerCollectionConfigSetProcessor.getSortedOverseerNodeNames(zk).contains(leader));
+  private void logOverseerState() throws KeeperException, InterruptedException {
+    log.info("Overseer: {}", getLeaderNode(zkClient()));
+    log.info("Election queue: ", getSortedElectionNodes(zkClient(), "/overseer_elect/election"));
   }
 
   @Test
   public void testOverseerRole() throws Exception {
 
-    List<String> l = OverseerCollectionConfigSetProcessor.getSortedOverseerNodeNames(zkClient()) ;
+    logOverseerState();
+    List<String> nodes = OverseerCollectionConfigSetProcessor.getSortedOverseerNodeNames(zkClient());
+    String overseer1 = OverseerCollectionConfigSetProcessor.getLeaderNode(zkClient());
+    nodes.remove(overseer1);
 
-    log.info("All nodes {}", l);
-    String currentLeader = OverseerCollectionConfigSetProcessor.getLeaderNode(zkClient());
-    log.info("Current leader {} ", currentLeader);
-    l.remove(currentLeader);
+    Collections.shuffle(nodes, random());
+    String overseer2 = nodes.get(0);
+    log.info("### Setting overseer designate {}", overseer2);
 
-    Collections.shuffle(l, random());
-    String overseerDesignate = l.get(0);
-    log.info("overseerDesignate {}", overseerDesignate);
+    CollectionAdminRequest.addRole(overseer2, "overseer").process(cluster.getSolrClient());
 
-    CollectionAdminRequest.addRole(overseerDesignate, "overseer").process(cluster.getSolrClient());
-
-    TimeOut timeout = new TimeOut(15, TimeUnit.SECONDS);
-
-    boolean leaderchanged = false;
-    for (;!timeout.hasTimedOut();) {
-      if (overseerDesignate.equals(OverseerCollectionConfigSetProcessor.getLeaderNode(zkClient()))) {
-        log.info("overseer designate is the new overseer");
-        leaderchanged =true;
-        break;
-      }
-      Thread.sleep(100);
-    }
-    assertTrue("could not set the new overseer . expected "+
-        overseerDesignate + " current order : " +
-        getSortedOverseerNodeNames(zkClient()) +
-        " ldr :"+ OverseerCollectionConfigSetProcessor.getLeaderNode(zkClient()) ,leaderchanged);
+    waitForNewOverseer(15, overseer2);
 
     //add another node as overseer
-    l.remove(overseerDesignate);
-    Collections.shuffle(l, random());
-
-    String anotherOverseer = l.get(0);
-    log.info("Adding another overseer designate {}", anotherOverseer);
-    CollectionAdminRequest.addRole(anotherOverseer, "overseer").process(cluster.getSolrClient());
-
-    String currentOverseer = getLeaderNode(zkClient());
+    nodes.remove(overseer2);
+    Collections.shuffle(nodes, random());
 
-    log.info("Current Overseer {}", currentOverseer);
+    String overseer3 = nodes.get(0);
+    log.info("### Adding another overseer designate {}", overseer3);
+    CollectionAdminRequest.addRole(overseer3, "overseer").process(cluster.getSolrClient());
 
-    String hostPort = currentOverseer.substring(0, currentOverseer.indexOf('_'));
+    // kill the current overseer, and check that the new designate becomes the new overseer
+    JettySolrRunner leaderJetty = getOverseerJetty();
+    logOverseerState();
 
-    StringBuilder sb = new StringBuilder();
-    log.info("hostPort : {}", hostPort);
-
-    JettySolrRunner leaderJetty = null;
+    ChaosMonkey.stop(leaderJetty);
+    waitForNewOverseer(10, overseer3);
+
+    // add another node as overseer
+    nodes.remove(overseer3);
+    Collections.shuffle(nodes, random());
+    String overseer4 = nodes.get(0);
+    log.info("### Adding last overseer designate {}", overseer4);
+    CollectionAdminRequest.addRole(overseer4, "overseer").process(cluster.getSolrClient());
+    logOverseerState();
+
+    // remove the overseer role from the current overseer
+    CollectionAdminRequest.removeRole(overseer3, "overseer").process(cluster.getSolrClient());
+    waitForNewOverseer(15, overseer4);
+
+    // Add it back again - we now have two delegates, 4 and 3
+    CollectionAdminRequest.addRole(overseer3, "overseer").process(cluster.getSolrClient());
+
+    // explicitly tell the overseer to quit
+    String leaderId = OverseerCollectionConfigSetProcessor.getLeaderId(zkClient());
+    String leader = OverseerCollectionConfigSetProcessor.getLeaderNode(zkClient());
+    log.info("### Sending QUIT to overseer {}", leader);
+    Overseer.getStateUpdateQueue(zkClient())
+        .offer(Utils.toJSON(new ZkNodeProps(Overseer.QUEUE_OPERATION, OverseerAction.QUIT.toLower(),
+            "id", leaderId)));
 
-    for (JettySolrRunner jetty : cluster.getJettySolrRunners()) {
-      String s = jetty.getBaseUrl().toString();
-      log.info("jetTy {}",s);
-      sb.append(s).append(" , ");
-      if (s.contains(hostPort)) {
-        leaderJetty = jetty;
-        break;
-      }
-    }
+    waitForNewOverseer(10, s -> Objects.equals(leader, s) == false);
 
-    assertNotNull("Could not find a jetty2 kill",  leaderJetty);
+    logOverseerState();
+    assertTrue("The old leader should have rejoined election",
+        OverseerCollectionConfigSetProcessor.getSortedOverseerNodeNames(zkClient()).contains(leader));
 
-    log.info("leader node {}", leaderJetty.getBaseUrl());
-    log.info("current election Queue",
-        OverseerCollectionConfigSetProcessor.getSortedElectionNodes(zkClient(), "/overseer_elect/election"));
-    ChaosMonkey.stop(leaderJetty);
-    timeout = new TimeOut(10, TimeUnit.SECONDS);
-    leaderchanged = false;
-    for (; !timeout.hasTimedOut(); ) {
-      currentOverseer = getLeaderNode(zkClient());
-      if (anotherOverseer.equals(currentOverseer)) {
-        leaderchanged = true;
-        break;
-      }
-      Thread.sleep(100);
-    }
-    assertTrue("New overseer designate has not become the overseer, expected : " + anotherOverseer + "actual : " + getLeaderNode(zkClient()), leaderchanged);
   }
 
 }


[10/14] lucene-solr:jira/solr-5944: LUCENE-7643: Move IndexOrDocValuesQuery to core.

Posted by is...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/71ca2a84/solr/core/src/java/org/apache/solr/schema/EnumField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/EnumField.java b/solr/core/src/java/org/apache/solr/schema/EnumField.java
index 967070c..5723206 100644
--- a/solr/core/src/java/org/apache/solr/schema/EnumField.java
+++ b/solr/core/src/java/org/apache/solr/schema/EnumField.java
@@ -43,7 +43,6 @@ import org.apache.lucene.legacy.LegacyNumericUtils;
 import org.apache.lucene.queries.function.ValueSource;
 import org.apache.lucene.queries.function.valuesource.EnumFieldSource;
 import org.apache.lucene.search.ConstantScoreQuery;
-import org.apache.lucene.search.DocValuesRangeQuery;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.search.SortField;
 import org.apache.lucene.util.BytesRef;
@@ -253,10 +252,21 @@ public class EnumField extends PrimitiveFieldType {
     Query query = null;
     final boolean matchOnly = field.hasDocValues() && !field.indexed();
     if (matchOnly) {
-      query = new ConstantScoreQuery(DocValuesRangeQuery.newLongRange(field.getName(),
-              min == null ? null : minValue.longValue(),
-              max == null ? null : maxValue.longValue(),
-              minInclusive, maxInclusive));
+      long lowerValue = Long.MIN_VALUE;
+      long upperValue = Long.MAX_VALUE;
+      if (minValue != null) {
+        lowerValue = minValue.longValue();
+        if (minInclusive == false) {
+          ++lowerValue;
+        }
+      }
+      if (maxValue != null) {
+        upperValue = maxValue.longValue();
+        if (maxInclusive == false) {
+          --upperValue;
+        }
+      }
+      query = new ConstantScoreQuery(NumericDocValuesField.newRangeQuery(field.getName(), lowerValue, upperValue));
     } else {
       query = LegacyNumericRangeQuery.newIntRange(field.getName(), DEFAULT_PRECISION_STEP,
           min == null ? null : minValue,

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/71ca2a84/solr/core/src/java/org/apache/solr/schema/FieldType.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/FieldType.java b/solr/core/src/java/org/apache/solr/schema/FieldType.java
index 3922edc..54f882f 100644
--- a/solr/core/src/java/org/apache/solr/schema/FieldType.java
+++ b/solr/core/src/java/org/apache/solr/schema/FieldType.java
@@ -36,13 +36,13 @@ import org.apache.lucene.analysis.util.CharFilterFactory;
 import org.apache.lucene.analysis.util.TokenFilterFactory;
 import org.apache.lucene.analysis.util.TokenizerFactory;
 import org.apache.lucene.document.Field;
+import org.apache.lucene.document.SortedSetDocValuesField;
 import org.apache.lucene.index.IndexableField;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.legacy.LegacyNumericType;
 import org.apache.lucene.queries.function.ValueSource;
 import org.apache.lucene.search.BooleanClause;
 import org.apache.lucene.search.BooleanQuery;
-import org.apache.lucene.search.DocValuesRangeQuery;
 import org.apache.lucene.search.DocValuesRewriteMethod;
 import org.apache.lucene.search.MultiTermQuery;
 import org.apache.lucene.search.PrefixQuery;
@@ -720,17 +720,17 @@ public abstract class FieldType extends FieldProperties {
    */
   public Query getRangeQuery(QParser parser, SchemaField field, String part1, String part2, boolean minInclusive, boolean maxInclusive) {
     // TODO: change these all to use readableToIndexed/bytes instead (e.g. for unicode collation)
+    final BytesRef miValue = part1 == null ? null : new BytesRef(toInternal(part1));
+    final BytesRef maxValue = part2 == null ? null : new BytesRef(toInternal(part2));
     if (field.hasDocValues() && !field.indexed()) {
-      return DocValuesRangeQuery.newBytesRefRange(
-          field.getName(),
-          part1 == null ? null : new BytesRef(toInternal(part1)),
-          part2 == null ? null : new BytesRef(toInternal(part2)),
-          minInclusive, maxInclusive);
+      return SortedSetDocValuesField.newRangeQuery(
+            field.getName(),
+            miValue, maxValue,
+            minInclusive, maxInclusive);
     } else {
       SolrRangeQuery rangeQuery = new SolrRangeQuery(
             field.getName(),
-            part1 == null ? null : new BytesRef(toInternal(part1)),
-            part2 == null ? null : new BytesRef(toInternal(part2)),
+            miValue, maxValue,
             minInclusive, maxInclusive);
       return rangeQuery;
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/71ca2a84/solr/core/src/java/org/apache/solr/schema/TrieField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/TrieField.java b/solr/core/src/java/org/apache/solr/schema/TrieField.java
index 0e8324c..57dbeff 100644
--- a/solr/core/src/java/org/apache/solr/schema/TrieField.java
+++ b/solr/core/src/java/org/apache/solr/schema/TrieField.java
@@ -43,7 +43,7 @@ import org.apache.lucene.queries.function.valuesource.DoubleFieldSource;
 import org.apache.lucene.queries.function.valuesource.FloatFieldSource;
 import org.apache.lucene.queries.function.valuesource.IntFieldSource;
 import org.apache.lucene.queries.function.valuesource.LongFieldSource;
-import org.apache.lucene.search.DocValuesRangeQuery;
+import org.apache.lucene.search.MatchNoDocsQuery;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.search.SortField;
 import org.apache.lucene.search.SortedSetSelector;
@@ -376,9 +376,9 @@ public class TrieField extends PrimitiveFieldType {
     switch (type) {
       case INTEGER:
         if (matchOnly) {
-          query = DocValuesRangeQuery.newLongRange(field.getName(),
-                min == null ? null : (long) Integer.parseInt(min),
-                max == null ? null : (long) Integer.parseInt(max),
+          query = numericDocValuesRangeQuery(field.getName(),
+                min == null ? null : Integer.parseInt(min),
+                max == null ? null : Integer.parseInt(max),
                 minInclusive, maxInclusive);
         } else {
           query = LegacyNumericRangeQuery.newIntRange(field.getName(), ps,
@@ -399,7 +399,7 @@ public class TrieField extends PrimitiveFieldType {
         break;
       case LONG:
         if (matchOnly) {
-          query = DocValuesRangeQuery.newLongRange(field.getName(),
+          query = numericDocValuesRangeQuery(field.getName(),
                 min == null ? null : Long.parseLong(min),
                 max == null ? null : Long.parseLong(max),
                 minInclusive, maxInclusive);
@@ -422,7 +422,7 @@ public class TrieField extends PrimitiveFieldType {
         break;
       case DATE:
         if (matchOnly) {
-          query = DocValuesRangeQuery.newLongRange(field.getName(),
+          query = numericDocValuesRangeQuery(field.getName(),
                 min == null ? null : DateMathParser.parseMath(null, min).getTime(),
                 max == null ? null : DateMathParser.parseMath(null, max).getTime(),
                 minInclusive, maxInclusive);
@@ -440,6 +440,35 @@ public class TrieField extends PrimitiveFieldType {
     return query;
   }
 
+  private static Query numericDocValuesRangeQuery(
+      String field,
+      Number lowerValue, Number upperValue,
+      boolean lowerInclusive, boolean upperInclusive) {
+
+    long actualLowerValue = Long.MIN_VALUE;
+    if (lowerValue != null) {
+      actualLowerValue = lowerValue.longValue();
+      if (lowerInclusive == false) {
+        if (actualLowerValue == Long.MAX_VALUE) {
+          return new MatchNoDocsQuery();
+        }
+        ++actualLowerValue;
+      }
+    }
+
+    long actualUpperValue = Long.MAX_VALUE;
+    if (upperValue != null) {
+      actualUpperValue = upperValue.longValue();
+      if (upperInclusive == false) {
+        if (actualUpperValue == Long.MIN_VALUE) {
+          return new MatchNoDocsQuery();
+        }
+        --actualUpperValue;
+      }
+    }
+    return NumericDocValuesField.newRangeQuery(field, actualLowerValue, actualUpperValue);
+  }
+
   private static long FLOAT_NEGATIVE_INFINITY_BITS = (long)Float.floatToIntBits(Float.NEGATIVE_INFINITY);
   private static long DOUBLE_NEGATIVE_INFINITY_BITS = Double.doubleToLongBits(Double.NEGATIVE_INFINITY);
   private static long FLOAT_POSITIVE_INFINITY_BITS = (long)Float.floatToIntBits(Float.POSITIVE_INFINITY);
@@ -476,10 +505,10 @@ public class TrieField extends PrimitiveFieldType {
     } else { // If both max and min are negative (or -0d), then issue range query with max and min reversed
       if ((minVal == null || minVal.doubleValue() < 0d || minBits == minusZeroBits) &&
           (maxVal != null && (maxVal.doubleValue() < 0d || maxBits == minusZeroBits))) {
-        query = DocValuesRangeQuery.newLongRange
+        query = numericDocValuesRangeQuery
             (fieldName, maxBits, (min == null ? negativeInfinityBits : minBits), maxInclusive, minInclusive);
       } else { // If both max and min are positive, then issue range query
-        query = DocValuesRangeQuery.newLongRange
+        query = numericDocValuesRangeQuery
             (fieldName, minBits, (max == null ? positiveInfinityBits : maxBits), minInclusive, maxInclusive);
       }
     }


[05/14] lucene-solr:jira/solr-5944: LUCENE-7055: Make sure to use the same reader to create the weight and pull the scorers.

Posted by is...@apache.org.
LUCENE-7055: Make sure to use the same reader to create the weight and pull the scorers.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/e8fa5990
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/e8fa5990
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/e8fa5990

Branch: refs/heads/jira/solr-5944
Commit: e8fa59904c99b7c09a89a4b2f79699ff5a384115
Parents: 075aec9
Author: Adrien Grand <jp...@gmail.com>
Authored: Thu Jan 19 09:29:51 2017 +0100
Committer: Adrien Grand <jp...@gmail.com>
Committed: Thu Jan 19 09:30:34 2017 +0100

----------------------------------------------------------------------
 .../test/org/apache/lucene/search/TestIndexOrDocValuesQuery.java | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e8fa5990/lucene/sandbox/src/test/org/apache/lucene/search/TestIndexOrDocValuesQuery.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/test/org/apache/lucene/search/TestIndexOrDocValuesQuery.java b/lucene/sandbox/src/test/org/apache/lucene/search/TestIndexOrDocValuesQuery.java
index 2a16e5d..de289e7 100644
--- a/lucene/sandbox/src/test/org/apache/lucene/search/TestIndexOrDocValuesQuery.java
+++ b/lucene/sandbox/src/test/org/apache/lucene/search/TestIndexOrDocValuesQuery.java
@@ -68,7 +68,7 @@ public class TestIndexOrDocValuesQuery extends LuceneTestCase {
         .build();
 
     final Weight w1 = searcher.createNormalizedWeight(q1, random().nextBoolean());
-    final Scorer s1 = w1.scorer(reader.leaves().get(0));
+    final Scorer s1 = w1.scorer(searcher.getIndexReader().leaves().get(0));
     assertNotNull(s1.twoPhaseIterator()); // means we use doc values
 
     // The term query is less selective, so the IndexOrDocValuesQuery should use points
@@ -78,7 +78,7 @@ public class TestIndexOrDocValuesQuery extends LuceneTestCase {
         .build();
 
     final Weight w2 = searcher.createNormalizedWeight(q2, random().nextBoolean());
-    final Scorer s2 = w2.scorer(reader.leaves().get(0));
+    final Scorer s2 = w2.scorer(searcher.getIndexReader().leaves().get(0));
     assertNull(s2.twoPhaseIterator()); // means we use points
 
     reader.close();


[02/14] lucene-solr:jira/solr-5944: SOLR-9984: Deprecate GenericHadoopAuthPlugin in favor of HadoopAuthPlugin

Posted by is...@apache.org.
SOLR-9984: Deprecate GenericHadoopAuthPlugin in favor of HadoopAuthPlugin


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/1a05d6f4
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/1a05d6f4
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/1a05d6f4

Branch: refs/heads/jira/solr-5944
Commit: 1a05d6f4f1a6e7c99662549c8f24a11727d86b2f
Parents: 9f58b6c
Author: Ishan Chattopadhyaya <ic...@gmail.com>
Authored: Thu Jan 19 09:35:59 2017 +0530
Committer: Ishan Chattopadhyaya <ic...@gmail.com>
Committed: Thu Jan 19 09:35:59 2017 +0530

----------------------------------------------------------------------
 solr/CHANGES.txt                                |   8 +
 .../solr/security/GenericHadoopAuthPlugin.java  | 245 +------------------
 .../apache/solr/security/HadoopAuthPlugin.java  |   2 +-
 3 files changed, 14 insertions(+), 241 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/1a05d6f4/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index aab5116..62b8818 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -94,6 +94,12 @@ Jetty 9.3.14.v20161028
 Detailed Change List
 ----------------------
 
+Upgrade Notes
+----------------------
+
+* SOLR-9984: GenericHadoopAuthPlugin is deprecated in favor of HadoopAuthPlugin. Simply changing the
+  name of the class in the security configurations should suffice while upgrading.
+
 New Features
 ----------------------
 
@@ -122,6 +128,8 @@ Other Changes
 ----------------------
 * SOLR-9980: Expose configVersion in core admin status (Jessica Cheng Mallet via Tom�s Fern�ndez L�bbe)
 
+* SOLR-9984: Deprecate GenericHadoopAuthPlugin in favor of HadoopAuthPlugin (Ishan Chattopadhyaya)
+
 ==================  6.4.0 ==================
 
 Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/1a05d6f4/solr/core/src/java/org/apache/solr/security/GenericHadoopAuthPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/security/GenericHadoopAuthPlugin.java b/solr/core/src/java/org/apache/solr/security/GenericHadoopAuthPlugin.java
index e5fe349..3d63fd6 100644
--- a/solr/core/src/java/org/apache/solr/security/GenericHadoopAuthPlugin.java
+++ b/solr/core/src/java/org/apache/solr/security/GenericHadoopAuthPlugin.java
@@ -16,251 +16,16 @@
  */
 package org.apache.solr.security;
 
-import static org.apache.solr.security.RequestContinuesRecorderAuthenticationHandler.REQUEST_CONTINUES_ATTR;
-import static org.apache.solr.security.HadoopAuthFilter.DELEGATION_TOKEN_ZK_CLIENT;
-
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.lang.invoke.MethodHandles;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Enumeration;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Objects;
-import java.util.Optional;
-
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletContext;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletResponse;
-import javax.servlet.http.HttpServletResponseWrapper;
-
-import org.apache.commons.collections.iterators.IteratorEnumeration;
-import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
-import org.apache.solr.client.solrj.impl.HttpClientBuilderFactory;
-import org.apache.solr.client.solrj.impl.Krb5HttpClientBuilder;
-import org.apache.solr.client.solrj.impl.SolrHttpClientBuilder;
-import org.apache.solr.cloud.ZkController;
-import org.apache.solr.common.SolrException;
-import org.apache.solr.common.SolrException.ErrorCode;
-import org.apache.solr.common.util.SuppressForbidden;
 import org.apache.solr.core.CoreContainer;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 /**
- * This class implements a generic plugin which can use authentication schemes exposed by the
- * Hadoop framework. This plugin supports following features
- * - integration with authentication mehcanisms (e.g. kerberos)
- * - Delegation token support
- * - Proxy users (or secure impersonation) support
- *
- * This plugin enables defining configuration parameters required by the undelying Hadoop authentication
- * mechanism. These configuration parameters can either be specified as a Java system property or the default
- * value can be specified as part of the plugin configuration.
- *
- * The proxy users are configured by specifying relevant Hadoop configuration parameters. Please note that
- * the delegation token support must be enabled for using the proxy users support.
- *
- * For Solr internal communication, this plugin enables configuring {@linkplain HttpClientBuilderFactory}
- * implementation (e.g. based on kerberos).
+ *  * @deprecated Use {@link HadoopAuthPlugin}. For backcompat against Solr 6.4.
  */
-public class GenericHadoopAuthPlugin extends AuthenticationPlugin implements HttpClientBuilderPlugin {
-  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-
-  /**
-   * A property specifying the type of authentication scheme to be configured.
-   */
-  private static final String HADOOP_AUTH_TYPE = "type";
-
-  /**
-   * A property specifies the value of the prefix to be used to define Java system property
-   * for configuring the authentication mechanism. The name of the Java system property is
-   * defined by appending the configuration parmeter namne to this prefix value e.g. if prefix
-   * is 'solr' then the Java system property 'solr.kerberos.principal' defines the value of
-   * configuration parameter 'kerberos.principal'.
-   */
-  private static final String SYSPROP_PREFIX_PROPERTY = "sysPropPrefix";
-
-  /**
-   * A property specifying the configuration parameters required by the authentication scheme
-   * defined by {@linkplain #HADOOP_AUTH_TYPE} property.
-   */
-  private static final String AUTH_CONFIG_NAMES_PROPERTY = "authConfigs";
-
-  /**
-   * A property specifying the {@linkplain HttpClientBuilderFactory} used for the Solr internal
-   * communication.
-   */
-  private static final String HTTPCLIENT_BUILDER_FACTORY = "clientBuilderFactory";
-
-  /**
-   * A property specifying the default values for the configuration parameters specified by the
-   * {@linkplain #AUTH_CONFIG_NAMES_PROPERTY} property. The default values are specified as a
-   * collection of key-value pairs (i.e. property-name : default_value).
-   */
-  private static final String DEFAULT_AUTH_CONFIGS_PROPERTY = "defaultConfigs";
-
-  /**
-   * A property which enable (or disable) the delegation tokens functionality.
-   */
-  private static final String DELEGATION_TOKEN_ENABLED_PROPERTY = "enableDelegationToken";
-
-  /**
-   * A property which enables initialization of kerberos before connecting to Zookeeper.
-   */
-  private static final String INIT_KERBEROS_ZK = "initKerberosZk";
-
-  /**
-   * A property which configures proxy users for the underlying Hadoop authentication mechanism.
-   * This configuration is expressed as a collection of key-value pairs  (i.e. property-name : value).
-   */
-  public static final String PROXY_USER_CONFIGS = "proxyUserConfigs";
-
-  private AuthenticationFilter authFilter;
-  private HttpClientBuilderFactory factory = null;
-  private final CoreContainer coreContainer;
+@Deprecated
+public class GenericHadoopAuthPlugin extends HadoopAuthPlugin {
 
   public GenericHadoopAuthPlugin(CoreContainer coreContainer) {
-    this.coreContainer = coreContainer;
-  }
-
-  @SuppressWarnings("rawtypes")
-  @Override
-  public void init(Map<String,Object> pluginConfig) {
-    try {
-      String delegationTokenEnabled = (String)pluginConfig.getOrDefault(DELEGATION_TOKEN_ENABLED_PROPERTY, "false");
-      authFilter = (Boolean.parseBoolean(delegationTokenEnabled)) ? new HadoopAuthFilter() : new AuthenticationFilter();
-
-      // Initialize kerberos before initializing curator instance.
-      boolean initKerberosZk = Boolean.parseBoolean((String)pluginConfig.getOrDefault(INIT_KERBEROS_ZK, "false"));
-      if (initKerberosZk) {
-        (new Krb5HttpClientBuilder()).getBuilder();
-      }
-
-      FilterConfig conf = getInitFilterConfig(pluginConfig);
-      authFilter.init(conf);
-
-      String httpClientBuilderFactory = (String)pluginConfig.get(HTTPCLIENT_BUILDER_FACTORY);
-      if (httpClientBuilderFactory != null) {
-        Class c = Class.forName(httpClientBuilderFactory);
-        factory = (HttpClientBuilderFactory)c.newInstance();
-      }
-
-    } catch (ServletException | ClassNotFoundException | InstantiationException | IllegalAccessException e) {
-      throw new SolrException(ErrorCode.SERVER_ERROR, "Error initializing kerberos authentication plugin: "+e);
-    }
+    super(coreContainer);
   }
 
-  @SuppressWarnings("unchecked")
-  protected FilterConfig getInitFilterConfig(Map<String, Object> pluginConfig) {
-    Map<String, String> params = new HashMap<>();
-
-    String type = (String) Objects.requireNonNull(pluginConfig.get(HADOOP_AUTH_TYPE));
-    params.put(HADOOP_AUTH_TYPE, type);
-
-    String sysPropPrefix = (String) pluginConfig.getOrDefault(SYSPROP_PREFIX_PROPERTY, "solr.");
-    Collection<String> authConfigNames = (Collection<String>) pluginConfig.
-        getOrDefault(AUTH_CONFIG_NAMES_PROPERTY, Collections.emptyList());
-    Map<String,String> authConfigDefaults = (Map<String,String>) pluginConfig
-        .getOrDefault(DEFAULT_AUTH_CONFIGS_PROPERTY, Collections.emptyMap());
-    Map<String,String> proxyUserConfigs = (Map<String,String>) pluginConfig
-        .getOrDefault(PROXY_USER_CONFIGS, Collections.emptyMap());
-
-    for ( String configName : authConfigNames) {
-      String systemProperty = sysPropPrefix + configName;
-      String defaultConfigVal = authConfigDefaults.get(configName);
-      String configVal = System.getProperty(systemProperty, defaultConfigVal);
-      if (configVal != null) {
-        params.put(configName, configVal);
-      }
-    }
-
-    // Configure proxy user settings.
-    params.putAll(proxyUserConfigs);
-
-    final ServletContext servletContext = new AttributeOnlyServletContext();
-    log.info("Params: "+params);
-
-    ZkController controller = coreContainer.getZkController();
-    if (controller != null) {
-      servletContext.setAttribute(DELEGATION_TOKEN_ZK_CLIENT, controller.getZkClient());
-    }
-
-    FilterConfig conf = new FilterConfig() {
-      @Override
-      public ServletContext getServletContext() {
-        return servletContext;
-      }
-
-      @Override
-      public Enumeration<String> getInitParameterNames() {
-        return new IteratorEnumeration(params.keySet().iterator());
-      }
-
-      @Override
-      public String getInitParameter(String param) {
-        return params.get(param);
-      }
-
-      @Override
-      public String getFilterName() {
-        return "HadoopAuthFilter";
-      }
-    };
-
-    return conf;
-  }
-
-  @Override
-  public boolean doAuthenticate(ServletRequest request, ServletResponse response, FilterChain filterChain)
-      throws Exception {
-    final HttpServletResponse frsp = (HttpServletResponse)response;
-
-    // Workaround until HADOOP-13346 is fixed.
-    HttpServletResponse rspCloseShield = new HttpServletResponseWrapper(frsp) {
-      @SuppressForbidden(reason = "Hadoop DelegationTokenAuthenticationFilter uses response writer, this" +
-          "is providing a CloseShield on top of that")
-      @Override
-      public PrintWriter getWriter() throws IOException {
-        final PrintWriter pw = new PrintWriterWrapper(frsp.getWriter()) {
-          @Override
-          public void close() {};
-        };
-        return pw;
-      }
-    };
-    authFilter.doFilter(request, rspCloseShield, filterChain);
-
-    if (authFilter instanceof HadoopAuthFilter) { // delegation token mgmt.
-      String requestContinuesAttr = (String)request.getAttribute(REQUEST_CONTINUES_ATTR);
-      if (requestContinuesAttr == null) {
-        log.warn("Could not find " + REQUEST_CONTINUES_ATTR);
-        return false;
-      } else {
-        return Boolean.parseBoolean(requestContinuesAttr);
-      }
-    }
-
-    return true;
-  }
-
-  @Override
-  public SolrHttpClientBuilder getHttpClientBuilder(SolrHttpClientBuilder builder) {
-    return (factory != null) ? factory.getHttpClientBuilder(Optional.ofNullable(builder)) : builder;
-  }
-
-  @Override
-  public void close() throws IOException {
-    if (authFilter != null) {
-      authFilter.destroy();
-    }
-    if (factory != null) {
-      factory.close();
-    }
-  }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/1a05d6f4/solr/core/src/java/org/apache/solr/security/HadoopAuthPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/security/HadoopAuthPlugin.java b/solr/core/src/java/org/apache/solr/security/HadoopAuthPlugin.java
index db0f639..1f0d5ad 100644
--- a/solr/core/src/java/org/apache/solr/security/HadoopAuthPlugin.java
+++ b/solr/core/src/java/org/apache/solr/security/HadoopAuthPlugin.java
@@ -135,7 +135,7 @@ public class HadoopAuthPlugin extends AuthenticationPlugin {
       authFilter.init(conf);
 
     } catch (ServletException e) {
-      throw new SolrException(ErrorCode.SERVER_ERROR, "Error initializing GenericHadoopAuthPlugin: "+e);
+      throw new SolrException(ErrorCode.SERVER_ERROR, "Error initializing " + getClass().getName() + ": "+e);
     }
   }
 


[14/14] lucene-solr:jira/solr-5944: Merge branch 'master' into jira/solr-5944

Posted by is...@apache.org.
Merge branch 'master' into jira/solr-5944


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/829f5293
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/829f5293
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/829f5293

Branch: refs/heads/jira/solr-5944
Commit: 829f5293bf1cc5eaa2f33b7674bb66de591c6b7c
Parents: 87c02d7 69055aa
Author: Ishan Chattopadhyaya <is...@apache.org>
Authored: Fri Jan 20 19:20:17 2017 +0530
Committer: Ishan Chattopadhyaya <is...@apache.org>
Committed: Fri Jan 20 19:20:17 2017 +0530

----------------------------------------------------------------------
 lucene/CHANGES.txt                              |   7 +
 .../analysis/charfilter/BaseCharFilter.java     |  26 +-
 .../lucene/document/NumericDocValuesField.java  |  48 +++
 .../lucene/document/SortedDocValuesField.java   |  42 +++
 .../document/SortedNumericDocValuesField.java   |  54 ++++
 .../SortedNumericDocValuesRangeQuery.java       | 144 +++++++++
 .../document/SortedSetDocValuesField.java       |  43 +++
 .../document/SortedSetDocValuesRangeQuery.java  | 187 +++++++++++
 .../lucene/search/IndexOrDocValuesQuery.java    | 166 ++++++++++
 .../lucene60/TestLucene60PointsFormat.java      |  10 +-
 .../lucene/search/TestDocValuesQueries.java     | 238 ++++++++++++++
 .../search/TestIndexOrDocValuesQuery.java       |  89 ++++++
 .../org/apache/lucene/util/bkd/TestBKD.java     |   4 +-
 .../lucene/search/DocValuesRangeQuery.java      | 276 -----------------
 .../lucene/search/IndexOrDocValuesQuery.java    | 116 -------
 .../lucene/search/TestDocValuesRangeQuery.java  | 307 -------------------
 .../search/TestIndexOrDocValuesQuery.java       |  89 ------
 solr/CHANGES.txt                                |   6 +
 solr/bin/solr                                   |  11 +-
 solr/bin/solr.cmd                               |  13 +-
 solr/bin/solr.in.cmd                            |   5 +
 solr/bin/solr.in.sh                             |   5 +
 .../apache/solr/schema/ICUCollationField.java   |  10 +-
 .../org/apache/solr/schema/CollationField.java  |   3 +-
 .../java/org/apache/solr/schema/EnumField.java  |  20 +-
 .../java/org/apache/solr/schema/FieldType.java  |  16 +-
 .../java/org/apache/solr/schema/TrieField.java  |  45 ++-
 .../solr/security/GenericHadoopAuthPlugin.java  | 266 ----------------
 .../apache/solr/security/HadoopAuthPlugin.java  |   2 +-
 .../solr/cloud/CollectionsAPISolrJTest.java     |  34 --
 .../apache/solr/cloud/OverseerRolesTest.java    | 173 +++++------
 .../org/apache/solr/schema/TestPointFields.java |  11 +
 solr/server/scripts/cloud-scripts/zkcli.bat     |   2 +-
 solr/server/scripts/cloud-scripts/zkcli.sh      |   2 +-
 34 files changed, 1220 insertions(+), 1250 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/829f5293/solr/core/src/test/org/apache/solr/schema/TestPointFields.java
----------------------------------------------------------------------


[07/14] lucene-solr:jira/solr-5944: LUCENE-7645: Use JDK's Arrays.binarySearch in BaseCharFilter.

Posted by is...@apache.org.
LUCENE-7645: Use JDK's Arrays.binarySearch in BaseCharFilter.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/a14d7936
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/a14d7936
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/a14d7936

Branch: refs/heads/jira/solr-5944
Commit: a14d79366f97ffde61b56aee2e2d9123ccadc8a7
Parents: 85a05b5
Author: Adrien Grand <jp...@gmail.com>
Authored: Thu Jan 19 11:27:24 2017 +0100
Committer: Adrien Grand <jp...@gmail.com>
Committed: Thu Jan 19 11:27:24 2017 +0100

----------------------------------------------------------------------
 .../analysis/charfilter/BaseCharFilter.java     | 26 +++++---------------
 1 file changed, 6 insertions(+), 20 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a14d7936/lucene/analysis/common/src/java/org/apache/lucene/analysis/charfilter/BaseCharFilter.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/charfilter/BaseCharFilter.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/charfilter/BaseCharFilter.java
index 48ffa48..4fba9fe 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/charfilter/BaseCharFilter.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/charfilter/BaseCharFilter.java
@@ -41,31 +41,17 @@ public abstract class BaseCharFilter extends CharFilter {
   /** Retrieve the corrected offset. */
   @Override
   protected int correct(int currentOff) {
-    if (offsets == null || currentOff < offsets[0]) {
+    if (offsets == null) {
       return currentOff;
     }
-    
-    int hi = size - 1;
-    if(currentOff >= offsets[hi])
-      return currentOff + diffs[hi];
 
-    int lo = 0;
-    int mid = -1;
-    
-    while (hi >= lo) {
-      mid = (lo + hi) >>> 1;
-      if (currentOff < offsets[mid])
-        hi = mid - 1;
-      else if (currentOff > offsets[mid])
-        lo = mid + 1;
-      else
-        return currentOff + diffs[mid];
+    int index = Arrays.binarySearch(offsets, 0, size, currentOff);
+    if (index < -1) {
+      index = -2 - index;
     }
 
-    if (currentOff < offsets[mid])
-      return mid == 0 ? currentOff : currentOff + diffs[mid-1];
-    else
-      return currentOff + diffs[mid];
+    final int diff = index < 0 ? 0 : diffs[index];
+    return currentOff + diff;
   }
   
   protected int getLastCumulativeDiff() {


[06/14] lucene-solr:jira/solr-5944: LUCENE-7640: Fix test.

Posted by is...@apache.org.
LUCENE-7640: Fix test.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/85a05b54
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/85a05b54
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/85a05b54

Branch: refs/heads/jira/solr-5944
Commit: 85a05b546bee9ff7484372c44854d4fd66d63b36
Parents: e8fa599
Author: Adrien Grand <jp...@gmail.com>
Authored: Thu Jan 19 09:54:23 2017 +0100
Committer: Adrien Grand <jp...@gmail.com>
Committed: Thu Jan 19 09:54:50 2017 +0100

----------------------------------------------------------------------
 .../lucene/codecs/lucene60/TestLucene60PointsFormat.java  | 10 +++++++++-
 1 file changed, 9 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/85a05b54/lucene/core/src/test/org/apache/lucene/codecs/lucene60/TestLucene60PointsFormat.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/codecs/lucene60/TestLucene60PointsFormat.java b/lucene/core/src/test/org/apache/lucene/codecs/lucene60/TestLucene60PointsFormat.java
index 4287273..08dc6c6 100644
--- a/lucene/core/src/test/org/apache/lucene/codecs/lucene60/TestLucene60PointsFormat.java
+++ b/lucene/core/src/test/org/apache/lucene/codecs/lucene60/TestLucene60PointsFormat.java
@@ -31,7 +31,9 @@ import org.apache.lucene.index.BasePointsFormatTestCase;
 import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
 import org.apache.lucene.index.LeafReader;
+import org.apache.lucene.index.MockRandomMergePolicy;
 import org.apache.lucene.index.PointValues;
 import org.apache.lucene.index.SegmentReadState;
 import org.apache.lucene.index.SegmentWriteState;
@@ -97,7 +99,13 @@ public class TestLucene60PointsFormat extends BasePointsFormatTestCase {
 
   public void testEstimatePointCount() throws IOException {
     Directory dir = newDirectory();
-    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig());
+    IndexWriterConfig iwc = newIndexWriterConfig();
+    // Avoid mockRandomMP since it may cause non-optimal merges that make the
+    // number of points per leaf hard to predict
+    while (iwc.getMergePolicy() instanceof MockRandomMergePolicy) {
+      iwc.setMergePolicy(newMergePolicy());
+    }
+    IndexWriter w = new IndexWriter(dir, iwc);
     byte[] pointValue = new byte[3];
     byte[] uniquePointValue = new byte[3];
     random().nextBytes(uniquePointValue);


[11/14] lucene-solr:jira/solr-5944: LUCENE-7643: Move IndexOrDocValuesQuery to core.

Posted by is...@apache.org.
LUCENE-7643: Move IndexOrDocValuesQuery to core.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/71ca2a84
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/71ca2a84
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/71ca2a84

Branch: refs/heads/jira/solr-5944
Commit: 71ca2a84bad2495eff3b0b15dc445f3f013ea4af
Parents: a2131a9
Author: Adrien Grand <jp...@gmail.com>
Authored: Thu Jan 19 18:12:04 2017 +0100
Committer: Adrien Grand <jp...@gmail.com>
Committed: Fri Jan 20 13:42:31 2017 +0100

----------------------------------------------------------------------
 lucene/CHANGES.txt                              |   7 +
 .../lucene/document/NumericDocValuesField.java  |  48 +++
 .../lucene/document/SortedDocValuesField.java   |  42 +++
 .../document/SortedNumericDocValuesField.java   |  54 ++++
 .../SortedNumericDocValuesRangeQuery.java       | 144 +++++++++
 .../document/SortedSetDocValuesField.java       |  43 +++
 .../document/SortedSetDocValuesRangeQuery.java  | 187 +++++++++++
 .../lucene/search/IndexOrDocValuesQuery.java    | 166 ++++++++++
 .../apache/lucene/search/PointRangeQuery.java   |   2 +-
 .../lucene/search/TestDocValuesQueries.java     | 238 ++++++++++++++
 .../search/TestIndexOrDocValuesQuery.java       |  89 ++++++
 .../lucene/search/DocValuesRangeQuery.java      | 276 -----------------
 .../lucene/search/IndexOrDocValuesQuery.java    | 116 -------
 .../lucene/search/TestDocValuesRangeQuery.java  | 307 -------------------
 .../search/TestIndexOrDocValuesQuery.java       |  89 ------
 .../apache/solr/schema/ICUCollationField.java   |  10 +-
 .../org/apache/solr/schema/CollationField.java  |   3 +-
 .../java/org/apache/solr/schema/EnumField.java  |  20 +-
 .../java/org/apache/solr/schema/FieldType.java  |  16 +-
 .../java/org/apache/solr/schema/TrieField.java  |  45 ++-
 20 files changed, 1082 insertions(+), 820 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/71ca2a84/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 9d1cbb7..147b0e0 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -74,6 +74,9 @@ API Changes
 * LUCENE-7644: FieldComparatorSource.newComparator() and
   SortField.getComparator() no longer throw IOException (Alan Woodward)
 
+* LUCENE-7643: Replaced doc-values queries in lucene/sandbox with factory
+  methods on the *DocValuesField classes. (Adrien Grand)
+
 New Features
 
 * LUCENE-7623: Add FunctionScoreQuery and FunctionMatchQuery (Alan Woodward,
@@ -96,6 +99,10 @@ Improvements
   should be run, eg. using points or doc values depending on costs of other
   parts of the query. (Adrien Grand)
 
+* LUCENE-7643: IndexOrDocValuesQuery allows to execute range queries using
+  either points or doc values depending on which one is more efficient.
+  (Adrien Grand)
+
 Optimizations
 
 * LUCENE-7641: Optimized point range queries to compute documents that do not

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/71ca2a84/lucene/core/src/java/org/apache/lucene/document/NumericDocValuesField.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/document/NumericDocValuesField.java b/lucene/core/src/java/org/apache/lucene/document/NumericDocValuesField.java
index 5b6dcc8..6d84492 100644
--- a/lucene/core/src/java/org/apache/lucene/document/NumericDocValuesField.java
+++ b/lucene/core/src/java/org/apache/lucene/document/NumericDocValuesField.java
@@ -17,7 +17,15 @@
 package org.apache.lucene.document;
 
 
+import java.io.IOException;
+
+import org.apache.lucene.index.DocValues;
 import org.apache.lucene.index.DocValuesType;
+import org.apache.lucene.index.LeafReader;
+import org.apache.lucene.index.NumericDocValues;
+import org.apache.lucene.index.SortedNumericDocValues;
+import org.apache.lucene.search.IndexOrDocValuesQuery;
+import org.apache.lucene.search.Query;
 
 /**
  * <p>
@@ -54,4 +62,44 @@ public class NumericDocValuesField extends Field {
     super(name, TYPE);
     fieldsData = Long.valueOf(value);
   }
+
+  /**
+   * Create a range query that matches all documents whose value is between
+   * {@code lowerValue} and {@code upperValue} included.
+   * <p>
+   * You can have half-open ranges (which are in fact &lt;/&le; or &gt;/&ge; queries)
+   * by setting {@code lowerValue = Long.MIN_VALUE} or {@code upperValue = Long.MAX_VALUE}. 
+   * <p>
+   * Ranges are inclusive. For exclusive ranges, pass {@code Math.addExact(lowerValue, 1)}
+   * or {@code Math.addExact(upperValue, -1)}.
+   * <p><b>NOTE</b>: Such queries cannot efficiently advance to the next match,
+   * which makes them slow if they are not ANDed with a selective query. As a
+   * consequence, they are best used wrapped in an {@link IndexOrDocValuesQuery},
+   * alongside a range query that executes on points, such as
+   * {@link LongPoint#newRangeQuery}.
+   */
+  public static Query newRangeQuery(String field, long lowerValue, long upperValue) {
+    return new SortedNumericDocValuesRangeQuery(field, lowerValue, upperValue) {
+      @Override
+      SortedNumericDocValues getValues(LeafReader reader, String field) throws IOException {
+        NumericDocValues values = reader.getNumericDocValues(field);
+        if (values == null) {
+          return null;
+        }
+        return DocValues.singleton(values);
+      }
+    };
+  }
+
+  /** 
+   * Create a query for matching an exact long value.
+   * <p><b>NOTE</b>: Such queries cannot efficiently advance to the next match,
+   * which makes them slow if they are not ANDed with a selective query. As a
+   * consequence, they are best used wrapped in an {@link IndexOrDocValuesQuery},
+   * alongside a range query that executes on points, such as
+   * {@link LongPoint#newExactQuery}.
+   */
+  public static Query newExactQuery(String field, long value) {
+    return newRangeQuery(field, value, value);
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/71ca2a84/lucene/core/src/java/org/apache/lucene/document/SortedDocValuesField.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/document/SortedDocValuesField.java b/lucene/core/src/java/org/apache/lucene/document/SortedDocValuesField.java
index bbfb467..feb7725 100644
--- a/lucene/core/src/java/org/apache/lucene/document/SortedDocValuesField.java
+++ b/lucene/core/src/java/org/apache/lucene/document/SortedDocValuesField.java
@@ -17,7 +17,14 @@
 package org.apache.lucene.document;
 
 
+import java.io.IOException;
+
+import org.apache.lucene.index.DocValues;
 import org.apache.lucene.index.DocValuesType;
+import org.apache.lucene.index.LeafReader;
+import org.apache.lucene.index.SortedSetDocValues;
+import org.apache.lucene.search.IndexOrDocValuesQuery;
+import org.apache.lucene.search.Query;
 import org.apache.lucene.util.BytesRef;
 
 /**
@@ -59,4 +66,39 @@ public class SortedDocValuesField extends Field {
     super(name, TYPE);
     fieldsData = bytes;
   }
+
+  /**
+   * Create a range query that matches all documents whose value is between
+   * {@code lowerValue} and {@code upperValue} included.
+   * <p>
+   * You can have half-open ranges by setting {@code lowerValue = null}
+   * or {@code upperValue = null}.
+   * <p><b>NOTE</b>: Such queries cannot efficiently advance to the next match,
+   * which makes them slow if they are not ANDed with a selective query. As a
+   * consequence, they are best used wrapped in an {@link IndexOrDocValuesQuery},
+   * alongside a range query that executes on points, such as
+   * {@link BinaryPoint#newRangeQuery}.
+   */
+  public static Query newRangeQuery(String field,
+      BytesRef lowerValue, BytesRef upperValue,
+      boolean lowerInclusive, boolean upperInclusive) {
+    return new SortedSetDocValuesRangeQuery(field, lowerValue, upperValue, lowerInclusive, upperInclusive) {
+      @Override
+      SortedSetDocValues getValues(LeafReader reader, String field) throws IOException {
+        return DocValues.singleton(DocValues.getSorted(reader, field));
+      }
+    };
+  }
+
+  /** 
+   * Create a query for matching an exact {@link BytesRef} value.
+   * <p><b>NOTE</b>: Such queries cannot efficiently advance to the next match,
+   * which makes them slow if they are not ANDed with a selective query. As a
+   * consequence, they are best used wrapped in an {@link IndexOrDocValuesQuery},
+   * alongside a range query that executes on points, such as
+   * {@link BinaryPoint#newExactQuery}.
+   */
+  public static Query newExactQuery(String field, BytesRef value) {
+    return newRangeQuery(field, value, value, true, true);
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/71ca2a84/lucene/core/src/java/org/apache/lucene/document/SortedNumericDocValuesField.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/document/SortedNumericDocValuesField.java b/lucene/core/src/java/org/apache/lucene/document/SortedNumericDocValuesField.java
index cbba218..6f9a271 100644
--- a/lucene/core/src/java/org/apache/lucene/document/SortedNumericDocValuesField.java
+++ b/lucene/core/src/java/org/apache/lucene/document/SortedNumericDocValuesField.java
@@ -17,7 +17,15 @@
 package org.apache.lucene.document;
 
 
+import java.io.IOException;
+
+import org.apache.lucene.index.DocValues;
 import org.apache.lucene.index.DocValuesType;
+import org.apache.lucene.index.FieldInfo;
+import org.apache.lucene.index.LeafReader;
+import org.apache.lucene.index.SortedNumericDocValues;
+import org.apache.lucene.search.IndexOrDocValuesQuery;
+import org.apache.lucene.search.Query;
 
 /**
  * <p>
@@ -63,4 +71,50 @@ public class SortedNumericDocValuesField extends Field {
     super(name, TYPE);
     fieldsData = Long.valueOf(value);
   }
+
+  /**
+   * Create a range query that matches all documents whose value is between
+   * {@code lowerValue} and {@code upperValue} included.
+   * <p>
+   * You can have half-open ranges (which are in fact &lt;/&le; or &gt;/&ge; queries)
+   * by setting {@code lowerValue = Long.MIN_VALUE} or {@code upperValue = Long.MAX_VALUE}. 
+   * <p>
+   * Ranges are inclusive. For exclusive ranges, pass {@code Math.addExact(lowerValue, 1)}
+   * or {@code Math.addExact(upperValue, -1)}.
+   * <p>This query also works with fields that have indexed
+   * {@link NumericDocValuesField}s.
+   * <p><b>NOTE</b>: Such queries cannot efficiently advance to the next match,
+   * which makes them slow if they are not ANDed with a selective query. As a
+   * consequence, they are best used wrapped in an {@link IndexOrDocValuesQuery},
+   * alongside a range query that executes on points, such as
+   * {@link LongPoint#newRangeQuery}.
+   */
+  public static Query newRangeQuery(String field, long lowerValue, long upperValue) {
+    return new SortedNumericDocValuesRangeQuery(field, lowerValue, upperValue) {
+      @Override
+      SortedNumericDocValues getValues(LeafReader reader, String field) throws IOException {
+        FieldInfo info = reader.getFieldInfos().fieldInfo(field);
+        if (info == null) {
+          // Queries have some optimizations when one sub scorer returns null rather
+          // than a scorer that does not match any documents
+          return null;
+        }
+        return DocValues.getSortedNumeric(reader, field);
+      }
+    };
+  }
+
+  /** 
+   * Create a query for matching an exact long value.
+   * <p>This query also works with fields that have indexed
+   * {@link NumericDocValuesField}s.
+   * <p><b>NOTE</b>: Such queries cannot efficiently advance to the next match,
+   * which makes them slow if they are not ANDed with a selective query. As a
+   * consequence, they are best used wrapped in an {@link IndexOrDocValuesQuery},
+   * alongside a range query that executes on points, such as
+   * {@link LongPoint#newExactQuery}.
+   */
+  public static Query newExactQuery(String field, long value) {
+    return newRangeQuery(field, value, value);
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/71ca2a84/lucene/core/src/java/org/apache/lucene/document/SortedNumericDocValuesRangeQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/document/SortedNumericDocValuesRangeQuery.java b/lucene/core/src/java/org/apache/lucene/document/SortedNumericDocValuesRangeQuery.java
new file mode 100644
index 0000000..18805b2
--- /dev/null
+++ b/lucene/core/src/java/org/apache/lucene/document/SortedNumericDocValuesRangeQuery.java
@@ -0,0 +1,144 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.document;
+
+import java.io.IOException;
+import java.util.Objects;
+
+import org.apache.lucene.index.DocValues;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.LeafReader;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.NumericDocValues;
+import org.apache.lucene.index.SortedNumericDocValues;
+import org.apache.lucene.search.ConstantScoreScorer;
+import org.apache.lucene.search.ConstantScoreWeight;
+import org.apache.lucene.search.FieldValueQuery;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.Scorer;
+import org.apache.lucene.search.TwoPhaseIterator;
+import org.apache.lucene.search.Weight;
+
+abstract class SortedNumericDocValuesRangeQuery extends Query {
+
+  private final String field;
+  private final long lowerValue;
+  private final long upperValue;
+
+  SortedNumericDocValuesRangeQuery(String field, long lowerValue, long upperValue) {
+    this.field = Objects.requireNonNull(field);
+    this.lowerValue = lowerValue;
+    this.upperValue = upperValue;
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (sameClassAs(obj) == false) {
+      return false;
+    }
+    SortedNumericDocValuesRangeQuery that = (SortedNumericDocValuesRangeQuery) obj;
+    return Objects.equals(field, that.field)
+        && lowerValue == that.lowerValue
+        && upperValue == that.upperValue;
+  }
+
+  @Override
+  public int hashCode() {
+    int h = classHash();
+    h = 31 * h + field.hashCode();
+    h = 31 * h + Long.hashCode(lowerValue);
+    h = 31 * h + Long.hashCode(upperValue);
+    return h;
+  }
+
+  @Override
+  public String toString(String field) {
+    StringBuilder b = new StringBuilder();
+    if (this.field.equals(field) == false) {
+      b.append(this.field).append(":");
+    }
+    return b
+        .append("[")
+        .append(lowerValue)
+        .append(" TO ")
+        .append(upperValue)
+        .append("]")
+        .toString();
+  }
+
+  @Override
+  public Query rewrite(IndexReader reader) throws IOException {
+    if (lowerValue == Long.MIN_VALUE && upperValue == Long.MAX_VALUE) {
+      return new FieldValueQuery(field);
+    }
+    return super.rewrite(reader);
+  }
+
+  abstract SortedNumericDocValues getValues(LeafReader reader, String field) throws IOException;
+
+  @Override
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    return new ConstantScoreWeight(this, boost) {
+      @Override
+      public Scorer scorer(LeafReaderContext context) throws IOException {
+        SortedNumericDocValues values = getValues(context.reader(), field);
+        if (values == null) {
+          return null;
+        }
+        final NumericDocValues singleton = DocValues.unwrapSingleton(values);
+        final TwoPhaseIterator iterator;
+        if (singleton != null) {
+          iterator = new TwoPhaseIterator(singleton) {
+            @Override
+            public boolean matches() throws IOException {
+              final long value = singleton.longValue();
+              return value >= lowerValue && value <= upperValue;
+            }
+
+            @Override
+            public float matchCost() {
+              return 2; // 2 comparisons
+            }
+          };
+        } else {
+          iterator = new TwoPhaseIterator(values) {
+            @Override
+            public boolean matches() throws IOException {
+              for (int i = 0, count = values.docValueCount(); i < count; ++i) {
+                final long value = values.nextValue();
+                if (value < lowerValue) {
+                  continue;
+                }
+                // Values are sorted, so the first value that is >= lowerValue is our best candidate
+                return value <= upperValue;
+              }
+              return false; // all values were < lowerValue
+            }
+
+            @Override
+            public float matchCost() {
+              return 2; // 2 comparisons
+            }
+          };
+        }
+        return new ConstantScoreScorer(this, score(), iterator);
+      }
+    };
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/71ca2a84/lucene/core/src/java/org/apache/lucene/document/SortedSetDocValuesField.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/document/SortedSetDocValuesField.java b/lucene/core/src/java/org/apache/lucene/document/SortedSetDocValuesField.java
index 7a273ac..26b1907 100644
--- a/lucene/core/src/java/org/apache/lucene/document/SortedSetDocValuesField.java
+++ b/lucene/core/src/java/org/apache/lucene/document/SortedSetDocValuesField.java
@@ -17,7 +17,14 @@
 package org.apache.lucene.document;
 
 
+import java.io.IOException;
+
+import org.apache.lucene.index.DocValues;
 import org.apache.lucene.index.DocValuesType;
+import org.apache.lucene.index.LeafReader;
+import org.apache.lucene.index.SortedSetDocValues;
+import org.apache.lucene.search.IndexOrDocValuesQuery;
+import org.apache.lucene.search.Query;
 import org.apache.lucene.util.BytesRef;
 
 /**
@@ -60,4 +67,40 @@ public class SortedSetDocValuesField extends Field {
     super(name, TYPE);
     fieldsData = bytes;
   }
+
+  /**
+   * Create a range query that matches all documents whose value is between
+   * {@code lowerValue} and {@code upperValue}.
+   * <p>This query also works with fields that have indexed
+   * {@link SortedDocValuesField}s.
+   * <p><b>NOTE</b>: Such queries cannot efficiently advance to the next match,
+   * which makes them slow if they are not ANDed with a selective query. As a
+   * consequence, they are best used wrapped in an {@link IndexOrDocValuesQuery},
+   * alongside a range query that executes on points, such as
+   * {@link BinaryPoint#newRangeQuery}.
+   */
+  public static Query newRangeQuery(String field,
+      BytesRef lowerValue, BytesRef upperValue,
+      boolean lowerInclusive, boolean upperInclusive) {
+    return new SortedSetDocValuesRangeQuery(field, lowerValue, upperValue, lowerInclusive, upperInclusive) {
+      @Override
+      SortedSetDocValues getValues(LeafReader reader, String field) throws IOException {
+        return DocValues.getSortedSet(reader, field);
+      }
+    };
+  }
+
+  /** 
+   * Create a query for matching an exact {@link BytesRef} value.
+   * <p>This query also works with fields that have indexed
+   * {@link SortedDocValuesField}s.
+   * <p><b>NOTE</b>: Such queries cannot efficiently advance to the next match,
+   * which makes them slow if they are not ANDed with a selective query. As a
+   * consequence, they are best used wrapped in an {@link IndexOrDocValuesQuery},
+   * alongside a range query that executes on points, such as
+   * {@link BinaryPoint#newExactQuery}.
+   */
+  public static Query newExactQuery(String field, BytesRef value) {
+    return newRangeQuery(field, value, value, true, true);
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/71ca2a84/lucene/core/src/java/org/apache/lucene/document/SortedSetDocValuesRangeQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/document/SortedSetDocValuesRangeQuery.java b/lucene/core/src/java/org/apache/lucene/document/SortedSetDocValuesRangeQuery.java
new file mode 100644
index 0000000..30af45f
--- /dev/null
+++ b/lucene/core/src/java/org/apache/lucene/document/SortedSetDocValuesRangeQuery.java
@@ -0,0 +1,187 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.document;
+
+import java.io.IOException;
+import java.util.Objects;
+
+import org.apache.lucene.index.DocValues;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.LeafReader;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.SortedDocValues;
+import org.apache.lucene.index.SortedSetDocValues;
+import org.apache.lucene.search.ConstantScoreScorer;
+import org.apache.lucene.search.ConstantScoreWeight;
+import org.apache.lucene.search.FieldValueQuery;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.Scorer;
+import org.apache.lucene.search.TwoPhaseIterator;
+import org.apache.lucene.search.Weight;
+import org.apache.lucene.util.BytesRef;
+
+abstract class SortedSetDocValuesRangeQuery extends Query {
+
+  private final String field;
+  private final BytesRef lowerValue;
+  private final BytesRef upperValue;
+  private final boolean lowerInclusive;
+  private final boolean upperInclusive;
+
+  SortedSetDocValuesRangeQuery(String field,
+      BytesRef lowerValue, BytesRef upperValue,
+      boolean lowerInclusive, boolean upperInclusive) {
+    this.field = Objects.requireNonNull(field);
+    this.lowerValue = lowerValue;
+    this.upperValue = upperValue;
+    this.lowerInclusive = lowerInclusive && lowerValue != null;
+    this.upperInclusive = upperInclusive && upperValue != null;
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (sameClassAs(obj) == false) {
+      return false;
+    }
+    SortedSetDocValuesRangeQuery that = (SortedSetDocValuesRangeQuery) obj;
+    return Objects.equals(field, that.field)
+        && Objects.equals(lowerValue, that.lowerValue)
+        && Objects.equals(upperValue, that.upperValue)
+        && lowerInclusive == that.lowerInclusive
+        && upperInclusive == that.upperInclusive;
+  }
+
+  @Override
+  public int hashCode() {
+    int h = classHash();
+    h = 31 * h + field.hashCode();
+    h = 31 * h + Objects.hashCode(lowerValue);
+    h = 31 * h + Objects.hashCode(upperValue);
+    h = 31 * h + Boolean.hashCode(lowerInclusive);
+    h = 31 * h + Boolean.hashCode(upperInclusive);
+    return h;
+  }
+
+  @Override
+  public String toString(String field) {
+    StringBuilder b = new StringBuilder();
+    if (this.field.equals(field) == false) {
+      b.append(this.field).append(":");
+    }
+    return b
+        .append(lowerInclusive ? "[" : "{")
+        .append(lowerValue == null ? "*" : lowerValue)
+        .append(" TO ")
+        .append(upperValue == null ? "*" : upperValue)
+        .append(upperInclusive ? "]" : "}")
+        .toString();
+  }
+
+  @Override
+  public Query rewrite(IndexReader reader) throws IOException {
+    if (lowerValue == null && upperValue == null) {
+      return new FieldValueQuery(field);
+    }
+    return super.rewrite(reader);
+  }
+
+  abstract SortedSetDocValues getValues(LeafReader reader, String field) throws IOException;
+
+  @Override
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    return new ConstantScoreWeight(this, boost) {
+      @Override
+      public Scorer scorer(LeafReaderContext context) throws IOException {
+        SortedSetDocValues values = getValues(context.reader(), field);
+        if (values == null) {
+          return null;
+        }
+
+        final long minOrd;
+        if (lowerValue == null) {
+          minOrd = 0;
+        } else {
+          final long ord = values.lookupTerm(lowerValue);
+          if (ord < 0) {
+            minOrd = -1 - ord;
+          } else if (lowerInclusive) {
+            minOrd = ord;
+          } else {
+            minOrd = ord + 1;
+          }
+        }
+
+        final long maxOrd;
+        if (upperValue == null) {
+          maxOrd = values.getValueCount() - 1;
+        } else {
+          final long ord = values.lookupTerm(upperValue);
+          if (ord < 0) {
+            maxOrd = -2 - ord;
+          } else if (upperInclusive) {
+            maxOrd = ord;
+          } else {
+            maxOrd = ord - 1;
+          }
+        }
+
+        if (minOrd > maxOrd) {
+          return null;
+        }
+
+        final SortedDocValues singleton = DocValues.unwrapSingleton(values);
+        final TwoPhaseIterator iterator;
+        if (singleton != null) {
+          iterator = new TwoPhaseIterator(singleton) {
+            @Override
+            public boolean matches() throws IOException {
+              final long ord = singleton.ordValue();
+              return ord >= minOrd && ord <= maxOrd;
+            }
+
+            @Override
+            public float matchCost() {
+              return 2; // 2 comparisons
+            }
+          };
+        } else {
+          iterator = new TwoPhaseIterator(values) {
+            @Override
+            public boolean matches() throws IOException {
+              for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) {
+                if (ord < minOrd) {
+                  continue;
+                }
+                // Values are sorted, so the first ord that is >= minOrd is our best candidate
+                return ord <= maxOrd;
+              }
+              return false; // all ords were < minOrd
+            }
+
+            @Override
+            public float matchCost() {
+              return 2; // 2 comparisons
+            }
+          };
+        }
+        return new ConstantScoreScorer(this, score(), iterator);
+      }
+    };
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/71ca2a84/lucene/core/src/java/org/apache/lucene/search/IndexOrDocValuesQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/IndexOrDocValuesQuery.java b/lucene/core/src/java/org/apache/lucene/search/IndexOrDocValuesQuery.java
new file mode 100644
index 0000000..35067d2
--- /dev/null
+++ b/lucene/core/src/java/org/apache/lucene/search/IndexOrDocValuesQuery.java
@@ -0,0 +1,166 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.search;
+
+import java.io.IOException;
+import java.util.Set;
+
+import org.apache.lucene.document.LongPoint;
+import org.apache.lucene.document.SortedNumericDocValuesField;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.Term;
+
+/**
+ * A query that uses either an index structure (points or terms) or doc values
+ * in order to run a query, depending which one is more efficient. This is
+ * typically useful for range queries, whose {@link Weight#scorer} is costly
+ * to create since it usually needs to sort large lists of doc ids. For
+ * instance, for a field that both indexed {@link LongPoint}s and
+ * {@link SortedNumericDocValuesField}s with the same values, an efficient
+ * range query could be created by doing:
+ * <pre class="prettyprint">
+ *   String field;
+ *   long minValue, maxValue;
+ *   Query pointQuery = LongPoint.newRangeQuery(field, minValue, maxValue);
+ *   Query dvQuery = SortedNumericDocValuesField.newRangeQuery(field, minValue, maxValue);
+ *   Query query = new IndexOrDocValuesQuery(pointQuery, dvQuery);
+ * </pre>
+ * The above query will be efficient as it will use points in the case that they
+ * perform better, ie. when we need a good lead iterator that will be almost
+ * entirely consumed; and doc values otherwise, ie. in the case that another
+ * part of the query is already leading iteration but we still need the ability
+ * to verify that some documents match.
+ * <p><b>NOTE</b>This query currently only works well with point range/exact
+ * queries and their equivalent doc values queries.
+ * @lucene.experimental
+ */
+public final class IndexOrDocValuesQuery extends Query {
+
+  private final Query indexQuery, dvQuery;
+
+  /**
+   * Create an {@link IndexOrDocValuesQuery}. Both provided queries must match
+   * the same documents and give the same scores.
+   * @param indexQuery a query that has a good iterator but whose scorer may be costly to create
+   * @param dvQuery a query whose scorer is cheap to create that can quickly check whether a given document matches
+   */
+  public IndexOrDocValuesQuery(Query indexQuery, Query dvQuery) {
+    this.indexQuery = indexQuery;
+    this.dvQuery = dvQuery;
+  }
+
+  /** Return the wrapped query that may be costly to initialize but has a good
+   *  iterator. */
+  public Query getIndexQuery() {
+    return indexQuery;
+  }
+
+  /** Return the wrapped query that may be slow at identifying all matching
+   *  documents, but which is cheap to initialize and can efficiently
+   *  verify that some documents match. */
+  public Query getRandomAccessQuery() {
+    return dvQuery;
+  }
+
+  @Override
+  public String toString(String field) {
+    return indexQuery.toString(field);
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (sameClassAs(obj) == false) {
+      return false;
+    }
+    IndexOrDocValuesQuery that = (IndexOrDocValuesQuery) obj;
+    return indexQuery.equals(that.indexQuery) && dvQuery.equals(that.dvQuery);
+  }
+
+  @Override
+  public int hashCode() {
+    int h = classHash();
+    h = 31 * h + indexQuery.hashCode();
+    h = 31 * h + dvQuery.hashCode();
+    return h;
+  }
+
+  @Override
+  public Query rewrite(IndexReader reader) throws IOException {
+    Query indexRewrite = indexQuery.rewrite(reader);
+    Query dvRewrite = dvQuery.rewrite(reader);
+    if (indexQuery != indexRewrite || dvQuery != dvRewrite) {
+      return new IndexOrDocValuesQuery(indexRewrite, dvRewrite);
+    }
+    return this;
+  }
+
+  @Override
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    final Weight indexWeight = indexQuery.createWeight(searcher, needsScores, boost);
+    final Weight dvWeight = dvQuery.createWeight(searcher, needsScores, boost);
+    return new Weight(this) {
+      @Override
+      public void extractTerms(Set<Term> terms) {
+        indexWeight.extractTerms(terms);
+      }
+
+      @Override
+      public Explanation explain(LeafReaderContext context, int doc) throws IOException {
+        // We need to check a single doc, so the dv query should perform better
+        return dvWeight.explain(context, doc);
+      }
+
+      @Override
+      public BulkScorer bulkScorer(LeafReaderContext context) throws IOException {
+        // Bulk scorers need to consume the entire set of docs, so using an
+        // index structure should perform better
+        return indexWeight.bulkScorer(context);
+      }
+
+      @Override
+      public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException {
+        final ScorerSupplier indexScorerSupplier = indexWeight.scorerSupplier(context);
+        final ScorerSupplier dvScorerSupplier = dvWeight.scorerSupplier(context);
+        if (indexScorerSupplier == null || dvScorerSupplier == null) {
+          return null;
+        }
+        return new ScorerSupplier() {
+          @Override
+          public Scorer get(boolean randomAccess) throws IOException {
+            return (randomAccess ? dvScorerSupplier : indexScorerSupplier).get(randomAccess);
+          }
+
+          @Override
+          public long cost() {
+            return Math.min(indexScorerSupplier.cost(), dvScorerSupplier.cost());
+          }
+        };
+      }
+
+      @Override
+      public Scorer scorer(LeafReaderContext context) throws IOException {
+        ScorerSupplier scorerSupplier = scorerSupplier(context);
+        if (scorerSupplier == null) {
+          return null;
+        }
+        return scorerSupplier.get(false);
+      }
+    };
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/71ca2a84/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java b/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java
index 7c997ca..f1b8551 100644
--- a/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java
@@ -281,7 +281,7 @@ public abstract class PointRangeQuery extends Query {
 
             @Override
             public Scorer get(boolean randomAccess) throws IOException {
-              if (values.getDocCount() == reader.maxDoc()
+              if (false && values.getDocCount() == reader.maxDoc()
                   && values.getDocCount() == values.size()
                   && cost() > reader.maxDoc() / 2) {
                 // If all docs have exactly one value and the cost is greater

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/71ca2a84/lucene/core/src/test/org/apache/lucene/search/TestDocValuesQueries.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestDocValuesQueries.java b/lucene/core/src/test/org/apache/lucene/search/TestDocValuesQueries.java
new file mode 100644
index 0000000..501538f
--- /dev/null
+++ b/lucene/core/src/test/org/apache/lucene/search/TestDocValuesQueries.java
@@ -0,0 +1,238 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.search;
+
+import java.io.IOException;
+import java.util.Arrays;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.LongPoint;
+import org.apache.lucene.document.NumericDocValuesField;
+import org.apache.lucene.document.SortedDocValuesField;
+import org.apache.lucene.document.SortedNumericDocValuesField;
+import org.apache.lucene.document.SortedSetDocValuesField;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.RandomIndexWriter;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util.TestUtil;
+
+public class TestDocValuesQueries extends LuceneTestCase {
+
+  public void testDuelPointRangeSortedNumericRangeQuery() throws IOException {
+    doTestDuelPointRangeNumericRangeQuery(true, 1);
+  }
+
+  public void testDuelPointRangeMultivaluedSortedNumericRangeQuery() throws IOException {
+    doTestDuelPointRangeNumericRangeQuery(true, 3);
+  }
+
+  public void testDuelPointRangeNumericRangeQuery() throws IOException {
+    doTestDuelPointRangeNumericRangeQuery(false, 1);
+  }
+
+  private void doTestDuelPointRangeNumericRangeQuery(boolean sortedNumeric, int maxValuesPerDoc) throws IOException {
+    final int iters = atLeast(10);
+    for (int iter = 0; iter < iters; ++iter) {
+      Directory dir = newDirectory();
+      RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
+      final int numDocs = atLeast(100);
+      for (int i = 0; i < numDocs; ++i) {
+        Document doc = new Document();
+        final int numValues = TestUtil.nextInt(random(), 0, maxValuesPerDoc);
+        for (int j = 0; j < numValues; ++j) {
+          final long value = TestUtil.nextLong(random(), -100, 10000);
+          if (sortedNumeric) {
+            doc.add(new SortedNumericDocValuesField("dv", value));
+          } else {
+            doc.add(new NumericDocValuesField("dv", value));
+          }
+          doc.add(new LongPoint("idx", value));
+        }
+        iw.addDocument(doc);
+      }
+      if (random().nextBoolean()) {
+        iw.deleteDocuments(LongPoint.newRangeQuery("idx", 0L, 10L));
+      }
+      final IndexReader reader = iw.getReader();
+      final IndexSearcher searcher = newSearcher(reader, false);
+      iw.close();
+
+      for (int i = 0; i < 100; ++i) {
+        final long min = random().nextBoolean() ? Long.MIN_VALUE : TestUtil.nextLong(random(), -100, 10000);
+        final long max = random().nextBoolean() ? Long.MAX_VALUE : TestUtil.nextLong(random(), -100, 10000);
+        final Query q1 = LongPoint.newRangeQuery("idx", min, max);
+        final Query q2;
+        if (sortedNumeric) {
+          q2 = SortedNumericDocValuesField.newRangeQuery("dv", min, max);
+        } else {
+          q2 = NumericDocValuesField.newRangeQuery("dv", min, max);
+        }
+        assertSameMatches(searcher, q1, q2, false);
+      }
+
+      reader.close();
+      dir.close();
+    }
+  }
+
+  private void doTestDuelPointRangeSortedRangeQuery(boolean sortedSet, int maxValuesPerDoc) throws IOException {
+    final int iters = atLeast(10);
+    for (int iter = 0; iter < iters; ++iter) {
+      Directory dir = newDirectory();
+      RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
+      final int numDocs = atLeast(100);
+      for (int i = 0; i < numDocs; ++i) {
+        Document doc = new Document();
+        final int numValues = TestUtil.nextInt(random(), 0, maxValuesPerDoc);
+        for (int j = 0; j < numValues; ++j) {
+          final long value = TestUtil.nextLong(random(), -100, 10000);
+          byte[] encoded = new byte[Long.BYTES];
+          LongPoint.encodeDimension(value, encoded, 0);
+          if (sortedSet) {
+            doc.add(new SortedSetDocValuesField("dv", new BytesRef(encoded)));
+          } else {
+            doc.add(new SortedDocValuesField("dv", new BytesRef(encoded)));
+          }
+          doc.add(new LongPoint("idx", value));
+        }
+        iw.addDocument(doc);
+      }
+      if (random().nextBoolean()) {
+        iw.deleteDocuments(LongPoint.newRangeQuery("idx", 0L, 10L));
+      }
+      final IndexReader reader = iw.getReader();
+      final IndexSearcher searcher = newSearcher(reader, false);
+      iw.close();
+
+      for (int i = 0; i < 100; ++i) {
+        long min = random().nextBoolean() ? Long.MIN_VALUE : TestUtil.nextLong(random(), -100, 10000);
+        long max = random().nextBoolean() ? Long.MAX_VALUE : TestUtil.nextLong(random(), -100, 10000);
+        byte[] encodedMin = new byte[Long.BYTES];
+        byte[] encodedMax = new byte[Long.BYTES];
+        LongPoint.encodeDimension(min, encodedMin, 0);
+        LongPoint.encodeDimension(max, encodedMax, 0);
+        boolean includeMin = true;
+        boolean includeMax = true;
+        if (random().nextBoolean()) {
+          includeMin = false;
+          min++;
+        }
+        if (random().nextBoolean()) {
+          includeMax = false;
+          max--;
+        }
+        final Query q1 = LongPoint.newRangeQuery("idx", min, max);
+        final Query q2;
+        if (sortedSet) {
+          q2 = SortedSetDocValuesField.newRangeQuery("dv",
+              min == Long.MIN_VALUE && random().nextBoolean() ? null : new BytesRef(encodedMin),
+              max == Long.MAX_VALUE && random().nextBoolean() ? null : new BytesRef(encodedMax),
+              includeMin, includeMax);
+        } else {
+          q2 = SortedDocValuesField.newRangeQuery("dv",
+              min == Long.MIN_VALUE && random().nextBoolean() ? null : new BytesRef(encodedMin),
+              max == Long.MAX_VALUE && random().nextBoolean() ? null : new BytesRef(encodedMax),
+              includeMin, includeMax);
+        }
+        assertSameMatches(searcher, q1, q2, false);
+      }
+
+      reader.close();
+      dir.close();
+    }
+  }
+
+  public void testDuelPointRangeSortedSetRangeQuery() throws IOException {
+    doTestDuelPointRangeSortedRangeQuery(true, 1);
+  }
+
+  public void testDuelPointRangeMultivaluedSortedSetRangeQuery() throws IOException {
+    doTestDuelPointRangeSortedRangeQuery(true, 3);
+  }
+
+  public void testDuelPointRangeSortedRangeQuery() throws IOException {
+    doTestDuelPointRangeSortedRangeQuery(false, 1);
+  }
+
+  private void assertSameMatches(IndexSearcher searcher, Query q1, Query q2, boolean scores) throws IOException {
+    final int maxDoc = searcher.getIndexReader().maxDoc();
+    final TopDocs td1 = searcher.search(q1, maxDoc, scores ? Sort.RELEVANCE : Sort.INDEXORDER);
+    final TopDocs td2 = searcher.search(q2, maxDoc, scores ? Sort.RELEVANCE : Sort.INDEXORDER);
+    assertEquals(td1.totalHits, td2.totalHits);
+    for (int i = 0; i < td1.scoreDocs.length; ++i) {
+      assertEquals(td1.scoreDocs[i].doc, td2.scoreDocs[i].doc);
+      if (scores) {
+        assertEquals(td1.scoreDocs[i].score, td2.scoreDocs[i].score, 10e-7);
+      }
+    }
+  }
+
+  public void testEquals() {
+    Query q1 = SortedNumericDocValuesField.newRangeQuery("foo", 3, 5);
+    QueryUtils.checkEqual(q1, SortedNumericDocValuesField.newRangeQuery("foo", 3, 5));
+    QueryUtils.checkUnequal(q1, SortedNumericDocValuesField.newRangeQuery("foo", 3, 6));
+    QueryUtils.checkUnequal(q1, SortedNumericDocValuesField.newRangeQuery("foo", 4, 5));
+    QueryUtils.checkUnequal(q1, SortedNumericDocValuesField.newRangeQuery("bar", 3, 5));
+
+    Query q2 = SortedSetDocValuesField.newRangeQuery("foo", new BytesRef("bar"), new BytesRef("baz"), true, true);
+    QueryUtils.checkEqual(q2, SortedSetDocValuesField.newRangeQuery("foo", new BytesRef("bar"), new BytesRef("baz"), true, true));
+    QueryUtils.checkUnequal(q2, SortedSetDocValuesField.newRangeQuery("foo", new BytesRef("baz"), new BytesRef("baz"), true, true));
+    QueryUtils.checkUnequal(q2, SortedSetDocValuesField.newRangeQuery("foo", new BytesRef("bar"), new BytesRef("bar"), true, true));
+    QueryUtils.checkUnequal(q2, SortedSetDocValuesField.newRangeQuery("quux", new BytesRef("bar"), new BytesRef("baz"), true, true));
+  }
+
+  public void testToString() {
+    Query q1 = SortedNumericDocValuesField.newRangeQuery("foo", 3, 5);
+    assertEquals("foo:[3 TO 5]", q1.toString());
+    assertEquals("[3 TO 5]", q1.toString("foo"));
+    assertEquals("foo:[3 TO 5]", q1.toString("bar"));
+
+    Query q2 = SortedSetDocValuesField.newRangeQuery("foo", new BytesRef("bar"), new BytesRef("baz"), true, true);
+    assertEquals("foo:[[62 61 72] TO [62 61 7a]]", q2.toString());
+    q2 = SortedSetDocValuesField.newRangeQuery("foo", new BytesRef("bar"), new BytesRef("baz"), false, true);
+    assertEquals("foo:{[62 61 72] TO [62 61 7a]]", q2.toString());
+    q2 = SortedSetDocValuesField.newRangeQuery("foo", new BytesRef("bar"), new BytesRef("baz"), false, false);
+    assertEquals("foo:{[62 61 72] TO [62 61 7a]}", q2.toString());
+    q2 = SortedSetDocValuesField.newRangeQuery("foo", new BytesRef("bar"), null, true, true);
+    assertEquals("foo:[[62 61 72] TO *}", q2.toString());
+    q2 = SortedSetDocValuesField.newRangeQuery("foo", null, new BytesRef("baz"), true, true);
+    assertEquals("foo:{* TO [62 61 7a]]", q2.toString());
+    assertEquals("{* TO [62 61 7a]]", q2.toString("foo"));
+    assertEquals("foo:{* TO [62 61 7a]]", q2.toString("bar"));
+  }
+
+  public void testMissingField() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
+    iw.addDocument(new Document());
+    IndexReader reader = iw.getReader();
+    iw.close();
+    IndexSearcher searcher = newSearcher(reader);
+    for (Query query : Arrays.asList(
+        NumericDocValuesField.newRangeQuery("foo", 2, 4),
+        SortedNumericDocValuesField.newRangeQuery("foo", 2, 4),
+        SortedDocValuesField.newRangeQuery("foo", new BytesRef("abc"), new BytesRef("bcd"), random().nextBoolean(), random().nextBoolean()),
+        SortedSetDocValuesField.newRangeQuery("foo", new BytesRef("abc"), new BytesRef("bcd"), random().nextBoolean(), random().nextBoolean()))) {
+      Weight w = searcher.createNormalizedWeight(query, random().nextBoolean());
+      assertNull(w.scorer(searcher.getIndexReader().leaves().get(0)));
+    }
+    reader.close();
+    dir.close();
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/71ca2a84/lucene/core/src/test/org/apache/lucene/search/TestIndexOrDocValuesQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestIndexOrDocValuesQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestIndexOrDocValuesQuery.java
new file mode 100644
index 0000000..8b81822
--- /dev/null
+++ b/lucene/core/src/test/org/apache/lucene/search/TestIndexOrDocValuesQuery.java
@@ -0,0 +1,89 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.search;
+
+import java.io.IOException;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field.Store;
+import org.apache.lucene.document.LongPoint;
+import org.apache.lucene.document.NumericDocValuesField;
+import org.apache.lucene.document.StringField;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.BooleanClause.Occur;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util.TestUtil;
+
+public class TestIndexOrDocValuesQuery extends LuceneTestCase {
+
+  public void testUseIndexForSelectiveQueries() throws IOException {
+    Directory dir = newDirectory();
+    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig()
+        // relies on costs and PointValues.estimateCost so we need the default codec
+        .setCodec(TestUtil.getDefaultCodec()));
+    for (int i = 0; i < 2000; ++i) {
+      Document doc = new Document();
+      if (i == 42) {
+        doc.add(new StringField("f1", "bar", Store.NO));
+        doc.add(new LongPoint("f2", 42L));
+        doc.add(new NumericDocValuesField("f2", 42L));
+      } else if (i == 100) {
+        doc.add(new StringField("f1", "foo", Store.NO));
+        doc.add(new LongPoint("f2", 2L));
+        doc.add(new NumericDocValuesField("f2", 2L));
+      } else {
+        doc.add(new StringField("f1", "bar", Store.NO));
+        doc.add(new LongPoint("f2", 2L));
+        doc.add(new NumericDocValuesField("f2", 2L));
+      }
+      w.addDocument(doc);
+    }
+    w.forceMerge(1);
+    IndexReader reader = DirectoryReader.open(w);
+    IndexSearcher searcher = newSearcher(reader);
+    searcher.setQueryCache(null);
+
+    // The term query is more selective, so the IndexOrDocValuesQuery should use doc values
+    final Query q1 = new BooleanQuery.Builder()
+        .add(new TermQuery(new Term("f1", "foo")), Occur.MUST)
+        .add(new IndexOrDocValuesQuery(LongPoint.newExactQuery("f2", 2), NumericDocValuesField.newRangeQuery("f2", 2L, 2L)), Occur.MUST)
+        .build();
+
+    final Weight w1 = searcher.createNormalizedWeight(q1, random().nextBoolean());
+    final Scorer s1 = w1.scorer(searcher.getIndexReader().leaves().get(0));
+    assertNotNull(s1.twoPhaseIterator()); // means we use doc values
+
+    // The term query is less selective, so the IndexOrDocValuesQuery should use points
+    final Query q2 = new BooleanQuery.Builder()
+        .add(new TermQuery(new Term("f1", "bar")), Occur.MUST)
+        .add(new IndexOrDocValuesQuery(LongPoint.newExactQuery("f2", 42), NumericDocValuesField.newRangeQuery("f2", 42L, 42L)), Occur.MUST)
+        .build();
+
+    final Weight w2 = searcher.createNormalizedWeight(q2, random().nextBoolean());
+    final Scorer s2 = w2.scorer(searcher.getIndexReader().leaves().get(0));
+    assertNull(s2.twoPhaseIterator()); // means we use points
+
+    reader.close();
+    w.close();
+    dir.close();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/71ca2a84/lucene/sandbox/src/java/org/apache/lucene/search/DocValuesRangeQuery.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/java/org/apache/lucene/search/DocValuesRangeQuery.java b/lucene/sandbox/src/java/org/apache/lucene/search/DocValuesRangeQuery.java
deleted file mode 100644
index 3d4feb9..0000000
--- a/lucene/sandbox/src/java/org/apache/lucene/search/DocValuesRangeQuery.java
+++ /dev/null
@@ -1,276 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.search;
-
-import java.io.IOException;
-import java.util.Objects;
-
-import org.apache.lucene.index.DocValues;
-import org.apache.lucene.index.DocValuesType;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.index.PointValues;
-import org.apache.lucene.index.SortedNumericDocValues;
-import org.apache.lucene.index.SortedSetDocValues;
-import org.apache.lucene.index.Terms;
-import org.apache.lucene.util.BytesRef;
-
-/**
- * A range query that works on top of the doc values APIs. Such queries are
- * usually slow since they do not use an inverted index. However, in the
- * dense case where most documents match this query, it <b>might</b> be as
- * fast or faster than a regular {@link PointRangeQuery}.
- *
- * <b>NOTE:</b> This query is typically best used within a
- * {@link IndexOrDocValuesQuery} alongside a query that uses an indexed
- * structure such as {@link PointValues points} or {@link Terms terms},
- * which allows to run the query on doc values when that would be more
- * efficient, and using an index otherwise.
- *
- * @lucene.experimental
- */
-public final class DocValuesRangeQuery extends Query {
-
-  /** Create a new numeric range query on a numeric doc-values field. The field
-   *  must has been indexed with either {@link DocValuesType#NUMERIC} or
-   *  {@link DocValuesType#SORTED_NUMERIC} doc values. */
-  public static Query newLongRange(String field, Long lowerVal, Long upperVal, boolean includeLower, boolean includeUpper) {
-    return new DocValuesRangeQuery(field, lowerVal, upperVal, includeLower, includeUpper);
-  }
-
-  /** Create a new numeric range query on a numeric doc-values field. The field
-   *  must has been indexed with {@link DocValuesType#SORTED} or
-   *  {@link DocValuesType#SORTED_SET} doc values. */
-  public static Query newBytesRefRange(String field, BytesRef lowerVal, BytesRef upperVal, boolean includeLower, boolean includeUpper) {
-    return new DocValuesRangeQuery(field, deepCopyOf(lowerVal), deepCopyOf(upperVal), includeLower, includeUpper);
-  }
-
-  private static BytesRef deepCopyOf(BytesRef b) {
-    if (b == null) {
-      return null;
-    } else {
-      return BytesRef.deepCopyOf(b);
-    }
-  }
-
-  private final String field;
-  private final Object lowerVal, upperVal;
-  private final boolean includeLower, includeUpper;
-
-  private DocValuesRangeQuery(String field, Object lowerVal, Object upperVal, boolean includeLower, boolean includeUpper) {
-    this.field = Objects.requireNonNull(field);
-    this.lowerVal = lowerVal;
-    this.upperVal = upperVal;
-    this.includeLower = includeLower;
-    this.includeUpper = includeUpper;
-  }
-
-  @Override
-  public boolean equals(Object other) {
-    return sameClassAs(other) &&
-           equalsTo(getClass().cast(other));
-  }
-
-  private boolean equalsTo(DocValuesRangeQuery other) {
-    return field.equals(other.field) && 
-           Objects.equals(lowerVal, other.lowerVal) && 
-           Objects.equals(upperVal, other.upperVal) && 
-           includeLower == other.includeLower && 
-           includeUpper == other.includeUpper;
-  }
-
-  @Override
-  public int hashCode() {
-    return 31 * classHash() + Objects.hash(field, lowerVal, upperVal, includeLower, includeUpper);
-  }
-
-  public String getField() {
-    return field;
-  }
-
-  public Object getLowerVal() {
-    return lowerVal;
-  }
-
-  public Object getUpperVal() {
-    return upperVal;
-  }
-
-  public boolean isIncludeLower() {
-    return includeLower;
-  }
-
-  public boolean isIncludeUpper() {
-    return includeUpper;
-  }
-
-  @Override
-  public String toString(String field) {
-    StringBuilder sb = new StringBuilder();
-    if (this.field.equals(field) == false) {
-      sb.append(this.field).append(':');
-    }
-    sb.append(includeLower ? '[' : '{');
-    sb.append(lowerVal == null ? "*" : lowerVal.toString());
-    sb.append(" TO ");
-    sb.append(upperVal == null ? "*" : upperVal.toString());
-    sb.append(includeUpper ? ']' : '}');
-    return sb.toString();
-  }
-
-  @Override
-  public Query rewrite(IndexReader reader) throws IOException {
-    if (lowerVal == null && upperVal == null) {
-      return new FieldValueQuery(field);
-    }
-    return super.rewrite(reader);
-  }
-
-  @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
-    if (lowerVal == null && upperVal == null) {
-      throw new IllegalStateException("Both min and max values must not be null, call rewrite first");
-    }
-
-    return new ConstantScoreWeight(DocValuesRangeQuery.this, boost) {
-
-      @Override
-      public Scorer scorer(LeafReaderContext context) throws IOException {
-        final TwoPhaseIterator iterator = createTwoPhaseIterator(context);
-        if (iterator == null) {
-          return null;
-        }
-        return new ConstantScoreScorer(this, score(), iterator);
-      }
-
-      private TwoPhaseIterator createTwoPhaseIterator(LeafReaderContext context) throws IOException {
-        if (lowerVal instanceof Long || upperVal instanceof Long) {
-
-          final SortedNumericDocValues values = DocValues.getSortedNumeric(context.reader(), field);
-
-          final long min;
-          if (lowerVal == null) {
-            min = Long.MIN_VALUE;
-          } else if (includeLower) {
-            min = (long) lowerVal;
-          } else {
-            if ((long) lowerVal == Long.MAX_VALUE) {
-              return null;
-            }
-            min = 1 + (long) lowerVal;
-          }
-
-          final long max;
-          if (upperVal == null) {
-            max = Long.MAX_VALUE;
-          } else if (includeUpper) {
-            max = (long) upperVal;
-          } else {
-            if ((long) upperVal == Long.MIN_VALUE) {
-              return null;
-            }
-            max = -1 + (long) upperVal;
-          }
-
-          if (min > max) {
-            return null;
-          }
-
-          return new TwoPhaseIterator(values) {
-
-            @Override
-            public boolean matches() throws IOException {
-              final int count = values.docValueCount();
-              assert count > 0;
-              for (int i = 0; i < count; ++i) {
-                final long value = values.nextValue();
-                if (value >= min && value <= max) {
-                  return true;
-                }
-              }
-              return false;
-            }
-
-            @Override
-            public float matchCost() {
-              return 2; // 2 comparisons
-            }
-
-          };
-
-        } else if (lowerVal instanceof BytesRef || upperVal instanceof BytesRef) {
-
-          final SortedSetDocValues values = DocValues.getSortedSet(context.reader(), field);
-
-          final long minOrd;
-          if (lowerVal == null) {
-            minOrd = 0;
-          } else {
-            final long ord = values.lookupTerm((BytesRef) lowerVal);
-            if (ord < 0) {
-              minOrd = -1 - ord;
-            } else if (includeLower) {
-              minOrd = ord;
-            } else {
-              minOrd = ord + 1;
-            }
-          }
-
-          final long maxOrd;
-          if (upperVal == null) {
-            maxOrd = values.getValueCount() - 1;
-          } else {
-            final long ord = values.lookupTerm((BytesRef) upperVal);
-            if (ord < 0) {
-              maxOrd = -2 - ord;
-            } else if (includeUpper) {
-              maxOrd = ord;
-            } else {
-              maxOrd = ord - 1;
-            }
-          }
-
-          if (minOrd > maxOrd) {
-            return null;
-          }
-
-          return new TwoPhaseIterator(values) {
-
-            @Override
-            public boolean matches() throws IOException {
-              for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) {
-                if (ord >= minOrd && ord <= maxOrd) {
-                  return true;
-                }
-              }
-              return false;
-            }
-
-            @Override
-            public float matchCost() {
-              return 2; // 2 comparisons
-            }
-          };
-
-        } else {
-          throw new AssertionError();
-        }
-      }
-    };
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/71ca2a84/lucene/sandbox/src/java/org/apache/lucene/search/IndexOrDocValuesQuery.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/java/org/apache/lucene/search/IndexOrDocValuesQuery.java b/lucene/sandbox/src/java/org/apache/lucene/search/IndexOrDocValuesQuery.java
deleted file mode 100644
index 0f9e8e3..0000000
--- a/lucene/sandbox/src/java/org/apache/lucene/search/IndexOrDocValuesQuery.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.search;
-
-import java.io.IOException;
-
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.LeafReaderContext;
-
-/**
- * A query that uses either an index (points or terms) or doc values in order
- * to run a range query, depending which one is more efficient.
- */
-public final class IndexOrDocValuesQuery extends Query {
-
-  private final Query indexQuery, dvQuery;
-
-  /**
-   * Constructor that takes both a query that executes on an index structure
-   * like the inverted index or the points tree, and another query that
-   * executes on doc values. Both queries must match the same documents and
-   * attribute constant scores.
-   */
-  public IndexOrDocValuesQuery(Query indexQuery, Query dvQuery) {
-    this.indexQuery = indexQuery;
-    this.dvQuery = dvQuery;
-  }
-
-  @Override
-  public String toString(String field) {
-    return indexQuery.toString(field);
-  }
-
-  @Override
-  public boolean equals(Object obj) {
-    if (sameClassAs(obj) == false) {
-      return false;
-    }
-    IndexOrDocValuesQuery that = (IndexOrDocValuesQuery) obj;
-    return indexQuery.equals(that.indexQuery) && dvQuery.equals(that.dvQuery);
-  }
-
-  @Override
-  public int hashCode() {
-    int h = classHash();
-    h = 31 * h + indexQuery.hashCode();
-    h = 31 * h + dvQuery.hashCode();
-    return h;
-  }
-
-  @Override
-  public Query rewrite(IndexReader reader) throws IOException {
-    Query indexRewrite = indexQuery.rewrite(reader);
-    Query dvRewrite = dvQuery.rewrite(reader);
-    if (indexQuery != indexRewrite || dvQuery != dvRewrite) {
-      return new IndexOrDocValuesQuery(indexRewrite, dvRewrite);
-    }
-    return this;
-  }
-
-  @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
-    final Weight indexWeight = indexQuery.createWeight(searcher, needsScores, boost);
-    final Weight dvWeight = dvQuery.createWeight(searcher, needsScores, boost);
-    return new ConstantScoreWeight(this, boost) {
-      @Override
-      public BulkScorer bulkScorer(LeafReaderContext context) throws IOException {
-        return indexWeight.bulkScorer(context);
-      }
-
-      @Override
-      public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException {
-        final ScorerSupplier indexScorerSupplier = indexWeight.scorerSupplier(context);
-        final ScorerSupplier dvScorerSupplier = dvWeight.scorerSupplier(context); 
-        if (indexScorerSupplier == null || dvScorerSupplier == null) {
-          return null;
-        }
-        return new ScorerSupplier() {
-          @Override
-          public Scorer get(boolean randomAccess) throws IOException {
-            return (randomAccess ? dvScorerSupplier : indexScorerSupplier).get(randomAccess);
-          }
-
-          @Override
-          public long cost() {
-            return Math.min(indexScorerSupplier.cost(), dvScorerSupplier.cost());
-          }
-        };
-      }
-
-      @Override
-      public Scorer scorer(LeafReaderContext context) throws IOException {
-        ScorerSupplier scorerSupplier = scorerSupplier(context);
-        if (scorerSupplier == null) {
-          return null;
-        }
-        return scorerSupplier.get(false);
-      }
-    };
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/71ca2a84/lucene/sandbox/src/test/org/apache/lucene/search/TestDocValuesRangeQuery.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/test/org/apache/lucene/search/TestDocValuesRangeQuery.java b/lucene/sandbox/src/test/org/apache/lucene/search/TestDocValuesRangeQuery.java
deleted file mode 100644
index c5ca64f..0000000
--- a/lucene/sandbox/src/test/org/apache/lucene/search/TestDocValuesRangeQuery.java
+++ /dev/null
@@ -1,307 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.search;
-
-import java.io.IOException;
-
-import org.apache.lucene.document.LongPoint;
-import org.apache.lucene.document.Document;
-import org.apache.lucene.document.Field.Store;
-import org.apache.lucene.document.NumericDocValuesField;
-import org.apache.lucene.document.SortedDocValuesField;
-import org.apache.lucene.document.SortedNumericDocValuesField;
-import org.apache.lucene.document.SortedSetDocValuesField;
-import org.apache.lucene.document.StringField;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.index.RandomIndexWriter;
-import org.apache.lucene.index.Term;
-import org.apache.lucene.search.BooleanClause.Occur;
-import org.apache.lucene.store.Directory;
-import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.LuceneTestCase;
-import org.apache.lucene.util.NumericUtils;
-import org.apache.lucene.util.TestUtil;
-
-public class TestDocValuesRangeQuery extends LuceneTestCase {
-
-  public void testDuelNumericRangeQuery() throws IOException {
-    final int iters = atLeast(10);
-      for (int iter = 0; iter < iters; ++iter) {
-      Directory dir = newDirectory();
-      RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
-      final int numDocs = atLeast(100);
-      for (int i = 0; i < numDocs; ++i) {
-        Document doc = new Document();
-        final int numValues = random().nextInt(2);
-        for (int j = 0; j < numValues; ++j) {
-          final long value = TestUtil.nextLong(random(), -100, 10000);
-          doc.add(new SortedNumericDocValuesField("dv", value));
-          doc.add(new LongPoint("idx", value));
-        }
-        iw.addDocument(doc);
-      }
-      if (random().nextBoolean()) {
-        iw.deleteDocuments(LongPoint.newRangeQuery("idx", 0L, 10L));
-      }
-      iw.commit();
-      final IndexReader reader = iw.getReader();
-      final IndexSearcher searcher = newSearcher(reader, false);
-      iw.close();
-
-      for (int i = 0; i < 100; ++i) {
-        final Long min = TestUtil.nextLong(random(), -100, 1000);
-        final Long max = TestUtil.nextLong(random(), -100, 1000);
-        final Query q1 = LongPoint.newRangeQuery("idx", min, max);
-        final Query q2 = DocValuesRangeQuery.newLongRange("dv", min, max, true, true);
-        assertSameMatches(searcher, q1, q2, false);
-      }
-
-      reader.close();
-      dir.close();
-    }
-  }
-
-  private static BytesRef toSortableBytes(Long l) {
-    if (l == null) {
-      return null;
-    } else {
-      byte[] bytes = new byte[Long.BYTES];
-      NumericUtils.longToSortableBytes(l, bytes, 0);
-      return new BytesRef(bytes);
-    }
-  }
-
-  public void testDuelNumericSorted() throws IOException {
-    Directory dir = newDirectory();
-    RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
-    final int numDocs = atLeast(100);
-    for (int i = 0; i < numDocs; ++i) {
-      Document doc = new Document();
-      final int numValues = random().nextInt(3);
-      for (int j = 0; j < numValues; ++j) {
-        final long value = TestUtil.nextLong(random(), -100, 10000);
-        doc.add(new SortedNumericDocValuesField("dv1", value));
-        doc.add(new SortedSetDocValuesField("dv2", toSortableBytes(value)));
-      }
-      iw.addDocument(doc);
-    }
-    if (random().nextBoolean()) {
-      iw.deleteDocuments(DocValuesRangeQuery.newLongRange("dv1", 0L, 10L, true, true));
-    }
-    iw.commit();
-    final IndexReader reader = iw.getReader();
-    final IndexSearcher searcher = newSearcher(reader);
-    iw.close();
-
-    for (int i = 0; i < 100; ++i) {
-      final Long min = random().nextBoolean() ? null : TestUtil.nextLong(random(), -100, 1000);
-      final Long max = random().nextBoolean() ? null : TestUtil.nextLong(random(), -100, 1000);
-      final boolean minInclusive = random().nextBoolean();
-      final boolean maxInclusive = random().nextBoolean();
-      final Query q1 = DocValuesRangeQuery.newLongRange("dv1", min, max, minInclusive, maxInclusive);
-      final Query q2 = DocValuesRangeQuery.newBytesRefRange("dv2", toSortableBytes(min), toSortableBytes(max), minInclusive, maxInclusive);
-      assertSameMatches(searcher, q1, q2, true);
-    }
-
-    reader.close();
-    dir.close();
-  }
-
-  public void testScore() throws IOException {
-    Directory dir = newDirectory();
-    RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
-    final int numDocs = atLeast(100);
-    for (int i = 0; i < numDocs; ++i) {
-      Document doc = new Document();
-      final int numValues = random().nextInt(3);
-      for (int j = 0; j < numValues; ++j) {
-        final long value = TestUtil.nextLong(random(), -100, 10000);
-        doc.add(new SortedNumericDocValuesField("dv1", value));
-        doc.add(new SortedSetDocValuesField("dv2", toSortableBytes(value)));
-      }
-      iw.addDocument(doc);
-    }
-    if (random().nextBoolean()) {
-      iw.deleteDocuments(DocValuesRangeQuery.newLongRange("dv1", 0L, 10L, true, true));
-    }
-    iw.commit();
-    final IndexReader reader = iw.getReader();
-    final IndexSearcher searcher = newSearcher(reader);
-    iw.close();
-
-    for (int i = 0; i < 100; ++i) {
-      final Long min = random().nextBoolean() ? null : TestUtil.nextLong(random(), -100, 1000);
-      final Long max = random().nextBoolean() ? null : TestUtil.nextLong(random(), -100, 1000);
-      final boolean minInclusive = random().nextBoolean();
-      final boolean maxInclusive = random().nextBoolean();
-
-      final float boost = random().nextFloat() * 10;
-
-      final Query q1 = new BoostQuery(DocValuesRangeQuery.newLongRange("dv1", min, max, minInclusive, maxInclusive), boost);
-      final Query csq1 = new BoostQuery(new ConstantScoreQuery(DocValuesRangeQuery.newLongRange("dv1", min, max, minInclusive, maxInclusive)), boost);
-      assertSameMatches(searcher, q1, csq1, true);
-
-      final Query q2 = new BoostQuery(DocValuesRangeQuery.newBytesRefRange("dv2", toSortableBytes(min), toSortableBytes(max), minInclusive, maxInclusive), boost);
-      final Query csq2 = new BoostQuery(new ConstantScoreQuery(DocValuesRangeQuery.newBytesRefRange("dv2", toSortableBytes(min), toSortableBytes(max), minInclusive, maxInclusive)), boost);
-      assertSameMatches(searcher, q2, csq2, true);
-    }
-
-    reader.close();
-    dir.close();
-  }
-
-  public void testApproximation() throws IOException {
-    Directory dir = newDirectory();
-    RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
-    final int numDocs = atLeast(100);
-    for (int i = 0; i < numDocs; ++i) {
-      Document doc = new Document();
-      final int numValues = random().nextInt(3);
-      for (int j = 0; j < numValues; ++j) {
-        final long value = TestUtil.nextLong(random(), -100, 10000);
-        doc.add(new SortedNumericDocValuesField("dv1", value));
-        doc.add(new SortedSetDocValuesField("dv2", toSortableBytes(value)));
-        doc.add(new LongPoint("idx", value));
-        doc.add(new StringField("f", random().nextBoolean() ? "a" : "b", Store.NO));
-      }
-      iw.addDocument(doc);
-    }
-    if (random().nextBoolean()) {
-      iw.deleteDocuments(LongPoint.newRangeQuery("idx", 0L, 10L));
-    }
-    iw.commit();
-    final IndexReader reader = iw.getReader();
-    final IndexSearcher searcher = newSearcher(reader, false);
-    iw.close();
-
-    for (int i = 0; i < 100; ++i) {
-      final Long min = TestUtil.nextLong(random(), -100, 1000);
-      final Long max = TestUtil.nextLong(random(), -100, 1000);
-
-      BooleanQuery.Builder ref = new BooleanQuery.Builder();
-      ref.add(LongPoint.newRangeQuery("idx", min, max), Occur.FILTER);
-      ref.add(new TermQuery(new Term("f", "a")), Occur.MUST);
-
-      BooleanQuery.Builder bq1 = new BooleanQuery.Builder();
-      bq1.add(DocValuesRangeQuery.newLongRange("dv1", min, max, true, true), Occur.FILTER);
-      bq1.add(new TermQuery(new Term("f", "a")), Occur.MUST);
-
-      assertSameMatches(searcher, ref.build(), bq1.build(), true);
-
-      BooleanQuery.Builder bq2 = new BooleanQuery.Builder();
-      bq2.add(DocValuesRangeQuery.newBytesRefRange("dv2", toSortableBytes(min), toSortableBytes(max), true, true), Occur.FILTER);
-      bq2.add(new TermQuery(new Term("f", "a")), Occur.MUST);
-
-      assertSameMatches(searcher, ref.build(), bq2.build(), true);
-    }
-
-    reader.close();
-    dir.close();
-  }
-
-  private void assertSameMatches(IndexSearcher searcher, Query q1, Query q2, boolean scores) throws IOException {
-    final int maxDoc = searcher.getIndexReader().maxDoc();
-    final TopDocs td1 = searcher.search(q1, maxDoc, scores ? Sort.RELEVANCE : Sort.INDEXORDER);
-    final TopDocs td2 = searcher.search(q2, maxDoc, scores ? Sort.RELEVANCE : Sort.INDEXORDER);
-    assertEquals(td1.totalHits, td2.totalHits);
-    for (int i = 0; i < td1.scoreDocs.length; ++i) {
-      assertEquals(td1.scoreDocs[i].doc, td2.scoreDocs[i].doc);
-      if (scores) {
-        assertEquals(td1.scoreDocs[i].score, td2.scoreDocs[i].score, 10e-7);
-      }
-    }
-  }
-
-  public void testToString() {
-    assertEquals("f:[2 TO 5]", DocValuesRangeQuery.newLongRange("f", 2L, 5L, true, true).toString());
-    assertEquals("f:{2 TO 5]", DocValuesRangeQuery.newLongRange("f", 2L, 5L, false, true).toString());
-    assertEquals("f:{2 TO 5}", DocValuesRangeQuery.newLongRange("f", 2L, 5L, false, false).toString());
-    assertEquals("f:{* TO 5}", DocValuesRangeQuery.newLongRange("f", null, 5L, false, false).toString());
-    assertEquals("f:[2 TO *}", DocValuesRangeQuery.newLongRange("f", 2L, null, true, false).toString());
-
-    BytesRef min = new BytesRef("a");
-    BytesRef max = new BytesRef("b");
-    assertEquals("f:[[61] TO [62]]", DocValuesRangeQuery.newBytesRefRange("f", min, max, true, true).toString());
-    assertEquals("f:{[61] TO [62]]", DocValuesRangeQuery.newBytesRefRange("f", min, max, false, true).toString());
-    assertEquals("f:{[61] TO [62]}", DocValuesRangeQuery.newBytesRefRange("f", min, max, false, false).toString());
-    assertEquals("f:{* TO [62]}", DocValuesRangeQuery.newBytesRefRange("f", null, max, false, false).toString());
-    assertEquals("f:[[61] TO *}", DocValuesRangeQuery.newBytesRefRange("f", min, null, true, false).toString());
-  }
-
-  public void testDocValuesRangeSupportsApproximation() throws IOException {
-    Directory dir = newDirectory();
-    RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
-    Document doc = new Document();
-    doc.add(new NumericDocValuesField("dv1", 5L));
-    doc.add(new SortedDocValuesField("dv2", toSortableBytes(42L)));
-    iw.addDocument(doc);
-    iw.commit();
-    final IndexReader reader = iw.getReader();
-    final LeafReaderContext ctx = reader.leaves().get(0);
-    final IndexSearcher searcher = newSearcher(reader);
-    iw.close();
-
-    Query q1 = DocValuesRangeQuery.newLongRange("dv1", 0L, 100L, random().nextBoolean(), random().nextBoolean());
-    Weight w = searcher.createNormalizedWeight(q1, true);
-    Scorer s = w.scorer(ctx);
-    assertNotNull(s.twoPhaseIterator());
-
-    Query q2 = DocValuesRangeQuery.newBytesRefRange("dv2", toSortableBytes(0L), toSortableBytes(100L), random().nextBoolean(), random().nextBoolean());
-    w = searcher.createNormalizedWeight(q2, true);
-    s = w.scorer(ctx);
-    assertNotNull(s.twoPhaseIterator());
-
-    reader.close();
-    dir.close();
-  }
-
-  public void testLongRangeBoundaryValues() throws IOException {
-    Directory dir = newDirectory();
-    RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
-
-    Document doc = new Document();
-    doc.add(new SortedNumericDocValuesField("dv", 100l));
-    iw.addDocument(doc);
-
-    doc = new Document();
-    doc.add(new SortedNumericDocValuesField("dv", 200l));
-    iw.addDocument(doc);
-
-    iw.commit();
-
-    final IndexReader reader = iw.getReader();
-    final IndexSearcher searcher = newSearcher(reader, false);
-    iw.close();
-
-    Long min = Long.MIN_VALUE;
-    Long max = Long.MIN_VALUE;
-    Query query = DocValuesRangeQuery.newLongRange("dv", min, max, true, false);
-    TopDocs td = searcher.search(query, searcher.reader.maxDoc(), Sort.INDEXORDER);
-    assertEquals(0, td.totalHits);
-
-    min = Long.MAX_VALUE;
-    max = Long.MAX_VALUE;
-    query = DocValuesRangeQuery.newLongRange("dv", min, max, false, true);
-    td = searcher.search(query, searcher.reader.maxDoc(), Sort.INDEXORDER);
-    assertEquals(0, td.totalHits);
-
-    reader.close();
-    dir.close();
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/71ca2a84/lucene/sandbox/src/test/org/apache/lucene/search/TestIndexOrDocValuesQuery.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/test/org/apache/lucene/search/TestIndexOrDocValuesQuery.java b/lucene/sandbox/src/test/org/apache/lucene/search/TestIndexOrDocValuesQuery.java
deleted file mode 100644
index de289e7..0000000
--- a/lucene/sandbox/src/test/org/apache/lucene/search/TestIndexOrDocValuesQuery.java
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.search;
-
-import java.io.IOException;
-
-import org.apache.lucene.document.Document;
-import org.apache.lucene.document.Field.Store;
-import org.apache.lucene.document.LongPoint;
-import org.apache.lucene.document.NumericDocValuesField;
-import org.apache.lucene.document.StringField;
-import org.apache.lucene.index.DirectoryReader;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.IndexWriter;
-import org.apache.lucene.index.Term;
-import org.apache.lucene.search.BooleanClause.Occur;
-import org.apache.lucene.store.Directory;
-import org.apache.lucene.util.LuceneTestCase;
-import org.apache.lucene.util.TestUtil;
-
-public class TestIndexOrDocValuesQuery extends LuceneTestCase {
-
-  public void testUseIndexForSelectiveQueries() throws IOException {
-    Directory dir = newDirectory();
-    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig()
-        // relies on costs and PointValues.estimateCost so we need the default codec
-        .setCodec(TestUtil.getDefaultCodec()));
-    for (int i = 0; i < 2000; ++i) {
-      Document doc = new Document();
-      if (i == 42) {
-        doc.add(new StringField("f1", "bar", Store.NO));
-        doc.add(new LongPoint("f2", 42L));
-        doc.add(new NumericDocValuesField("f2", 42L));
-      } else if (i == 100) {
-        doc.add(new StringField("f1", "foo", Store.NO));
-        doc.add(new LongPoint("f2", 2L));
-        doc.add(new NumericDocValuesField("f2", 2L));
-      } else {
-        doc.add(new StringField("f1", "bar", Store.NO));
-        doc.add(new LongPoint("f2", 2L));
-        doc.add(new NumericDocValuesField("f2", 2L));
-      }
-      w.addDocument(doc);
-    }
-    w.forceMerge(1);
-    IndexReader reader = DirectoryReader.open(w);
-    IndexSearcher searcher = newSearcher(reader);
-    searcher.setQueryCache(null);
-
-    // The term query is more selective, so the IndexOrDocValuesQuery should use doc values
-    final Query q1 = new BooleanQuery.Builder()
-        .add(new TermQuery(new Term("f1", "foo")), Occur.MUST)
-        .add(new IndexOrDocValuesQuery(LongPoint.newExactQuery("f2", 2), new DocValuesNumbersQuery("f2", 2L)), Occur.MUST)
-        .build();
-
-    final Weight w1 = searcher.createNormalizedWeight(q1, random().nextBoolean());
-    final Scorer s1 = w1.scorer(searcher.getIndexReader().leaves().get(0));
-    assertNotNull(s1.twoPhaseIterator()); // means we use doc values
-
-    // The term query is less selective, so the IndexOrDocValuesQuery should use points
-    final Query q2 = new BooleanQuery.Builder()
-        .add(new TermQuery(new Term("f1", "bar")), Occur.MUST)
-        .add(new IndexOrDocValuesQuery(LongPoint.newExactQuery("f2", 42), new DocValuesNumbersQuery("f2", 42L)), Occur.MUST)
-        .build();
-
-    final Weight w2 = searcher.createNormalizedWeight(q2, random().nextBoolean());
-    final Scorer s2 = w2.scorer(searcher.getIndexReader().leaves().get(0));
-    assertNull(s2.twoPhaseIterator()); // means we use points
-
-    reader.close();
-    w.close();
-    dir.close();
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/71ca2a84/solr/contrib/analysis-extras/src/java/org/apache/solr/schema/ICUCollationField.java
----------------------------------------------------------------------
diff --git a/solr/contrib/analysis-extras/src/java/org/apache/solr/schema/ICUCollationField.java b/solr/contrib/analysis-extras/src/java/org/apache/solr/schema/ICUCollationField.java
index 2071163..5152768 100644
--- a/solr/contrib/analysis-extras/src/java/org/apache/solr/schema/ICUCollationField.java
+++ b/solr/contrib/analysis-extras/src/java/org/apache/solr/schema/ICUCollationField.java
@@ -32,7 +32,6 @@ import org.apache.lucene.collation.ICUCollationKeyAnalyzer;
 import org.apache.lucene.document.SortedDocValuesField;
 import org.apache.lucene.document.SortedSetDocValuesField;
 import org.apache.lucene.index.IndexableField;
-import org.apache.lucene.search.DocValuesRangeQuery;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.search.SortField;
 import org.apache.lucene.search.TermRangeQuery;
@@ -272,13 +271,8 @@ public class ICUCollationField extends FieldType {
     BytesRef low = part1 == null ? null : getCollationKey(f, part1);
     BytesRef high = part2 == null ? null : getCollationKey(f, part2);
     if (!field.indexed() && field.hasDocValues()) {
-      if (field.multiValued()) {
-          return DocValuesRangeQuery.newBytesRefRange(
-              field.getName(), low, high, minInclusive, maxInclusive);
-        } else {
-          return DocValuesRangeQuery.newBytesRefRange(
-              field.getName(), low, high, minInclusive, maxInclusive);
-        } 
+      return SortedSetDocValuesField.newRangeQuery(
+          field.getName(), low, high, minInclusive, maxInclusive);
     } else {
       return new TermRangeQuery(field.getName(), low, high, minInclusive, maxInclusive);
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/71ca2a84/solr/core/src/java/org/apache/solr/schema/CollationField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/CollationField.java b/solr/core/src/java/org/apache/solr/schema/CollationField.java
index 998db2a..805e204 100644
--- a/solr/core/src/java/org/apache/solr/schema/CollationField.java
+++ b/solr/core/src/java/org/apache/solr/schema/CollationField.java
@@ -36,7 +36,6 @@ import org.apache.lucene.collation.CollationKeyAnalyzer;
 import org.apache.lucene.document.SortedDocValuesField;
 import org.apache.lucene.document.SortedSetDocValuesField;
 import org.apache.lucene.index.IndexableField;
-import org.apache.lucene.search.DocValuesRangeQuery;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.search.SortField;
 import org.apache.lucene.search.TermRangeQuery;
@@ -242,7 +241,7 @@ public class CollationField extends FieldType {
     BytesRef low = part1 == null ? null : getCollationKey(f, part1);
     BytesRef high = part2 == null ? null : getCollationKey(f, part2);
     if (!field.indexed() && field.hasDocValues()) {
-      return DocValuesRangeQuery.newBytesRefRange(
+      return SortedSetDocValuesField.newRangeQuery(
           field.getName(), low, high, minInclusive, maxInclusive);
     } else {
       return new TermRangeQuery(field.getName(), low, high, minInclusive, maxInclusive);


[04/14] lucene-solr:jira/solr-5944: SOLR-9885: Allow pre-startup Solr log management in Solr bin scripts to be disabled.

Posted by is...@apache.org.
SOLR-9885: Allow pre-startup Solr log management in Solr bin scripts to be disabled.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/075aec91
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/075aec91
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/075aec91

Branch: refs/heads/jira/solr-5944
Commit: 075aec91cd2c10e3f9a62adcf0feadc705c205ec
Parents: bb35732
Author: markrmiller <ma...@apache.org>
Authored: Thu Jan 19 03:07:09 2017 -0500
Committer: markrmiller <ma...@apache.org>
Committed: Thu Jan 19 03:07:09 2017 -0500

----------------------------------------------------------------------
 solr/CHANGES.txt     |  2 ++
 solr/bin/solr        | 11 ++++++-----
 solr/bin/solr.cmd    | 13 +++++++++----
 solr/bin/solr.in.cmd |  5 +++++
 solr/bin/solr.in.sh  |  5 +++++
 5 files changed, 27 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/075aec91/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index aab5116..c0fe505 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -102,6 +102,8 @@ New Features
 
 * SOLR-9926: Allow passing arbitrary java system properties to zkcli. (Hrishikesh Gadre via Mark Miller)
 
+* SOLR-9885: Allow pre-startup Solr log management in Solr bin scripts to be disabled. (Mano Kovacs via Mark Miller)
+
 Bug Fixes
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/075aec91/solr/bin/solr
----------------------------------------------------------------------
diff --git a/solr/bin/solr b/solr/bin/solr
index fcf864b..c2d0feb 100755
--- a/solr/bin/solr
+++ b/solr/bin/solr
@@ -1480,11 +1480,12 @@ if [ ! -e "$SOLR_HOME" ]; then
   echo -e "\nSolr home directory $SOLR_HOME not found!\n"
   exit 1
 fi
-
-run_tool utils -s "$DEFAULT_SERVER_DIR" -l "$SOLR_LOGS_DIR" -remove_old_solr_logs 7 || echo "Failed removing old solr logs"
-run_tool utils -s "$DEFAULT_SERVER_DIR" -l "$SOLR_LOGS_DIR" -archive_gc_logs        || echo "Failed archiving old GC logs"
-run_tool utils -s "$DEFAULT_SERVER_DIR" -l "$SOLR_LOGS_DIR" -archive_console_logs   || echo "Failed archiving old console logs"
-run_tool utils -s "$DEFAULT_SERVER_DIR" -l "$SOLR_LOGS_DIR" -rotate_solr_logs 9     || echo "Failed rotating old solr logs"
+if [ "${SOLR_LOG_PRESTART_ROTATION:=true}" == "true" ]; then
+  run_tool utils -s "$DEFAULT_SERVER_DIR" -l "$SOLR_LOGS_DIR" -remove_old_solr_logs 7 || echo "Failed removing old solr logs"
+  run_tool utils -s "$DEFAULT_SERVER_DIR" -l "$SOLR_LOGS_DIR" -archive_gc_logs        || echo "Failed archiving old GC logs"
+  run_tool utils -s "$DEFAULT_SERVER_DIR" -l "$SOLR_LOGS_DIR" -archive_console_logs   || echo "Failed archiving old console logs"
+  run_tool utils -s "$DEFAULT_SERVER_DIR" -l "$SOLR_LOGS_DIR" -rotate_solr_logs 9     || echo "Failed rotating old solr logs"
+fi
 
 java_ver_out=`echo "$("$JAVA" -version 2>&1)"`
 JAVA_VERSION=`echo $java_ver_out | grep "java version" | awk '{ print substr($3, 2, length($3)-2); }'`

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/075aec91/solr/bin/solr.cmd
----------------------------------------------------------------------
diff --git a/solr/bin/solr.cmd b/solr/bin/solr.cmd
index 04398bc..732c2de 100644
--- a/solr/bin/solr.cmd
+++ b/solr/bin/solr.cmd
@@ -939,10 +939,15 @@ IF ERRORLEVEL 1 (
 )
 
 REM Clean up and rotate logs
-call :run_utils "-remove_old_solr_logs 7" || echo "Failed removing old solr logs"
-call :run_utils "-archive_gc_logs"        || echo "Failed archiving old GC logs"
-call :run_utils "-archive_console_logs"   || echo "Failed archiving old console logs"
-call :run_utils "-rotate_solr_logs 9"     || echo "Failed rotating old solr logs"
+IF [%SOLR_LOG_PRESTART_ROTATION%] == [] (
+  set SOLR_LOG_PRESTART_ROTATION=true
+)
+IF [%SOLR_LOG_PRESTART_ROTATION%] == [true] (
+  call :run_utils "-remove_old_solr_logs 7" || echo "Failed removing old solr logs"
+  call :run_utils "-archive_gc_logs"        || echo "Failed archiving old GC logs"
+  call :run_utils "-archive_console_logs"   || echo "Failed archiving old console logs"
+  call :run_utils "-rotate_solr_logs 9"     || echo "Failed rotating old solr logs"
+)
 
 IF NOT "%ZK_HOST%"=="" set SOLR_MODE=solrcloud
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/075aec91/solr/bin/solr.in.cmd
----------------------------------------------------------------------
diff --git a/solr/bin/solr.in.cmd b/solr/bin/solr.in.cmd
index d323434..e565c02 100644
--- a/solr/bin/solr.in.cmd
+++ b/solr/bin/solr.in.cmd
@@ -75,6 +75,11 @@ REM set SOLR_LOG_LEVEL=INFO
 REM Location where Solr should write logs to. Absolute or relative to solr start dir
 REM set SOLR_LOGS_DIR=logs
 
+REM Enables log rotation, cleanup, and archiving before starting Solr. Setting SOLR_LOG_PRESTART_ROTATION=false will skip start
+REM time rotation of logs, and the archiving of the last GC and console log files. It does not affect Log4j configuration. This
+REM pre-startup rotation may need to be disabled depending how much you customize the default logging setup.
+REM set SOLR_LOG_PRESTART_ROTATION=true
+
 REM Set the host interface to listen on. Jetty will listen on all interfaces (0.0.0.0) by default.
 REM This must be an IPv4 ("a.b.c.d") or bracketed IPv6 ("[x::y]") address, not a hostname!
 REM set SOLR_JETTY_HOST=0.0.0.0

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/075aec91/solr/bin/solr.in.sh
----------------------------------------------------------------------
diff --git a/solr/bin/solr.in.sh b/solr/bin/solr.in.sh
index e5dd0c9..5a9f807 100644
--- a/solr/bin/solr.in.sh
+++ b/solr/bin/solr.in.sh
@@ -91,6 +91,11 @@
 # Location where Solr should write logs to. Absolute or relative to solr start dir
 #SOLR_LOGS_DIR=logs
 
+# Enables log rotation, cleanup, and archiving during start. Setting SOLR_LOG_PRESTART_ROTATION=false will skip start
+# time rotation of logs, and the archiving of the last GC and console log files. It does not affect Log4j configuration.
+# This pre-startup rotation may need to be disabled depending how much you customize the default logging setup.
+#SOLR_LOG_PRESTART_ROTATION=true
+
 # Sets the port Solr binds to, default is 8983
 #SOLR_PORT=8983
 


[03/14] lucene-solr:jira/solr-5944: SOLR-9984: Remove GenericHadoopAuthPlugin (HadoopAuthPlugin is there)

Posted by is...@apache.org.
SOLR-9984: Remove GenericHadoopAuthPlugin (HadoopAuthPlugin is there)


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/bb35732e
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/bb35732e
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/bb35732e

Branch: refs/heads/jira/solr-5944
Commit: bb35732eef90fc0ba7862d2c123c7e16356d2a0b
Parents: 1a05d6f
Author: Ishan Chattopadhyaya <ic...@gmail.com>
Authored: Thu Jan 19 10:02:13 2017 +0530
Committer: Ishan Chattopadhyaya <ic...@gmail.com>
Committed: Thu Jan 19 10:02:13 2017 +0530

----------------------------------------------------------------------
 solr/CHANGES.txt                                |  8 -----
 .../solr/security/GenericHadoopAuthPlugin.java  | 31 --------------------
 2 files changed, 39 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/bb35732e/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 62b8818..aab5116 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -94,12 +94,6 @@ Jetty 9.3.14.v20161028
 Detailed Change List
 ----------------------
 
-Upgrade Notes
-----------------------
-
-* SOLR-9984: GenericHadoopAuthPlugin is deprecated in favor of HadoopAuthPlugin. Simply changing the
-  name of the class in the security configurations should suffice while upgrading.
-
 New Features
 ----------------------
 
@@ -128,8 +122,6 @@ Other Changes
 ----------------------
 * SOLR-9980: Expose configVersion in core admin status (Jessica Cheng Mallet via Tom�s Fern�ndez L�bbe)
 
-* SOLR-9984: Deprecate GenericHadoopAuthPlugin in favor of HadoopAuthPlugin (Ishan Chattopadhyaya)
-
 ==================  6.4.0 ==================
 
 Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/bb35732e/solr/core/src/java/org/apache/solr/security/GenericHadoopAuthPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/security/GenericHadoopAuthPlugin.java b/solr/core/src/java/org/apache/solr/security/GenericHadoopAuthPlugin.java
deleted file mode 100644
index 3d63fd6..0000000
--- a/solr/core/src/java/org/apache/solr/security/GenericHadoopAuthPlugin.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.security;
-
-import org.apache.solr.core.CoreContainer;
-
-/**
- *  * @deprecated Use {@link HadoopAuthPlugin}. For backcompat against Solr 6.4.
- */
-@Deprecated
-public class GenericHadoopAuthPlugin extends HadoopAuthPlugin {
-
-  public GenericHadoopAuthPlugin(CoreContainer coreContainer) {
-    super(coreContainer);
-  }
-
-}
\ No newline at end of file