You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by ho...@apache.org on 2016/03/12 01:26:42 UTC

[01/50] [abbrv] lucene-solr git commit: SOLR-8799: Improve error message when tuple can't be read by SolrJ JDBC

Repository: lucene-solr
Updated Branches:
  refs/heads/jira/SOLR-445 2401c9495 -> d3e0bdd4a


SOLR-8799: Improve error message when tuple can't be read by SolrJ JDBC


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/56ad6e5d
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/56ad6e5d
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/56ad6e5d

Branch: refs/heads/jira/SOLR-445
Commit: 56ad6e5d8a3d92f7ea496c598c2097aa572263cc
Parents: 4384627
Author: jbernste <jb...@apache.org>
Authored: Tue Mar 8 15:21:19 2016 -0500
Committer: jbernste <jb...@apache.org>
Committed: Tue Mar 8 15:22:42 2016 -0500

----------------------------------------------------------------------
 .../solr/client/solrj/io/sql/ResultSetImpl.java      |  2 +-
 .../apache/solr/client/solrj/io/sql/JdbcTest.java    | 15 +++++++++++++++
 2 files changed, 16 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/56ad6e5d/solr/solrj/src/java/org/apache/solr/client/solrj/io/sql/ResultSetImpl.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/sql/ResultSetImpl.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/sql/ResultSetImpl.java
index e2f8cf0..0aa3a4b 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/sql/ResultSetImpl.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/sql/ResultSetImpl.java
@@ -78,7 +78,7 @@ class ResultSetImpl implements ResultSet {
       this.firstTuple = this.solrStream.read();
       this.solrStream.pushBack(firstTuple);
     } catch (IOException e) {
-      throw new SQLException("Couldn't read first tuple", e);
+      throw new SQLException(e);
     }
 
     this.resultSetMetaData = new ResultSetMetaDataImpl(this);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/56ad6e5d/solr/solrj/src/test/org/apache/solr/client/solrj/io/sql/JdbcTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/sql/JdbcTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/sql/JdbcTest.java
index e1e9739..572491e 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/sql/JdbcTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/sql/JdbcTest.java
@@ -396,6 +396,21 @@ public class JdbcTest extends AbstractFullDistribZkTestBase {
       }
     }
 
+
+    //Test error propagation
+    props = new Properties();
+    props.put("aggregationMode", "facet");
+    try (Connection con = DriverManager.getConnection("jdbc:solr://" + zkHost + "?collection=collection1", props)) {
+      try (Statement stmt = con.createStatement()) {
+        try (ResultSet rs = stmt.executeQuery("select crap from collection1 group by a_s " +
+            "order by sum(a_f) desc")) {
+        } catch (Exception e) {
+          String errorMessage = e.getMessage();
+          assertTrue(errorMessage.contains("Group by queries must include atleast one aggregate function"));
+        }
+      }
+    }
+
     testDriverMetadata();
   }
 


[09/50] [abbrv] lucene-solr git commit: LUCENE-7085: PointRangeQuery.equals sometimes returns false even if queries were in fact equal

Posted by ho...@apache.org.
LUCENE-7085: PointRangeQuery.equals sometimes returns false even if queries were in fact equal


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/770e508f
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/770e508f
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/770e508f

Branch: refs/heads/jira/SOLR-445
Commit: 770e508fd3d908e9bf7997361299af96aa437b75
Parents: 004e83b
Author: Mike McCandless <mi...@apache.org>
Authored: Wed Mar 9 10:07:15 2016 -0500
Committer: Mike McCandless <mi...@apache.org>
Committed: Wed Mar 9 10:07:15 2016 -0500

----------------------------------------------------------------------
 .../apache/lucene/search/PointInSetQuery.java   | 10 +--
 .../apache/lucene/search/PointRangeQuery.java   | 38 +++++++---
 .../apache/lucene/search/TestPointQueries.java  | 80 ++++++++++++++++++++
 .../lucene/document/InetAddressPoint.java       | 23 +++++-
 .../lucene/document/TestBigIntegerPoint.java    | 15 ++++
 .../lucene/document/TestInetAddressPoint.java   | 21 +++++
 .../apache/lucene/document/TestLatLonPoint.java | 20 +++++
 .../spatial3d/PointInGeo3DShapeQuery.java       |  4 +-
 .../apache/lucene/spatial3d/TestGeo3DPoint.java | 14 ++++
 9 files changed, 202 insertions(+), 23 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/770e508f/lucene/core/src/java/org/apache/lucene/search/PointInSetQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/PointInSetQuery.java b/lucene/core/src/java/org/apache/lucene/search/PointInSetQuery.java
index 944fadf..bee864f 100644
--- a/lucene/core/src/java/org/apache/lucene/search/PointInSetQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/PointInSetQuery.java
@@ -103,7 +103,7 @@ public abstract class PointInSetQuery extends Query {
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
+  public final Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
 
     // We don't use RandomAccessWeight here: it's no good to approximate with "match all docs".
     // This is an inverted structure and should be used in the first pass:
@@ -161,14 +161,12 @@ public abstract class PointInSetQuery extends Query {
     private final DocIdSetBuilder result;
     private TermIterator iterator;
     private BytesRef nextQueryPoint;
-    private final byte[] lastMaxPackedValue;
     private final BytesRef scratch = new BytesRef();
     private final PrefixCodedTerms sortedPackedPoints;
 
     public MergePointVisitor(PrefixCodedTerms sortedPackedPoints, DocIdSetBuilder result) throws IOException {
       this.result = result;
       this.sortedPackedPoints = sortedPackedPoints;
-      lastMaxPackedValue = new byte[bytesPerDim];
       scratch.length = bytesPerDim;
       this.iterator = sortedPackedPoints.iterator();
       nextQueryPoint = iterator.next();
@@ -304,7 +302,7 @@ public abstract class PointInSetQuery extends Query {
   }
 
   @Override
-  public int hashCode() {
+  public final int hashCode() {
     int hash = super.hashCode();
     hash = 31 * hash + sortedPackedPointsHashCode;
     hash = 31 * hash + numDims;
@@ -313,7 +311,7 @@ public abstract class PointInSetQuery extends Query {
   }
 
   @Override
-  public boolean equals(Object other) {
+  public final boolean equals(Object other) {
     if (super.equals(other)) {
       final PointInSetQuery q = (PointInSetQuery) other;
       return q.numDims == numDims &&
@@ -326,7 +324,7 @@ public abstract class PointInSetQuery extends Query {
   }
 
   @Override
-  public String toString(String field) {
+  public final String toString(String field) {
     final StringBuilder sb = new StringBuilder();
     if (this.field.equals(field) == false) {
       sb.append(this.field);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/770e508f/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java b/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java
index ebbe7e2..9384d23 100644
--- a/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java
@@ -109,7 +109,7 @@ public abstract class PointRangeQuery extends Query {
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
+  public final Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
 
     // We don't use RandomAccessWeight here: it's no good to approximate with "match all docs".
     // This is an inverted structure and should be used in the first pass:
@@ -239,7 +239,7 @@ public abstract class PointRangeQuery extends Query {
   }
 
   @Override
-  public int hashCode() {
+  public final int hashCode() {
     int hash = super.hashCode();
     hash = 31 * hash + Arrays.hashCode(lowerPoint);
     hash = 31 * hash + Arrays.hashCode(upperPoint);
@@ -249,20 +249,36 @@ public abstract class PointRangeQuery extends Query {
   }
 
   @Override
-  public boolean equals(Object other) {
-    if (super.equals(other)) {
-      final PointRangeQuery q = (PointRangeQuery) other;
-      return q.numDims == numDims &&
-        q.bytesPerDim == bytesPerDim &&
-        Arrays.equals(lowerPoint, q.lowerPoint) &&
-        Arrays.equals(upperPoint, q.upperPoint);
+  public final boolean equals(Object other) {
+    if (super.equals(other) == false) {
+      return false;
     }
 
-    return false;
+    final PointRangeQuery q = (PointRangeQuery) other;
+    if (q.numDims != numDims) {
+      return false;
+    }
+
+    if (q.bytesPerDim != bytesPerDim) {
+      return false;
+    }
+
+    // Cannot use Arrays.equals here, because it in turn uses byte[].equals
+    // to compare each value, which only uses "=="
+    for(int dim=0;dim<numDims;dim++) {
+      if (Arrays.equals(lowerPoint[dim], q.lowerPoint[dim]) == false) {
+        return false;
+      }
+      if (Arrays.equals(upperPoint[dim], q.upperPoint[dim]) == false) {
+        return false;
+      }
+    }
+
+    return true;
   }
 
   @Override
-  public String toString(String field) {
+  public final String toString(String field) {
     final StringBuilder sb = new StringBuilder();
     if (this.field.equals(field) == false) {
       sb.append(this.field);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/770e508f/lucene/core/src/test/org/apache/lucene/search/TestPointQueries.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestPointQueries.java b/lucene/core/src/test/org/apache/lucene/search/TestPointQueries.java
index 500bb8f..4d9aa59 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestPointQueries.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestPointQueries.java
@@ -1886,4 +1886,84 @@ public class TestPointQueries extends LuceneTestCase {
     w.close();
     dir.close();
   }
+
+  public void testPointRangeEquals() {
+    Query q = IntPoint.newRangeQuery("a", 0, 1000);
+    assertEquals(q, IntPoint.newRangeQuery("a", 0, 1000));
+    assertFalse(q.equals(IntPoint.newRangeQuery("a", 1, 1000)));
+
+    q = LongPoint.newRangeQuery("a", 0, 1000);
+    assertEquals(q, LongPoint.newRangeQuery("a", 0, 1000));
+    assertFalse(q.equals(LongPoint.newRangeQuery("a", 1, 1000)));
+
+    q = FloatPoint.newRangeQuery("a", 0, 1000);
+    assertEquals(q, FloatPoint.newRangeQuery("a", 0, 1000));
+    assertFalse(q.equals(FloatPoint.newRangeQuery("a", 1, 1000)));
+
+    q = DoublePoint.newRangeQuery("a", 0, 1000);
+    assertEquals(q, DoublePoint.newRangeQuery("a", 0, 1000));
+    assertFalse(q.equals(DoublePoint.newRangeQuery("a", 1, 1000)));
+
+    byte[] zeros = new byte[5];
+    byte[] ones = new byte[5];
+    Arrays.fill(ones, (byte) 0xff);
+    q = BinaryPoint.newRangeQuery("a", new byte[][] {zeros}, new byte[][] {ones});
+    assertEquals(q, BinaryPoint.newRangeQuery("a", new byte[][] {zeros}, new byte[][] {ones}));
+    byte[] other = ones.clone();
+    other[2] = (byte) 5;
+    assertFalse(q.equals(BinaryPoint.newRangeQuery("a", new byte[][] {zeros}, new byte[][] {other})));
+  }
+
+  public void testPointExactEquals() {
+    Query q = IntPoint.newExactQuery("a", 1000);
+    assertEquals(q, IntPoint.newExactQuery("a", 1000));
+    assertFalse(q.equals(IntPoint.newExactQuery("a", 1)));
+
+    q = LongPoint.newExactQuery("a", 1000);
+    assertEquals(q, LongPoint.newExactQuery("a", 1000));
+    assertFalse(q.equals(LongPoint.newExactQuery("a", 1)));
+
+    q = FloatPoint.newExactQuery("a", 1000);
+    assertEquals(q, FloatPoint.newExactQuery("a", 1000));
+    assertFalse(q.equals(FloatPoint.newExactQuery("a", 1)));
+
+    q = DoublePoint.newExactQuery("a", 1000);
+    assertEquals(q, DoublePoint.newExactQuery("a", 1000));
+    assertFalse(q.equals(DoublePoint.newExactQuery("a", 1)));
+
+    byte[] ones = new byte[5];
+    Arrays.fill(ones, (byte) 0xff);
+    q = BinaryPoint.newExactQuery("a", ones);
+    assertEquals(q, BinaryPoint.newExactQuery("a", ones));
+    byte[] other = ones.clone();
+    other[2] = (byte) 5;
+    assertFalse(q.equals(BinaryPoint.newExactQuery("a", other)));
+  }
+
+  public void testPointInSetEquals() {
+    Query q = IntPoint.newSetQuery("a", 0, 1000, 17);
+    assertEquals(q, IntPoint.newSetQuery("a", 17, 0, 1000));
+    assertFalse(q.equals(IntPoint.newSetQuery("a", 1, 17, 1000)));
+
+    q = LongPoint.newSetQuery("a", 0, 1000, 17);
+    assertEquals(q, LongPoint.newSetQuery("a", 17, 0, 1000));
+    assertFalse(q.equals(LongPoint.newSetQuery("a", 1, 17, 1000)));
+
+    q = FloatPoint.newSetQuery("a", 0, 1000, 17);
+    assertEquals(q, FloatPoint.newSetQuery("a", 17, 0, 1000));
+    assertFalse(q.equals(FloatPoint.newSetQuery("a", 1, 17, 1000)));
+
+    q = DoublePoint.newSetQuery("a", 0, 1000, 17);
+    assertEquals(q, DoublePoint.newSetQuery("a", 17, 0, 1000));
+    assertFalse(q.equals(DoublePoint.newSetQuery("a", 1, 17, 1000)));
+
+    byte[] zeros = new byte[5];
+    byte[] ones = new byte[5];
+    Arrays.fill(ones, (byte) 0xff);
+    q = BinaryPoint.newSetQuery("a", new byte[][] {zeros, ones});
+    assertEquals(q, BinaryPoint.newSetQuery("a", new byte[][] {zeros, ones}));
+    byte[] other = ones.clone();
+    other[2] = (byte) 5;
+    assertFalse(q.equals(BinaryPoint.newSetQuery("a", new byte[][] {zeros, other})));
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/770e508f/lucene/sandbox/src/java/org/apache/lucene/document/InetAddressPoint.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/java/org/apache/lucene/document/InetAddressPoint.java b/lucene/sandbox/src/java/org/apache/lucene/document/InetAddressPoint.java
index f0df6ff..7abc1b6 100644
--- a/lucene/sandbox/src/java/org/apache/lucene/document/InetAddressPoint.java
+++ b/lucene/sandbox/src/java/org/apache/lucene/document/InetAddressPoint.java
@@ -19,12 +19,14 @@ package org.apache.lucene.document;
 import java.net.InetAddress;
 import java.net.UnknownHostException;
 import java.util.Arrays;
+import java.util.Comparator;
 
 import org.apache.lucene.index.PointValues;
 import org.apache.lucene.search.PointInSetQuery;
 import org.apache.lucene.search.PointRangeQuery;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.StringHelper;
 
 /** 
  * An indexed 128-bit {@code InetAddress} field.
@@ -214,9 +216,22 @@ public class InetAddressPoint extends Field {
    */
   public static Query newSetQuery(String field, InetAddress... values) {
 
-    // Don't unexpectedly change the user's incoming values array:
-    InetAddress[] sortedValues = values.clone();
-    Arrays.sort(sortedValues);
+    // We must compare the encoded form (InetAddress doesn't implement Comparable, and even if it
+    // did, we do our own thing with ipv4 addresses):
+
+    // NOTE: we could instead convert-per-comparison and save this extra array, at cost of slower sort:
+    byte[][] sortedValues = new byte[values.length][];
+    for(int i=0;i<values.length;i++) {
+      sortedValues[i] = encode(values[i]);
+    }
+
+    Arrays.sort(sortedValues,
+                new Comparator<byte[]>() {
+                  @Override
+                  public int compare(byte[] a, byte[] b) {
+                    return StringHelper.compare(BYTES, a, 0, b, 0);
+                  }
+                });
 
     final BytesRef encoded = new BytesRef(new byte[BYTES]);
 
@@ -230,7 +245,7 @@ public class InetAddressPoint extends Field {
                                    if (upto == sortedValues.length) {
                                      return null;
                                    } else {
-                                     encoded.bytes = encode(sortedValues[upto]);
+                                     encoded.bytes = sortedValues[upto];
                                      assert encoded.bytes.length == encoded.length;
                                      upto++;
                                      return encoded;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/770e508f/lucene/sandbox/src/test/org/apache/lucene/document/TestBigIntegerPoint.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/TestBigIntegerPoint.java b/lucene/sandbox/src/test/org/apache/lucene/document/TestBigIntegerPoint.java
index 500c2a3..8f38bcd 100644
--- a/lucene/sandbox/src/test/org/apache/lucene/document/TestBigIntegerPoint.java
+++ b/lucene/sandbox/src/test/org/apache/lucene/document/TestBigIntegerPoint.java
@@ -21,6 +21,7 @@ import java.math.BigInteger;
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.RandomIndexWriter;
 import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Query;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.LuceneTestCase;
 
@@ -93,4 +94,18 @@ public class TestBigIntegerPoint extends LuceneTestCase {
                                                                                  new BigInteger[] {BigInteger.valueOf(17), BigInteger.valueOf(42)}).toString());
     assertEquals("field:{1}", BigIntegerPoint.newSetQuery("field", BigInteger.ONE).toString());
   }
+
+  public void testQueryEquals() throws Exception {
+    Query q = BigIntegerPoint.newRangeQuery("a", BigInteger.valueOf(0), BigInteger.valueOf(1000));
+    assertEquals(q, BigIntegerPoint.newRangeQuery("a", BigInteger.valueOf(0), BigInteger.valueOf(1000)));
+    assertFalse(q.equals(BigIntegerPoint.newRangeQuery("a", BigInteger.valueOf(1), BigInteger.valueOf(1000))));
+
+    q = BigIntegerPoint.newExactQuery("a", BigInteger.valueOf(1000));
+    assertEquals(q, BigIntegerPoint.newExactQuery("a", BigInteger.valueOf(1000)));
+    assertFalse(q.equals(BigIntegerPoint.newExactQuery("a", BigInteger.valueOf(1))));
+
+    q = BigIntegerPoint.newSetQuery("a", BigInteger.valueOf(0), BigInteger.valueOf(1000), BigInteger.valueOf(17));
+    assertEquals(q, BigIntegerPoint.newSetQuery("a", BigInteger.valueOf(17), BigInteger.valueOf(0), BigInteger.valueOf(1000)));
+    assertFalse(q.equals(BigIntegerPoint.newSetQuery("a", BigInteger.valueOf(1), BigInteger.valueOf(17), BigInteger.valueOf(1000))));
+  }     
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/770e508f/lucene/sandbox/src/test/org/apache/lucene/document/TestInetAddressPoint.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/TestInetAddressPoint.java b/lucene/sandbox/src/test/org/apache/lucene/document/TestInetAddressPoint.java
index d4ddb3a..c91b52b 100644
--- a/lucene/sandbox/src/test/org/apache/lucene/document/TestInetAddressPoint.java
+++ b/lucene/sandbox/src/test/org/apache/lucene/document/TestInetAddressPoint.java
@@ -21,6 +21,7 @@ import java.net.InetAddress;
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.RandomIndexWriter;
 import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Query;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.LuceneTestCase;
 
@@ -45,6 +46,7 @@ public class TestInetAddressPoint extends LuceneTestCase {
     assertEquals(1, searcher.count(InetAddressPoint.newPrefixQuery("field", address, 24)));
     assertEquals(1, searcher.count(InetAddressPoint.newRangeQuery("field", InetAddress.getByName("1.2.3.3"), InetAddress.getByName("1.2.3.5"))));
     assertEquals(1, searcher.count(InetAddressPoint.newSetQuery("field", InetAddress.getByName("1.2.3.4"))));
+    assertEquals(1, searcher.count(InetAddressPoint.newSetQuery("field", InetAddress.getByName("1.2.3.4"), InetAddress.getByName("1.2.3.5"))));
     assertEquals(0, searcher.count(InetAddressPoint.newSetQuery("field", InetAddress.getByName("1.2.3.3"))));
     assertEquals(0, searcher.count(InetAddressPoint.newSetQuery("field")));
 
@@ -88,4 +90,23 @@ public class TestInetAddressPoint extends LuceneTestCase {
     assertEquals("field:[fdc8:57ed:f042:ad1:0:0:0:0 TO fdc8:57ed:f042:ad1:ffff:ffff:ffff:ffff]", InetAddressPoint.newPrefixQuery("field", InetAddress.getByName("fdc8:57ed:f042:0ad1:f66d:4ff:fe90:ce0c"), 64).toString());
     assertEquals("field:{fdc8:57ed:f042:ad1:f66d:4ff:fe90:ce0c}", InetAddressPoint.newSetQuery("field", InetAddress.getByName("fdc8:57ed:f042:0ad1:f66d:4ff:fe90:ce0c")).toString());
   }
+
+  public void testQueryEquals() throws Exception {
+    Query q = InetAddressPoint.newRangeQuery("a", InetAddress.getByName("1.2.3.3"), InetAddress.getByName("1.2.3.5"));
+    assertEquals(q, InetAddressPoint.newRangeQuery("a", InetAddress.getByName("1.2.3.3"), InetAddress.getByName("1.2.3.5")));
+    assertFalse(q.equals(InetAddressPoint.newRangeQuery("a", InetAddress.getByName("1.2.3.3"), InetAddress.getByName("1.2.3.7"))));
+
+    q = InetAddressPoint.newPrefixQuery("a", InetAddress.getByName("1.2.3.3"), 16);
+    assertEquals(q, InetAddressPoint.newPrefixQuery("a", InetAddress.getByName("1.2.3.3"), 16));
+    assertFalse(q.equals(InetAddressPoint.newPrefixQuery("a", InetAddress.getByName("1.1.3.5"), 16)));
+    assertFalse(q.equals(InetAddressPoint.newPrefixQuery("a", InetAddress.getByName("1.2.3.5"), 24)));
+
+    q = InetAddressPoint.newExactQuery("a", InetAddress.getByName("1.2.3.3"));
+    assertEquals(q, InetAddressPoint.newExactQuery("a", InetAddress.getByName("1.2.3.3")));
+    assertFalse(q.equals(InetAddressPoint.newExactQuery("a", InetAddress.getByName("1.2.3.5"))));
+
+    q = InetAddressPoint.newSetQuery("a", InetAddress.getByName("1.2.3.3"), InetAddress.getByName("1.2.3.5"));
+    assertEquals(q, InetAddressPoint.newSetQuery("a", InetAddress.getByName("1.2.3.3"), InetAddress.getByName("1.2.3.5")));
+    assertFalse(q.equals(InetAddressPoint.newSetQuery("a", InetAddress.getByName("1.2.3.3"), InetAddress.getByName("1.2.3.7"))));
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/770e508f/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPoint.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPoint.java b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPoint.java
index 0ef948d..61c6754 100644
--- a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPoint.java
+++ b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPoint.java
@@ -19,6 +19,7 @@ package org.apache.lucene.document;
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.RandomIndexWriter;
 import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Query;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.LuceneTestCase;
 
@@ -170,4 +171,23 @@ public class TestLatLonPoint extends LuceneTestCase {
       assertEquals(lonEnc, lonEnc2, 0.0);
     }
   }
+
+  public void testQueryEquals() throws Exception {
+    Query q = LatLonPoint.newBoxQuery("field", 50, 70, -40, 20);
+    assertEquals(q, LatLonPoint.newBoxQuery("field", 50, 70, -40, 20));
+    assertFalse(q.equals(LatLonPoint.newBoxQuery("field", 50, 70, -40, 10)));
+
+    q = LatLonPoint.newDistanceQuery("field", 50, 70, 10000);
+    assertEquals(q, LatLonPoint.newDistanceQuery("field", 50, 70, 10000));
+    assertFalse(q.equals(LatLonPoint.newDistanceQuery("field", 50, 70, 11000)));
+    assertFalse(q.equals(LatLonPoint.newDistanceQuery("field", 50, 60, 10000)));
+
+                
+    double[] polyLats1 = new double[] {30, 40, 40, 30, 30};
+    double[] polyLons1 = new double[] {90, 90, -40, -40, 90};
+    double[] polyLats2 = new double[] {20, 40, 40, 20, 20};
+    q = LatLonPoint.newPolygonQuery("field", polyLats1, polyLons1);
+    assertEquals(q, LatLonPoint.newPolygonQuery("field", polyLats1, polyLons1));
+    assertFalse(q.equals(LatLonPoint.newPolygonQuery("field", polyLats2, polyLons1)));
+  }     
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/770e508f/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/PointInGeo3DShapeQuery.java
----------------------------------------------------------------------
diff --git a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/PointInGeo3DShapeQuery.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/PointInGeo3DShapeQuery.java
index 9df8752..c9b5e4e 100644
--- a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/PointInGeo3DShapeQuery.java
+++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/PointInGeo3DShapeQuery.java
@@ -43,7 +43,7 @@ import org.apache.lucene.util.NumericUtils;
  *
  * @lucene.experimental */
 
-class PointInGeo3DShapeQuery extends Query {
+final class PointInGeo3DShapeQuery extends Query {
   final String field;
   final GeoShape shape;
 
@@ -192,7 +192,7 @@ class PointInGeo3DShapeQuery extends Query {
   }
 
   @Override
-  public final int hashCode() {
+  public int hashCode() {
     int result = super.hashCode();
     result = 31 * result + shape.hashCode();
     return result;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/770e508f/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/TestGeo3DPoint.java
----------------------------------------------------------------------
diff --git a/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/TestGeo3DPoint.java b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/TestGeo3DPoint.java
index a4d8ed1..3061b76 100644
--- a/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/TestGeo3DPoint.java
+++ b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/TestGeo3DPoint.java
@@ -807,4 +807,18 @@ public class TestGeo3DPoint extends LuceneTestCase {
   private static Directory getDirectory() {     
     return newDirectory();
   }
+
+  public void testEquals() {
+    GeoShape shape = randomShape(PlanetModel.WGS84);
+    Query q = Geo3DPoint.newShapeQuery("point", shape);
+    assertEquals(q, Geo3DPoint.newShapeQuery("point", shape));
+    
+    // make a different random shape:
+    GeoShape shape2;
+    do {
+      shape2 = randomShape(PlanetModel.WGS84);
+    } while (shape.equals(shape2));
+
+    assertFalse(q.equals(Geo3DPoint.newShapeQuery("point", shape2)));
+  }
 }


[20/50] [abbrv] lucene-solr git commit: SOLR-445: added test asserts of client exception metadata, and hacky kludge to get it working in CloudSolrClient

Posted by ho...@apache.org.
SOLR-445: added test asserts of client exception metadata, and hacky kludge to get it working in CloudSolrClient

need to refactor KnownErr to solr-common (and rename something more clear out of context) so client code can use it


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/5b405b61
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/5b405b61
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/5b405b61

Branch: refs/heads/jira/SOLR-445
Commit: 5b405b614417b1bcdbafe933b2e8308bf4e9fbeb
Parents: 92f81fb
Author: Chris Hostetter <ho...@apache.org>
Authored: Wed Mar 9 17:45:28 2016 -0700
Committer: Chris Hostetter <ho...@apache.org>
Committed: Wed Mar 9 17:45:28 2016 -0700

----------------------------------------------------------------------
 .../cloud/TestTolerantUpdateProcessorCloud.java | 67 ++++++++++++++++++--
 .../solr/client/solrj/impl/CloudSolrClient.java | 27 +++++++-
 2 files changed, 85 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5b405b61/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java b/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java
index aef0385..bee23a7 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java
@@ -22,7 +22,7 @@ import java.net.URL;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
-import java.util.HashSet;
+import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -45,9 +45,13 @@ import org.apache.solr.common.cloud.Slice;
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.params.SolrParams;
+import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SimpleOrderedMap;
 import org.apache.solr.util.RevertDefaultThreadHandlerRule;
 
+import org.apache.solr.update.processor.TolerantUpdateProcessor.KnownErr;
+import org.apache.solr.update.processor.TolerantUpdateProcessor.CmdType;
+
 import org.junit.AfterClass;
 import org.junit.Before;
 import org.junit.BeforeClass;
@@ -523,14 +527,37 @@ public class TestTolerantUpdateProcessorCloud extends SolrCloudTestCase {
     } catch (SolrException e) {
       // we can't make any reliable assertions about the error message, because
       // it varies based on how the request was routed
-      // nocommit: can we tighten this any more? substring check?
+      // nocommit: verify that we can't do an e.getMessage() substring check
       assertEquals("not the type of error we were expecting ("+e.code()+"): " + e.toString(),
                    // NOTE: we always expect a 400 because we know that's what we would get from these types of errors
                    // on a single node setup -- a 5xx type error isn't something we should have triggered
                    400, e.code());
 
-      // nocommit: is there a way to inspect the response body anyway?
-      // nocommit: look for the correct "errors" ?  .... check e's metatata
+      // verify that the Exceptions metadata can tell us what failed.
+      NamedList<String> remoteErrMetadata = e.getMetadata();
+      assertNotNull("no metadata in: " + e.toString(), remoteErrMetadata);
+      Set<KnownErr> actualKnownErrs = new LinkedHashSet<KnownErr>(remoteErrMetadata.size());
+      int actualKnownErrsCount = 0;
+      for (int i = 0; i < remoteErrMetadata.size(); i++) {
+        KnownErr err = KnownErr.parseMetadataIfKnownErr(remoteErrMetadata.getName(i),
+                                                        remoteErrMetadata.getVal(i));
+        if (null == err) {
+          // some metadata unrelated to this update processor
+          continue;
+        }
+        actualKnownErrsCount++;
+        actualKnownErrs.add(err);
+      }
+      assertEquals("wrong number of errors in metadata: " + remoteErrMetadata.toString(),
+                   11, actualKnownErrsCount);
+      assertEquals("at least one dup error in metadata: " + remoteErrMetadata.toString(),
+                   actualKnownErrsCount, actualKnownErrs.size());
+      for (KnownErr err : actualKnownErrs) {
+        assertEquals("only expected type of error is ADD: " + err,
+                     CmdType.ADD, err.type);
+        assertTrue("failed err msg didn't match expected value: " + err,
+                   err.errorValue.contains("bogus_val"));
+      }
     }
     assertEquals(0, client.commit().getStatus()); // need to force since update didn't finish
     assertQueryDocIds(client, false
@@ -573,14 +600,40 @@ public class TestTolerantUpdateProcessorCloud extends SolrCloudTestCase {
     } catch (SolrException e) {
       // we can't make any reliable assertions about the error message, because
       // it varies based on how the request was routed
-      // nocommit: can we tighten this any more? substring check?
+      // nocommit: verify that we can't do an e.getMessage() substring check
       assertEquals("not the type of error we were expecting ("+e.code()+"): " + e.toString(),
                    // NOTE: we always expect a 400 because we know that's what we would get from these types of errors
                    // on a single node setup -- a 5xx type error isn't something we should have triggered
                    400, e.code());
 
-      // nocommit: is there a way to inspect the response body anyway?
-      // nocommit: look for the correct "errors" ?  .... check e's metatata
+      // verify that the Exceptions metadata can tell us what failed.
+      NamedList<String> remoteErrMetadata = e.getMetadata();
+      assertNotNull("no metadata in: " + e.toString(), remoteErrMetadata);
+      Set<KnownErr> actualKnownErrs = new LinkedHashSet<KnownErr>(remoteErrMetadata.size());
+      int actualKnownErrsCount = 0;
+      for (int i = 0; i < remoteErrMetadata.size(); i++) {
+        KnownErr err = KnownErr.parseMetadataIfKnownErr(remoteErrMetadata.getName(i),
+                                                        remoteErrMetadata.getVal(i));
+        if (null == err) {
+          // some metadata unrelated to this update processor
+          continue;
+        }
+        actualKnownErrsCount++;
+        actualKnownErrs.add(err);
+      }
+      assertEquals("wrong number of errors in metadata: " + remoteErrMetadata.toString(),
+                   11, actualKnownErrsCount);
+      assertEquals("at least one dup error in metadata: " + remoteErrMetadata.toString(),
+                   actualKnownErrsCount, actualKnownErrs.size());
+      for (KnownErr err : actualKnownErrs) {
+        assertEquals("only expected type of error is ADD: " + err,
+                     CmdType.ADD, err.type);
+        assertTrue("failed id had unexpected prefix: " + err,
+                   err.id.startsWith(S_TWO_PRE));
+        assertTrue("failed err msg didn't match expected value: " + err,
+                   err.errorValue.contains("bogus_val"));
+      }
+           
     }
     assertEquals(0, client.commit().getStatus()); // need to force since update didn't finish
     assertQueryDocIds(client, true

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5b405b61/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java
index 0248a7d..655844f 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java
@@ -776,8 +776,15 @@ public class CloudSolrClient extends SolrClient {
     if (null != toleratedErrors) {
       cheader.add("errors", toleratedErrors);
       if (maxToleratedErrors < toleratedErrors.size()) {
-        // nocommit: populate metadata based on the toleratedErrors
-        throw new SolrException(ErrorCode.BAD_REQUEST, "nocommit: need better msg");
+        NamedList metadata = new NamedList<String>();
+        SolrException toThrow = new SolrException(ErrorCode.BAD_REQUEST, "nocommit: need better msg");
+        toThrow.setMetadata(metadata);
+        for (SimpleOrderedMap<String> err : toleratedErrors) {
+          // nocommit: hack, refactor KnownErr into solr-common and re-use here...
+          metadata.add("org.apache.solr.update.processor.TolerantUpdateProcessor--" +
+                       err.get("type") + ":" + err.get("id"), err.get("message"));
+        }
+        throw toThrow;
       }
     }
     condensed.add("responseHeader", cheader);
@@ -815,6 +822,22 @@ public class CloudSolrClient extends SolrClient {
       super(errorCode, throwables.getVal(0).getMessage(), throwables.getVal(0));
       this.throwables = throwables;
       this.routes = routes;
+
+      // create a merged copy of the metadata from all wrapped exceptions
+      NamedList<String> metadata = new NamedList<String>();
+      for (int i = 0; i < throwables.size(); i++) {
+        Throwable t = throwables.getVal(i);
+        if (t instanceof SolrException) {
+          SolrException e = (SolrException) t;
+          NamedList<String> eMeta = e.getMetadata();
+          if (null != eMeta) {
+            metadata.addAll(eMeta);
+          }
+        }
+      }
+      if (0 < metadata.size()) {
+        this.setMetadata(metadata);
+      }
     }
 
     public NamedList<Throwable> getThrowables() {


[41/50] [abbrv] lucene-solr git commit: test bug: close the right reader (the one we passed to newSearcher)

Posted by ho...@apache.org.
test bug: close the right reader (the one we passed to newSearcher)


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/1a233d83
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/1a233d83
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/1a233d83

Branch: refs/heads/jira/SOLR-445
Commit: 1a233d83dedecc2cf3d807079e413dae6758fa74
Parents: 343d9c6
Author: Mike McCandless <mi...@apache.org>
Authored: Fri Mar 11 04:53:43 2016 -0500
Committer: Mike McCandless <mi...@apache.org>
Committed: Fri Mar 11 04:53:43 2016 -0500

----------------------------------------------------------------------
 .../src/test/org/apache/lucene/queries/CommonTermsQueryTest.java   | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/1a233d83/lucene/queries/src/test/org/apache/lucene/queries/CommonTermsQueryTest.java
----------------------------------------------------------------------
diff --git a/lucene/queries/src/test/org/apache/lucene/queries/CommonTermsQueryTest.java b/lucene/queries/src/test/org/apache/lucene/queries/CommonTermsQueryTest.java
index e991b0c..fa74497 100644
--- a/lucene/queries/src/test/org/apache/lucene/queries/CommonTermsQueryTest.java
+++ b/lucene/queries/src/test/org/apache/lucene/queries/CommonTermsQueryTest.java
@@ -489,7 +489,7 @@ public class CommonTermsQueryTest extends LuceneTestCase {
       QueryUtils.check(random(), cq, newSearcher(reader2));
       reader2.close();
     } finally {
-      IOUtils.close(wrapper, w, dir, analyzer);
+      IOUtils.close(reader, w, dir, analyzer);
     }
     
   }


[26/50] [abbrv] lucene-solr git commit: LUCENE-7087: Let MemoryIndex#fromDocument(...) accept 'Iterable' as document instead of 'Document'

Posted by ho...@apache.org.
LUCENE-7087: Let MemoryIndex#fromDocument(...) accept 'Iterable<? extends IndexableField>' as document instead of 'Document'


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/0b8b16f9
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/0b8b16f9
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/0b8b16f9

Branch: refs/heads/jira/SOLR-445
Commit: 0b8b16f9f281f10d730019f6e291b31f42b936c7
Parents: 66cd070
Author: Martijn van Groningen <ma...@gmail.com>
Authored: Thu Mar 10 09:12:38 2016 +0100
Committer: Martijn van Groningen <mv...@apache.org>
Committed: Thu Mar 10 14:05:54 2016 +0100

----------------------------------------------------------------------
 lucene/CHANGES.txt                                             | 5 +++++
 .../src/java/org/apache/lucene/index/memory/MemoryIndex.java   | 6 +++---
 2 files changed, 8 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0b8b16f9/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 7e803cf..a07e69d 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -14,6 +14,11 @@ Optimizations
 * LUCENE-7071: Reduce bytes copying in OfflineSorter, giving ~10%
   speedup on merging 2D LatLonPoint values (Mike McCandless)
 
+Other
+
+* LUCENE-7087: Let MemoryIndex#fromDocument(...) accept 'Iterable<? extends IndexableField>'
+  as document instead of 'Document'. (Martijn van Groningen)
+
 ======================= Lucene 6.0.0 =======================
 
 System Requirements

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0b8b16f9/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java
----------------------------------------------------------------------
diff --git a/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java b/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java
index 849cd63..9e01182 100644
--- a/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java
+++ b/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java
@@ -265,7 +265,7 @@ public class MemoryIndex {
    * @param analyzer the analyzer to use
    * @return a MemoryIndex
    */
-  public static MemoryIndex fromDocument(Document document, Analyzer analyzer) {
+  public static MemoryIndex fromDocument(Iterable<? extends IndexableField> document, Analyzer analyzer) {
     return fromDocument(document, analyzer, false, false, 0);
   }
 
@@ -277,7 +277,7 @@ public class MemoryIndex {
    * @param storePayloads <code>true</code> if payloads should be stored
    * @return a MemoryIndex
    */
-  public static MemoryIndex fromDocument(Document document, Analyzer analyzer, boolean storeOffsets, boolean storePayloads) {
+  public static MemoryIndex fromDocument(Iterable<? extends IndexableField> document, Analyzer analyzer, boolean storeOffsets, boolean storePayloads) {
     return fromDocument(document, analyzer, storeOffsets, storePayloads, 0);
   }
 
@@ -290,7 +290,7 @@ public class MemoryIndex {
    * @param maxReusedBytes the number of bytes that should remain in the internal memory pools after {@link #reset()} is called
    * @return a MemoryIndex
    */
-  public static MemoryIndex fromDocument(Document document, Analyzer analyzer, boolean storeOffsets, boolean storePayloads, long maxReusedBytes) {
+  public static MemoryIndex fromDocument(Iterable<? extends IndexableField> document, Analyzer analyzer, boolean storeOffsets, boolean storePayloads, long maxReusedBytes) {
     MemoryIndex mi = new MemoryIndex(storeOffsets, storePayloads, maxReusedBytes);
     for (IndexableField field : document) {
       mi.addField(field, analyzer);


[08/50] [abbrv] lucene-solr git commit: add 2B point test

Posted by ho...@apache.org.
add 2B point test


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/004e83bb
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/004e83bb
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/004e83bb

Branch: refs/heads/jira/SOLR-445
Commit: 004e83bb6cd75dabf3e6c6928bcb6fe382b3f234
Parents: 55c595a
Author: Mike McCandless <mi...@apache.org>
Authored: Wed Mar 9 09:28:50 2016 -0500
Committer: Mike McCandless <mi...@apache.org>
Committed: Wed Mar 9 09:30:22 2016 -0500

----------------------------------------------------------------------
 .../org/apache/lucene/index/Test2BPoints.java   | 130 +++++++++++++++++++
 1 file changed, 130 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/004e83bb/lucene/core/src/test/org/apache/lucene/index/Test2BPoints.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/Test2BPoints.java b/lucene/core/src/test/org/apache/lucene/index/Test2BPoints.java
new file mode 100644
index 0000000..bfe387e
--- /dev/null
+++ b/lucene/core/src/test/org/apache/lucene/index/Test2BPoints.java
@@ -0,0 +1,130 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.index;
+
+import org.apache.lucene.analysis.MockAnalyzer;
+import org.apache.lucene.codecs.Codec;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.LongPoint;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.FSDirectory;
+import org.apache.lucene.util.LuceneTestCase.Monster;
+import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
+import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util.TestUtil;
+import org.apache.lucene.util.TimeUnits;
+
+import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite;
+
+// e.g. run like this: ant test -Dtestcase=Test2BPoints -Dtests.nightly=true -Dtests.verbose=true -Dtests.monster=true
+// 
+//   or: python -u /l/util/src/python/repeatLuceneTest.py -once -nolog -tmpDir /b/tmp -logDir /l/logs Test2BPoints.test1D -verbose
+
+@SuppressCodecs({ "SimpleText", "Memory", "Direct", "Compressing" })
+@TimeoutSuite(millis = 16 * TimeUnits.HOUR)
+@Monster("takes at least 4 hours and consumes many GB of temp disk space")
+public class Test2BPoints extends LuceneTestCase {
+  public void test1D() throws Exception {
+    Directory dir = FSDirectory.open(createTempDir("2BPoints1D"));
+    System.out.println("DIR: " + ((FSDirectory) dir).getDirectory());
+
+    IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()))
+        .setCodec(Codec.forName("Lucene60"))
+        .setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
+        .setRAMBufferSizeMB(64.0)
+        .setMergeScheduler(new ConcurrentMergeScheduler())
+        .setMergePolicy(newLogMergePolicy(false, 10))
+        .setOpenMode(IndexWriterConfig.OpenMode.CREATE);
+    
+    IndexWriter w = new IndexWriter(dir, iwc);
+
+    MergePolicy mp = w.getConfig().getMergePolicy();
+    if (mp instanceof LogByteSizeMergePolicy) {
+     // 1 petabyte:
+     ((LogByteSizeMergePolicy) mp).setMaxMergeMB(1024*1024*1024);
+    }
+
+    final int numDocs = (Integer.MAX_VALUE / 26) + 1;
+    long counter = 0;
+    for (int i = 0; i < numDocs; i++) {
+      Document doc = new Document();
+      for (int j=0;j<26;j++) {
+        doc.add(new LongPoint("long", counter));
+        counter++;
+      }
+      w.addDocument(doc);
+      if (VERBOSE && i % 100000 == 0) {
+        System.out.println(i + " of " + numDocs + "...");
+      }
+    }
+    w.forceMerge(1);
+    DirectoryReader r = DirectoryReader.open(w);
+    IndexSearcher s = new IndexSearcher(r);
+    assertEquals(1250, s.count(LongPoint.newRangeQuery("long", 33640828, 33673327)));
+    assertTrue(r.leaves().get(0).reader().getPointValues().size("long") > Integer.MAX_VALUE);
+    r.close();
+    w.close();
+    System.out.println("TEST: now CheckIndex");
+    TestUtil.checkIndex(dir);
+    dir.close();
+  }
+
+  public void test2D() throws Exception {
+    Directory dir = FSDirectory.open(createTempDir("2BPoints2D"));
+
+    IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()))
+        .setCodec(Codec.forName("Lucene60"))
+        .setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
+        .setRAMBufferSizeMB(64.0)
+        .setMergeScheduler(new ConcurrentMergeScheduler())
+        .setMergePolicy(newLogMergePolicy(false, 10))
+        .setOpenMode(IndexWriterConfig.OpenMode.CREATE);
+    
+    IndexWriter w = new IndexWriter(dir, iwc);
+
+    MergePolicy mp = w.getConfig().getMergePolicy();
+    if (mp instanceof LogByteSizeMergePolicy) {
+     // 1 petabyte:
+     ((LogByteSizeMergePolicy) mp).setMaxMergeMB(1024*1024*1024);
+    }
+
+    final int numDocs = (Integer.MAX_VALUE / 26) + 1;
+    long counter = 0;
+    for (int i = 0; i < numDocs; i++) {
+      Document doc = new Document();
+      for (int j=0;j<26;j++) {
+        doc.add(new LongPoint("long", counter, 2*counter+1));
+        counter++;
+      }
+      w.addDocument(doc);
+      if (VERBOSE && i % 100000 == 0) {
+        System.out.println(i + " of " + numDocs + "...");
+      }
+    }
+    w.forceMerge(1);
+    DirectoryReader r = DirectoryReader.open(w);
+    IndexSearcher s = new IndexSearcher(r);
+    assertEquals(1250, s.count(LongPoint.newRangeQuery("long", new long[] {33640828, 33673327}, new long[] {Long.MIN_VALUE, Long.MAX_VALUE})));
+    assertTrue(r.leaves().get(0).reader().getPointValues().size("long") > Integer.MAX_VALUE);
+    r.close();
+    w.close();
+    System.out.println("TEST: now CheckIndex");
+    TestUtil.checkIndex(dir);
+    dir.close();
+  }
+}


[03/50] [abbrv] lucene-solr git commit: fix random float test to do the +/- 1 ulp in float space

Posted by ho...@apache.org.
fix random float test to do the +/- 1 ulp in float space


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/a6c8ccbc
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/a6c8ccbc
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/a6c8ccbc

Branch: refs/heads/jira/SOLR-445
Commit: a6c8ccbc99a9fc83cc5ddfcfd65f9c3c4d4e920c
Parents: d776f7b
Author: Mike McCandless <mi...@apache.org>
Authored: Tue Mar 8 17:21:12 2016 -0500
Committer: Mike McCandless <mi...@apache.org>
Committed: Tue Mar 8 17:22:31 2016 -0500

----------------------------------------------------------------------
 .../lucene/facet/range/TestRangeFacetCounts.java    | 16 +++++++++++++++-
 1 file changed, 15 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a6c8ccbc/lucene/facet/src/test/org/apache/lucene/facet/range/TestRangeFacetCounts.java
----------------------------------------------------------------------
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/range/TestRangeFacetCounts.java b/lucene/facet/src/test/org/apache/lucene/facet/range/TestRangeFacetCounts.java
index 9fde6e3..9f8b109 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/range/TestRangeFacetCounts.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/range/TestRangeFacetCounts.java
@@ -532,6 +532,8 @@ public class TestRangeFacetCounts extends FacetTestCase {
       int[] expectedCounts = new int[numRange];
       float minAcceptedValue = Float.POSITIVE_INFINITY;
       float maxAcceptedValue = Float.NEGATIVE_INFINITY;
+      boolean[] rangeMinIncl = new boolean[numRange];
+      boolean[] rangeMaxIncl = new boolean[numRange];
       if (VERBOSE) {
         System.out.println("TEST: " + numRange + " ranges");
       }
@@ -582,6 +584,8 @@ public class TestRangeFacetCounts extends FacetTestCase {
           minIncl = random().nextBoolean();
           maxIncl = random().nextBoolean();
         }
+        rangeMinIncl[rangeID] = minIncl;
+        rangeMaxIncl[rangeID] = maxIncl;
         ranges[rangeID] = new DoubleRange("r" + rangeID, min, minIncl, max, maxIncl);
 
         if (VERBOSE) {
@@ -642,7 +646,17 @@ public class TestRangeFacetCounts extends FacetTestCase {
         // Test drill-down:
         DrillDownQuery ddq = new DrillDownQuery(config);
         if (random().nextBoolean()) {
-          ddq.add("field", FloatPoint.newRangeQuery("field", (float) range.min, (float) range.max));
+          // We must do the nextUp/down in float space, here, because the nextUp that DoubleRange did in double space, when cast back to float,
+          // in fact does nothing!
+          float minFloat = (float) range.min;
+          if (rangeMinIncl[rangeID] == false) {
+            minFloat = Math.nextUp(minFloat);
+          }
+          float maxFloat = (float) range.max;
+          if (rangeMaxIncl[rangeID] == false) {
+            maxFloat = Math.nextAfter(maxFloat, Float.NEGATIVE_INFINITY);
+          }
+          ddq.add("field", FloatPoint.newRangeQuery("field", minFloat, maxFloat));
         } else {
           ddq.add("field", range.getQuery(fastMatchQuery, vs));
         }


[42/50] [abbrv] lucene-solr git commit: remove TODO that was in fact done

Posted by ho...@apache.org.
remove TODO that was in fact done


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/af349938
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/af349938
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/af349938

Branch: refs/heads/jira/SOLR-445
Commit: af3499384305e3651c40203495262d27e1eec9bb
Parents: 1a233d8
Author: Mike McCandless <mi...@apache.org>
Authored: Fri Mar 11 04:54:01 2016 -0500
Committer: Mike McCandless <mi...@apache.org>
Committed: Fri Mar 11 04:54:01 2016 -0500

----------------------------------------------------------------------
 lucene/core/src/java/org/apache/lucene/util/bkd/BKDWriter.java | 3 ---
 1 file changed, 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/af349938/lucene/core/src/java/org/apache/lucene/util/bkd/BKDWriter.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/util/bkd/BKDWriter.java b/lucene/core/src/java/org/apache/lucene/util/bkd/BKDWriter.java
index 5388bb3..aa7e5dc 100644
--- a/lucene/core/src/java/org/apache/lucene/util/bkd/BKDWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/util/bkd/BKDWriter.java
@@ -1059,9 +1059,6 @@ public class BKDWriter implements Closeable {
       assert count > 0: "nodeID=" + nodeID + " leafNodeOffset=" + leafNodeOffset;
       writeLeafBlockDocs(out, heapSource.docIDs, Math.toIntExact(source.start), count);
 
-      // TODO: we should delta compress / only write suffix bytes, like terms dict (the values will all be "close together" since we are at
-      // a leaf cell):
-
       // First pass: find the per-dim common prefix for all values in this block:
       Arrays.fill(commonPrefixLengths, bytesPerDim);
       for (int i=0;i<count;i++) {


[27/50] [abbrv] lucene-solr git commit: LUCENE-7090, LUCENE-7075: deprecate single-valued LegacyNumerics fieldcaching, provide Points-based replacement.

Posted by ho...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d35d5694/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSort.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSort.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSort.java
index 717d364..f46bdde 100644
--- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSort.java
+++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSort.java
@@ -23,11 +23,16 @@ import java.util.Map;
 
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.document.Document;
+import org.apache.lucene.document.DoublePoint;
 import org.apache.lucene.document.LegacyDoubleField;
 import org.apache.lucene.document.Field;
+import org.apache.lucene.document.FloatPoint;
+import org.apache.lucene.document.IntPoint;
 import org.apache.lucene.document.LegacyFloatField;
 import org.apache.lucene.document.LegacyIntField;
 import org.apache.lucene.document.LegacyLongField;
+import org.apache.lucene.document.LongPoint;
+import org.apache.lucene.document.StoredField;
 import org.apache.lucene.document.StringField;
 import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.IndexReader;
@@ -449,6 +454,140 @@ public class TestFieldCacheSort extends LuceneTestCase {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     Document doc = new Document();
+    doc.add(new IntPoint("value", 300000));
+    doc.add(new StoredField("value", 300000));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new IntPoint("value", -1));
+    doc.add(new StoredField("value", -1));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new IntPoint("value", 4));
+    doc.add(new StoredField("value", 4));
+    writer.addDocument(doc);
+    IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
+                     Collections.singletonMap("value", Type.INTEGER_POINT));
+    writer.close();
+    
+    IndexSearcher searcher = newSearcher(ir, false);
+    Sort sort = new Sort(new SortField("value", SortField.Type.INT));
+
+    TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+    assertEquals(3, td.totalHits);
+    // numeric order
+    assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
+    assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
+    assertEquals("300000", searcher.doc(td.scoreDocs[2].doc).get("value"));
+    TestUtil.checkReader(ir);
+    ir.close();
+    dir.close();
+  }
+  
+  /** Tests sorting on type int with a missing value */
+  public void testIntMissing() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+    Document doc = new Document();
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new IntPoint("value", -1));
+    doc.add(new StoredField("value", -1));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new IntPoint("value", 4));
+    doc.add(new StoredField("value", 4));
+    writer.addDocument(doc);
+    IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
+                     Collections.singletonMap("value", Type.INTEGER_POINT));
+    writer.close();
+    
+    IndexSearcher searcher = newSearcher(ir, false);
+    Sort sort = new Sort(new SortField("value", SortField.Type.INT));
+
+    TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+    assertEquals(3, td.totalHits);
+    // null is treated as a 0
+    assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
+    assertNull(searcher.doc(td.scoreDocs[1].doc).get("value"));
+    assertEquals("4", searcher.doc(td.scoreDocs[2].doc).get("value"));
+    TestUtil.checkReader(ir);
+    ir.close();
+    dir.close();
+  }
+  
+  /** Tests sorting on type int, specifying the missing value should be treated as Integer.MAX_VALUE */
+  public void testIntMissingLast() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+    Document doc = new Document();
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new IntPoint("value", -1));
+    doc.add(new StoredField("value", -1));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new IntPoint("value", 4));
+    doc.add(new StoredField("value", 4));
+    writer.addDocument(doc);
+    IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
+                     Collections.singletonMap("value", Type.INTEGER_POINT));
+    writer.close();
+    
+    IndexSearcher searcher = newSearcher(ir, false);
+    SortField sortField = new SortField("value", SortField.Type.INT);
+    sortField.setMissingValue(Integer.MAX_VALUE);
+    Sort sort = new Sort(sortField);
+
+    TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+    assertEquals(3, td.totalHits);
+    // null is treated as a Integer.MAX_VALUE
+    assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
+    assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
+    assertNull(searcher.doc(td.scoreDocs[2].doc).get("value"));
+    TestUtil.checkReader(ir);
+    ir.close();
+    dir.close();
+  }
+  
+  /** Tests sorting on type int in reverse */
+  public void testIntReverse() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+    Document doc = new Document();
+    doc.add(new IntPoint("value", 300000));
+    doc.add(new StoredField("value", 300000));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new IntPoint("value", -1));
+    doc.add(new StoredField("value", -1));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new IntPoint("value", 4));
+    doc.add(new StoredField("value", 4));
+    writer.addDocument(doc);
+    IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
+                     Collections.singletonMap("value", Type.INTEGER_POINT));
+    writer.close();
+    
+    IndexSearcher searcher = newSearcher(ir, false);
+    Sort sort = new Sort(new SortField("value", SortField.Type.INT, true));
+
+    TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+    assertEquals(3, td.totalHits);
+    // reverse numeric order
+    assertEquals("300000", searcher.doc(td.scoreDocs[0].doc).get("value"));
+    assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
+    assertEquals("-1", searcher.doc(td.scoreDocs[2].doc).get("value"));
+    TestUtil.checkReader(ir);
+    ir.close();
+    dir.close();
+  }
+
+  /** Tests sorting on type legacy int */
+  public void testLegacyInt() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+    Document doc = new Document();
     doc.add(new LegacyIntField("value", 300000, Field.Store.YES));
     writer.addDocument(doc);
     doc = new Document();
@@ -458,7 +597,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
     doc.add(new LegacyIntField("value", 4, Field.Store.YES));
     writer.addDocument(doc);
     IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
-                     Collections.singletonMap("value", Type.INTEGER));
+                     Collections.singletonMap("value", Type.LEGACY_INTEGER));
     writer.close();
     
     IndexSearcher searcher = newSearcher(ir);
@@ -475,8 +614,8 @@ public class TestFieldCacheSort extends LuceneTestCase {
     dir.close();
   }
   
-  /** Tests sorting on type int with a missing value */
-  public void testIntMissing() throws IOException {
+  /** Tests sorting on type legacy int with a missing value */
+  public void testLegacyIntMissing() throws IOException {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     Document doc = new Document();
@@ -488,7 +627,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
     doc.add(new LegacyIntField("value", 4, Field.Store.YES));
     writer.addDocument(doc);
     IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
-                     Collections.singletonMap("value", Type.INTEGER));
+                     Collections.singletonMap("value", Type.LEGACY_INTEGER));
     writer.close();
     
     IndexSearcher searcher = newSearcher(ir);
@@ -505,8 +644,8 @@ public class TestFieldCacheSort extends LuceneTestCase {
     dir.close();
   }
   
-  /** Tests sorting on type int, specifying the missing value should be treated as Integer.MAX_VALUE */
-  public void testIntMissingLast() throws IOException {
+  /** Tests sorting on type legacy int, specifying the missing value should be treated as Integer.MAX_VALUE */
+  public void testLegacyIntMissingLast() throws IOException {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     Document doc = new Document();
@@ -518,7 +657,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
     doc.add(new LegacyIntField("value", 4, Field.Store.YES));
     writer.addDocument(doc);
     IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
-                     Collections.singletonMap("value", Type.INTEGER));
+                     Collections.singletonMap("value", Type.LEGACY_INTEGER));
     writer.close();
     
     IndexSearcher searcher = newSearcher(ir);
@@ -537,8 +676,8 @@ public class TestFieldCacheSort extends LuceneTestCase {
     dir.close();
   }
   
-  /** Tests sorting on type int in reverse */
-  public void testIntReverse() throws IOException {
+  /** Tests sorting on type legacy int in reverse */
+  public void testLegacyIntReverse() throws IOException {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     Document doc = new Document();
@@ -551,7 +690,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
     doc.add(new LegacyIntField("value", 4, Field.Store.YES));
     writer.addDocument(doc);
     IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
-                     Collections.singletonMap("value", Type.INTEGER));
+                     Collections.singletonMap("value", Type.LEGACY_INTEGER));
     writer.close();
     
     IndexSearcher searcher = newSearcher(ir);
@@ -573,6 +712,140 @@ public class TestFieldCacheSort extends LuceneTestCase {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     Document doc = new Document();
+    doc.add(new LongPoint("value", 3000000000L));
+    doc.add(new StoredField("value", 3000000000L));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new LongPoint("value", -1));
+    doc.add(new StoredField("value", -1));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new LongPoint("value", 4));
+    doc.add(new StoredField("value", 4));
+    writer.addDocument(doc);
+    IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
+                     Collections.singletonMap("value", Type.LONG_POINT));
+    writer.close();
+    
+    IndexSearcher searcher = newSearcher(ir, false);
+    Sort sort = new Sort(new SortField("value", SortField.Type.LONG));
+
+    TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+    assertEquals(3, td.totalHits);
+    // numeric order
+    assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
+    assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
+    assertEquals("3000000000", searcher.doc(td.scoreDocs[2].doc).get("value"));
+    TestUtil.checkReader(ir);
+    ir.close();
+    dir.close();
+  }
+  
+  /** Tests sorting on type long with a missing value */
+  public void testLongMissing() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+    Document doc = new Document();
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new LongPoint("value", -1));
+    doc.add(new StoredField("value", -1));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new LongPoint("value", 4));
+    doc.add(new StoredField("value", 4));
+    writer.addDocument(doc);
+    IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
+                     Collections.singletonMap("value", Type.LONG_POINT));
+    writer.close();
+    
+    IndexSearcher searcher = newSearcher(ir, false);
+    Sort sort = new Sort(new SortField("value", SortField.Type.LONG));
+
+    TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+    assertEquals(3, td.totalHits);
+    // null is treated as 0
+    assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
+    assertNull(searcher.doc(td.scoreDocs[1].doc).get("value"));
+    assertEquals("4", searcher.doc(td.scoreDocs[2].doc).get("value"));
+    TestUtil.checkReader(ir);
+    ir.close();
+    dir.close();
+  }
+  
+  /** Tests sorting on type long, specifying the missing value should be treated as Long.MAX_VALUE */
+  public void testLongMissingLast() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+    Document doc = new Document();
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new LongPoint("value", -1));
+    doc.add(new StoredField("value", -1));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new LongPoint("value", 4));
+    doc.add(new StoredField("value", 4));
+    writer.addDocument(doc);
+    IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
+                     Collections.singletonMap("value", Type.LONG_POINT));
+    writer.close();
+    
+    IndexSearcher searcher = newSearcher(ir, false);
+    SortField sortField = new SortField("value", SortField.Type.LONG);
+    sortField.setMissingValue(Long.MAX_VALUE);
+    Sort sort = new Sort(sortField);
+
+    TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+    assertEquals(3, td.totalHits);
+    // null is treated as Long.MAX_VALUE
+    assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
+    assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
+    assertNull(searcher.doc(td.scoreDocs[2].doc).get("value"));
+    TestUtil.checkReader(ir);
+    ir.close();
+    dir.close();
+  }
+  
+  /** Tests sorting on type long in reverse */
+  public void testLongReverse() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+    Document doc = new Document();
+    doc.add(new LongPoint("value", 3000000000L));
+    doc.add(new StoredField("value", 3000000000L));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new LongPoint("value", -1));
+    doc.add(new StoredField("value", -1));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new LongPoint("value", 4));
+    doc.add(new StoredField("value", 4));
+    writer.addDocument(doc);
+    IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
+                     Collections.singletonMap("value", Type.LONG_POINT));
+    writer.close();
+    
+    IndexSearcher searcher = newSearcher(ir, false);
+    Sort sort = new Sort(new SortField("value", SortField.Type.LONG, true));
+
+    TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+    assertEquals(3, td.totalHits);
+    // reverse numeric order
+    assertEquals("3000000000", searcher.doc(td.scoreDocs[0].doc).get("value"));
+    assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
+    assertEquals("-1", searcher.doc(td.scoreDocs[2].doc).get("value"));
+    TestUtil.checkReader(ir);
+    ir.close();
+    dir.close();
+  }
+  
+  /** Tests sorting on type legacy long */
+  public void testLegacyLong() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+    Document doc = new Document();
     doc.add(new LegacyLongField("value", 3000000000L, Field.Store.YES));
     writer.addDocument(doc);
     doc = new Document();
@@ -582,7 +855,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
     doc.add(new LegacyLongField("value", 4, Field.Store.YES));
     writer.addDocument(doc);
     IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
-                     Collections.singletonMap("value", Type.LONG));
+                     Collections.singletonMap("value", Type.LEGACY_LONG));
     writer.close();
     
     IndexSearcher searcher = newSearcher(ir);
@@ -599,8 +872,8 @@ public class TestFieldCacheSort extends LuceneTestCase {
     dir.close();
   }
   
-  /** Tests sorting on type long with a missing value */
-  public void testLongMissing() throws IOException {
+  /** Tests sorting on type legacy long with a missing value */
+  public void testLegacyLongMissing() throws IOException {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     Document doc = new Document();
@@ -612,7 +885,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
     doc.add(new LegacyLongField("value", 4, Field.Store.YES));
     writer.addDocument(doc);
     IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
-                     Collections.singletonMap("value", Type.LONG));
+                     Collections.singletonMap("value", Type.LEGACY_LONG));
     writer.close();
     
     IndexSearcher searcher = newSearcher(ir);
@@ -629,8 +902,8 @@ public class TestFieldCacheSort extends LuceneTestCase {
     dir.close();
   }
   
-  /** Tests sorting on type long, specifying the missing value should be treated as Long.MAX_VALUE */
-  public void testLongMissingLast() throws IOException {
+  /** Tests sorting on type legacy long, specifying the missing value should be treated as Long.MAX_VALUE */
+  public void testLegacyLongMissingLast() throws IOException {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     Document doc = new Document();
@@ -642,7 +915,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
     doc.add(new LegacyLongField("value", 4, Field.Store.YES));
     writer.addDocument(doc);
     IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
-                     Collections.singletonMap("value", Type.LONG));
+                     Collections.singletonMap("value", Type.LEGACY_LONG));
     writer.close();
     
     IndexSearcher searcher = newSearcher(ir);
@@ -661,8 +934,8 @@ public class TestFieldCacheSort extends LuceneTestCase {
     dir.close();
   }
   
-  /** Tests sorting on type long in reverse */
-  public void testLongReverse() throws IOException {
+  /** Tests sorting on type legacy long in reverse */
+  public void testLegacyLongReverse() throws IOException {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     Document doc = new Document();
@@ -675,7 +948,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
     doc.add(new LegacyLongField("value", 4, Field.Store.YES));
     writer.addDocument(doc);
     IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
-                     Collections.singletonMap("value", Type.LONG));
+                     Collections.singletonMap("value", Type.LEGACY_LONG));
     writer.close();
     
     IndexSearcher searcher = newSearcher(ir);
@@ -697,6 +970,140 @@ public class TestFieldCacheSort extends LuceneTestCase {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     Document doc = new Document();
+    doc.add(new FloatPoint("value", 30.1f));
+    doc.add(new StoredField("value", 30.1f));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new FloatPoint("value", -1.3f));
+    doc.add(new StoredField("value", -1.3f));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new FloatPoint("value", 4.2f));
+    doc.add(new StoredField("value", 4.2f));
+    writer.addDocument(doc);
+    IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
+                     Collections.singletonMap("value", Type.FLOAT_POINT));
+    writer.close();
+    
+    IndexSearcher searcher = newSearcher(ir, false);
+    Sort sort = new Sort(new SortField("value", SortField.Type.FLOAT));
+
+    TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+    assertEquals(3, td.totalHits);
+    // numeric order
+    assertEquals("-1.3", searcher.doc(td.scoreDocs[0].doc).get("value"));
+    assertEquals("4.2", searcher.doc(td.scoreDocs[1].doc).get("value"));
+    assertEquals("30.1", searcher.doc(td.scoreDocs[2].doc).get("value"));
+    TestUtil.checkReader(ir);
+    ir.close();
+    dir.close();
+  }
+  
+  /** Tests sorting on type float with a missing value */
+  public void testFloatMissing() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+    Document doc = new Document();
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new FloatPoint("value", -1.3f));
+    doc.add(new StoredField("value", -1.3f));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new FloatPoint("value", 4.2f));
+    doc.add(new StoredField("value", 4.2f));
+    writer.addDocument(doc);
+    IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
+                     Collections.singletonMap("value", Type.FLOAT_POINT));
+    writer.close();
+    
+    IndexSearcher searcher = newSearcher(ir, false);
+    Sort sort = new Sort(new SortField("value", SortField.Type.FLOAT));
+
+    TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+    assertEquals(3, td.totalHits);
+    // null is treated as 0
+    assertEquals("-1.3", searcher.doc(td.scoreDocs[0].doc).get("value"));
+    assertNull(searcher.doc(td.scoreDocs[1].doc).get("value"));
+    assertEquals("4.2", searcher.doc(td.scoreDocs[2].doc).get("value"));
+    TestUtil.checkReader(ir);
+    ir.close();
+    dir.close();
+  }
+  
+  /** Tests sorting on type float, specifying the missing value should be treated as Float.MAX_VALUE */
+  public void testFloatMissingLast() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+    Document doc = new Document();
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new FloatPoint("value", -1.3f));
+    doc.add(new StoredField("value", -1.3f));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new FloatPoint("value", 4.2f));
+    doc.add(new StoredField("value", 4.2f));
+    writer.addDocument(doc);
+    IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
+                     Collections.singletonMap("value", Type.FLOAT_POINT));
+    writer.close();
+    
+    IndexSearcher searcher = newSearcher(ir, false);
+    SortField sortField = new SortField("value", SortField.Type.FLOAT);
+    sortField.setMissingValue(Float.MAX_VALUE);
+    Sort sort = new Sort(sortField);
+
+    TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+    assertEquals(3, td.totalHits);
+    // null is treated as Float.MAX_VALUE
+    assertEquals("-1.3", searcher.doc(td.scoreDocs[0].doc).get("value"));
+    assertEquals("4.2", searcher.doc(td.scoreDocs[1].doc).get("value"));
+    assertNull(searcher.doc(td.scoreDocs[2].doc).get("value"));
+    TestUtil.checkReader(ir);
+    ir.close();
+    dir.close();
+  }
+  
+  /** Tests sorting on type float in reverse */
+  public void testFloatReverse() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+    Document doc = new Document();
+    doc.add(new FloatPoint("value", 30.1f));
+    doc.add(new StoredField("value", 30.1f));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new FloatPoint("value", -1.3f));
+    doc.add(new StoredField("value", -1.3f));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new FloatPoint("value", 4.2f));
+    doc.add(new StoredField("value", 4.2f));
+    writer.addDocument(doc);
+    IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
+                     Collections.singletonMap("value", Type.FLOAT_POINT));
+    writer.close();
+    
+    IndexSearcher searcher = newSearcher(ir, false);
+    Sort sort = new Sort(new SortField("value", SortField.Type.FLOAT, true));
+
+    TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+    assertEquals(3, td.totalHits);
+    // reverse numeric order
+    assertEquals("30.1", searcher.doc(td.scoreDocs[0].doc).get("value"));
+    assertEquals("4.2", searcher.doc(td.scoreDocs[1].doc).get("value"));
+    assertEquals("-1.3", searcher.doc(td.scoreDocs[2].doc).get("value"));
+    TestUtil.checkReader(ir);
+    ir.close();
+    dir.close();
+  }
+  
+  /** Tests sorting on type legacy float */
+  public void testLegacyFloat() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+    Document doc = new Document();
     doc.add(new LegacyFloatField("value", 30.1f, Field.Store.YES));
     writer.addDocument(doc);
     doc = new Document();
@@ -706,7 +1113,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
     doc.add(new LegacyFloatField("value", 4.2f, Field.Store.YES));
     writer.addDocument(doc);
     IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
-                     Collections.singletonMap("value", Type.FLOAT));
+                     Collections.singletonMap("value", Type.LEGACY_FLOAT));
     writer.close();
     
     IndexSearcher searcher = newSearcher(ir);
@@ -723,8 +1130,8 @@ public class TestFieldCacheSort extends LuceneTestCase {
     dir.close();
   }
   
-  /** Tests sorting on type float with a missing value */
-  public void testFloatMissing() throws IOException {
+  /** Tests sorting on type legacy float with a missing value */
+  public void testLegacyFloatMissing() throws IOException {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     Document doc = new Document();
@@ -736,7 +1143,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
     doc.add(new LegacyFloatField("value", 4.2f, Field.Store.YES));
     writer.addDocument(doc);
     IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
-                     Collections.singletonMap("value", Type.FLOAT));
+                     Collections.singletonMap("value", Type.LEGACY_FLOAT));
     writer.close();
     
     IndexSearcher searcher = newSearcher(ir);
@@ -753,8 +1160,8 @@ public class TestFieldCacheSort extends LuceneTestCase {
     dir.close();
   }
   
-  /** Tests sorting on type float, specifying the missing value should be treated as Float.MAX_VALUE */
-  public void testFloatMissingLast() throws IOException {
+  /** Tests sorting on type legacy float, specifying the missing value should be treated as Float.MAX_VALUE */
+  public void testLegacyFloatMissingLast() throws IOException {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     Document doc = new Document();
@@ -766,7 +1173,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
     doc.add(new LegacyFloatField("value", 4.2f, Field.Store.YES));
     writer.addDocument(doc);
     IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
-                     Collections.singletonMap("value", Type.FLOAT));
+                     Collections.singletonMap("value", Type.LEGACY_FLOAT));
     writer.close();
     
     IndexSearcher searcher = newSearcher(ir);
@@ -785,8 +1192,8 @@ public class TestFieldCacheSort extends LuceneTestCase {
     dir.close();
   }
   
-  /** Tests sorting on type float in reverse */
-  public void testFloatReverse() throws IOException {
+  /** Tests sorting on type legacy float in reverse */
+  public void testLegacyFloatReverse() throws IOException {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     Document doc = new Document();
@@ -799,7 +1206,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
     doc.add(new LegacyFloatField("value", 4.2f, Field.Store.YES));
     writer.addDocument(doc);
     IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
-                     Collections.singletonMap("value", Type.FLOAT));
+                     Collections.singletonMap("value", Type.LEGACY_FLOAT));
     writer.close();
     
     IndexSearcher searcher = newSearcher(ir);
@@ -821,6 +1228,195 @@ public class TestFieldCacheSort extends LuceneTestCase {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     Document doc = new Document();
+    doc.add(new DoublePoint("value", 30.1));
+    doc.add(new StoredField("value", 30.1));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new DoublePoint("value", -1.3));
+    doc.add(new StoredField("value", -1.3));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new DoublePoint("value", 4.2333333333333));
+    doc.add(new StoredField("value", 4.2333333333333));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new DoublePoint("value", 4.2333333333332));
+    doc.add(new StoredField("value", 4.2333333333332));
+    writer.addDocument(doc);
+    IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
+                     Collections.singletonMap("value", Type.DOUBLE_POINT));
+    writer.close();
+    
+    IndexSearcher searcher = newSearcher(ir, false);
+    Sort sort = new Sort(new SortField("value", SortField.Type.DOUBLE));
+
+    TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+    assertEquals(4, td.totalHits);
+    // numeric order
+    assertEquals("-1.3", searcher.doc(td.scoreDocs[0].doc).get("value"));
+    assertEquals("4.2333333333332", searcher.doc(td.scoreDocs[1].doc).get("value"));
+    assertEquals("4.2333333333333", searcher.doc(td.scoreDocs[2].doc).get("value"));
+    assertEquals("30.1", searcher.doc(td.scoreDocs[3].doc).get("value"));
+    TestUtil.checkReader(ir);
+    ir.close();
+    dir.close();
+  }
+  
+  /** Tests sorting on type double with +/- zero */
+  public void testDoubleSignedZero() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+    Document doc = new Document();
+    doc.add(new DoublePoint("value", +0d));
+    doc.add(new StoredField("value", +0d));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new DoublePoint("value", -0d));
+    doc.add(new StoredField("value", -0d));
+    writer.addDocument(doc);
+    doc = new Document();
+    IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
+                     Collections.singletonMap("value", Type.DOUBLE_POINT));
+    writer.close();
+    
+    IndexSearcher searcher = newSearcher(ir, false);
+    Sort sort = new Sort(new SortField("value", SortField.Type.DOUBLE));
+
+    TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+    assertEquals(2, td.totalHits);
+    // numeric order
+    double v0 = searcher.doc(td.scoreDocs[0].doc).getField("value").numericValue().doubleValue();
+    double v1 = searcher.doc(td.scoreDocs[1].doc).getField("value").numericValue().doubleValue();
+    assertEquals(0, v0, 0d);
+    assertEquals(0, v1, 0d);
+    // check sign bits
+    assertEquals(1, Double.doubleToLongBits(v0) >>> 63);
+    assertEquals(0, Double.doubleToLongBits(v1) >>> 63);
+    TestUtil.checkReader(ir);
+    ir.close();
+    dir.close();
+  }
+  
+  /** Tests sorting on type double with a missing value */
+  public void testDoubleMissing() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+    Document doc = new Document();
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new DoublePoint("value", -1.3));
+    doc.add(new StoredField("value", -1.3));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new DoublePoint("value", 4.2333333333333));
+    doc.add(new StoredField("value", 4.2333333333333));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new DoublePoint("value", 4.2333333333332));
+    doc.add(new StoredField("value", 4.2333333333332));
+    writer.addDocument(doc);
+    IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
+                     Collections.singletonMap("value", Type.DOUBLE_POINT));
+    writer.close();
+    
+    IndexSearcher searcher = newSearcher(ir, false);
+    Sort sort = new Sort(new SortField("value", SortField.Type.DOUBLE));
+
+    TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+    assertEquals(4, td.totalHits);
+    // null treated as a 0
+    assertEquals("-1.3", searcher.doc(td.scoreDocs[0].doc).get("value"));
+    assertNull(searcher.doc(td.scoreDocs[1].doc).get("value"));
+    assertEquals("4.2333333333332", searcher.doc(td.scoreDocs[2].doc).get("value"));
+    assertEquals("4.2333333333333", searcher.doc(td.scoreDocs[3].doc).get("value"));
+    TestUtil.checkReader(ir);
+    ir.close();
+    dir.close();
+  }
+  
+  /** Tests sorting on type double, specifying the missing value should be treated as Double.MAX_VALUE */
+  public void testDoubleMissingLast() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+    Document doc = new Document();
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new DoublePoint("value", -1.3));
+    doc.add(new StoredField("value", -1.3));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new DoublePoint("value", 4.2333333333333));
+    doc.add(new StoredField("value", 4.2333333333333));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new DoublePoint("value", 4.2333333333332));
+    doc.add(new StoredField("value", 4.2333333333332));
+    writer.addDocument(doc);
+    IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
+                     Collections.singletonMap("value", Type.DOUBLE_POINT));
+    writer.close();
+    
+    IndexSearcher searcher = newSearcher(ir, false);
+    SortField sortField = new SortField("value", SortField.Type.DOUBLE);
+    sortField.setMissingValue(Double.MAX_VALUE);
+    Sort sort = new Sort(sortField);
+
+    TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+    assertEquals(4, td.totalHits);
+    // null treated as Double.MAX_VALUE
+    assertEquals("-1.3", searcher.doc(td.scoreDocs[0].doc).get("value"));
+    assertEquals("4.2333333333332", searcher.doc(td.scoreDocs[1].doc).get("value"));
+    assertEquals("4.2333333333333", searcher.doc(td.scoreDocs[2].doc).get("value"));
+    assertNull(searcher.doc(td.scoreDocs[3].doc).get("value"));
+    TestUtil.checkReader(ir);
+    ir.close();
+    dir.close();
+  }
+  
+  /** Tests sorting on type double in reverse */
+  public void testDoubleReverse() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+    Document doc = new Document();
+    doc.add(new DoublePoint("value", 30.1));
+    doc.add(new StoredField("value", 30.1));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new DoublePoint("value", -1.3));
+    doc.add(new StoredField("value", -1.3));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new DoublePoint("value", 4.2333333333333));
+    doc.add(new StoredField("value", 4.2333333333333));
+    writer.addDocument(doc);
+    doc = new Document();
+    doc.add(new DoublePoint("value", 4.2333333333332));
+    doc.add(new StoredField("value", 4.2333333333332));
+    writer.addDocument(doc);
+    IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
+                     Collections.singletonMap("value", Type.DOUBLE_POINT));
+    writer.close();
+    
+    IndexSearcher searcher = newSearcher(ir, false);
+    Sort sort = new Sort(new SortField("value", SortField.Type.DOUBLE, true));
+
+    TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+    assertEquals(4, td.totalHits);
+    // numeric order
+    assertEquals("30.1", searcher.doc(td.scoreDocs[0].doc).get("value"));
+    assertEquals("4.2333333333333", searcher.doc(td.scoreDocs[1].doc).get("value"));
+    assertEquals("4.2333333333332", searcher.doc(td.scoreDocs[2].doc).get("value"));
+    assertEquals("-1.3", searcher.doc(td.scoreDocs[3].doc).get("value"));
+    TestUtil.checkReader(ir);
+    ir.close();
+    dir.close();
+  }
+
+  /** Tests sorting on type legacy double */
+  public void testLegacyDouble() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+    Document doc = new Document();
     doc.add(new LegacyDoubleField("value", 30.1, Field.Store.YES));
     writer.addDocument(doc);
     doc = new Document();
@@ -833,7 +1429,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
     doc.add(new LegacyDoubleField("value", 4.2333333333332, Field.Store.YES));
     writer.addDocument(doc);
     IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
-                     Collections.singletonMap("value", Type.DOUBLE));
+                     Collections.singletonMap("value", Type.LEGACY_DOUBLE));
     writer.close();
     
     IndexSearcher searcher = newSearcher(ir);
@@ -851,8 +1447,8 @@ public class TestFieldCacheSort extends LuceneTestCase {
     dir.close();
   }
   
-  /** Tests sorting on type double with +/- zero */
-  public void testDoubleSignedZero() throws IOException {
+  /** Tests sorting on type legacy double with +/- zero */
+  public void testLegacyDoubleSignedZero() throws IOException {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     Document doc = new Document();
@@ -863,7 +1459,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
     writer.addDocument(doc);
     doc = new Document();
     IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
-                     Collections.singletonMap("value", Type.DOUBLE));
+                     Collections.singletonMap("value", Type.LEGACY_DOUBLE));
     writer.close();
     
     IndexSearcher searcher = newSearcher(ir);
@@ -884,8 +1480,8 @@ public class TestFieldCacheSort extends LuceneTestCase {
     dir.close();
   }
   
-  /** Tests sorting on type double with a missing value */
-  public void testDoubleMissing() throws IOException {
+  /** Tests sorting on type legacy double with a missing value */
+  public void testLegacyDoubleMissing() throws IOException {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     Document doc = new Document();
@@ -900,7 +1496,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
     doc.add(new LegacyDoubleField("value", 4.2333333333332, Field.Store.YES));
     writer.addDocument(doc);
     IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
-                     Collections.singletonMap("value", Type.DOUBLE));
+                     Collections.singletonMap("value", Type.LEGACY_DOUBLE));
     writer.close();
     
     IndexSearcher searcher = newSearcher(ir);
@@ -918,8 +1514,8 @@ public class TestFieldCacheSort extends LuceneTestCase {
     dir.close();
   }
   
-  /** Tests sorting on type double, specifying the missing value should be treated as Double.MAX_VALUE */
-  public void testDoubleMissingLast() throws IOException {
+  /** Tests sorting on type legacy double, specifying the missing value should be treated as Double.MAX_VALUE */
+  public void testLegacyDoubleMissingLast() throws IOException {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     Document doc = new Document();
@@ -934,7 +1530,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
     doc.add(new LegacyDoubleField("value", 4.2333333333332, Field.Store.YES));
     writer.addDocument(doc);
     IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
-                     Collections.singletonMap("value", Type.DOUBLE));
+                     Collections.singletonMap("value", Type.LEGACY_DOUBLE));
     writer.close();
     
     IndexSearcher searcher = newSearcher(ir);
@@ -954,8 +1550,8 @@ public class TestFieldCacheSort extends LuceneTestCase {
     dir.close();
   }
   
-  /** Tests sorting on type double in reverse */
-  public void testDoubleReverse() throws IOException {
+  /** Tests sorting on type legacy double in reverse */
+  public void testLegacyDoubleReverse() throws IOException {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     Document doc = new Document();
@@ -971,7 +1567,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
     doc.add(new LegacyDoubleField("value", 4.2333333333332, Field.Store.YES));
     writer.addDocument(doc);
     IndexReader ir = UninvertingReader.wrap(writer.getReader(), 
-                     Collections.singletonMap("value", Type.DOUBLE));
+                     Collections.singletonMap("value", Type.LEGACY_DOUBLE));
     writer.close();
     
     IndexSearcher searcher = newSearcher(ir);
@@ -1062,7 +1658,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
     }
 
     IndexReader r = UninvertingReader.wrap(DirectoryReader.open(w),
-                    Collections.singletonMap("id", Type.INTEGER));
+                    Collections.singletonMap("id", Type.LEGACY_INTEGER));
     w.close();
     Query q = new TermQuery(new Term("body", "text"));
     IndexSearcher s = newSearcher(r);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d35d5694/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSortRandom.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSortRandom.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSortRandom.java
index 0b6292d..f3bd455 100644
--- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSortRandom.java
+++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSortRandom.java
@@ -30,7 +30,8 @@ import java.util.Set;
 
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
-import org.apache.lucene.document.LegacyIntField;
+import org.apache.lucene.document.IntPoint;
+import org.apache.lucene.document.StoredField;
 import org.apache.lucene.document.StringField;
 import org.apache.lucene.index.DocValues;
 import org.apache.lucene.index.IndexReader;
@@ -118,7 +119,8 @@ public class TestFieldCacheSortRandom extends LuceneTestCase {
         docValues.add(null);
       }
 
-      doc.add(new LegacyIntField("id", numDocs, Field.Store.YES));
+      doc.add(new IntPoint("id", numDocs));
+      doc.add(new StoredField("id", numDocs));
       writer.addDocument(doc);
       numDocs++;
 
@@ -130,7 +132,7 @@ public class TestFieldCacheSortRandom extends LuceneTestCase {
 
     Map<String,UninvertingReader.Type> mapping = new HashMap<>();
     mapping.put("stringdv", Type.SORTED);
-    mapping.put("id", Type.INTEGER);
+    mapping.put("id", Type.INTEGER_POINT);
     final IndexReader r = UninvertingReader.wrap(writer.getReader(), mapping);
     writer.close();
     if (VERBOSE) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d35d5694/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheVsDocValues.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheVsDocValues.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheVsDocValues.java
index 1b14522..23b7d0c 100644
--- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheVsDocValues.java
+++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheVsDocValues.java
@@ -458,8 +458,8 @@ public class TestFieldCacheVsDocValues extends LuceneTestCase {
     DirectoryReader ir = DirectoryReader.open(dir);
     for (LeafReaderContext context : ir.leaves()) {
       LeafReader r = context.reader();
-      Bits expected = FieldCache.DEFAULT.getDocsWithField(r, "indexed");
-      Bits actual = FieldCache.DEFAULT.getDocsWithField(r, "dv");
+      Bits expected = FieldCache.DEFAULT.getDocsWithField(r, "indexed", null);
+      Bits actual = FieldCache.DEFAULT.getDocsWithField(r, "dv", null);
       assertEquals(expected, actual);
     }
     ir.close();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d35d5694/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheWithThreads.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheWithThreads.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheWithThreads.java
index 9b05ee1..e716419 100644
--- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheWithThreads.java
+++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheWithThreads.java
@@ -42,6 +42,7 @@ import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.TestUtil;
 
+// TODO: what happened to this test... its not actually uninverting?
 public class TestFieldCacheWithThreads extends LuceneTestCase {
 
   public void test() throws Exception {
@@ -83,7 +84,7 @@ public class TestFieldCacheWithThreads extends LuceneTestCase {
           public void run() {
             try {
               //NumericDocValues ndv = ar.getNumericDocValues("number");
-              NumericDocValues ndv = FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.NUMERIC_UTILS_LONG_PARSER, false);
+              NumericDocValues ndv = FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.LONG_POINT_PARSER, false);
               //BinaryDocValues bdv = ar.getBinaryDocValues("bytes");
               BinaryDocValues bdv = FieldCache.DEFAULT.getTerms(ar, "bytes", false);
               SortedDocValues sdv = FieldCache.DEFAULT.getTermsIndex(ar, "sorted");
@@ -93,16 +94,16 @@ public class TestFieldCacheWithThreads extends LuceneTestCase {
                 int docID = threadRandom.nextInt(numDocs);
                 switch(threadRandom.nextInt(4)) {
                 case 0:
-                  assertEquals(numbers.get(docID).longValue(), FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.NUMERIC_UTILS_INT_PARSER, false).get(docID));
+                  assertEquals(numbers.get(docID).longValue(), FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.INT_POINT_PARSER, false).get(docID));
                   break;
                 case 1:
-                  assertEquals(numbers.get(docID).longValue(), FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.NUMERIC_UTILS_LONG_PARSER, false).get(docID));
+                  assertEquals(numbers.get(docID).longValue(), FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.LONG_POINT_PARSER, false).get(docID));
                   break;
                 case 2:
-                  assertEquals(numbers.get(docID).longValue(), FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.NUMERIC_UTILS_FLOAT_PARSER, false).get(docID));
+                  assertEquals(numbers.get(docID).longValue(), FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.FLOAT_POINT_PARSER, false).get(docID));
                   break;
                 case 3:
-                  assertEquals(numbers.get(docID).longValue(), FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, false).get(docID));
+                  assertEquals(numbers.get(docID).longValue(), FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.DOUBLE_POINT_PARSER, false).get(docID));
                   break;
                 }
                 BytesRef term = bdv.get(docID);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d35d5694/lucene/misc/src/test/org/apache/lucene/uninverting/TestLegacyFieldCache.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestLegacyFieldCache.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestLegacyFieldCache.java
new file mode 100644
index 0000000..c4ef1c4
--- /dev/null
+++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestLegacyFieldCache.java
@@ -0,0 +1,498 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.uninverting;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.PrintStream;
+
+import java.util.concurrent.CyclicBarrier;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import org.apache.lucene.analysis.MockAnalyzer;
+import org.apache.lucene.document.BinaryDocValuesField;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.LegacyDoubleField;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.Field.Store;
+import org.apache.lucene.document.LegacyFloatField;
+import org.apache.lucene.document.LegacyIntField;
+import org.apache.lucene.document.LegacyLongField;
+import org.apache.lucene.document.NumericDocValuesField;
+import org.apache.lucene.document.SortedDocValuesField;
+import org.apache.lucene.document.SortedSetDocValuesField;
+import org.apache.lucene.document.StoredField;
+import org.apache.lucene.index.LeafReader;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.NumericDocValues;
+import org.apache.lucene.index.RandomIndexWriter;
+import org.apache.lucene.index.SlowCompositeReaderWrapper;
+import org.apache.lucene.index.Terms;
+import org.apache.lucene.index.TermsEnum;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.Bits;
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.IOUtils;
+import org.apache.lucene.util.LegacyNumericUtils;
+import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util.TestUtil;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+
+/** random assortment of tests against legacy numerics */
+public class TestLegacyFieldCache extends LuceneTestCase {
+  private static LeafReader reader;
+  private static int NUM_DOCS;
+  private static Directory directory;
+
+  @BeforeClass
+  public static void beforeClass() throws Exception {
+    NUM_DOCS = atLeast(500);
+    directory = newDirectory();
+    RandomIndexWriter writer= new RandomIndexWriter(random(), directory, newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
+    long theLong = Long.MAX_VALUE;
+    double theDouble = Double.MAX_VALUE;
+    int theInt = Integer.MAX_VALUE;
+    float theFloat = Float.MAX_VALUE;
+    if (VERBOSE) {
+      System.out.println("TEST: setUp");
+    }
+    for (int i = 0; i < NUM_DOCS; i++){
+      Document doc = new Document();
+      doc.add(new LegacyLongField("theLong", theLong--, Field.Store.NO));
+      doc.add(new LegacyDoubleField("theDouble", theDouble--, Field.Store.NO));
+      doc.add(new LegacyIntField("theInt", theInt--, Field.Store.NO));
+      doc.add(new LegacyFloatField("theFloat", theFloat--, Field.Store.NO));
+      if (i%2 == 0) {
+        doc.add(new LegacyIntField("sparse", i, Field.Store.NO));
+      }
+
+      if (i%2 == 0) {
+        doc.add(new LegacyIntField("numInt", i, Field.Store.NO));
+      }
+      writer.addDocument(doc);
+    }
+    IndexReader r = writer.getReader();
+    reader = SlowCompositeReaderWrapper.wrap(r);
+    TestUtil.checkReader(reader);
+    writer.close();
+  }
+
+  @AfterClass
+  public static void afterClass() throws Exception {
+    reader.close();
+    reader = null;
+    directory.close();
+    directory = null;
+  }
+  
+  public void testInfoStream() throws Exception {
+    try {
+      FieldCache cache = FieldCache.DEFAULT;
+      ByteArrayOutputStream bos = new ByteArrayOutputStream(1024);
+      cache.setInfoStream(new PrintStream(bos, false, IOUtils.UTF_8));
+      cache.getNumerics(reader, "theDouble", FieldCache.LEGACY_DOUBLE_PARSER, false);
+      cache.getNumerics(reader, "theDouble", new FieldCache.Parser() {
+        @Override
+        public TermsEnum termsEnum(Terms terms) throws IOException {
+          return LegacyNumericUtils.filterPrefixCodedLongs(terms.iterator());
+        }
+        @Override
+        public long parseValue(BytesRef term) {
+          int val = (int) LegacyNumericUtils.prefixCodedToLong(term);
+          if (val<0) val ^= 0x7fffffff;
+          return val;
+        }
+      }, false);
+      assertTrue(bos.toString(IOUtils.UTF_8).indexOf("WARNING") != -1);
+    } finally {
+      FieldCache.DEFAULT.setInfoStream(null);
+      FieldCache.DEFAULT.purgeAllCaches();
+    }
+  }
+
+  public void test() throws IOException {
+    FieldCache cache = FieldCache.DEFAULT;
+    NumericDocValues doubles = cache.getNumerics(reader, "theDouble", FieldCache.LEGACY_DOUBLE_PARSER, random().nextBoolean());
+    assertSame("Second request to cache return same array", doubles, cache.getNumerics(reader, "theDouble", FieldCache.LEGACY_DOUBLE_PARSER, random().nextBoolean()));
+    for (int i = 0; i < NUM_DOCS; i++) {
+      assertEquals(Double.doubleToLongBits(Double.MAX_VALUE - i), doubles.get(i));
+    }
+    
+    NumericDocValues longs = cache.getNumerics(reader, "theLong", FieldCache.LEGACY_LONG_PARSER, random().nextBoolean());
+    assertSame("Second request to cache return same array", longs, cache.getNumerics(reader, "theLong", FieldCache.LEGACY_LONG_PARSER, random().nextBoolean()));
+    for (int i = 0; i < NUM_DOCS; i++) {
+      assertEquals(Long.MAX_VALUE - i, longs.get(i));
+    }
+
+    NumericDocValues ints = cache.getNumerics(reader, "theInt", FieldCache.LEGACY_INT_PARSER, random().nextBoolean());
+    assertSame("Second request to cache return same array", ints, cache.getNumerics(reader, "theInt", FieldCache.LEGACY_INT_PARSER, random().nextBoolean()));
+    for (int i = 0; i < NUM_DOCS; i++) {
+      assertEquals(Integer.MAX_VALUE - i, ints.get(i));
+    }
+    
+    NumericDocValues floats = cache.getNumerics(reader, "theFloat", FieldCache.LEGACY_FLOAT_PARSER, random().nextBoolean());
+    assertSame("Second request to cache return same array", floats, cache.getNumerics(reader, "theFloat", FieldCache.LEGACY_FLOAT_PARSER, random().nextBoolean()));
+    for (int i = 0; i < NUM_DOCS; i++) {
+      assertEquals(Float.floatToIntBits(Float.MAX_VALUE - i), floats.get(i));
+    }
+
+    Bits docsWithField = cache.getDocsWithField(reader, "theLong", null);
+    assertSame("Second request to cache return same array", docsWithField, cache.getDocsWithField(reader, "theLong", null));
+    assertTrue("docsWithField(theLong) must be class Bits.MatchAllBits", docsWithField instanceof Bits.MatchAllBits);
+    assertTrue("docsWithField(theLong) Size: " + docsWithField.length() + " is not: " + NUM_DOCS, docsWithField.length() == NUM_DOCS);
+    for (int i = 0; i < docsWithField.length(); i++) {
+      assertTrue(docsWithField.get(i));
+    }
+    
+    docsWithField = cache.getDocsWithField(reader, "sparse", null);
+    assertSame("Second request to cache return same array", docsWithField, cache.getDocsWithField(reader, "sparse", null));
+    assertFalse("docsWithField(sparse) must not be class Bits.MatchAllBits", docsWithField instanceof Bits.MatchAllBits);
+    assertTrue("docsWithField(sparse) Size: " + docsWithField.length() + " is not: " + NUM_DOCS, docsWithField.length() == NUM_DOCS);
+    for (int i = 0; i < docsWithField.length(); i++) {
+      assertEquals(i%2 == 0, docsWithField.get(i));
+    }
+
+    FieldCache.DEFAULT.purgeByCacheKey(reader.getCoreCacheKey());
+  }
+
+  public void testEmptyIndex() throws Exception {
+    Directory dir = newDirectory();
+    IndexWriter writer= new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setMaxBufferedDocs(500));
+    writer.close();
+    IndexReader r = DirectoryReader.open(dir);
+    LeafReader reader = SlowCompositeReaderWrapper.wrap(r);
+    TestUtil.checkReader(reader);
+    FieldCache.DEFAULT.getTerms(reader, "foobar", true);
+    FieldCache.DEFAULT.getTermsIndex(reader, "foobar");
+    FieldCache.DEFAULT.purgeByCacheKey(reader.getCoreCacheKey());
+    r.close();
+    dir.close();
+  }
+
+  public void testDocsWithField() throws Exception {
+    FieldCache cache = FieldCache.DEFAULT;
+    cache.purgeAllCaches();
+    assertEquals(0, cache.getCacheEntries().length);
+    cache.getNumerics(reader, "theDouble", FieldCache.LEGACY_DOUBLE_PARSER, true);
+
+    // The double[] takes one slots, and docsWithField should also
+    // have been populated:
+    assertEquals(2, cache.getCacheEntries().length);
+    Bits bits = cache.getDocsWithField(reader, "theDouble", FieldCache.LEGACY_DOUBLE_PARSER);
+
+    // No new entries should appear:
+    assertEquals(2, cache.getCacheEntries().length);
+    assertTrue(bits instanceof Bits.MatchAllBits);
+
+    NumericDocValues ints = cache.getNumerics(reader, "sparse", FieldCache.LEGACY_INT_PARSER, true);
+    assertEquals(4, cache.getCacheEntries().length);
+    Bits docsWithField = cache.getDocsWithField(reader, "sparse", FieldCache.LEGACY_INT_PARSER);
+    assertEquals(4, cache.getCacheEntries().length);
+    for (int i = 0; i < docsWithField.length(); i++) {
+      if (i%2 == 0) {
+        assertTrue(docsWithField.get(i));
+        assertEquals(i, ints.get(i));
+      } else {
+        assertFalse(docsWithField.get(i));
+      }
+    }
+
+    NumericDocValues numInts = cache.getNumerics(reader, "numInt", FieldCache.LEGACY_INT_PARSER, random().nextBoolean());
+    docsWithField = cache.getDocsWithField(reader, "numInt", FieldCache.LEGACY_INT_PARSER);
+    for (int i = 0; i < docsWithField.length(); i++) {
+      if (i%2 == 0) {
+        assertTrue(docsWithField.get(i));
+        assertEquals(i, numInts.get(i));
+      } else {
+        assertFalse(docsWithField.get(i));
+      }
+    }
+  }
+  
+  public void testGetDocsWithFieldThreadSafety() throws Exception {
+    final FieldCache cache = FieldCache.DEFAULT;
+    cache.purgeAllCaches();
+
+    int NUM_THREADS = 3;
+    Thread[] threads = new Thread[NUM_THREADS];
+    final AtomicBoolean failed = new AtomicBoolean();
+    final AtomicInteger iters = new AtomicInteger();
+    final int NUM_ITER = 200 * RANDOM_MULTIPLIER;
+    final CyclicBarrier restart = new CyclicBarrier(NUM_THREADS,
+                                                    new Runnable() {
+                                                      @Override
+                                                      public void run() {
+                                                        cache.purgeAllCaches();
+                                                        iters.incrementAndGet();
+                                                      }
+                                                    });
+    for(int threadIDX=0;threadIDX<NUM_THREADS;threadIDX++) {
+      threads[threadIDX] = new Thread() {
+          @Override
+          public void run() {
+
+            try {
+              while(!failed.get()) {
+                final int op = random().nextInt(3);
+                if (op == 0) {
+                  // Purge all caches & resume, once all
+                  // threads get here:
+                  restart.await();
+                  if (iters.get() >= NUM_ITER) {
+                    break;
+                  }
+                } else if (op == 1) {
+                  Bits docsWithField = cache.getDocsWithField(reader, "sparse", null);
+                  for (int i = 0; i < docsWithField.length(); i++) {
+                    assertEquals(i%2 == 0, docsWithField.get(i));
+                  }
+                } else {
+                  NumericDocValues ints = cache.getNumerics(reader, "sparse", FieldCache.LEGACY_INT_PARSER, true);
+                  Bits docsWithField = cache.getDocsWithField(reader, "sparse", null);
+                  for (int i = 0; i < docsWithField.length(); i++) {
+                    if (i%2 == 0) {
+                      assertTrue(docsWithField.get(i));
+                      assertEquals(i, ints.get(i));
+                    } else {
+                      assertFalse(docsWithField.get(i));
+                    }
+                  }
+                }
+              }
+            } catch (Throwable t) {
+              failed.set(true);
+              restart.reset();
+              throw new RuntimeException(t);
+            }
+          }
+        };
+      threads[threadIDX].start();
+    }
+
+    for(int threadIDX=0;threadIDX<NUM_THREADS;threadIDX++) {
+      threads[threadIDX].join();
+    }
+    assertFalse(failed.get());
+  }
+  
+  public void testDocValuesIntegration() throws Exception {
+    Directory dir = newDirectory();
+    IndexWriterConfig iwc = newIndexWriterConfig(null);
+    RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
+    Document doc = new Document();
+    doc.add(new BinaryDocValuesField("binary", new BytesRef("binary value")));
+    doc.add(new SortedDocValuesField("sorted", new BytesRef("sorted value")));
+    doc.add(new NumericDocValuesField("numeric", 42));
+    doc.add(new SortedSetDocValuesField("sortedset", new BytesRef("sortedset value1")));
+    doc.add(new SortedSetDocValuesField("sortedset", new BytesRef("sortedset value2")));
+    iw.addDocument(doc);
+    DirectoryReader ir = iw.getReader();
+    iw.close();
+    LeafReader ar = getOnlySegmentReader(ir);
+    
+    // Binary type: can be retrieved via getTerms()
+    expectThrows(IllegalStateException.class, () -> {
+      FieldCache.DEFAULT.getNumerics(ar, "binary", FieldCache.LEGACY_INT_PARSER, false);
+    });
+    
+    // Sorted type: can be retrieved via getTerms(), getTermsIndex(), getDocTermOrds()
+    expectThrows(IllegalStateException.class, () -> {
+      FieldCache.DEFAULT.getNumerics(ar, "sorted", FieldCache.LEGACY_INT_PARSER, false);
+    });
+    
+    // Numeric type: can be retrieved via getInts() and so on
+    NumericDocValues numeric = FieldCache.DEFAULT.getNumerics(ar, "numeric", FieldCache.LEGACY_INT_PARSER, false);
+    assertEquals(42, numeric.get(0));
+       
+    // SortedSet type: can be retrieved via getDocTermOrds() 
+    expectThrows(IllegalStateException.class, () -> {
+      FieldCache.DEFAULT.getNumerics(ar, "sortedset", FieldCache.LEGACY_INT_PARSER, false);
+    });
+    
+    ir.close();
+    dir.close();
+  }
+  
+  public void testNonexistantFields() throws Exception {
+    Directory dir = newDirectory();
+    RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
+    Document doc = new Document();
+    iw.addDocument(doc);
+    DirectoryReader ir = iw.getReader();
+    iw.close();
+    
+    LeafReader ar = getOnlySegmentReader(ir);
+    
+    final FieldCache cache = FieldCache.DEFAULT;
+    cache.purgeAllCaches();
+    assertEquals(0, cache.getCacheEntries().length);
+    
+    NumericDocValues ints = cache.getNumerics(ar, "bogusints", FieldCache.LEGACY_INT_PARSER, true);
+    assertEquals(0, ints.get(0));
+    
+    NumericDocValues longs = cache.getNumerics(ar, "boguslongs", FieldCache.LEGACY_LONG_PARSER, true);
+    assertEquals(0, longs.get(0));
+    
+    NumericDocValues floats = cache.getNumerics(ar, "bogusfloats", FieldCache.LEGACY_FLOAT_PARSER, true);
+    assertEquals(0, floats.get(0));
+    
+    NumericDocValues doubles = cache.getNumerics(ar, "bogusdoubles", FieldCache.LEGACY_DOUBLE_PARSER, true);
+    assertEquals(0, doubles.get(0));
+    
+    // check that we cached nothing
+    assertEquals(0, cache.getCacheEntries().length);
+    ir.close();
+    dir.close();
+  }
+  
+  public void testNonIndexedFields() throws Exception {
+    Directory dir = newDirectory();
+    RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
+    Document doc = new Document();
+    doc.add(new StoredField("bogusbytes", "bogus"));
+    doc.add(new StoredField("bogusshorts", "bogus"));
+    doc.add(new StoredField("bogusints", "bogus"));
+    doc.add(new StoredField("boguslongs", "bogus"));
+    doc.add(new StoredField("bogusfloats", "bogus"));
+    doc.add(new StoredField("bogusdoubles", "bogus"));
+    doc.add(new StoredField("bogusbits", "bogus"));
+    iw.addDocument(doc);
+    DirectoryReader ir = iw.getReader();
+    iw.close();
+    
+    LeafReader ar = getOnlySegmentReader(ir);
+    
+    final FieldCache cache = FieldCache.DEFAULT;
+    cache.purgeAllCaches();
+    assertEquals(0, cache.getCacheEntries().length);
+    
+    NumericDocValues ints = cache.getNumerics(ar, "bogusints", FieldCache.LEGACY_INT_PARSER, true);
+    assertEquals(0, ints.get(0));
+    
+    NumericDocValues longs = cache.getNumerics(ar, "boguslongs", FieldCache.LEGACY_LONG_PARSER, true);
+    assertEquals(0, longs.get(0));
+    
+    NumericDocValues floats = cache.getNumerics(ar, "bogusfloats", FieldCache.LEGACY_FLOAT_PARSER, true);
+    assertEquals(0, floats.get(0));
+    
+    NumericDocValues doubles = cache.getNumerics(ar, "bogusdoubles", FieldCache.LEGACY_DOUBLE_PARSER, true);
+    assertEquals(0, doubles.get(0));
+    
+    // check that we cached nothing
+    assertEquals(0, cache.getCacheEntries().length);
+    ir.close();
+    dir.close();
+  }
+
+  // Make sure that the use of GrowableWriter doesn't prevent from using the full long range
+  public void testLongFieldCache() throws IOException {
+    Directory dir = newDirectory();
+    IndexWriterConfig cfg = newIndexWriterConfig(new MockAnalyzer(random()));
+    cfg.setMergePolicy(newLogMergePolicy());
+    RandomIndexWriter iw = new RandomIndexWriter(random(), dir, cfg);
+    Document doc = new Document();
+    LegacyLongField field = new LegacyLongField("f", 0L, Store.YES);
+    doc.add(field);
+    final long[] values = new long[TestUtil.nextInt(random(), 1, 10)];
+    for (int i = 0; i < values.length; ++i) {
+      final long v;
+      switch (random().nextInt(10)) {
+        case 0:
+          v = Long.MIN_VALUE;
+          break;
+        case 1:
+          v = 0;
+          break;
+        case 2:
+          v = Long.MAX_VALUE;
+          break;
+        default:
+          v = TestUtil.nextLong(random(), -10, 10);
+          break;
+      }
+      values[i] = v;
+      if (v == 0 && random().nextBoolean()) {
+        // missing
+        iw.addDocument(new Document());
+      } else {
+        field.setLongValue(v);
+        iw.addDocument(doc);
+      }
+    }
+    iw.forceMerge(1);
+    final DirectoryReader reader = iw.getReader();
+    final NumericDocValues longs = FieldCache.DEFAULT.getNumerics(getOnlySegmentReader(reader), "f", FieldCache.LEGACY_LONG_PARSER, false);
+    for (int i = 0; i < values.length; ++i) {
+      assertEquals(values[i], longs.get(i));
+    }
+    reader.close();
+    iw.close();
+    dir.close();
+  }
+
+  // Make sure that the use of GrowableWriter doesn't prevent from using the full int range
+  public void testIntFieldCache() throws IOException {
+    Directory dir = newDirectory();
+    IndexWriterConfig cfg = newIndexWriterConfig(new MockAnalyzer(random()));
+    cfg.setMergePolicy(newLogMergePolicy());
+    RandomIndexWriter iw = new RandomIndexWriter(random(), dir, cfg);
+    Document doc = new Document();
+    LegacyIntField field = new LegacyIntField("f", 0, Store.YES);
+    doc.add(field);
+    final int[] values = new int[TestUtil.nextInt(random(), 1, 10)];
+    for (int i = 0; i < values.length; ++i) {
+      final int v;
+      switch (random().nextInt(10)) {
+        case 0:
+          v = Integer.MIN_VALUE;
+          break;
+        case 1:
+          v = 0;
+          break;
+        case 2:
+          v = Integer.MAX_VALUE;
+          break;
+        default:
+          v = TestUtil.nextInt(random(), -10, 10);
+          break;
+      }
+      values[i] = v;
+      if (v == 0 && random().nextBoolean()) {
+        // missing
+        iw.addDocument(new Document());
+      } else {
+        field.setIntValue(v);
+        iw.addDocument(doc);
+      }
+    }
+    iw.forceMerge(1);
+    final DirectoryReader reader = iw.getReader();
+    final NumericDocValues ints = FieldCache.DEFAULT.getNumerics(getOnlySegmentReader(reader), "f", FieldCache.LEGACY_INT_PARSER, false);
+    for (int i = 0; i < values.length; ++i) {
+      assertEquals(values[i], ints.get(i));
+    }
+    reader.close();
+    iw.close();
+    dir.close();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d35d5694/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms32.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms32.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms32.java
index bc85db4..a0cddf8 100644
--- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms32.java
+++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms32.java
@@ -96,9 +96,9 @@ public class TestNumericTerms32 extends LuceneTestCase {
     }
   
     Map<String,Type> map = new HashMap<>();
-    map.put("field2", Type.INTEGER);
-    map.put("field4", Type.INTEGER);
-    map.put("field8", Type.INTEGER);
+    map.put("field2", Type.LEGACY_INTEGER);
+    map.put("field4", Type.LEGACY_INTEGER);
+    map.put("field8", Type.LEGACY_INTEGER);
     reader = UninvertingReader.wrap(writer.getReader(), map);
     searcher=newSearcher(reader);
     writer.close();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d35d5694/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms64.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms64.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms64.java
index d9fcc92..0724d86 100644
--- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms64.java
+++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms64.java
@@ -100,10 +100,10 @@ public class TestNumericTerms64 extends LuceneTestCase {
       writer.addDocument(doc);
     }
     Map<String,Type> map = new HashMap<>();
-    map.put("field2", Type.LONG);
-    map.put("field4", Type.LONG);
-    map.put("field6", Type.LONG);
-    map.put("field8", Type.LONG);
+    map.put("field2", Type.LEGACY_LONG);
+    map.put("field4", Type.LEGACY_LONG);
+    map.put("field6", Type.LEGACY_LONG);
+    map.put("field8", Type.LEGACY_LONG);
     reader = UninvertingReader.wrap(writer.getReader(), map);
     searcher=newSearcher(reader);
     writer.close();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d35d5694/lucene/misc/src/test/org/apache/lucene/uninverting/TestUninvertingReader.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestUninvertingReader.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestUninvertingReader.java
index 99df329..0a1cf3d 100644
--- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestUninvertingReader.java
+++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestUninvertingReader.java
@@ -363,8 +363,9 @@ public class TestUninvertingReader extends LuceneTestCase {
     iw.close();
 
     Map<String, Type> uninvertingMap = new HashMap<>();
-    uninvertingMap.put("int", Type.INTEGER);
-    uninvertingMap.put("dv", Type.INTEGER);
+    uninvertingMap.put("int", Type.LEGACY_INTEGER);
+    uninvertingMap.put("dv", Type.LEGACY_INTEGER);
+    uninvertingMap.put("dint", Type.INTEGER_POINT);
 
     DirectoryReader ir = UninvertingReader.wrap(DirectoryReader.open(dir), 
                          uninvertingMap);
@@ -376,6 +377,7 @@ public class TestUninvertingReader extends LuceneTestCase {
     assertEquals(0, intFInfo.getPointNumBytes());
 
     FieldInfo dintFInfo = leafReader.getFieldInfos().fieldInfo("dint");
+    assertEquals(DocValuesType.NUMERIC, dintFInfo.getDocValuesType());
     assertEquals(1, dintFInfo.getPointDimensionCount());
     assertEquals(4, dintFInfo.getPointNumBytes());
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d35d5694/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialTestCase.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialTestCase.java
index 529e98b..8ccb9af 100644
--- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialTestCase.java
+++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialTestCase.java
@@ -73,8 +73,8 @@ public abstract class SpatialTestCase extends LuceneTestCase {
     super.setUp();
     // TODO: change this module to index docvalues instead of uninverting
     uninvertMap.clear();
-    uninvertMap.put("pointvector__x", Type.DOUBLE);
-    uninvertMap.put("pointvector__y", Type.DOUBLE);
+    uninvertMap.put("pointvector__x", Type.LEGACY_DOUBLE);
+    uninvertMap.put("pointvector__y", Type.LEGACY_DOUBLE);
 
     directory = newDirectory();
     final Random random = random();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d35d5694/solr/core/src/java/org/apache/solr/schema/EnumField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/EnumField.java b/solr/core/src/java/org/apache/solr/schema/EnumField.java
index e1fb420..cbf1d4e 100644
--- a/solr/core/src/java/org/apache/solr/schema/EnumField.java
+++ b/solr/core/src/java/org/apache/solr/schema/EnumField.java
@@ -194,7 +194,7 @@ public class EnumField extends PrimitiveFieldType {
     if (sf.multiValued()) {
       return Type.SORTED_SET_INTEGER;
     } else {
-      return Type.INTEGER;
+      return Type.LEGACY_INTEGER;
     }
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d35d5694/solr/core/src/java/org/apache/solr/schema/TrieField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/TrieField.java b/solr/core/src/java/org/apache/solr/schema/TrieField.java
index 572bf88..c4899a1 100644
--- a/solr/core/src/java/org/apache/solr/schema/TrieField.java
+++ b/solr/core/src/java/org/apache/solr/schema/TrieField.java
@@ -203,14 +203,14 @@ public class TrieField extends PrimitiveFieldType {
     } else {
       switch (type) {
         case INTEGER:
-          return Type.INTEGER;
+          return Type.LEGACY_INTEGER;
         case LONG:
         case DATE:
-          return Type.LONG;
+          return Type.LEGACY_LONG;
         case FLOAT:
-          return Type.FLOAT;
+          return Type.LEGACY_FLOAT;
         case DOUBLE:
-          return Type.DOUBLE;
+          return Type.LEGACY_DOUBLE;
         default:
           throw new AssertionError();
       }


[06/50] [abbrv] lucene-solr git commit: LUCENE-7080: Sort files to corrupt to prevent HashSet iteration order issues across JVMs

Posted by ho...@apache.org.
LUCENE-7080: Sort files to corrupt to prevent HashSet iteration order issues across JVMs


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/588aeeaa
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/588aeeaa
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/588aeeaa

Branch: refs/heads/jira/SOLR-445
Commit: 588aeeaab731f34af9063ec0dedb714f8740e0b2
Parents: 12f7ad6
Author: Simon Willnauer <si...@apache.org>
Authored: Wed Mar 9 10:56:13 2016 +0100
Committer: Simon Willnauer <si...@apache.org>
Committed: Wed Mar 9 10:56:13 2016 +0100

----------------------------------------------------------------------
 .../java/org/apache/lucene/store/MockDirectoryWrapper.java    | 7 ++++++-
 1 file changed, 6 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/588aeeaa/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java b/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java
index 962062e..7fe7c3b 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java
@@ -45,6 +45,7 @@ import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.index.IndexWriterConfig;
 import org.apache.lucene.index.NoDeletionPolicy;
 import org.apache.lucene.index.SegmentInfos;
+import org.apache.lucene.util.CollectionUtil;
 import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.TestUtil;
@@ -296,7 +297,11 @@ public class MockDirectoryWrapper extends BaseDirectoryWrapper {
   public synchronized void corruptFiles(Collection<String> files) throws IOException {
     // Must make a copy because we change the incoming unsyncedFiles
     // when we create temp files, delete, etc., below:
-    for(String name : new ArrayList<>(files)) {
+    final List<String> filesToCorrupt = new ArrayList<>(files);
+    // sort the files otherwise we have reproducibility issues
+    // across JVMs if the incoming collection is a hashSet etc.
+    CollectionUtil.timSort(filesToCorrupt);
+    for(String name : filesToCorrupt) {
       int damage = randomState.nextInt(6);
       String action = null;
 


[11/50] [abbrv] lucene-solr git commit: LUCENE-7084: fail precommit on comparingIdentical. also replaces one assert in the SingletonSortedSetDocValues constructor with equivalent test (TestSortedSetDocValues.testNoMoreOrdsConstant).

Posted by ho...@apache.org.
LUCENE-7084: fail precommit on comparingIdentical. also replaces one assert in the SingletonSortedSetDocValues constructor with equivalent test (TestSortedSetDocValues.testNoMoreOrdsConstant).


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/f1ad7696
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/f1ad7696
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/f1ad7696

Branch: refs/heads/jira/SOLR-445
Commit: f1ad769666d5ddc090acd3c366ffa5c6af265cd1
Parents: 116ece2
Author: Christine Poerschke <cp...@apache.org>
Authored: Wed Mar 9 16:00:15 2016 +0000
Committer: Christine Poerschke <cp...@apache.org>
Committed: Wed Mar 9 16:00:15 2016 +0000

----------------------------------------------------------------------
 .../index/SingletonSortedSetDocValues.java      |  1 -
 .../lucene/index/TestSortedSetDocValues.java    | 27 ++++++++++++++++++++
 lucene/tools/javadoc/ecj.javadocs.prefs         |  2 +-
 3 files changed, 28 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f1ad7696/lucene/core/src/java/org/apache/lucene/index/SingletonSortedSetDocValues.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/SingletonSortedSetDocValues.java b/lucene/core/src/java/org/apache/lucene/index/SingletonSortedSetDocValues.java
index c90fcc5..5077cd8 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SingletonSortedSetDocValues.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SingletonSortedSetDocValues.java
@@ -33,7 +33,6 @@ final class SingletonSortedSetDocValues extends RandomAccessOrds {
   /** Creates a multi-valued view over the provided SortedDocValues */
   public SingletonSortedSetDocValues(SortedDocValues in) {
     this.in = in;
-    assert NO_MORE_ORDS == -1; // this allows our nextOrd() to work for missing values without a check
   }
 
   /** Return the wrapped {@link SortedDocValues} */

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f1ad7696/lucene/core/src/test/org/apache/lucene/index/TestSortedSetDocValues.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestSortedSetDocValues.java b/lucene/core/src/test/org/apache/lucene/index/TestSortedSetDocValues.java
new file mode 100644
index 0000000..8cffeaf
--- /dev/null
+++ b/lucene/core/src/test/org/apache/lucene/index/TestSortedSetDocValues.java
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.index;
+
+import org.apache.lucene.util.LuceneTestCase;
+
+public class TestSortedSetDocValues extends LuceneTestCase {
+
+  public void testNoMoreOrdsConstant() {
+    assertEquals(SortedSetDocValues.NO_MORE_ORDS, -1);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f1ad7696/lucene/tools/javadoc/ecj.javadocs.prefs
----------------------------------------------------------------------
diff --git a/lucene/tools/javadoc/ecj.javadocs.prefs b/lucene/tools/javadoc/ecj.javadocs.prefs
index d01148c..bd98c99 100644
--- a/lucene/tools/javadoc/ecj.javadocs.prefs
+++ b/lucene/tools/javadoc/ecj.javadocs.prefs
@@ -11,7 +11,7 @@ org.eclipse.jdt.core.compiler.doc.comment.support=enabled
 org.eclipse.jdt.core.compiler.problem.annotationSuperInterface=ignore
 org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
 org.eclipse.jdt.core.compiler.problem.autoboxing=ignore
-org.eclipse.jdt.core.compiler.problem.comparingIdentical=ignore
+org.eclipse.jdt.core.compiler.problem.comparingIdentical=error
 org.eclipse.jdt.core.compiler.problem.deadCode=ignore
 org.eclipse.jdt.core.compiler.problem.deprecation=ignore
 org.eclipse.jdt.core.compiler.problem.deprecationInDeprecatedCode=disabled


[07/50] [abbrv] lucene-solr git commit: SOLR-8765: Enforce required parameters in SolrJ Collections API

Posted by ho...@apache.org.
SOLR-8765: Enforce required parameters in SolrJ Collections API


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/55c595a9
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/55c595a9
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/55c595a9

Branch: refs/heads/jira/SOLR-445
Commit: 55c595a9dcea7d3426e7dcc2690324624287b204
Parents: 588aeea
Author: Alan Woodward <ro...@apache.org>
Authored: Mon Mar 7 19:53:09 2016 +0000
Committer: Alan Woodward <ro...@apache.org>
Committed: Wed Mar 9 12:51:05 2016 +0000

----------------------------------------------------------------------
 solr/CHANGES.txt                                |   4 +
 .../org/apache/solr/core/CoreContainer.java     |   9 +-
 .../solr/handler/admin/CollectionsHandler.java  | 101 ++--
 .../org/apache/solr/cloud/DeleteStatusTest.java | 172 +++---
 .../solrj/request/CollectionAdminRequest.java   | 546 +++++++++++++++++--
 .../client/solrj/request/CoreAdminRequest.java  |  23 +-
 .../solrj/util/SolrIdentifierValidator.java     |  26 +-
 7 files changed, 674 insertions(+), 207 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/55c595a9/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index dc7c45f..2d4ddff 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -27,6 +27,10 @@ Detailed Change List
   .processAndWait() to wait for a call to finish without holding HTTP
   collections open.  (Alan Woodward)
 
+* SOLR-8765: Enforce required parameters at query construction time in the SolrJ
+  Collections API, add static factory methods, and deprecate old setter methods.
+  (Alan Woodward, Jason Gerlowski)
+
 New Features
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/55c595a9/solr/core/src/java/org/apache/solr/core/CoreContainer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/CoreContainer.java b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
index 9ff45ea..1d614e3 100644
--- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java
+++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
@@ -804,9 +804,7 @@ public class CoreContainer {
     SolrCore core = null;
     try {
       MDCLoggingContext.setCore(core);
-      if (!SolrIdentifierValidator.validateCoreName(dcore.getName())) {
-        throw new SolrException(ErrorCode.BAD_REQUEST, SolrIdentifierValidator.getIdentifierMessage(SolrIdentifierValidator.IdentifierType.CORE, dcore.getName()));
-      }
+      SolrIdentifierValidator.validateCoreName(dcore.getName());
       if (zkSys.getZkController() != null) {
         zkSys.getZkController().preRegister(dcore);
       }
@@ -1009,10 +1007,7 @@ public class CoreContainer {
   }
 
   public void rename(String name, String toName) {
-    if (!SolrIdentifierValidator.validateCoreName(toName)) {
-      throw new SolrException(ErrorCode.BAD_REQUEST, SolrIdentifierValidator.getIdentifierMessage(SolrIdentifierValidator.IdentifierType.CORE,
-          toName));
-    }
+    SolrIdentifierValidator.validateCoreName(toName);
     try (SolrCore core = getCore(name)) {
       if (core != null) {
         registerCore(toName, core, true);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/55c595a9/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
index 593dac8..06968c3 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
@@ -16,45 +16,8 @@
  */
 package org.apache.solr.handler.admin;
 
-import static org.apache.solr.client.solrj.response.RequestStatusState.*;
-import static org.apache.solr.cloud.Overseer.QUEUE_OPERATION;
-import static org.apache.solr.cloud.OverseerCollectionMessageHandler.COLL_CONF;
-import static org.apache.solr.cloud.OverseerCollectionMessageHandler.COLL_PROP_PREFIX;
-import static org.apache.solr.cloud.OverseerCollectionMessageHandler.CREATE_NODE_SET;
-import static org.apache.solr.cloud.OverseerCollectionMessageHandler.CREATE_NODE_SET_SHUFFLE;
-import static org.apache.solr.cloud.OverseerCollectionMessageHandler.NUM_SLICES;
-import static org.apache.solr.cloud.OverseerCollectionMessageHandler.ONLY_ACTIVE_NODES;
-import static org.apache.solr.cloud.OverseerCollectionMessageHandler.ONLY_IF_DOWN;
-import static org.apache.solr.cloud.OverseerCollectionMessageHandler.REQUESTID;
-import static org.apache.solr.cloud.OverseerCollectionMessageHandler.SHARDS_PROP;
-import static org.apache.solr.cloud.OverseerCollectionMessageHandler.SHARD_UNIQUE;
-import static org.apache.solr.common.cloud.DocCollection.DOC_ROUTER;
-import static org.apache.solr.common.cloud.DocCollection.RULE;
-import static org.apache.solr.common.cloud.DocCollection.SNITCH;
-import static org.apache.solr.common.cloud.DocCollection.STATE_FORMAT;
-import static org.apache.solr.common.cloud.ZkStateReader.AUTO_ADD_REPLICAS;
-import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROP;
-import static org.apache.solr.common.cloud.ZkStateReader.MAX_SHARDS_PER_NODE;
-import static org.apache.solr.common.cloud.ZkStateReader.PROPERTY_PROP;
-import static org.apache.solr.common.cloud.ZkStateReader.PROPERTY_VALUE_PROP;
-import static org.apache.solr.common.cloud.ZkStateReader.REPLICATION_FACTOR;
-import static org.apache.solr.common.cloud.ZkStateReader.REPLICA_PROP;
-import static org.apache.solr.common.cloud.ZkStateReader.SHARD_ID_PROP;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.*;
-import static org.apache.solr.common.params.CommonAdminParams.ASYNC;
-import static org.apache.solr.common.params.CommonParams.NAME;
-import static org.apache.solr.common.params.CommonParams.VALUE_LONG;
-import static org.apache.solr.common.params.CoreAdminParams.DATA_DIR;
-import static org.apache.solr.common.params.CoreAdminParams.DELETE_DATA_DIR;
-import static org.apache.solr.common.params.CoreAdminParams.DELETE_INDEX;
-import static org.apache.solr.common.params.CoreAdminParams.DELETE_INSTANCE_DIR;
-import static org.apache.solr.common.params.CoreAdminParams.INSTANCE_DIR;
-import static org.apache.solr.common.params.ShardParams._ROUTE_;
-import static org.apache.solr.common.util.StrUtils.formatString;
-
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
-
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
@@ -66,6 +29,8 @@ import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
 
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableSet;
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang.StringUtils;
 import org.apache.solr.client.solrj.SolrResponse;
@@ -117,8 +82,45 @@ import org.apache.zookeeper.KeeperException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
+import static org.apache.solr.client.solrj.response.RequestStatusState.COMPLETED;
+import static org.apache.solr.client.solrj.response.RequestStatusState.FAILED;
+import static org.apache.solr.client.solrj.response.RequestStatusState.NOT_FOUND;
+import static org.apache.solr.client.solrj.response.RequestStatusState.RUNNING;
+import static org.apache.solr.client.solrj.response.RequestStatusState.SUBMITTED;
+import static org.apache.solr.cloud.Overseer.QUEUE_OPERATION;
+import static org.apache.solr.cloud.OverseerCollectionMessageHandler.COLL_CONF;
+import static org.apache.solr.cloud.OverseerCollectionMessageHandler.COLL_PROP_PREFIX;
+import static org.apache.solr.cloud.OverseerCollectionMessageHandler.CREATE_NODE_SET;
+import static org.apache.solr.cloud.OverseerCollectionMessageHandler.CREATE_NODE_SET_SHUFFLE;
+import static org.apache.solr.cloud.OverseerCollectionMessageHandler.NUM_SLICES;
+import static org.apache.solr.cloud.OverseerCollectionMessageHandler.ONLY_ACTIVE_NODES;
+import static org.apache.solr.cloud.OverseerCollectionMessageHandler.ONLY_IF_DOWN;
+import static org.apache.solr.cloud.OverseerCollectionMessageHandler.REQUESTID;
+import static org.apache.solr.cloud.OverseerCollectionMessageHandler.SHARDS_PROP;
+import static org.apache.solr.cloud.OverseerCollectionMessageHandler.SHARD_UNIQUE;
+import static org.apache.solr.common.cloud.DocCollection.DOC_ROUTER;
+import static org.apache.solr.common.cloud.DocCollection.RULE;
+import static org.apache.solr.common.cloud.DocCollection.SNITCH;
+import static org.apache.solr.common.cloud.DocCollection.STATE_FORMAT;
+import static org.apache.solr.common.cloud.ZkStateReader.AUTO_ADD_REPLICAS;
+import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.MAX_SHARDS_PER_NODE;
+import static org.apache.solr.common.cloud.ZkStateReader.PROPERTY_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.PROPERTY_VALUE_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.REPLICATION_FACTOR;
+import static org.apache.solr.common.cloud.ZkStateReader.REPLICA_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.SHARD_ID_PROP;
+import static org.apache.solr.common.params.CollectionParams.CollectionAction.*;
+import static org.apache.solr.common.params.CommonAdminParams.ASYNC;
+import static org.apache.solr.common.params.CommonParams.NAME;
+import static org.apache.solr.common.params.CommonParams.VALUE_LONG;
+import static org.apache.solr.common.params.CoreAdminParams.DATA_DIR;
+import static org.apache.solr.common.params.CoreAdminParams.DELETE_DATA_DIR;
+import static org.apache.solr.common.params.CoreAdminParams.DELETE_INDEX;
+import static org.apache.solr.common.params.CoreAdminParams.DELETE_INSTANCE_DIR;
+import static org.apache.solr.common.params.CoreAdminParams.INSTANCE_DIR;
+import static org.apache.solr.common.params.ShardParams._ROUTE_;
+import static org.apache.solr.common.util.StrUtils.formatString;
 
 public class CollectionsHandler extends RequestHandlerBase {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
@@ -348,11 +350,7 @@ public class CollectionsHandler extends RequestHandlerBase {
         addMapObject(props, RULE);
         addMapObject(props, SNITCH);
         verifyRuleParams(h.coreContainer, props);
-        final String collectionName = (String) props.get(NAME);
-        if (!SolrIdentifierValidator.validateCollectionName(collectionName)) {
-          throw new SolrException(ErrorCode.BAD_REQUEST,
-              SolrIdentifierValidator.getIdentifierMessage(SolrIdentifierValidator.IdentifierType.COLLECTION, collectionName));
-        }
+        final String collectionName = SolrIdentifierValidator.validateCollectionName((String)props.get(NAME));
         final String shardsParam = (String) props.get(SHARDS_PROP);
         if (StringUtils.isNotEmpty(shardsParam)) {
           verifyShardsParam(shardsParam);
@@ -433,10 +431,7 @@ public class CollectionsHandler extends RequestHandlerBase {
       @Override
       Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler handler)
           throws Exception {
-        final String aliasName = req.getParams().get(NAME);
-        if (!SolrIdentifierValidator.validateCollectionName(aliasName)) {
-          throw new SolrException(ErrorCode.BAD_REQUEST, SolrIdentifierValidator.getIdentifierMessage(SolrIdentifierValidator.IdentifierType.ALIAS, aliasName));
-        }
+        final String aliasName = SolrIdentifierValidator.validateAliasName(req.getParams().get(NAME));
         return req.getParams().required().getAll(null, NAME, "collections");
       }
     },
@@ -505,11 +500,7 @@ public class CollectionsHandler extends RequestHandlerBase {
             COLLECTION_PROP,
             SHARD_ID_PROP);
         ClusterState clusterState = handler.coreContainer.getZkController().getClusterState();
-        final String newShardName = req.getParams().get(SHARD_ID_PROP);
-        if (!SolrIdentifierValidator.validateShardName(newShardName)) {
-          throw new SolrException(ErrorCode.BAD_REQUEST, SolrIdentifierValidator.getIdentifierMessage(SolrIdentifierValidator.IdentifierType.SHARD,
-              newShardName));
-        }
+        final String newShardName = SolrIdentifierValidator.validateShardName(req.getParams().get(SHARD_ID_PROP));
         if (!ImplicitDocRouter.NAME.equals(((Map) clusterState.getCollection(req.getParams().get(COLLECTION_PROP)).get(DOC_ROUTER)).get(NAME)))
           throw new SolrException(ErrorCode.BAD_REQUEST, "shards can be added only to 'implicit' collections");
         req.getParams().getAll(map,
@@ -997,9 +988,7 @@ public class CollectionsHandler extends RequestHandlerBase {
   
   private static void verifyShardsParam(String shardsParam) {
     for (String shard : shardsParam.split(",")) {
-      if (!SolrIdentifierValidator.validateShardName(shard))
-        throw new SolrException(ErrorCode.BAD_REQUEST, SolrIdentifierValidator.getIdentifierMessage(SolrIdentifierValidator.IdentifierType.SHARD,
-            shard));
+      SolrIdentifierValidator.validateShardName(shard);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/55c595a9/solr/core/src/test/org/apache/solr/cloud/DeleteStatusTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/DeleteStatusTest.java b/solr/core/src/test/org/apache/solr/cloud/DeleteStatusTest.java
index 16ca35a..3b8e014 100644
--- a/solr/core/src/test/org/apache/solr/cloud/DeleteStatusTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/DeleteStatusTest.java
@@ -17,101 +17,129 @@
 package org.apache.solr.cloud;
 
 import java.io.IOException;
+import java.util.concurrent.TimeUnit;
 
+import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrServerException;
-import org.apache.solr.client.solrj.impl.HttpSolrClient;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
 import org.apache.solr.client.solrj.response.CollectionAdminResponse;
 import org.apache.solr.client.solrj.response.RequestStatusState;
+import org.junit.BeforeClass;
 import org.junit.Test;
 
-public class DeleteStatusTest extends AbstractFullDistribZkTestBase {
+public class DeleteStatusTest extends SolrCloudTestCase {
+
+  public static final int MAX_WAIT_TIMEOUT = 30;
+
+  @BeforeClass
+  public static void createCluster() throws Exception {
+    configureCluster(2)
+        .addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf"))
+        .configure();
+  }
+
+  // Basically equivalent to RequestStatus.waitFor(), but doesn't delete the id from the queue
+  private static RequestStatusState waitForRequestState(String id, SolrClient client, int timeout)
+      throws IOException, SolrServerException, InterruptedException {
+    RequestStatusState state = RequestStatusState.SUBMITTED;
+    long endTime = System.nanoTime() + TimeUnit.SECONDS.toNanos(MAX_WAIT_TIMEOUT);
+    while (System.nanoTime() < endTime) {
+      state = CollectionAdminRequest.requestStatus(id).process(client).getRequestStatus();
+      if (state == RequestStatusState.COMPLETED)
+        break;
+      assumeTrue("Error creating collection - skipping test", state != RequestStatusState.FAILED);
+      TimeUnit.SECONDS.sleep(1);
+    }
+    assumeTrue("Timed out creating collection - skipping test", state == RequestStatusState.COMPLETED);
+    return state;
+  }
 
   @Test
-  public void testDeleteStatus() throws IOException, SolrServerException {
-    CollectionAdminRequest.Create create = new CollectionAdminRequest.Create();
-    create.setCollectionName("requeststatus")
-        .setConfigName("conf1")
-        .setReplicationFactor(1)
-        .setNumShards(1)
-        .setAsyncId("collectioncreate")
-        .process(cloudClient);
-
-    RequestStatusState state = getRequestStateAfterCompletion("collectioncreate", 30, cloudClient);
-    assertSame(RequestStatusState.COMPLETED, state);
+  public void testAsyncIdsMayBeDeleted() throws Exception {
+
+    final CloudSolrClient client = cluster.getSolrClient();
+
+    final String collection = "deletestatus";
+    final String asyncId = CollectionAdminRequest.createCollection(collection, "conf1", 1, 1).processAsync(client);
+
+    waitForRequestState(asyncId, client, MAX_WAIT_TIMEOUT);
 
-    // Let's delete the stored response now
-    CollectionAdminRequest.DeleteStatus deleteStatus = new CollectionAdminRequest.DeleteStatus();
-    CollectionAdminResponse rsp = deleteStatus
-        .setRequestId("collectioncreate")
-        .process(cloudClient);
-    assertEquals("successfully removed stored response for [collectioncreate]", rsp.getResponse().get("status"));
-
-    // Make sure that the response was deleted from zk
-    state = getRequestState("collectioncreate", cloudClient);
-    assertSame(RequestStatusState.NOT_FOUND, state);
-
-    // Try deleting the same requestid again
-    deleteStatus = new CollectionAdminRequest.DeleteStatus();
-    rsp = deleteStatus
-        .setRequestId("collectioncreate")
-        .process(cloudClient);
-    assertEquals("[collectioncreate] not found in stored responses", rsp.getResponse().get("status"));
-
-    // Let's try deleting a non-existent status
-    deleteStatus = new CollectionAdminRequest.DeleteStatus();
-    rsp = deleteStatus
-        .setRequestId("foo")
-        .process(cloudClient);
+    assertEquals(RequestStatusState.COMPLETED,
+        CollectionAdminRequest.requestStatus(asyncId).process(client).getRequestStatus());
+
+    CollectionAdminResponse rsp = CollectionAdminRequest.deleteAsyncId(asyncId).process(client);
+    assertEquals("successfully removed stored response for [" + asyncId + "]", rsp.getResponse().get("status"));
+
+    assertEquals(RequestStatusState.NOT_FOUND,
+        CollectionAdminRequest.requestStatus(asyncId).process(client).getRequestStatus());
+
+  }
+
+  @Test
+  public void testDeletingNonExistentRequests() throws Exception {
+
+    final CloudSolrClient client = cluster.getSolrClient();
+
+    CollectionAdminResponse rsp = CollectionAdminRequest.deleteAsyncId("foo").process(client);
     assertEquals("[foo] not found in stored responses", rsp.getResponse().get("status"));
+
+  }
+
+  @Test
+  public void testProcessAndWaitDeletesAsyncIds() throws IOException, SolrServerException, InterruptedException {
+
+    final CloudSolrClient client = cluster.getSolrClient();
+
+    RequestStatusState state = CollectionAdminRequest.createCollection("requeststatus", "conf1", 1, 1)
+                                  .processAndWait("request1", client, MAX_WAIT_TIMEOUT);
+    assertSame(RequestStatusState.COMPLETED, state);
+
+    // using processAndWait deletes the requestid
+    state = CollectionAdminRequest.requestStatus("request1").process(client).getRequestStatus();
+    assertSame("Request id was not deleted by processAndWait call", RequestStatusState.NOT_FOUND, state);
+
   }
 
   @Test
   public void testDeleteStatusFlush() throws Exception {
-    CollectionAdminRequest.Create create = new CollectionAdminRequest.Create();
-    create.setConfigName("conf1")
-        .setCollectionName("foo")
-        .setAsyncId("foo")
-        .setNumShards(1)
-        .setReplicationFactor(1)
-        .process(cloudClient);
-
-    create = new CollectionAdminRequest.Create();
-    create.setConfigName("conf1")
-        .setCollectionName("bar")
-        .setAsyncId("bar")
-        .setNumShards(1)
-        .setReplicationFactor(1)
-        .process(cloudClient);
-
-    RequestStatusState state = getRequestStateAfterCompletion("foo", 30, cloudClient);
-    assertEquals(RequestStatusState.COMPLETED, state);
-
-    state = getRequestStateAfterCompletion("bar", 30, cloudClient);
-    assertEquals(RequestStatusState.COMPLETED, state);
-
-    CollectionAdminRequest.DeleteStatus deleteStatus = new CollectionAdminRequest.DeleteStatus();
-    deleteStatus.setFlush(true)
-        .process(cloudClient);
-
-    assertEquals(RequestStatusState.NOT_FOUND, getRequestState("foo", cloudClient));
-    assertEquals(RequestStatusState.NOT_FOUND, getRequestState("bar", cloudClient));
-
-    deleteStatus = new CollectionAdminRequest.DeleteStatus();
+
+    final CloudSolrClient client = cluster.getSolrClient();
+
+    String id1 = CollectionAdminRequest.createCollection("flush1", "conf1", 1, 1).processAsync(client);
+    String id2 = CollectionAdminRequest.createCollection("flush2", "conf1", 1, 1).processAsync(client);
+
+    assertEquals(RequestStatusState.COMPLETED, waitForRequestState(id1, client, MAX_WAIT_TIMEOUT));
+    assertEquals(RequestStatusState.COMPLETED, waitForRequestState(id2, client, MAX_WAIT_TIMEOUT));
+
+    CollectionAdminRequest.deleteAllAsyncIds().process(client);
+
+    assertEquals(RequestStatusState.NOT_FOUND,
+        CollectionAdminRequest.requestStatus(id1).process(client).getRequestStatus());
+    assertEquals(RequestStatusState.NOT_FOUND,
+        CollectionAdminRequest.requestStatus(id2).process(client).getRequestStatus());
+
+  }
+
+  @Test
+  @SuppressWarnings("deprecation")
+  public void testDeprecatedConstructorValidation() throws Exception {
+
+    final CloudSolrClient client = cluster.getSolrClient();
+
     try {
-      deleteStatus.process(cloudClient);
+      new CollectionAdminRequest.DeleteStatus().process(client);
       fail("delete status should have failed");
-    } catch (HttpSolrClient.RemoteSolrException e) {
+    } catch (IllegalArgumentException e) {
       assertTrue(e.getMessage().contains("Either requestid or flush parameter must be specified."));
     }
 
-    deleteStatus = new CollectionAdminRequest.DeleteStatus();
     try {
-      deleteStatus.setFlush(true)
+      new CollectionAdminRequest.DeleteStatus().setFlush(true)
           .setRequestId("foo")
-          .process(cloudClient);
+          .process(client);
       fail("delete status should have failed");
-    } catch (HttpSolrClient.RemoteSolrException e) {
+    } catch (IllegalArgumentException e) {
       assertTrue(e.getMessage().contains("Both requestid and flush parameters can not be specified together."));
     }
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/55c595a9/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java
index c9c8c39..4f28408 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java
@@ -18,8 +18,6 @@ package org.apache.solr.client.solrj.request;
 
 import java.io.IOException;
 import java.util.Collection;
-import java.util.Iterator;
-import java.util.Map;
 import java.util.Properties;
 import java.util.UUID;
 import java.util.concurrent.TimeUnit;
@@ -80,16 +78,15 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
   }
 
   protected void addProperties(ModifiableSolrParams params, Properties props) {
-    Iterator<Map.Entry<Object, Object>> iter = props.entrySet().iterator();
-    while(iter.hasNext()) {
-      Map.Entry<Object, Object> prop = iter.next();
-      String key = (String) prop.getKey();
-      String value = (String) prop.getValue();
-      params.set(PROPERTY_PREFIX + key, value);
+    for (String propertyName : props.stringPropertyNames()) {
+      params.set(PROPERTY_PREFIX + propertyName, props.getProperty(propertyName));
     }
   }
 
-  protected abstract static class AsyncCollectionAdminRequest extends CollectionAdminRequest<CollectionAdminResponse> {
+  /**
+   * Base class for asynchronous collection admin requests
+   */
+  public abstract static class AsyncCollectionAdminRequest extends CollectionAdminRequest<CollectionAdminResponse> {
 
     public AsyncCollectionAdminRequest(CollectionAction action) {
       super(action);
@@ -164,7 +161,7 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
     public RequestStatusState processAndWait(String asyncId, SolrClient client, long timeoutSeconds)
         throws IOException, SolrServerException, InterruptedException {
       processAsync(asyncId, client);
-      return new RequestStatus().setRequestId(asyncId).waitFor(client, timeoutSeconds);
+      return requestStatus(asyncId).waitFor(client, timeoutSeconds);
     }
 
     @Override
@@ -181,10 +178,12 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
 
     protected String collection;
 
-    public AsyncCollectionSpecificAdminRequest(CollectionAction action) {
+    public AsyncCollectionSpecificAdminRequest(CollectionAction action, String collection) {
       super(action);
+      this.collection = collection;
     }
 
+    @Deprecated
     public abstract AsyncCollectionSpecificAdminRequest setCollectionName(String collection);
 
     @Override
@@ -202,12 +201,14 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
     protected String collection;
     protected String shard;
 
-    public AsyncShardSpecificAdminRequest(CollectionAction action) {
+    public AsyncShardSpecificAdminRequest(CollectionAction action, String collection, String shard) {
       super(action);
     }
 
+    @Deprecated
     public abstract AsyncShardSpecificAdminRequest setCollectionName(String collection);
 
+    @Deprecated
     public abstract AsyncShardSpecificAdminRequest setShardName(String shard);
 
     @Override
@@ -228,12 +229,14 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
     protected String collection;
     protected String shard;
 
-    public ShardSpecificAdminRequest(CollectionAction action) {
+    public ShardSpecificAdminRequest(CollectionAction action, String collection, String shard) {
       super(action);
     }
 
+    @Deprecated
     public abstract ShardSpecificAdminRequest setCollectionName(String collection);
 
+    @Deprecated
     public abstract ShardSpecificAdminRequest setShardName(String shard);
 
     @Override
@@ -264,7 +267,7 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
     protected String node;
     protected String role;
 
-    public CollectionAdminRoleRequest(CollectionAction action) {
+    public CollectionAdminRoleRequest(CollectionAction action, String node, String role) {
       super(action);
     }
 
@@ -274,12 +277,14 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
       return this;
     }
 
+    @Deprecated
     public abstract CollectionAdminRoleRequest setNode(String node);
 
     public String getNode() {
       return this.node;
     }
 
+    @Deprecated
     public abstract CollectionAdminRoleRequest setRole(String role);
 
     public String getRole() {
@@ -298,6 +303,17 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
 
   /** Specific Collection API call implementations **/
 
+  /**
+   * Returns a SolrRequest for creating a collection
+   * @param collection the collection name
+   * @param config     the collection config
+   * @param numShards  the number of shards in the collection
+   * @param numReplicas the replication factor of the collection
+   */
+  public static Create createCollection(String collection, String config, int numShards, int numReplicas) {
+    return new Create(collection, config, numShards, numReplicas);
+  }
+
   // CREATE request
   public static class Create extends AsyncCollectionSpecificAdminRequest {
 
@@ -315,17 +331,31 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
     protected Integer stateFormat;
     private String[] rule , snitch;
 
+    /**
+     * @deprecated Use {@link #createCollection(String, String, int, int)}
+     */
+    @Deprecated
     public Create() {
-      super(CollectionAction.CREATE);
+      super(CollectionAction.CREATE, null);
     }
 
+    private Create(String collection, String config, int numShards, int numReplicas) {
+      super(CollectionAction.CREATE, SolrIdentifierValidator.validateCollectionName(collection));
+      this.configName = config;
+      this.numShards = numShards;
+      this.replicationFactor = numReplicas;
+    }
+
+    @Deprecated
     public Create setConfigName(String config) { this.configName = config; return this; }
     public Create setCreateNodeSet(String nodeSet) { this.createNodeSet = nodeSet; return this; }
     public Create setRouterName(String routerName) { this.routerName = routerName; return this; }
     public Create setRouterField(String routerField) { this.routerField = routerField; return this; }
+    @Deprecated
     public Create setNumShards(Integer numShards) {this.numShards = numShards; return this; }
     public Create setMaxShardsPerNode(Integer numShards) { this.maxShardsPerNode = numShards; return this; }
     public Create setAutoAddReplicas(boolean autoAddReplicas) { this.autoAddReplicas = autoAddReplicas; return this; }
+    @Deprecated
     public Create setReplicationFactor(Integer repl) { this.replicationFactor = repl; return this; }
     public Create setStateFormat(Integer stateFormat) { this.stateFormat = stateFormat; return this; }
     public Create setRule(String... s){ this.rule = s; return this; }
@@ -350,10 +380,7 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
      */
     public Create setShards(String shards) {
       for (String shard : shards.split(",")) {
-        if (!SolrIdentifierValidator.validateShardName(shard)) {
-          throw new IllegalArgumentException(SolrIdentifierValidator.getIdentifierMessage(SolrIdentifierValidator.IdentifierType.SHARD,
-              shard));
-        }
+        SolrIdentifierValidator.validateShardName(shard);
       }
       this.shards = shards;
       return this;
@@ -366,16 +393,14 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
      * 
      * @throws IllegalArgumentException if the collection name contains invalid characters.
      */
+    @Deprecated
     public Create setCollectionName(String collectionName) throws SolrException {
-      if (!SolrIdentifierValidator.validateCollectionName(collectionName)) {
-        throw new IllegalArgumentException(SolrIdentifierValidator.getIdentifierMessage(SolrIdentifierValidator.IdentifierType.COLLECTION,
-            collectionName));
-      }
-      this.collection = collectionName;
+      this.collection = SolrIdentifierValidator.validateCollectionName(collectionName);
       return this;
     }
 
     @Override
+    @Deprecated
     public Create setAsyncId(String id) {
       this.asyncId = id;
       return this;
@@ -426,46 +451,87 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
 
   }
 
+  /**
+   * Returns a SolrRequest to reload a collection
+   */
+  public static Reload reloadCollection(String collection) {
+    return new Reload(collection);
+  }
+
   // RELOAD request
   public static class Reload extends AsyncCollectionSpecificAdminRequest {
 
+    /**
+     * @deprecated use {@link #reloadCollection(String)}
+     */
+    @Deprecated
     public Reload() {
-      super(CollectionAction.RELOAD);
+      super(CollectionAction.RELOAD, null);
+    }
+
+    private Reload(String collection) {
+      super(CollectionAction.RELOAD, collection);
     }
 
     @Override
+    @Deprecated
     public Reload setCollectionName(String collection) {
       this.collection = collection;
       return this;
     }
 
     @Override
+    @Deprecated
     public Reload setAsyncId(String id) {
       this.asyncId = id;
       return this;
     }
   }
 
+  /**
+   * Returns a SolrRequest to delete a collection
+   */
+  public static Delete deleteCollection(String collection) {
+    return new Delete(collection);
+  }
+
   // DELETE request
   public static class Delete extends AsyncCollectionSpecificAdminRequest {
 
+    /**
+     * @deprecated Use {@link #deleteCollection(String)}
+     */
+    @Deprecated
     public Delete() {
-      super(CollectionAction.DELETE);
+      super(CollectionAction.DELETE, null);
+    }
+
+    private Delete(String collection) {
+      super(CollectionAction.DELETE, collection);
     }
 
     @Override
+    @Deprecated
     public Delete setCollectionName(String collection) {
       this.collection = collection;
       return this;
     }
 
     @Override
+    @Deprecated
     public Delete setAsyncId(String id) {
       this.asyncId = id;
       return this;
     }
   }
 
+  /**
+   * Returns a SolrRequest to create a new shard in a collection
+   */
+  public static CreateShard createShard(String collection, String shard) {
+    return new CreateShard(collection, shard);
+  }
+
   // CREATESHARD request
   public static class CreateShard extends AsyncShardSpecificAdminRequest {
 
@@ -490,11 +556,20 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
       return this;
     }
 
+    /**
+     * @deprecated use {@link #createShard(String, String)}
+     */
+    @Deprecated
     public CreateShard() {
-      super(CollectionAction.CREATESHARD);
+      super(CollectionAction.CREATESHARD, null, null);
+    }
+
+    private CreateShard(String collection, String shard) {
+      super(CollectionAction.CREATESHARD, collection, SolrIdentifierValidator.validateShardName(shard));
     }
 
     @Override
+    @Deprecated
     public CreateShard setCollectionName(String collection) {
       this.collection = collection;
       return this;
@@ -508,16 +583,14 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
      * @throws IllegalArgumentException if the shard name contains invalid characters.
      */
     @Override
+    @Deprecated
     public CreateShard setShardName(String shardName) {
-      if (!SolrIdentifierValidator.validateShardName(shardName)) {
-        throw new IllegalArgumentException(SolrIdentifierValidator.getIdentifierMessage(SolrIdentifierValidator.IdentifierType.SHARD,
-            shardName));
-      }
-      this.shard = shardName;
+      this.shard = SolrIdentifierValidator.validateShardName(shardName);
       return this;
     }
 
     @Override
+    @Deprecated
     public CreateShard setAsyncId(String id) {
       this.asyncId = id;
       return this;
@@ -538,6 +611,13 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
 
   }
 
+  /**
+   * Returns a SolrRequest to split a shard in a collection
+   */
+  public static SplitShard splitShard(String collection, String shard) {
+    return new SplitShard(collection, shard);
+  }
+
   // SPLITSHARD request
   public static class SplitShard extends AsyncShardSpecificAdminRequest {
     protected String ranges;
@@ -545,8 +625,16 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
 
     private Properties properties;
 
+    private SplitShard(String collection, String shard) {
+      super(CollectionAction.SPLITSHARD, collection, shard);
+    }
+
+    /**
+     * @deprecated Use {@link #splitShard(String, String)}
+     */
+    @Deprecated
     public SplitShard() {
-      super(CollectionAction.SPLITSHARD);
+      super(CollectionAction.SPLITSHARD, null, null);
     }
 
     public SplitShard setRanges(String ranges) { this.ranges = ranges; return this; }
@@ -571,18 +659,21 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
     }
 
     @Override
+    @Deprecated
     public SplitShard setCollectionName(String collection) {
       this.collection = collection;
       return this;
     }
 
     @Override
+    @Deprecated
     public SplitShard setShardName(String shard) {
       this.shard = shard;
       return this;
     }
 
     @Override
+    @Deprecated
     public SplitShard setAsyncId(String id) {
       this.asyncId = id;
       return this;
@@ -604,14 +695,29 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
 
   }
 
+  /**
+   * Returns a SolrRequest to delete a shard from a collection
+   */
+  public static DeleteShard deleteShard(String collection, String shard) {
+    return new DeleteShard(collection, shard);
+  }
+
   // DELETESHARD request
   public static class DeleteShard extends AsyncShardSpecificAdminRequest {
 
     private Boolean deleteInstanceDir;
     private Boolean deleteDataDir;
 
+    /**
+     * @deprecated Use {@link #deleteShard(String, String)}
+     */
+    @Deprecated
     public DeleteShard() {
-      super(CollectionAction.DELETESHARD);
+      super(CollectionAction.DELETESHARD, null, null);
+    }
+
+    private DeleteShard(String collection, String shard) {
+      super(CollectionAction.DELETESHARD, collection, shard);
     }
 
     public Boolean getDeleteInstanceDir() {
@@ -633,18 +739,21 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
     }
 
     @Override
+    @Deprecated
     public DeleteShard setCollectionName(String collection) {
       this.collection = collection;
       return this;
     }
 
     @Override
+    @Deprecated
     public DeleteShard setShardName(String shard) {
       this.shard = shard;
       return this;
     }
 
     @Override
+    @Deprecated
     public DeleteShard setAsyncId(String id) {
       this.asyncId = id;
       return this;
@@ -663,21 +772,41 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
     }
   }
 
+  /**
+   * Returns a SolrRequest to force a leader election for a shard in a collection
+   *
+   * WARNING: This may cause data loss if the new leader does not contain updates
+   * acknowledged by the old leader.  Use only if leadership elections are entirely
+   * broken.
+   */
+  public static ForceLeader forceLeaderElection(String collection, String shard) {
+    return new ForceLeader(collection, shard);
+  }
+
   // FORCELEADER request
   public static class ForceLeader extends ShardSpecificAdminRequest {
 
+    /**
+     * @deprecated Use {@link #forceLeaderElection(String, String)}
+     */
+    @Deprecated
     public ForceLeader() {
-      super(CollectionAction.FORCELEADER);
+      super(CollectionAction.FORCELEADER, null, null);
     }
 
+    private ForceLeader(String collection, String shard) {
+      super(CollectionAction.FORCELEADER, collection, shard);
+    }
 
     @Override
+    @Deprecated
     public ForceLeader setCollectionName(String collection) {
       this.collection = collection;
       return this;
     }
 
     @Override
+    @Deprecated
     public ForceLeader setShardName(String shard) {
       this.shard = shard;
       return this;
@@ -685,6 +814,9 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
 
   }
 
+  /**
+   * A response object for {@link RequestStatus} requests
+   */
   public static class RequestStatusResponse extends CollectionAdminResponse {
 
     public RequestStatusState getRequestStatus() {
@@ -694,15 +826,34 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
 
   }
 
+  /**
+   * Returns a SolrRequest for checking the status of an asynchronous request
+   *
+   * @see CollectionAdminRequest.AsyncCollectionAdminRequest
+   */
+  public static RequestStatus requestStatus(String requestId) {
+    return new RequestStatus(requestId);
+  }
+
   // REQUESTSTATUS request
   public static class RequestStatus extends CollectionAdminRequest<RequestStatusResponse> {
 
     protected String requestId = null;
 
+    private RequestStatus(String requestId) {
+      super(CollectionAction.REQUESTSTATUS);
+      this.requestId = requestId;
+    }
+
+    /**
+     * @deprecated Use {@link #requestStatus(String)}
+     */
+    @Deprecated
     public RequestStatus() {
       super(CollectionAction.REQUESTSTATUS);
     }
 
+    @Deprecated
     public RequestStatus setRequestId(String requestId) {
       this.requestId = requestId;
       return this;
@@ -726,6 +877,12 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
       return new RequestStatusResponse();
     }
 
+    /**
+     * Wait until the asynchronous request is either completed or failed, up to a timeout
+     * @param client a SolrClient
+     * @param timeoutSeconds the maximum time to wait in seconds
+     * @return the last seen state of the request
+     */
     public RequestStatusState waitFor(SolrClient client, long timeoutSeconds)
         throws IOException, SolrServerException, InterruptedException {
       long finishTime = System.nanoTime() + TimeUnit.SECONDS.toNanos(timeoutSeconds);
@@ -733,7 +890,7 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
       while (System.nanoTime() < finishTime) {
         state = this.process(client).getRequestStatus();
         if (state == RequestStatusState.COMPLETED || state == RequestStatusState.FAILED) {
-          new DeleteStatus().setRequestId(requestId).process(client);
+          deleteAsyncId(requestId).process(client);
           return state;
         }
         TimeUnit.SECONDS.sleep(1);
@@ -742,21 +899,43 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
     }
   }
 
+  /**
+   * Returns a SolrRequest to delete an asynchronous request status
+   */
+  public static DeleteStatus deleteAsyncId(String requestId) {
+    return new DeleteStatus(requestId);
+  }
+
+  public static DeleteStatus deleteAllAsyncIds() {
+    return new DeleteStatus().setFlush(true);
+  }
+
   // DELETESTATUS request
   public static class DeleteStatus extends CollectionAdminRequest<CollectionAdminResponse> {
 
     protected String requestId = null;
     protected Boolean flush = null;
 
+    private DeleteStatus(String requestId) {
+      super(CollectionAction.DELETESTATUS);
+      this.requestId = requestId;
+    }
+
+    /**
+     * @deprecated Use {@link #deleteAsyncId(String)} or {@link #deleteAllAsyncIds()}
+     */
+    @Deprecated
     public DeleteStatus() {
       super(CollectionAction.DELETESTATUS);
     }
 
+    @Deprecated
     public DeleteStatus setRequestId(String requestId) {
       this.requestId = requestId;
       return this;
     }
 
+    @Deprecated
     public DeleteStatus setFlush(Boolean flush) {
       this.flush = flush;
       return this;
@@ -773,9 +952,12 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
     @Override
     public SolrParams getParams() {
       ModifiableSolrParams params = (ModifiableSolrParams) super.getParams();
+      if (requestId == null && flush == null)
+        throw new IllegalArgumentException("Either requestid or flush parameter must be specified.");
+      if (requestId != null && flush != null)
+        throw new IllegalArgumentException("Both requestid and flush parameters can not be specified together.");
       if (requestId != null)
         params.set(CoreAdminParams.REQUESTID, requestId);
-
       if (flush != null)
         params.set(CollectionAdminParams.FLUSH, flush);
       return params;
@@ -788,12 +970,31 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
 
   }
 
+  /**
+   * Returns a SolrRequest to create a new alias
+   * @param aliasName           the alias name
+   * @param aliasedCollections  the collections to alias
+   */
+  public static CreateAlias createAlias(String aliasName, String aliasedCollections) {
+    return new CreateAlias(aliasName, aliasedCollections);
+  }
+
   // CREATEALIAS request
   public static class CreateAlias extends AsyncCollectionAdminRequest {
 
     protected String aliasName;
     protected String aliasedCollections;
 
+    private CreateAlias(String aliasName, String aliasedCollections) {
+      super(CollectionAction.CREATEALIAS);
+      this.aliasName = SolrIdentifierValidator.validateAliasName(aliasName);
+      this.aliasedCollections = aliasedCollections;
+    }
+
+    /**
+     * @deprecated Use {@link #createAlias(String, String)}
+     */
+    @Deprecated
     public CreateAlias() {
       super(CollectionAction.CREATEALIAS);
     }
@@ -805,12 +1006,9 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
      * 
      * @throws IllegalArgumentException if the alias name contains invalid characters.
      */
+    @Deprecated
     public CreateAlias setAliasName(String aliasName) {
-      if (!SolrIdentifierValidator.validateCollectionName(aliasName)) {
-        throw new IllegalArgumentException(SolrIdentifierValidator.getIdentifierMessage(SolrIdentifierValidator.IdentifierType.ALIAS,
-            aliasName));
-      }
-      this.aliasName = aliasName;
+      this.aliasName = SolrIdentifierValidator.validateAliasName(aliasName);
       return this;
     }
 
@@ -818,6 +1016,7 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
       return aliasName;
     }
 
+    @Deprecated
     public CreateAlias setAliasedCollections(String alias) {
       this.aliasedCollections = alias;
       return this;
@@ -828,6 +1027,7 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
     }
 
     @Override
+    @Deprecated
     public CreateAlias setAsyncId(String id) {
       this.asyncId = id;
       return this;
@@ -843,21 +1043,39 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
 
   }
 
+  /**
+   * Returns a SolrRequest to delete an alias
+   */
+  public static DeleteAlias deleteAlias(String aliasName) {
+    return new DeleteAlias(aliasName);
+  }
+
   // DELETEALIAS request
   public static class DeleteAlias extends AsyncCollectionAdminRequest {
 
     protected String aliasName;
 
+    private DeleteAlias(String aliasName) {
+      super(CollectionAction.DELETEALIAS);
+      this.aliasName = aliasName;
+    }
+
+    /**
+     * @deprecated Use {@link #deleteAlias(String)}
+     */
+    @Deprecated
     public DeleteAlias() {
       super(CollectionAction.DELETEALIAS);
     }
 
+    @Deprecated
     public DeleteAlias setAliasName(String aliasName) {
       this.aliasName = aliasName;
       return this;
     }
 
     @Override
+    @Deprecated
     public DeleteAlias setAsyncId(String id) {
       this.asyncId = id;
       return this;
@@ -873,6 +1091,20 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
 
   }
 
+  /**
+   * Returns a SolrRequest to add a replica to a shard in a collection
+   */
+  public static AddReplica addReplicaToShard(String collection, String shard) {
+    return new AddReplica(collection, shard, null);
+  }
+
+  /**
+   * Returns a SolrRequest to add a replica to a collection using a route key
+   */
+  public static AddReplica addReplicaByRouteKey(String collection, String routeKey) {
+    return new AddReplica(collection, null, routeKey);
+  }
+
   // ADDREPLICA request
   public static class AddReplica extends AsyncCollectionAdminRequest {
 
@@ -884,10 +1116,21 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
     protected String dataDir;
     protected Properties properties;
 
+    /**
+     * @deprecated Use {@link #addReplicaByRouteKey(String, String)} or {@link #addReplicaToShard(String, String)}
+     */
+    @Deprecated
     public AddReplica() {
       super(CollectionAction.ADDREPLICA);
     }
 
+    private AddReplica(String collection, String shard, String routeKey) {
+      super(CollectionAction.ADDREPLICA);
+      this.collection = collection;
+      this.shard = shard;
+      this.routeKey = routeKey;
+    }
+
     public Properties getProperties() {
       return properties;
     }
@@ -910,6 +1153,7 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
       return routeKey;
     }
 
+    @Deprecated
     public AddReplica setRouteKey(String routeKey) {
       this.routeKey = routeKey;
       return this;
@@ -933,17 +1177,20 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
       return this;
     }
 
+    @Deprecated
     public AddReplica setCollectionName(String collection) {
       this.collection = collection;
       return this;
     }
 
+    @Deprecated
     public AddReplica setShardName(String shard) {
       this.shard = shard;
       return this;
     }
 
     @Override
+    @Deprecated
     public AddReplica setAsyncId(String id) {
       this.asyncId = id;
       return this;
@@ -957,7 +1204,7 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
       params.add(CoreAdminParams.COLLECTION, collection);
       if (shard == null || shard.isEmpty()) {
         if (routeKey == null) {
-          throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Either shard or routeKey must be provided");
+          throw new IllegalArgumentException("Either shard or routeKey must be provided");
         }
         params.add(ShardParams._ROUTE_, routeKey);
       }
@@ -979,7 +1226,13 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
       return params;
     }
 
+  }
 
+  /**
+   * Returns a SolrRequest to delete a replica from a shard in a collection
+   */
+  public static DeleteReplica deleteReplica(String collection, String shard, String replica) {
+    return new DeleteReplica(collection, shard, replica);
   }
 
   // DELETEREPLICA request
@@ -991,10 +1244,20 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
     private Boolean deleteInstanceDir;
     private Boolean deleteIndexDir;
 
+    /**
+     * @deprecated Use {@link #deleteReplica(String, String, String)}
+     */
+    @Deprecated
     public DeleteReplica() {
-      super(CollectionAction.DELETEREPLICA);
+      super(CollectionAction.DELETEREPLICA, null, null);
+    }
+
+    private DeleteReplica(String collection, String shard, String replica) {
+      super(CollectionAction.DELETEREPLICA, collection, shard);
+      this.replica = replica;
     }
 
+    @Deprecated
     public DeleteReplica setReplica(String replica) {
       this.replica = replica;
       return this;
@@ -1014,18 +1277,21 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
     }
 
     @Override
+    @Deprecated
     public DeleteReplica setCollectionName(String collection) {
       this.collection = collection;
       return this;
     }
 
     @Override
+    @Deprecated
     public DeleteReplica setShardName(String shard) {
       this.shard = shard;
       return this;
     }
 
     @Override
+    @Deprecated
     public DeleteReplica setAsyncId(String id) {
       this.asyncId = id;
       return this;
@@ -1070,16 +1336,34 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
     }
   }
 
+  /**
+   * Returns a SolrRequest to set a cluster property
+   */
+  public static ClusterProp setClusterProperty(String propertyName, String propertyValue) {
+    return new ClusterProp(propertyName, propertyValue);
+  }
+
   // CLUSTERPROP request
   public static class ClusterProp extends CollectionAdminRequest<CollectionAdminResponse> {
 
     private String propertyName;
     private String propertyValue;
 
+    /**
+     * @deprecated Use {@link #setClusterProperty(String, String)}
+     */
+    @Deprecated
     public ClusterProp() {
       super(CollectionAction.CLUSTERPROP);
     }
 
+    private ClusterProp(String propertyName, String propertyValue) {
+      super(CollectionAction.CLUSTERPROP);
+      this.propertyName = propertyName;
+      this.propertyValue = propertyValue;
+    }
+
+    @Deprecated
     public ClusterProp setPropertyName(String propertyName) {
       this.propertyName = propertyName;
       return this;
@@ -1089,6 +1373,7 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
       return this.propertyName;
     }
 
+    @Deprecated
     public ClusterProp setPropertyValue(String propertyValue) {
       this.propertyValue = propertyValue;
       return this;
@@ -1115,6 +1400,13 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
 
   }
 
+  /**
+   * Returns a SolrRequest to migrate data matching a split key to another collection
+   */
+  public static Migrate migrateData(String collection, String targetCollection, String splitKey) {
+    return new Migrate(collection, targetCollection, splitKey);
+  }
+
   // MIGRATE request
   public static class Migrate extends AsyncCollectionAdminRequest {
 
@@ -1124,10 +1416,22 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
     private Integer forwardTimeout;
     private Properties properties;
 
+    /**
+     * @deprecated Use {@link #migrateData(String, String, String)}
+     */
+    @Deprecated
     public Migrate() {
       super(CollectionAction.MIGRATE);
     }
 
+    private Migrate(String collection, String targetCollection, String splitKey) {
+      super(CollectionAction.MIGRATE);
+      this.collection = collection;
+      this.targetCollection = targetCollection;
+      this.splitKey = splitKey;
+    }
+
+    @Deprecated
     public Migrate setCollectionName(String collection) {
       this.collection = collection;
       return this;
@@ -1137,6 +1441,7 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
       return collection;
     }
 
+    @Deprecated
     public Migrate setTargetCollection(String targetCollection) {
       this.targetCollection = targetCollection;
       return this;
@@ -1146,6 +1451,7 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
       return this.targetCollection;
     }
 
+    @Deprecated
     public Migrate setSplitKey(String splitKey) {
       this.splitKey = splitKey;
       return this;
@@ -1174,6 +1480,7 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
     }
 
     @Override
+    @Deprecated
     public Migrate setAsyncId(String id) {
       this.asyncId = id;
       return this;
@@ -1198,46 +1505,87 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
 
   }
 
+  /**
+   * Returns a SolrRequest to add a role to a node
+   */
+  public static AddRole addRole(String node, String role) {
+    return new AddRole(node, role);
+  }
+
   // ADDROLE request
   public static class AddRole extends CollectionAdminRoleRequest {
 
+    /**
+     * @deprecated Use {@link #addRole(String, String)}
+     */
+    @Deprecated
     public AddRole() {
-      super(CollectionAction.ADDROLE);
+      super(CollectionAction.ADDROLE, null, null);
+    }
+
+    private AddRole(String node, String role) {
+      super(CollectionAction.ADDROLE, node, role);
     }
 
     @Override
+    @Deprecated
     public AddRole setNode(String node) {
       this.node = node;
       return this;
     }
 
     @Override
+    @Deprecated
     public AddRole setRole(String role) {
       this.role = role;
       return this;
     }
   }
 
+  /**
+   * Returns a SolrRequest to remove a role from a node
+   */
+  public static RemoveRole removeRole(String node, String role) {
+    return new RemoveRole(node, role);
+  }
+
   // REMOVEROLE request
   public static class RemoveRole extends CollectionAdminRoleRequest {
 
+    /**
+     * @deprecated Use {@link #removeRole(String, String)}
+     */
+    @Deprecated
     public RemoveRole() {
-      super(CollectionAction.REMOVEROLE);
+      super(CollectionAction.REMOVEROLE, null, null);
+    }
+
+    private RemoveRole(String node, String role) {
+      super(CollectionAction.REMOVEROLE, node, role);
     }
 
     @Override
+    @Deprecated
     public RemoveRole setNode(String node) {
       this.node = node;
       return this;
     }
 
     @Override
+    @Deprecated
     public RemoveRole setRole(String role) {
       this.role = role;
       return this;
     }
   }
 
+  /**
+   * Return a SolrRequest to get the Overseer status
+   */
+  public static OverseerStatus getOverseerStatus() {
+    return new OverseerStatus();
+  }
+
   // OVERSEERSTATUS request
   public static class OverseerStatus extends AsyncCollectionAdminRequest {
 
@@ -1246,12 +1594,20 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
     }
 
     @Override
+    @Deprecated
     public OverseerStatus setAsyncId(String id) {
       this.asyncId = id;
       return this;
     }
   }
 
+  /**
+   * Return a SolrRequest to get the Cluster status
+   */
+  public static ClusterStatus getClusterStatus() {
+    return new ClusterStatus();
+  }
+
   // CLUSTERSTATUS request
   public static class ClusterStatus extends CollectionAdminRequest<CollectionAdminResponse> {
 
@@ -1310,7 +1666,13 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
       return new CollectionAdminResponse();
     }
 
+  }
 
+  /**
+   * Returns a SolrRequest to get a list of collections in the cluster
+   */
+  public static List listCollections() {
+    return new List();
   }
 
   // LIST request
@@ -1325,6 +1687,14 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
     }
   }
 
+  /**
+   * Returns a SolrRequest to add a property to a specific replica
+   */
+  public static AddReplicaProp addReplicaProperty(String collection, String shard, String replica,
+                                                  String propertyName, String propertyValue) {
+    return new AddReplicaProp(collection, shard, replica, propertyName, propertyValue);
+  }
+
   // ADDREPLICAPROP request
   public static class AddReplicaProp extends AsyncShardSpecificAdminRequest {
 
@@ -1333,14 +1703,26 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
     private String propertyValue;
     private Boolean shardUnique;
 
+    /**
+     * @deprecated Use {@link #addReplicaProperty(String, String, String, String, String)}
+     */
+    @Deprecated
     public AddReplicaProp() {
-      super(CollectionAction.ADDREPLICAPROP);
+      super(CollectionAction.ADDREPLICAPROP, null, null);
+    }
+
+    private AddReplicaProp(String collection, String shard, String replica, String propertyName, String propertyValue) {
+      super(CollectionAction.ADDREPLICAPROP, collection, shard);
+      this.replica = replica;
+      this.propertyName = propertyName;
+      this.propertyValue = propertyValue;
     }
 
     public String getReplica() {
       return replica;
     }
 
+    @Deprecated
     public AddReplicaProp setReplica(String replica) {
       this.replica = replica;
       return this;
@@ -1350,6 +1732,7 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
       return propertyName;
     }
 
+    @Deprecated
     public AddReplicaProp setPropertyName(String propertyName) {
       this.propertyName = propertyName;
       return this;
@@ -1359,6 +1742,7 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
       return propertyValue;
     }
 
+    @Deprecated
     public AddReplicaProp setPropertyValue(String propertyValue) {
       this.propertyValue = propertyValue;
       return this;
@@ -1374,18 +1758,21 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
     }
 
     @Override
+    @Deprecated
     public AddReplicaProp setCollectionName(String collection) {
       this.collection = collection;
       return this;
     }
 
     @Override
+    @Deprecated
     public AddReplicaProp setShardName(String shard) {
       this.shard = shard;
       return this;
     }
 
     @Override
+    @Deprecated
     public AddReplicaProp setAsyncId(String id) {
       this.asyncId = id;
       return this;
@@ -1407,20 +1794,39 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
 
   }
 
+  /**
+   * Returns a SolrRequest to delete a property from a specific replica
+   */
+  public static DeleteReplicaProp deleteReplicaProperty(String collection, String shard,
+                                                        String replica, String propertyName) {
+    return new DeleteReplicaProp(collection, shard, replica, propertyName);
+  }
+
   // DELETEREPLICAPROP request
   public static class DeleteReplicaProp extends AsyncShardSpecificAdminRequest {
 
     private String replica;
     private String propertyName;
 
+    /**
+     * @deprecated Use {@link #deleteReplicaProperty(String, String, String, String)}
+     */
+    @Deprecated
     public DeleteReplicaProp() {
-      super(CollectionAction.DELETEREPLICAPROP);
+      super(CollectionAction.DELETEREPLICAPROP, null, null);
+    }
+
+    private DeleteReplicaProp(String collection, String shard, String replica, String propertyName) {
+      super(CollectionAction.DELETEREPLICAPROP, collection, shard);
+      this.replica = replica;
+      this.propertyName = propertyName;
     }
 
     public String getReplica() {
       return replica;
     }
 
+    @Deprecated
     public DeleteReplicaProp setReplica(String replica) {
       this.replica = replica;
       return this;
@@ -1430,24 +1836,28 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
       return propertyName;
     }
 
+    @Deprecated
     public DeleteReplicaProp setPropertyName(String propertyName) {
       this.propertyName = propertyName;
       return this;
     }
 
     @Override
+    @Deprecated
     public DeleteReplicaProp setCollectionName(String collection) {
       this.collection = collection;
       return this;
     }
 
     @Override
+    @Deprecated
     public DeleteReplicaProp setShardName(String shard) {
       this.shard = shard;
       return this;
     }
 
     @Override
+    @Deprecated
     public DeleteReplicaProp setAsyncId(String id) {
       this.asyncId = id;
       return this;
@@ -1464,21 +1874,41 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
 
   }
 
+  /**
+   * Returns a SolrRequest to migrate a collection state format
+   *
+   * This is an expert-level request, and should not generally be necessary.
+   */
+  public static MigrateClusterState migrateCollectionFormat(String collection) {
+    return new MigrateClusterState(collection);
+  }
+
   // MIGRATECLUSTERSTATE request
   public static class MigrateClusterState extends AsyncCollectionAdminRequest {
 
     protected String collection;
 
+    private MigrateClusterState(String collection) {
+      super(CollectionAction.MIGRATESTATEFORMAT);
+      this.collection = collection;
+    }
+
+    /**
+     * @deprecated Use {@link #migrateCollectionFormat(String)}
+     */
+    @Deprecated
     public MigrateClusterState() {
       super(CollectionAction.MIGRATESTATEFORMAT);
     }
 
+    @Deprecated
     public MigrateClusterState setCollectionName(String collection) {
       this.collection = collection;
       return this;
     }
 
     @Override
+    @Deprecated
     public MigrateClusterState setAsyncId(String id) {
       this.asyncId = id;
       return this;
@@ -1494,6 +1924,13 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
     }
   }
 
+  /**
+   * Returns a SolrRequest to balance a replica property across the shards of a collection
+   */
+  public static BalanceShardUnique balanceReplicaProperty(String collection, String propertyName) {
+    return new BalanceShardUnique(collection, propertyName);
+  }
+
   // BALANCESHARDUNIQUE request
   public static class BalanceShardUnique extends AsyncCollectionAdminRequest {
 
@@ -1502,6 +1939,16 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
     protected Boolean onlyActiveNodes;
     protected Boolean shardUnique;
 
+    private BalanceShardUnique(String collection, String propertyName) {
+      super(CollectionAction.BALANCESHARDUNIQUE);
+      this.collection = collection;
+      this.propertyName = propertyName;
+    }
+
+    /**
+     * @deprecated Use {@link #balanceReplicaProperty(String, String)}
+     */
+    @Deprecated
     public BalanceShardUnique() {
       super(CollectionAction.BALANCESHARDUNIQUE);
     }
@@ -1510,6 +1957,7 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
       return propertyName;
     }
 
+    @Deprecated
     public BalanceShardUnique setPropertyName(String propertyName) {
       this.propertyName = propertyName;
       return this;
@@ -1533,6 +1981,7 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
       return this;
     }
 
+    @Deprecated
     public BalanceShardUnique setCollection(String collection) {
       this.collection = collection;
       return this;
@@ -1543,6 +1992,7 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
     }
 
     @Override
+    @Deprecated
     public BalanceShardUnique setAsyncId(String id) {
       this.asyncId = id;
       return this;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/55c595a9/solr/solrj/src/java/org/apache/solr/client/solrj/request/CoreAdminRequest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CoreAdminRequest.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CoreAdminRequest.java
index ab563ed..7d9e356 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CoreAdminRequest.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CoreAdminRequest.java
@@ -16,6 +16,11 @@
  */
 package org.apache.solr.client.solrj.request;
 
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrRequest;
 import org.apache.solr.client.solrj.SolrServerException;
@@ -29,11 +34,6 @@ import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.ContentStream;
 
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.List;
-
 /**
  * This class is experimental and subject to change.
  *
@@ -110,11 +110,7 @@ public class CoreAdminRequest extends SolrRequest<CoreAdminResponse> {
      */
     @Override
     public void setCoreName(String coreName) {
-      if (!SolrIdentifierValidator.validateCoreName(coreName)) {
-        throw new IllegalArgumentException(SolrIdentifierValidator.getIdentifierMessage(SolrIdentifierValidator.IdentifierType.CORE,
-            coreName));
-      }
-      this.core = coreName;
+      this.core = SolrIdentifierValidator.validateCoreName(coreName);
     }
     
     @Override
@@ -559,14 +555,9 @@ public class CoreAdminRequest extends SolrRequest<CoreAdminResponse> {
    */
   public static CoreAdminResponse renameCore(String coreName, String newName, SolrClient client )
       throws SolrServerException, IOException {
-    if (!SolrIdentifierValidator.validateCoreName(newName)) {
-      throw new IllegalArgumentException(SolrIdentifierValidator.getIdentifierMessage(SolrIdentifierValidator.IdentifierType.CORE,
-          newName));
-    }
-    
     CoreAdminRequest req = new CoreAdminRequest();
     req.setCoreName(coreName);
-    req.setOtherCoreName(newName);
+    req.setOtherCoreName(SolrIdentifierValidator.validateCoreName(newName));
     req.setAction( CoreAdminAction.RENAME );
     return req.process( client );
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/55c595a9/solr/solrj/src/java/org/apache/solr/client/solrj/util/SolrIdentifierValidator.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/util/SolrIdentifierValidator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/util/SolrIdentifierValidator.java
index 449c621..57f9909 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/util/SolrIdentifierValidator.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/util/SolrIdentifierValidator.java
@@ -32,18 +32,28 @@ public class SolrIdentifierValidator {
     SHARD, COLLECTION, CORE, ALIAS
   }
 
-  public static boolean validateShardName(String shardName) {
-    return validateIdentifier(shardName);
+  public static String validateName(IdentifierType type, String name) {
+    if (!validateIdentifier(name))
+      throw new IllegalArgumentException(getIdentifierMessage(type, name));
+    return name;
   }
-  
-  public static boolean validateCollectionName(String collectionName) {
-    return validateIdentifier(collectionName);
+
+  public static String validateShardName(String shardName) {
+    return validateName(IdentifierType.SHARD, shardName);
   }
   
-  public static boolean validateCoreName(String name) {
-    return validateIdentifier(name);
+  public static String validateCollectionName(String collectionName) {
+    return validateName(IdentifierType.COLLECTION, collectionName);
   }
-  
+
+  public static String validateAliasName(String alias) {
+    return validateName(IdentifierType.ALIAS, alias);
+  }
+
+  public static String validateCoreName(String coreName) {
+    return validateName(IdentifierType.CORE, coreName);
+  }
+
   private static boolean validateIdentifier(String identifier) {
     if (identifier == null || ! identifierPattern.matcher(identifier).matches()) {
       return false;


[30/50] [abbrv] lucene-solr git commit: SOLR-8135: SolrCloudExampleTest.testLoadDocsIntoGettingStartedCollection reproducible failure

Posted by ho...@apache.org.
SOLR-8135: SolrCloudExampleTest.testLoadDocsIntoGettingStartedCollection reproducible failure


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/8cc978b5
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/8cc978b5
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/8cc978b5

Branch: refs/heads/jira/SOLR-445
Commit: 8cc978b53b1299a27de492d7114cd2d4e353b6cb
Parents: d35d569
Author: Noble Paul <no...@apache.org>
Authored: Fri Mar 11 00:49:49 2016 +0530
Committer: Noble Paul <no...@apache.org>
Committed: Fri Mar 11 00:49:49 2016 +0530

----------------------------------------------------------------------
 .../core/src/java/org/apache/solr/core/CoreContainer.java |  6 ++++--
 solr/core/src/java/org/apache/solr/core/SolrCore.java     | 10 +++++++---
 .../src/java/org/apache/solr/update/SolrCoreState.java    |  8 +++++++-
 .../test/org/apache/solr/cloud/SolrCloudExampleTest.java  |  1 -
 4 files changed, 18 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cc978b5/solr/core/src/java/org/apache/solr/core/CoreContainer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/CoreContainer.java b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
index 1d614e3..c140fb4 100644
--- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java
+++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
@@ -62,6 +62,7 @@ import org.apache.solr.security.AuthorizationPlugin;
 import org.apache.solr.security.HttpClientInterceptorPlugin;
 import org.apache.solr.security.PKIAuthenticationPlugin;
 import org.apache.solr.security.SecurityPluginHolder;
+import org.apache.solr.update.SolrCoreState;
 import org.apache.solr.update.UpdateShardHandler;
 import org.apache.solr.util.DefaultSolrThreadFactory;
 import org.apache.zookeeper.KeeperException;
@@ -916,8 +917,9 @@ public class CoreContainer {
       log.info("Reloading SolrCore '{}' using configuration from {}", cd.getName(), coreConfig.getName());
       SolrCore newCore = core.reload(coreConfig);
       registerCore(name, newCore, false);
-    }
-    catch (Exception e) {
+    } catch (SolrCoreState.CoreIsClosedException e) {
+      throw e;
+    } catch (Exception e) {
       coreInitFailures.put(cd.getName(), new CoreLoadFailure(cd, e));
       throw new SolrException(ErrorCode.SERVER_ERROR, "Unable to reload core [" + cd.getName() + "]", e);
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cc978b5/solr/core/src/java/org/apache/solr/core/SolrCore.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java
index cde878a..7a65a72 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrCore.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java
@@ -534,8 +534,8 @@ public final class SolrCore implements SolrInfoMBean, Closeable {
 
     // Create the index if it doesn't exist.
     if(!indexExists) {
-      log.warn(logid+"Solr index directory '" + new File(indexDir) + "' doesn't exist."
-              + " Creating new index...");
+      log.warn(logid + "Solr index directory '" + new File(indexDir) + "' doesn't exist."
+          + " Creating new index...");
 
       SolrIndexWriter writer = SolrIndexWriter.create(this, "SolrCore.initIndex", indexDir, getDirectoryFactory(), true,
                                                       getLatestSchema(), solrConfig.indexConfig, solrDelPolicy, codec);
@@ -2501,7 +2501,11 @@ public final class SolrCore implements SolrInfoMBean, Closeable {
           checkStale(zkClient, solrConfigPath, overlayVersion) ||
           checkStale(zkClient, managedSchmaResourcePath, managedSchemaVersion)) {
         log.info("core reload {}", coreName);
-        cc.reload(coreName);
+        try {
+          cc.reload(coreName);
+        } catch (SolrCoreState.CoreIsClosedException e) {
+          /*no problem this core is already closed*/
+        }
         return;
       }
       //some files in conf directory may have  other than managedschema, overlay, params

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cc978b5/solr/core/src/java/org/apache/solr/update/SolrCoreState.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/SolrCoreState.java b/solr/core/src/java/org/apache/solr/update/SolrCoreState.java
index 42727b4..fc0bca8 100644
--- a/solr/core/src/java/org/apache/solr/update/SolrCoreState.java
+++ b/solr/core/src/java/org/apache/solr/update/SolrCoreState.java
@@ -51,7 +51,7 @@ public abstract class SolrCoreState {
   public void increfSolrCoreState() {
     synchronized (this) {
       if (solrCoreStateRefCnt == 0) {
-        throw new IllegalStateException("IndexWriter has been closed");
+        throw new CoreIsClosedException("IndexWriter has been closed");
       }
       solrCoreStateRefCnt++;
     }
@@ -157,4 +157,10 @@ public abstract class SolrCoreState {
   public abstract boolean getLastReplicateIndexSuccess();
 
   public abstract void setLastReplicateIndexSuccess(boolean success);
+
+  public static class CoreIsClosedException extends IllegalStateException {
+    public CoreIsClosedException(String s) {
+      super(s);
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cc978b5/solr/core/src/test/org/apache/solr/cloud/SolrCloudExampleTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/SolrCloudExampleTest.java b/solr/core/src/test/org/apache/solr/cloud/SolrCloudExampleTest.java
index e889d90..f084c20 100644
--- a/solr/core/src/test/org/apache/solr/cloud/SolrCloudExampleTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/SolrCloudExampleTest.java
@@ -43,7 +43,6 @@ import org.slf4j.LoggerFactory;
  * this test is useful for catching regressions in indexing the example docs in collections that
  * use data-driven schema and managed schema features provided by configsets/data_driven_schema_configs.
  */
-@LuceneTestCase.BadApple(bugUrl = "https://issues.apache.org/jira/browse/SOLR-8135")
 public class SolrCloudExampleTest extends AbstractFullDistribZkTestBase {
 
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());


[25/50] [abbrv] lucene-solr git commit: Fix javadocs for ClusterState.getCollectionOrNull

Posted by ho...@apache.org.
Fix javadocs for ClusterState.getCollectionOrNull


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/66cd070d
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/66cd070d
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/66cd070d

Branch: refs/heads/jira/SOLR-445
Commit: 66cd070d7a3510456c5dce1890c6de4ba3263520
Parents: 162636b
Author: Shalin Shekhar Mangar <sh...@apache.org>
Authored: Thu Mar 10 18:16:43 2016 +0530
Committer: Shalin Shekhar Mangar <sh...@apache.org>
Committed: Thu Mar 10 18:16:43 2016 +0530

----------------------------------------------------------------------
 .../src/java/org/apache/solr/common/cloud/ClusterState.java      | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/66cd070d/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java b/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java
index ff0e6a3..2495c41 100644
--- a/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java
+++ b/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java
@@ -180,9 +180,9 @@ public class ClusterState implements JSONWriter.Writable {
    * if such a collection exists. Returns null otherwise.
    *
    * Implementation note: This method resolves the collection reference by calling
-   * {@link CollectionRef#get()} which can make a call to ZooKeeper. This is necessary
+   * {@link CollectionRef#get()} which may make a call to ZooKeeper. This is necessary
    * because the semantics of how collection list is loaded have changed in SOLR-6629.
-   * Please javadocs in {@link ZkStateReader#refreshCollectionList(Watcher)}
+   * Please see javadocs in {@link ZkStateReader#refreshCollectionList(Watcher)}
    */
   public DocCollection getCollectionOrNull(String collectionName) {
     CollectionRef ref = collectionStates.get(collectionName);


[50/50] [abbrv] lucene-solr git commit: SOLR-445: Merge remote-tracking branch 'origin' into jira/SOLR-445

Posted by ho...@apache.org.
SOLR-445: Merge remote-tracking branch 'origin' into jira/SOLR-445


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/d3e0bdd4
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/d3e0bdd4
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/d3e0bdd4

Branch: refs/heads/jira/SOLR-445
Commit: d3e0bdd4a72389609f7260faff3321470fc0ccd5
Parents: 0bd817d 007d41c
Author: Chris Hostetter <ho...@apache.org>
Authored: Fri Mar 11 15:18:41 2016 -0700
Committer: Chris Hostetter <ho...@apache.org>
Committed: Fri Mar 11 15:18:41 2016 -0700

----------------------------------------------------------------------
 dev-tools/scripts/smokeTestRelease.py           |   3 +-
 lucene/CHANGES.txt                              |   5 +
 .../lucene50/TestLucene50DocValuesFormat.java   |   2 +-
 .../index/TestBackwardsCompatibility.java       |   2 +-
 .../classification/ClassificationTestBase.java  |   7 +-
 .../DocumentClassificationTestBase.java         |   4 +-
 .../classification/utils/DataSplitterTest.java  |   5 +-
 .../org/apache/lucene/codecs/PointsWriter.java  |   6 +
 .../lucene54/Lucene54DocValuesConsumer.java     |  21 +-
 .../org/apache/lucene/document/DoublePoint.java |  16 +
 .../org/apache/lucene/document/FloatPoint.java  |  16 +
 .../org/apache/lucene/document/IntPoint.java    |  16 +
 .../org/apache/lucene/document/LongPoint.java   |  16 +
 .../org/apache/lucene/index/CheckIndex.java     |   7 +
 .../apache/lucene/index/CompositeReader.java    |   2 -
 .../org/apache/lucene/index/IndexReader.java    |   2 -
 .../org/apache/lucene/index/IndexWriter.java    |   1 +
 .../org/apache/lucene/index/MultiDocValues.java |   6 +-
 .../index/SingletonSortedSetDocValues.java      |   1 -
 .../index/SlowCompositeReaderWrapper.java       | 271 --------
 .../lucene/search/LegacyNumericRangeQuery.java  |  12 +-
 .../apache/lucene/search/PointInSetQuery.java   |  10 +-
 .../apache/lucene/search/PointRangeQuery.java   |  38 +-
 .../org/apache/lucene/util/bkd/BKDWriter.java   |   7 +-
 .../lucene/util/bkd/OfflinePointReader.java     |   3 +-
 .../lucene/util/bkd/OfflinePointWriter.java     |   3 -
 .../lucene54/TestLucene54DocValuesFormat.java   |   6 +-
 .../org/apache/lucene/index/Test2BPoints.java   | 164 +++++
 .../index/TestAllFilesCheckIndexHeader.java     |   8 +-
 .../index/TestAllFilesDetectTruncation.java     |   4 +-
 .../index/TestBinaryDocValuesUpdates.java       |  28 +-
 .../apache/lucene/index/TestCustomNorms.java    |   4 +-
 .../index/TestDemoParallelLeafReader.java       |  21 +-
 .../lucene/index/TestDirectoryReader.java       |   2 +-
 .../org/apache/lucene/index/TestDocValues.java  |  14 +-
 .../lucene/index/TestDocValuesIndexing.java     |  25 +-
 .../lucene/index/TestDocsAndPositions.java      |   4 +-
 .../apache/lucene/index/TestDocumentWriter.java |   2 +-
 .../apache/lucene/index/TestDuelingCodecs.java  |   4 +-
 .../index/TestExitableDirectoryReader.java      |  17 +-
 .../lucene/index/TestFilterLeafReader.java      |   8 +-
 .../test/org/apache/lucene/index/TestFlex.java  |   2 +-
 .../index/TestFlushByRamOrCountsPolicy.java     |   2 +-
 .../lucene/index/TestForceMergeForever.java     |   2 +-
 .../lucene/index/TestIndexReaderClose.java      |  52 +-
 .../apache/lucene/index/TestIndexWriter.java    |   6 +-
 .../lucene/index/TestLazyProxSkipping.java      |   2 +-
 .../apache/lucene/index/TestMultiDocValues.java |  16 +-
 .../lucene/index/TestMultiLevelSkipList.java    |   2 +-
 .../test/org/apache/lucene/index/TestNorms.java |   6 +-
 .../index/TestNumericDocValuesUpdates.java      |  30 +-
 .../org/apache/lucene/index/TestOmitNorms.java  |   8 +-
 .../apache/lucene/index/TestOmitPositions.java  |   2 +-
 .../org/apache/lucene/index/TestOmitTf.java     |   6 +-
 .../org/apache/lucene/index/TestOrdinalMap.java |   5 +-
 .../index/TestParallelCompositeReader.java      |  34 +-
 .../lucene/index/TestParallelLeafReader.java    |  28 +-
 .../index/TestParallelReaderEmptyIndex.java     |  22 +-
 .../lucene/index/TestParallelTermEnum.java      |   4 +-
 .../org/apache/lucene/index/TestPayloads.java   |  11 +-
 .../apache/lucene/index/TestPointValues.java    |  99 ++-
 .../lucene/index/TestPostingsOffsets.java       |   5 +-
 .../apache/lucene/index/TestReaderClosed.java   |   6 +-
 .../index/TestReaderWrapperDVTypeCheck.java     |   5 +-
 .../apache/lucene/index/TestRollingUpdates.java |   2 +-
 .../lucene/index/TestSegmentTermEnum.java       |   2 +-
 .../lucene/index/TestSortedSetDocValues.java    |  27 +
 .../apache/lucene/index/TestStressAdvance.java  |   2 +-
 .../lucene/index/TestSwappedIndexFiles.java     |   3 +-
 .../org/apache/lucene/index/TestTermsEnum.java  |   8 +-
 .../lucene/search/TestDisjunctionMaxQuery.java  |   6 +-
 .../lucene/search/TestMinShouldMatch2.java      |   2 +-
 .../lucene/search/TestMultiPhraseEnum.java      |   8 +-
 .../apache/lucene/search/TestPhraseQuery.java   |   4 +-
 .../apache/lucene/search/TestPointQueries.java  |  88 +++
 .../lucene/search/TestPositionIncrement.java    |  10 +-
 .../lucene/search/TestSimilarityProvider.java   |  11 +-
 .../apache/lucene/search/TestTermScorer.java    |   7 +-
 .../TestUsageTrackingFilterCachingPolicy.java   |  17 +-
 .../search/spans/TestFieldMaskingSpanQuery.java |  11 +-
 .../search/spans/TestNearSpansOrdered.java      |  29 +-
 .../lucene/search/spans/TestSpanCollection.java |   9 +-
 .../search/spans/TestSpanContainQuery.java      |   7 +-
 .../apache/lucene/search/spans/TestSpans.java   |  13 +-
 .../lucene/store/TestNRTCachingDirectory.java   |   2 +-
 .../org/apache/lucene/util/fst/TestFSTs.java    |   2 +-
 .../DefaultSortedSetDocValuesReaderState.java   |  55 +-
 .../facet/range/TestRangeFacetCounts.java       | 189 -----
 .../sortedset/TestSortedSetDocValuesFacets.java |  36 -
 .../lucene/search/grouping/TestGrouping.java    |  12 +-
 .../lucene/search/join/TestBlockJoin.java       |  25 +-
 .../apache/lucene/search/join/TestJoinUtil.java |   8 +-
 .../apache/lucene/index/memory/MemoryIndex.java |   6 +-
 .../memory/TestMemoryIndexAgainstRAMDir.java    |  12 +-
 .../index/SlowCompositeReaderWrapper.java       | 275 ++++++++
 .../apache/lucene/uninverting/FieldCache.java   | 113 ++-
 .../lucene/uninverting/FieldCacheImpl.java      | 164 ++++-
 .../lucene/uninverting/UninvertingReader.java   |  97 ++-
 .../index/TestSlowCompositeReaderWrapper.java   |  91 +++
 .../lucene/uninverting/TestDocTermOrds.java     |  14 +-
 .../lucene/uninverting/TestFieldCache.java      | 147 ++--
 .../uninverting/TestFieldCacheReopen.java       |  11 +-
 .../TestFieldCacheSanityChecker.java            |  10 +-
 .../lucene/uninverting/TestFieldCacheSort.java  | 684 +++++++++++++++++--
 .../uninverting/TestFieldCacheSortRandom.java   |   8 +-
 .../uninverting/TestFieldCacheVsDocValues.java  |   6 +-
 .../uninverting/TestFieldCacheWithThreads.java  |  13 +-
 .../uninverting/TestLegacyFieldCache.java       | 498 ++++++++++++++
 .../lucene/uninverting/TestNumericTerms32.java  |   6 +-
 .../lucene/uninverting/TestNumericTerms64.java  |   8 +-
 .../uninverting/TestUninvertingReader.java      |   6 +-
 .../lucene/queries/CommonTermsQueryTest.java    |   6 +-
 .../apache/lucene/queries/TermsQueryTest.java   |   4 +-
 .../function/TestSortedSetFieldSource.java      |   2 +-
 .../lucene/queries/payloads/PayloadHelper.java  |   3 +-
 .../queries/payloads/TestPayloadSpans.java      |  39 +-
 .../queries/payloads/TestPayloadTermQuery.java  |  11 +-
 .../flexible/core/nodes/package-info.java       |   4 +-
 .../flexible/standard/StandardQueryParser.java  |  29 +-
 .../LegacyNumericRangeQueryNodeBuilder.java     |  93 +++
 .../builders/NumericRangeQueryNodeBuilder.java  |  91 ---
 .../builders/PointRangeQueryNodeBuilder.java    | 137 ++++
 .../builders/StandardQueryTreeBuilder.java      |  12 +-
 .../standard/config/LegacyNumericConfig.java    | 166 +++++
 .../LegacyNumericFieldConfigListener.java       |  75 ++
 .../flexible/standard/config/NumericConfig.java | 164 -----
 .../config/NumericFieldConfigListener.java      |  73 --
 .../flexible/standard/config/PointsConfig.java  | 124 ++++
 .../standard/config/PointsConfigListener.java   |  65 ++
 .../config/StandardQueryConfigHandler.java      |  39 +-
 .../standard/nodes/LegacyNumericQueryNode.java  | 153 +++++
 .../nodes/LegacyNumericRangeQueryNode.java      | 153 +++++
 .../standard/nodes/NumericQueryNode.java        | 151 ----
 .../standard/nodes/NumericRangeQueryNode.java   | 151 ----
 .../flexible/standard/nodes/PointQueryNode.java | 151 ++++
 .../standard/nodes/PointRangeQueryNode.java     | 124 ++++
 .../LegacyNumericQueryNodeProcessor.java        | 154 +++++
 .../LegacyNumericRangeQueryNodeProcessor.java   | 170 +++++
 .../processors/NumericQueryNodeProcessor.java   | 152 -----
 .../NumericRangeQueryNodeProcessor.java         | 168 -----
 .../processors/PointQueryNodeProcessor.java     | 136 ++++
 .../PointRangeQueryNodeProcessor.java           | 148 ++++
 .../StandardQueryNodeProcessorPipeline.java     |   6 +-
 .../lucene/queryparser/xml/CoreParser.java      |   1 +
 .../LegacyNumericRangeQueryBuilder.java         |   2 +
 .../xml/builders/PointRangeQueryBuilder.java    |  95 +++
 .../standard/TestLegacyNumericQueryParser.java  | 535 +++++++++++++++
 .../standard/TestNumericQueryParser.java        | 535 ---------------
 .../flexible/standard/TestPointQueryParser.java |  82 +++
 .../lucene/queryparser/xml/PointRangeQuery.xml  |  31 +
 .../lucene/queryparser/xml/TestCoreParser.java  |   9 +-
 .../lucene/document/InetAddressPoint.java       |  23 +-
 .../lucene/document/TestBigIntegerPoint.java    |  15 +
 .../lucene/document/TestInetAddressPoint.java   |  21 +
 .../apache/lucene/document/TestLatLonPoint.java |  20 +
 .../apache/lucene/spatial/SpatialTestCase.java  |   4 +-
 .../spatial3d/PointInGeo3DShapeQuery.java       |   4 +-
 .../apache/lucene/spatial3d/TestGeo3DPoint.java |  14 +
 .../analyzing/TestFreeTextSuggester.java        |   2 +-
 .../index/BaseDocValuesFormatTestCase.java      |  64 +-
 .../index/BaseIndexFileFormatTestCase.java      |   6 +-
 .../lucene/index/BasePointsFormatTestCase.java  |   4 +-
 .../index/BasePostingsFormatTestCase.java       |  66 +-
 .../index/BaseStoredFieldsFormatTestCase.java   |   2 +-
 .../index/BaseTermVectorsFormatTestCase.java    |  12 +-
 .../ThreadedIndexingAndSearchingTestCase.java   |   4 +-
 .../org/apache/lucene/search/QueryUtils.java    |   6 +-
 .../lucene/search/ShardSearchingTestBase.java   |   2 +-
 .../lucene/search/spans/MultiSpansWrapper.java  |  51 --
 .../lucene/store/MockDirectoryWrapper.java      |   9 +-
 .../org/apache/lucene/util/LineFileDocs.java    |  58 +-
 .../org/apache/lucene/util/LuceneTestCase.java  |  53 +-
 .../java/org/apache/lucene/util/TestUtil.java   |  14 +-
 .../lucene/analysis/TestMockAnalyzer.java       |   2 +-
 .../TestCompressingStoredFieldsFormat.java      |   5 +-
 .../TestCompressingTermVectorsFormat.java       |   8 +-
 .../lucene/index/TestAssertingLeafReader.java   |   5 +-
 lucene/tools/javadoc/ecj.javadocs.prefs         |   2 +-
 lucene/tools/junit4/cached-timehints.txt        |   2 +-
 solr/CHANGES.txt                                |  19 +
 .../cloud/OverseerCollectionMessageHandler.java |   1 +
 .../org/apache/solr/core/CoreContainer.java     |  15 +-
 .../src/java/org/apache/solr/core/SolrCore.java |  10 +-
 .../solr/handler/admin/ClusterStatus.java       |  16 +-
 .../solr/handler/admin/CollectionsHandler.java  | 103 ++-
 .../solr/handler/admin/LukeRequestHandler.java  |  12 +-
 .../java/org/apache/solr/schema/EnumField.java  |   2 +-
 .../java/org/apache/solr/schema/TrieField.java  |   8 +-
 .../org/apache/solr/update/SolrCoreState.java   |   8 +-
 .../org/apache/solr/update/VersionInfo.java     |   8 +-
 .../conf/schema-non-stored-docvalues.xml        | 131 ++--
 .../solr/cloud/CollectionsAPISolrJTest.java     |   2 +-
 .../org/apache/solr/cloud/DeleteStatusTest.java | 172 +++--
 .../apache/solr/cloud/SolrCloudExampleTest.java |   1 -
 .../apache/solr/cloud/TestCollectionAPI.java    |  19 +
 .../solr/schema/TestUseDocValuesAsStored.java   |  60 +-
 .../solr/client/solrj/io/sql/ResultSetImpl.java |   2 +-
 .../client/solrj/io/stream/DaemonStream.java    |   6 +-
 .../solrj/request/CollectionAdminRequest.java   | 568 +++++++++++++--
 .../client/solrj/request/CoreAdminRequest.java  |  23 +-
 .../solrj/util/SolrIdentifierValidator.java     |  28 +-
 .../apache/solr/common/cloud/ClusterState.java  |   4 +-
 .../common/params/CollectionAdminParams.java    |   2 +
 .../solr/client/solrj/io/sql/JdbcTest.java      |  15 +
 .../solrj/io/stream/StreamExpressionTest.java   |   8 +-
 .../request/TestCollectionAdminRequest.java     |   9 +-
 .../client/solrj/request/TestCoreAdmin.java     |  13 +-
 solr/webapp/web/css/angular/common.css          |  13 +-
 solr/webapp/web/css/styles/common.css           |  16 +-
 solr/webapp/web/index.html                      |   6 +-
 solr/webapp/web/old.html                        |   5 +-
 solr/webapp/web/partials/query.html             |  12 +-
 212 files changed, 6740 insertions(+), 3211 deletions(-)
----------------------------------------------------------------------



[21/50] [abbrv] lucene-solr git commit: LUCENE-7089, LUCENE-7075: add points to flexible queryparser to replace legacy numerics support

Posted by ho...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/89cc676f/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestNumericQueryParser.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestNumericQueryParser.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestNumericQueryParser.java
deleted file mode 100644
index c29573b..0000000
--- a/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestNumericQueryParser.java
+++ /dev/null
@@ -1,535 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.queryparser.flexible.standard;
-
-import java.io.IOException;
-import java.text.DateFormat;
-import java.text.NumberFormat;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Collections;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.Locale;
-import java.util.Map;
-import java.util.Random;
-import java.util.TimeZone;
-
-import org.apache.lucene.analysis.Analyzer;
-import org.apache.lucene.analysis.MockAnalyzer;
-import org.apache.lucene.document.Document;
-import org.apache.lucene.document.LegacyDoubleField;
-import org.apache.lucene.document.Field;
-import org.apache.lucene.document.FieldType.LegacyNumericType;
-import org.apache.lucene.document.FieldType;
-import org.apache.lucene.document.LegacyFloatField;
-import org.apache.lucene.document.LegacyIntField;
-import org.apache.lucene.document.LegacyLongField;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.RandomIndexWriter;
-import org.apache.lucene.queryparser.flexible.core.QueryNodeException;
-import org.apache.lucene.queryparser.flexible.core.parser.EscapeQuerySyntax;
-import org.apache.lucene.queryparser.flexible.standard.config.NumberDateFormat;
-import org.apache.lucene.queryparser.flexible.standard.config.NumericConfig;
-import org.apache.lucene.queryparser.flexible.standard.parser.EscapeQuerySyntaxImpl;
-import org.apache.lucene.search.IndexSearcher;
-import org.apache.lucene.search.Query;
-import org.apache.lucene.search.TopDocs;
-import org.apache.lucene.store.Directory;
-import org.apache.lucene.util.LuceneTestCase;
-import org.apache.lucene.util.TestUtil;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
-
-public class TestNumericQueryParser extends LuceneTestCase {
-  
-  private static enum NumberType {
-    NEGATIVE, ZERO, POSITIVE;
-  }
-  
-  final private static int[] DATE_STYLES = {DateFormat.FULL, DateFormat.LONG,
-      DateFormat.MEDIUM, DateFormat.SHORT};
-  
-  final private static int PRECISION_STEP = 8;
-  final private static String FIELD_NAME = "field";
-  private static Locale LOCALE;
-  private static TimeZone TIMEZONE;
-  private static Map<String,Number> RANDOM_NUMBER_MAP;
-  private static EscapeQuerySyntax ESCAPER = new EscapeQuerySyntaxImpl();
-  final private static String DATE_FIELD_NAME = "date";
-  private static int DATE_STYLE;
-  private static int TIME_STYLE;
-  
-  private static Analyzer ANALYZER;
-  
-  private static NumberFormat NUMBER_FORMAT;
-  
-  private static StandardQueryParser qp;
-  
-  private static NumberDateFormat DATE_FORMAT;
-  
-  private static Directory directory = null;
-  private static IndexReader reader = null;
-  private static IndexSearcher searcher = null;
-  
-  private static boolean checkDateFormatSanity(DateFormat dateFormat, long date) {
-    try {
-      return date == dateFormat.parse(dateFormat.format(new Date(date)))
-        .getTime();
-    } catch (ParseException e) {
-      return false;
-    }
-  }
-  
-  @BeforeClass
-  public static void beforeClass() throws Exception {
-    ANALYZER = new MockAnalyzer(random());
-    
-    qp = new StandardQueryParser(ANALYZER);
-    
-    final HashMap<String,Number> randomNumberMap = new HashMap<>();
-    
-    SimpleDateFormat dateFormat;
-    long randomDate;
-    boolean dateFormatSanityCheckPass;
-    int count = 0;
-    do {
-      if (count > 100) {
-        fail("This test has problems to find a sane random DateFormat/NumberFormat. Stopped trying after 100 iterations.");
-      }
-      
-      dateFormatSanityCheckPass = true;
-      LOCALE = randomLocale(random());
-      TIMEZONE = randomTimeZone(random());
-      DATE_STYLE = randomDateStyle(random());
-      TIME_STYLE = randomDateStyle(random());
-      
-      // assumes localized date pattern will have at least year, month, day,
-      // hour, minute
-      dateFormat = (SimpleDateFormat) DateFormat.getDateTimeInstance(
-          DATE_STYLE, TIME_STYLE, LOCALE);
-      
-      // not all date patterns includes era, full year, timezone and second,
-      // so we add them here
-      dateFormat.applyPattern(dateFormat.toPattern() + " G s Z yyyy");
-      dateFormat.setTimeZone(TIMEZONE);
-      
-      DATE_FORMAT = new NumberDateFormat(dateFormat);
-      
-      do {
-        randomDate = random().nextLong();
-        
-        // prune date value so it doesn't pass in insane values to some
-        // calendars.
-        randomDate = randomDate % 3400000000000l;
-        
-        // truncate to second
-        randomDate = (randomDate / 1000L) * 1000L;
-        
-        // only positive values
-        randomDate = Math.abs(randomDate);
-      } while (randomDate == 0L);
-      
-      dateFormatSanityCheckPass &= checkDateFormatSanity(dateFormat, randomDate);
-      
-      dateFormatSanityCheckPass &= checkDateFormatSanity(dateFormat, 0);
-      
-      dateFormatSanityCheckPass &= checkDateFormatSanity(dateFormat,
-          -randomDate);
-      
-      count++;
-    } while (!dateFormatSanityCheckPass);
-    
-    NUMBER_FORMAT = NumberFormat.getNumberInstance(LOCALE);
-    NUMBER_FORMAT.setMaximumFractionDigits((random().nextInt() & 20) + 1);
-    NUMBER_FORMAT.setMinimumFractionDigits((random().nextInt() & 20) + 1);
-    NUMBER_FORMAT.setMaximumIntegerDigits((random().nextInt() & 20) + 1);
-    NUMBER_FORMAT.setMinimumIntegerDigits((random().nextInt() & 20) + 1);
-    
-    double randomDouble;
-    long randomLong;
-    int randomInt;
-    float randomFloat;
-    
-    while ((randomLong = normalizeNumber(Math.abs(random().nextLong()))
-        .longValue()) == 0L)
-      ;
-    while ((randomDouble = normalizeNumber(Math.abs(random().nextDouble()))
-        .doubleValue()) == 0.0)
-      ;
-    while ((randomFloat = normalizeNumber(Math.abs(random().nextFloat()))
-        .floatValue()) == 0.0f)
-      ;
-    while ((randomInt = normalizeNumber(Math.abs(random().nextInt())).intValue()) == 0)
-      ;
-    
-    randomNumberMap.put(LegacyNumericType.LONG.name(), randomLong);
-    randomNumberMap.put(FieldType.LegacyNumericType.INT.name(), randomInt);
-    randomNumberMap.put(LegacyNumericType.FLOAT.name(), randomFloat);
-    randomNumberMap.put(LegacyNumericType.DOUBLE.name(), randomDouble);
-    randomNumberMap.put(DATE_FIELD_NAME, randomDate);
-    
-    RANDOM_NUMBER_MAP = Collections.unmodifiableMap(randomNumberMap);
-    
-    directory = newDirectory();
-    RandomIndexWriter writer = new RandomIndexWriter(random(), directory,
-        newIndexWriterConfig(new MockAnalyzer(random()))
-            .setMaxBufferedDocs(TestUtil.nextInt(random(), 50, 1000))
-            .setMergePolicy(newLogMergePolicy()));
-    
-    Document doc = new Document();
-    HashMap<String,NumericConfig> numericConfigMap = new HashMap<>();
-    HashMap<String,Field> numericFieldMap = new HashMap<>();
-    qp.setNumericConfigMap(numericConfigMap);
-    
-    for (LegacyNumericType type : LegacyNumericType.values()) {
-      numericConfigMap.put(type.name(), new NumericConfig(PRECISION_STEP,
-          NUMBER_FORMAT, type));
-
-      FieldType ft = new FieldType(LegacyIntField.TYPE_NOT_STORED);
-      ft.setNumericType(type);
-      ft.setStored(true);
-      ft.setNumericPrecisionStep(PRECISION_STEP);
-      ft.freeze();
-      final Field field;
-
-      switch(type) {
-      case INT:
-        field = new LegacyIntField(type.name(), 0, ft);
-        break;
-      case FLOAT:
-        field = new LegacyFloatField(type.name(), 0.0f, ft);
-        break;
-      case LONG:
-        field = new LegacyLongField(type.name(), 0l, ft);
-        break;
-      case DOUBLE:
-        field = new LegacyDoubleField(type.name(), 0.0, ft);
-        break;
-      default:
-        fail();
-        field = null;
-      }
-      numericFieldMap.put(type.name(), field);
-      doc.add(field);
-    }
-    
-    numericConfigMap.put(DATE_FIELD_NAME, new NumericConfig(PRECISION_STEP,
-        DATE_FORMAT, LegacyNumericType.LONG));
-    FieldType ft = new FieldType(LegacyLongField.TYPE_NOT_STORED);
-    ft.setStored(true);
-    ft.setNumericPrecisionStep(PRECISION_STEP);
-    LegacyLongField dateField = new LegacyLongField(DATE_FIELD_NAME, 0l, ft);
-    numericFieldMap.put(DATE_FIELD_NAME, dateField);
-    doc.add(dateField);
-    
-    for (NumberType numberType : NumberType.values()) {
-      setFieldValues(numberType, numericFieldMap);
-      if (VERBOSE) System.out.println("Indexing document: " + doc);
-      writer.addDocument(doc);
-    }
-    
-    reader = writer.getReader();
-    searcher = newSearcher(reader);
-    writer.close();
-    
-  }
-  
-  private static Number getNumberType(NumberType numberType, String fieldName) {
-    
-    if (numberType == null) {
-      return null;
-    }
-    
-    switch (numberType) {
-      
-      case POSITIVE:
-        return RANDOM_NUMBER_MAP.get(fieldName);
-        
-      case NEGATIVE:
-        Number number = RANDOM_NUMBER_MAP.get(fieldName);
-        
-        if (LegacyNumericType.LONG.name().equals(fieldName)
-            || DATE_FIELD_NAME.equals(fieldName)) {
-          number = -number.longValue();
-          
-        } else if (FieldType.LegacyNumericType.DOUBLE.name().equals(fieldName)) {
-          number = -number.doubleValue();
-          
-        } else if (FieldType.LegacyNumericType.FLOAT.name().equals(fieldName)) {
-          number = -number.floatValue();
-          
-        } else if (LegacyNumericType.INT.name().equals(fieldName)) {
-          number = -number.intValue();
-          
-        } else {
-          throw new IllegalArgumentException("field name not found: "
-              + fieldName);
-        }
-        
-        return number;
-        
-      default:
-        return 0;
-        
-    }
-    
-  }
-  
-  private static void setFieldValues(NumberType numberType,
-      HashMap<String,Field> numericFieldMap) {
-    
-    Number number = getNumberType(numberType, LegacyNumericType.DOUBLE
-        .name());
-    numericFieldMap.get(LegacyNumericType.DOUBLE.name()).setDoubleValue(
-        number.doubleValue());
-    
-    number = getNumberType(numberType, FieldType.LegacyNumericType.INT.name());
-    numericFieldMap.get(FieldType.LegacyNumericType.INT.name()).setIntValue(
-        number.intValue());
-    
-    number = getNumberType(numberType, LegacyNumericType.LONG.name());
-    numericFieldMap.get(FieldType.LegacyNumericType.LONG.name()).setLongValue(
-        number.longValue());
-    
-    number = getNumberType(numberType, FieldType.LegacyNumericType.FLOAT.name());
-    numericFieldMap.get(FieldType.LegacyNumericType.FLOAT.name()).setFloatValue(
-        number.floatValue());
-    
-    number = getNumberType(numberType, DATE_FIELD_NAME);
-    numericFieldMap.get(DATE_FIELD_NAME).setLongValue(number.longValue());
-  }
-  
-  private static int randomDateStyle(Random random) {
-    return DATE_STYLES[random.nextInt(DATE_STYLES.length)];
-  }
-  
-  @Test
-  public void testInclusiveNumericRange() throws Exception {
-    assertRangeQuery(NumberType.ZERO, NumberType.ZERO, true, true, 1);
-    assertRangeQuery(NumberType.ZERO, NumberType.POSITIVE, true, true, 2);
-    assertRangeQuery(NumberType.NEGATIVE, NumberType.ZERO, true, true, 2);
-    assertRangeQuery(NumberType.NEGATIVE, NumberType.POSITIVE, true, true, 3);
-    assertRangeQuery(NumberType.NEGATIVE, NumberType.NEGATIVE, true, true, 1);
-  }
-  
-   @Test
-  // test disabled since standard syntax parser does not work with inclusive and
-  // exclusive at the same time
-  public void testInclusiveLowerNumericRange() throws Exception {
-    assertRangeQuery(NumberType.NEGATIVE, NumberType.ZERO, false, true, 1);
-    assertRangeQuery(NumberType.ZERO, NumberType.POSITIVE, false, true, 1);
-    assertRangeQuery(NumberType.NEGATIVE, NumberType.POSITIVE, false, true, 2);
-    assertRangeQuery(NumberType.NEGATIVE, NumberType.NEGATIVE, false, true, 0);
-   }
-  
-  @Test
-  // test disabled since standard syntax parser does not work with inclusive and
-  // exclusive at the same time
-  public void testInclusiveUpperNumericRange() throws Exception {
-    assertRangeQuery(NumberType.NEGATIVE, NumberType.ZERO, true, false, 1);
-    assertRangeQuery(NumberType.ZERO, NumberType.POSITIVE, true, false, 1);
-    assertRangeQuery(NumberType.NEGATIVE, NumberType.POSITIVE, true, false, 2);
-    assertRangeQuery(NumberType.NEGATIVE, NumberType.NEGATIVE, true, false, 0);
-  }
-  
-  @Test
-  public void testExclusiveNumericRange() throws Exception {
-    assertRangeQuery(NumberType.ZERO, NumberType.ZERO, false, false, 0);
-    assertRangeQuery(NumberType.ZERO, NumberType.POSITIVE, false, false, 0);
-    assertRangeQuery(NumberType.NEGATIVE, NumberType.ZERO, false, false, 0);
-    assertRangeQuery(NumberType.NEGATIVE, NumberType.POSITIVE, false, false, 1);
-    assertRangeQuery(NumberType.NEGATIVE, NumberType.NEGATIVE, false, false, 0);
-  }
-  
-  @Test
-  public void testOpenRangeNumericQuery() throws Exception {
-    assertOpenRangeQuery(NumberType.ZERO, "<", 1);
-    assertOpenRangeQuery(NumberType.POSITIVE, "<", 2);
-    assertOpenRangeQuery(NumberType.NEGATIVE, "<", 0);
-    
-    assertOpenRangeQuery(NumberType.ZERO, "<=", 2);
-    assertOpenRangeQuery(NumberType.POSITIVE, "<=", 3);
-    assertOpenRangeQuery(NumberType.NEGATIVE, "<=", 1);
-    
-    assertOpenRangeQuery(NumberType.ZERO, ">", 1);
-    assertOpenRangeQuery(NumberType.POSITIVE, ">", 0);
-    assertOpenRangeQuery(NumberType.NEGATIVE, ">", 2);
-    
-    assertOpenRangeQuery(NumberType.ZERO, ">=", 2);
-    assertOpenRangeQuery(NumberType.POSITIVE, ">=", 1);
-    assertOpenRangeQuery(NumberType.NEGATIVE, ">=", 3);
-    
-    assertOpenRangeQuery(NumberType.NEGATIVE, "=", 1);
-    assertOpenRangeQuery(NumberType.ZERO, "=", 1);
-    assertOpenRangeQuery(NumberType.POSITIVE, "=", 1);
-    
-    assertRangeQuery(NumberType.NEGATIVE, null, true, true, 3);
-    assertRangeQuery(NumberType.NEGATIVE, null, false, true, 2);
-    assertRangeQuery(NumberType.POSITIVE, null, true, false, 1);
-    assertRangeQuery(NumberType.ZERO, null, false, false, 1);
-
-    assertRangeQuery(null, NumberType.POSITIVE, true, true, 3);
-    assertRangeQuery(null, NumberType.POSITIVE, true, false, 2);
-    assertRangeQuery(null, NumberType.NEGATIVE, false, true, 1);
-    assertRangeQuery(null, NumberType.ZERO, false, false, 1);
-    
-    assertRangeQuery(null, null, false, false, 3);
-    assertRangeQuery(null, null, true, true, 3);
-    
-  }
-  
-  @Test
-  public void testSimpleNumericQuery() throws Exception {
-    assertSimpleQuery(NumberType.ZERO, 1);
-    assertSimpleQuery(NumberType.POSITIVE, 1);
-    assertSimpleQuery(NumberType.NEGATIVE, 1);
-  }
-  
-  public void assertRangeQuery(NumberType lowerType, NumberType upperType,
-      boolean lowerInclusive, boolean upperInclusive, int expectedDocCount)
-      throws QueryNodeException, IOException {
-    
-    StringBuilder sb = new StringBuilder();
-    
-    String lowerInclusiveStr = (lowerInclusive ? "[" : "{");
-    String upperInclusiveStr = (upperInclusive ? "]" : "}");
-    
-    for (LegacyNumericType type : LegacyNumericType.values()) {
-      String lowerStr = numberToString(getNumberType(lowerType, type.name()));
-      String upperStr = numberToString(getNumberType(upperType, type.name()));
-      
-      sb.append("+").append(type.name()).append(':').append(lowerInclusiveStr)
-          .append('"').append(lowerStr).append("\" TO \"").append(upperStr)
-          .append('"').append(upperInclusiveStr).append(' ');
-    }
-    
-    Number lowerDateNumber = getNumberType(lowerType, DATE_FIELD_NAME);
-    Number upperDateNumber = getNumberType(upperType, DATE_FIELD_NAME);
-    String lowerDateStr;
-    String upperDateStr;
-    
-    if (lowerDateNumber != null) {
-      lowerDateStr = ESCAPER.escape(
-          DATE_FORMAT.format(new Date(lowerDateNumber.longValue())), LOCALE,
-          EscapeQuerySyntax.Type.STRING).toString();
-      
-    } else {
-      lowerDateStr = "*";
-    }
-    
-    if (upperDateNumber != null) {
-    upperDateStr = ESCAPER.escape(
-          DATE_FORMAT.format(new Date(upperDateNumber.longValue())), LOCALE,
-          EscapeQuerySyntax.Type.STRING).toString();
-    
-    } else {
-      upperDateStr = "*";
-    }
-    
-    sb.append("+").append(DATE_FIELD_NAME).append(':')
-        .append(lowerInclusiveStr).append('"').append(lowerDateStr).append(
-            "\" TO \"").append(upperDateStr).append('"').append(
-            upperInclusiveStr);
-    
-    testQuery(sb.toString(), expectedDocCount);
-    
-  }
-  
-  public void assertOpenRangeQuery(NumberType boundType, String operator, int expectedDocCount)
-      throws QueryNodeException, IOException {
-
-    StringBuilder sb = new StringBuilder();
-    
-    for (LegacyNumericType type : FieldType.LegacyNumericType.values()) {
-      String boundStr = numberToString(getNumberType(boundType, type.name()));
-      
-      sb.append("+").append(type.name()).append(operator).append('"').append(boundStr).append('"').append(' ');
-    }
-    
-    String boundDateStr = ESCAPER.escape(
-        DATE_FORMAT.format(new Date(getNumberType(boundType, DATE_FIELD_NAME)
-            .longValue())), LOCALE, EscapeQuerySyntax.Type.STRING).toString();
-    
-    sb.append("+").append(DATE_FIELD_NAME).append(operator).append('"').append(boundDateStr).append('"');
-    
-    testQuery(sb.toString(), expectedDocCount);
-  }
-  
-  public void assertSimpleQuery(NumberType numberType, int expectedDocCount)
-      throws QueryNodeException, IOException {
-    StringBuilder sb = new StringBuilder();
-    
-    for (LegacyNumericType type : LegacyNumericType.values()) {
-      String numberStr = numberToString(getNumberType(numberType, type.name()));
-      sb.append('+').append(type.name()).append(":\"").append(numberStr)
-          .append("\" ");
-    }
-    
-    String dateStr = ESCAPER.escape(
-        DATE_FORMAT.format(new Date(getNumberType(numberType, DATE_FIELD_NAME)
-            .longValue())), LOCALE, EscapeQuerySyntax.Type.STRING).toString();
-    
-    sb.append('+').append(DATE_FIELD_NAME).append(":\"").append(dateStr)
-        .append('"');
-    
-    testQuery(sb.toString(), expectedDocCount);
-    
-  }
-  
-  private void testQuery(String queryStr, int expectedDocCount)
-      throws QueryNodeException, IOException {
-    if (VERBOSE) System.out.println("Parsing: " + queryStr);
-    
-    Query query = qp.parse(queryStr, FIELD_NAME);
-    if (VERBOSE) System.out.println("Querying: " + query);
-    TopDocs topDocs = searcher.search(query, 1000);
-    
-    String msg = "Query <" + queryStr + "> retrieved " + topDocs.totalHits
-        + " document(s), " + expectedDocCount + " document(s) expected.";
-    
-    if (VERBOSE) System.out.println(msg);
-    
-    assertEquals(msg, expectedDocCount, topDocs.totalHits);
-  }
-  
-  private static String numberToString(Number number) {
-    return number == null ? "*" : ESCAPER.escape(NUMBER_FORMAT.format(number),
-        LOCALE, EscapeQuerySyntax.Type.STRING).toString();
-  }
-  
-  private static Number normalizeNumber(Number number) throws ParseException {
-    return NUMBER_FORMAT.parse(NUMBER_FORMAT.format(number));
-  }
-  
-  @AfterClass
-  public static void afterClass() throws Exception {
-    searcher = null;
-    reader.close();
-    reader = null;
-    directory.close();
-    directory = null;
-    qp = null;
-    LOCALE = null;
-    TIMEZONE = null;
-    NUMBER_FORMAT = null;
-    DATE_FORMAT = null;
-    ESCAPER = null;
-  }
-  
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/89cc676f/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestPointQueryParser.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestPointQueryParser.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestPointQueryParser.java
new file mode 100644
index 0000000..323b0ff
--- /dev/null
+++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestPointQueryParser.java
@@ -0,0 +1,82 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.queryparser.flexible.standard;
+
+import java.text.NumberFormat;
+import java.util.HashMap;
+import java.util.Locale;
+import java.util.Map;
+
+import org.apache.lucene.document.DoublePoint;
+import org.apache.lucene.document.FloatPoint;
+import org.apache.lucene.document.IntPoint;
+import org.apache.lucene.document.LongPoint;
+import org.apache.lucene.queryparser.flexible.standard.config.PointsConfig;
+import org.apache.lucene.util.LuceneTestCase;
+
+/** Simple test for point field integration into the flexible QP */
+public class TestPointQueryParser extends LuceneTestCase {
+  
+  public void testIntegers() throws Exception {
+    StandardQueryParser parser = new StandardQueryParser();
+    Map<String,PointsConfig> pointsConfig = new HashMap<>();
+    pointsConfig.put("intField", new PointsConfig(NumberFormat.getIntegerInstance(Locale.ROOT), Integer.class));
+    parser.setPointsConfigMap(pointsConfig);
+    
+    assertEquals(IntPoint.newRangeQuery("intField", 1, 3),
+                 parser.parse("intField:[1 TO 3]", "body"));
+    assertEquals(IntPoint.newRangeQuery("intField", 1, 1),
+                 parser.parse("intField:1", "body"));
+  }
+  
+  public void testLongs() throws Exception {
+    StandardQueryParser parser = new StandardQueryParser();
+    Map<String,PointsConfig> pointsConfig = new HashMap<>();
+    pointsConfig.put("longField", new PointsConfig(NumberFormat.getIntegerInstance(Locale.ROOT), Long.class));
+    parser.setPointsConfigMap(pointsConfig);
+    
+    assertEquals(LongPoint.newRangeQuery("longField", 1, 3),
+                 parser.parse("longField:[1 TO 3]", "body"));
+    assertEquals(LongPoint.newRangeQuery("longField", 1, 1),
+                 parser.parse("longField:1", "body"));
+  }
+  
+  public void testFloats() throws Exception {
+    StandardQueryParser parser = new StandardQueryParser();
+    Map<String,PointsConfig> pointsConfig = new HashMap<>();
+    pointsConfig.put("floatField", new PointsConfig(NumberFormat.getNumberInstance(Locale.ROOT), Float.class));
+    parser.setPointsConfigMap(pointsConfig);
+    
+    assertEquals(FloatPoint.newRangeQuery("floatField", 1.5F, 3.6F),
+                 parser.parse("floatField:[1.5 TO 3.6]", "body"));
+    assertEquals(FloatPoint.newRangeQuery("floatField", 1.5F, 1.5F),
+                 parser.parse("floatField:1.5", "body"));
+  }
+  
+  public void testDoubles() throws Exception {
+    StandardQueryParser parser = new StandardQueryParser();
+    Map<String,PointsConfig> pointsConfig = new HashMap<>();
+    pointsConfig.put("doubleField", new PointsConfig(NumberFormat.getNumberInstance(Locale.ROOT), Double.class));
+    parser.setPointsConfigMap(pointsConfig);
+    
+    assertEquals(DoublePoint.newRangeQuery("doubleField", 1.5D, 3.6D),
+                 parser.parse("doubleField:[1.5 TO 3.6]", "body"));
+    assertEquals(DoublePoint.newRangeQuery("floatField", 1.5D, 1.5D),
+                 parser.parse("doubleField:1.5", "body"));
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/89cc676f/lucene/tools/junit4/cached-timehints.txt
----------------------------------------------------------------------
diff --git a/lucene/tools/junit4/cached-timehints.txt b/lucene/tools/junit4/cached-timehints.txt
index 9c8b22a..f2b8974 100644
--- a/lucene/tools/junit4/cached-timehints.txt
+++ b/lucene/tools/junit4/cached-timehints.txt
@@ -548,7 +548,7 @@ org.apache.lucene.queryparser.flexible.spans.TestSpanQueryParser=711,339,113,55,
 org.apache.lucene.queryparser.flexible.spans.TestSpanQueryParserSimpleSample=51,644,92,32,129,60,21
 org.apache.lucene.queryparser.flexible.standard.TestMultiAnalyzerQPHelper=44,84,87,34,260,35,229
 org.apache.lucene.queryparser.flexible.standard.TestMultiFieldQPHelper=1152,1779,1888,384,179,1665,445
-org.apache.lucene.queryparser.flexible.standard.TestNumericQueryParser=344,496,451,1373,733,1420,367
+org.apache.lucene.queryparser.flexible.standard.TestLegacyNumericQueryParser=344,496,451,1373,733,1420,367
 org.apache.lucene.queryparser.flexible.standard.TestQPHelper=287,676,989,2137,860,586,612
 org.apache.lucene.queryparser.flexible.standard.TestStandardQP=1643,430,2182,2193,600,1506,741
 org.apache.lucene.queryparser.surround.query.SrndQueryTest=1062,92,92,212,65,95,113


[44/50] [abbrv] lucene-solr git commit: fix int overflow bug in BKDWriter that prevented it from indexing > 2.1B points; try to improve runtime of Test2BPoints

Posted by ho...@apache.org.
fix int overflow bug in BKDWriter that prevented it from indexing > 2.1B points; try to improve runtime of Test2BPoints


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/1e05d3be
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/1e05d3be
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/1e05d3be

Branch: refs/heads/jira/SOLR-445
Commit: 1e05d3be76e0dcd7d0e1a2bb2bb89eb4019e33cc
Parents: fafbb2b
Author: Mike McCandless <mi...@apache.org>
Authored: Fri Mar 11 06:48:30 2016 -0500
Committer: Mike McCandless <mi...@apache.org>
Committed: Fri Mar 11 06:48:30 2016 -0500

----------------------------------------------------------------------
 .../org/apache/lucene/util/bkd/BKDWriter.java   |  4 +-
 .../org/apache/lucene/index/Test2BPoints.java   | 62 +++++++++++++++-----
 2 files changed, 50 insertions(+), 16 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/1e05d3be/lucene/core/src/java/org/apache/lucene/util/bkd/BKDWriter.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/util/bkd/BKDWriter.java b/lucene/core/src/java/org/apache/lucene/util/bkd/BKDWriter.java
index aa7e5dc..f5a2d81 100644
--- a/lucene/core/src/java/org/apache/lucene/util/bkd/BKDWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/util/bkd/BKDWriter.java
@@ -1082,7 +1082,7 @@ public class BKDWriter implements Closeable {
 
       // Second pass: write the full values:
       byte[] lastPackedValue = new byte[bytesPerDim];
-      for (int i=0;i<source.count;i++) {
+      for (int i=0;i<count;i++) {
         // TODO: we could do bulk copying here, avoiding the intermediate copy:
         heapSource.readPackedValue(Math.toIntExact(source.start + i), scratchPackedValue);
         assert numDims != 1 || valueInOrder(i, lastPackedValue, scratchPackedValue);
@@ -1143,7 +1143,7 @@ public class BKDWriter implements Closeable {
 
           // Partition this source according to how the splitDim split the values:
           int nextRightCount = 0;
-          for (int i=0;i<source.count;i++) {
+          for (long i=0;i<source.count;i++) {
             boolean result = reader.next();
             assert result;
             byte[] packedValue = reader.packedValue();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/1e05d3be/lucene/core/src/test/org/apache/lucene/index/Test2BPoints.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/Test2BPoints.java b/lucene/core/src/test/org/apache/lucene/index/Test2BPoints.java
index bfe387e..43207b8 100644
--- a/lucene/core/src/test/org/apache/lucene/index/Test2BPoints.java
+++ b/lucene/core/src/test/org/apache/lucene/index/Test2BPoints.java
@@ -16,8 +16,16 @@
  */
 package org.apache.lucene.index;
 
+import java.io.IOException;
+
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.codecs.Codec;
+import org.apache.lucene.codecs.FilterCodec;
+import org.apache.lucene.codecs.PointsFormat;
+import org.apache.lucene.codecs.PointsReader;
+import org.apache.lucene.codecs.PointsWriter;
+import org.apache.lucene.codecs.lucene60.Lucene60PointsReader;
+import org.apache.lucene.codecs.lucene60.Lucene60PointsWriter;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.LongPoint;
 import org.apache.lucene.search.IndexSearcher;
@@ -33,10 +41,10 @@ import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite;
 
 // e.g. run like this: ant test -Dtestcase=Test2BPoints -Dtests.nightly=true -Dtests.verbose=true -Dtests.monster=true
 // 
-//   or: python -u /l/util/src/python/repeatLuceneTest.py -once -nolog -tmpDir /b/tmp -logDir /l/logs Test2BPoints.test1D -verbose
+//   or: python -u /l/util/src/python/repeatLuceneTest.py -heap 6g -once -nolog -tmpDir /b/tmp -logDir /l/logs Test2BPoints.test2D -verbose
 
 @SuppressCodecs({ "SimpleText", "Memory", "Direct", "Compressing" })
-@TimeoutSuite(millis = 16 * TimeUnits.HOUR)
+@TimeoutSuite(millis = 365 * 24 * TimeUnits.HOUR) // hopefully ~1 year is long enough ;)
 @Monster("takes at least 4 hours and consumes many GB of temp disk space")
 public class Test2BPoints extends LuceneTestCase {
   public void test1D() throws Exception {
@@ -44,12 +52,14 @@ public class Test2BPoints extends LuceneTestCase {
     System.out.println("DIR: " + ((FSDirectory) dir).getDirectory());
 
     IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()))
-        .setCodec(Codec.forName("Lucene60"))
-        .setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
-        .setRAMBufferSizeMB(64.0)
-        .setMergeScheduler(new ConcurrentMergeScheduler())
-        .setMergePolicy(newLogMergePolicy(false, 10))
-        .setOpenMode(IndexWriterConfig.OpenMode.CREATE);
+      .setCodec(getCodec())
+      .setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
+      .setRAMBufferSizeMB(256.0)
+      .setMergeScheduler(new ConcurrentMergeScheduler())
+      .setMergePolicy(newLogMergePolicy(false, 10))
+      .setOpenMode(IndexWriterConfig.OpenMode.CREATE);
+
+    ((ConcurrentMergeScheduler) iwc.getMergeScheduler()).setMaxMergesAndThreads(6, 3);
     
     IndexWriter w = new IndexWriter(dir, iwc);
 
@@ -88,13 +98,15 @@ public class Test2BPoints extends LuceneTestCase {
     Directory dir = FSDirectory.open(createTempDir("2BPoints2D"));
 
     IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()))
-        .setCodec(Codec.forName("Lucene60"))
-        .setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
-        .setRAMBufferSizeMB(64.0)
-        .setMergeScheduler(new ConcurrentMergeScheduler())
-        .setMergePolicy(newLogMergePolicy(false, 10))
-        .setOpenMode(IndexWriterConfig.OpenMode.CREATE);
+      .setCodec(getCodec())
+      .setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
+      .setRAMBufferSizeMB(256.0)
+      .setMergeScheduler(new ConcurrentMergeScheduler())
+      .setMergePolicy(newLogMergePolicy(false, 10))
+      .setOpenMode(IndexWriterConfig.OpenMode.CREATE);
     
+    ((ConcurrentMergeScheduler) iwc.getMergeScheduler()).setMaxMergesAndThreads(6, 3);
+
     IndexWriter w = new IndexWriter(dir, iwc);
 
     MergePolicy mp = w.getConfig().getMergePolicy();
@@ -127,4 +139,26 @@ public class Test2BPoints extends LuceneTestCase {
     TestUtil.checkIndex(dir);
     dir.close();
   }
+
+  private static Codec getCodec() {
+
+    return new FilterCodec("Lucene60", Codec.forName("Lucene60")) {
+      @Override
+      public PointsFormat pointsFormat() {
+        return new PointsFormat() {
+          @Override
+          public PointsWriter fieldsWriter(SegmentWriteState writeState) throws IOException {
+            int maxPointsInLeafNode = 1024;
+            double maxMBSortInHeap = 256.0;
+            return new Lucene60PointsWriter(writeState, maxPointsInLeafNode, maxMBSortInHeap);
+          }
+
+          @Override
+          public PointsReader fieldsReader(SegmentReadState readState) throws IOException {
+            return new Lucene60PointsReader(readState);
+          }
+        };
+      }
+    };
+  }
 }


[47/50] [abbrv] lucene-solr git commit: SOLR-8831: allow _version_ field to be retrievable via docValues

Posted by ho...@apache.org.
SOLR-8831: allow _version_ field to be retrievable via docValues


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/50c413e8
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/50c413e8
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/50c413e8

Branch: refs/heads/jira/SOLR-445
Commit: 50c413e865a166de7ba72bf8c3affb5702c2fb62
Parents: fe21f7a
Author: yonik <yo...@apache.org>
Authored: Fri Mar 11 14:10:37 2016 -0500
Committer: yonik <yo...@apache.org>
Committed: Fri Mar 11 14:10:37 2016 -0500

----------------------------------------------------------------------
 solr/CHANGES.txt                                           | 2 ++
 solr/core/src/java/org/apache/solr/update/VersionInfo.java | 8 ++++----
 2 files changed, 6 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/50c413e8/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 1d91a3e..bb36297 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -195,6 +195,8 @@ New Features
 
 * SOLR-8698: params.json can now specify 'appends' and 'invariants' (noble)
 
+* SOLR-8831: allow _version_ field to be retrievable via docValues (yonik)
+
 
 Bug Fixes
 ----------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/50c413e8/solr/core/src/java/org/apache/solr/update/VersionInfo.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/VersionInfo.java b/solr/core/src/java/org/apache/solr/update/VersionInfo.java
index d5eebec..5fe415c 100644
--- a/solr/core/src/java/org/apache/solr/update/VersionInfo.java
+++ b/solr/core/src/java/org/apache/solr/update/VersionInfo.java
@@ -61,7 +61,7 @@ public class VersionInfo {
    */
   public static SchemaField getAndCheckVersionField(IndexSchema schema) 
     throws SolrException {
-    final String errPrefix = VERSION_FIELD + " field must exist in schema, using indexed=\"true\" or docValues=\"true\", stored=\"true\" and multiValued=\"false\"";
+    final String errPrefix = VERSION_FIELD + " field must exist in schema and be searchable (indexed or docValues) and retrievable(stored or docValues) and not multiValued";
     SchemaField sf = schema.getFieldOrNull(VERSION_FIELD);
 
     if (null == sf) {
@@ -72,12 +72,12 @@ public class VersionInfo {
     if ( !sf.indexed() && !sf.hasDocValues()) {
       throw new SolrException
         (SolrException.ErrorCode.SERVER_ERROR, 
-         errPrefix + " (" + VERSION_FIELD + " must be either indexed or have docValues");
+         errPrefix + " (" + VERSION_FIELD + " not searchable");
     }
-    if ( !sf.stored() ) {
+    if ( !sf.stored() && !sf.hasDocValues()) {
       throw new SolrException
         (SolrException.ErrorCode.SERVER_ERROR, 
-         errPrefix + " (" + VERSION_FIELD + " is not stored");
+         errPrefix + " (" + VERSION_FIELD + " not retrievable");
     }
     if ( sf.multiValued() ) {
       throw new SolrException


[45/50] [abbrv] lucene-solr git commit: SOLR-8730: Fix highlighting in new UI query pane

Posted by ho...@apache.org.
SOLR-8730: Fix highlighting in new UI query pane


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/fe21f7a4
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/fe21f7a4
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/fe21f7a4

Branch: refs/heads/jira/SOLR-445
Commit: fe21f7a4c3a135caa39b1e25e640bc28c069b0a6
Parents: 1e05d3b
Author: Upayavira <uv...@odoko.co.uk>
Authored: Fri Mar 11 13:26:01 2016 +0000
Committer: Upayavira <uv...@odoko.co.uk>
Committed: Fri Mar 11 13:26:01 2016 +0000

----------------------------------------------------------------------
 solr/webapp/web/partials/query.html | 12 ++++++------
 1 file changed, 6 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/fe21f7a4/solr/webapp/web/partials/query.html
----------------------------------------------------------------------
diff --git a/solr/webapp/web/partials/query.html b/solr/webapp/web/partials/query.html
index 87f2f4e..31bc242 100644
--- a/solr/webapp/web/partials/query.html
+++ b/solr/webapp/web/partials/query.html
@@ -215,26 +215,26 @@ limitations under the License.
         <div class="fieldset" ng-show="isHighlight">
 
         <label for="hl_fl" title="Fields to highlight on.">hl.fl</label>
-        <input type="text" ng-model="hl.fl" name="hl.fl" id="hl_fl" value="" title="Fields to highlight on.">
+        <input type="text" ng-model="hl['hl.fl']" name="hl.fl" id="hl_fl" value="" title="Fields to highlight on.">
 
         <label for="hl_simple_pre">hl.simple.pre</label>
-        <input type="text" ng-model="hl['simple.pre']" name="hl.simple.pre" id="hl_simple_pre" value="<em>">
+        <input type="text" ng-model="hl['hl.simple.pre']" name="hl.simple.pre" id="hl_simple_pre" value="<em>">
 
         <label for="hl_simple_post">hl.simple.post</label>
-        <input type="text" ng-model="hl['simple.post']"  name="hl.simple.post" id="hl_simple_post" value="</em>">
+        <input type="text" ng-model="hl['hl.simple.post']"  name="hl.simple.post" id="hl_simple_post" value="</em>">
 
         <label for="hl_requireFieldMatch" class="checkbox">
-          <input type="checkbox" ng-model="hl.requireFieldMatch" name="hl.requireFieldMatch" id="hl_requireFieldMatch" value="true">
+          <input type="checkbox" ng-model="hl['hl.requireFieldMatch']" name="hl.requireFieldMatch" id="hl_requireFieldMatch" value="true">
           hl.requireFieldMatch
         </label>
 
         <label for="hl_usePhraseHighlighter" class="checkbox">
-          <input type="checkbox" ng-model="hl.usePhraseHighLighter" name="hl.usePhraseHighlighter" id="hl_usePhraseHighlighter" value="true">
+          <input type="checkbox" ng-model="hl['usePhraseHighLighter']" name="hl.usePhraseHighlighter" id="hl_usePhraseHighlighter" value="true">
           hl.usePhraseHighlighter
         </label>
 
         <label for="hl_highlightMultiTerm" class="checkbox">
-          <input type="checkbox" ng-model="hl.hightlightMultiTerm" name="hl.highlightMultiTerm" id="hl_highlightMultiTerm" value="true">
+          <input type="checkbox" ng-model="hl['hightlightMultiTerm']" name="hl.highlightMultiTerm" id="hl_highlightMultiTerm" value="true">
           hl.highlightMultiTerm
         </label>
 


[49/50] [abbrv] lucene-solr git commit: SOLR-445: ensure maxErrors=-1 is treated as effectively unlimited

Posted by ho...@apache.org.
SOLR-445: ensure maxErrors=-1 is treated as effectively unlimited


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/0bd817d1
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/0bd817d1
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/0bd817d1

Branch: refs/heads/jira/SOLR-445
Commit: 0bd817d19cadf7a6c0c522ed15f75110620029b3
Parents: 7d6ed17
Author: Chris Hostetter <ho...@apache.org>
Authored: Fri Mar 11 15:18:02 2016 -0700
Committer: Chris Hostetter <ho...@apache.org>
Committed: Fri Mar 11 15:18:02 2016 -0700

----------------------------------------------------------------------
 .../processor/TolerantUpdateProcessor.java      | 25 +++++----
 .../TolerantUpdateProcessorFactory.java         | 54 ++++++++++----------
 ...lrconfig-distrib-update-processor-chains.xml |  3 +-
 .../cloud/TestTolerantUpdateProcessorCloud.java | 32 +++++++-----
 .../processor/TolerantUpdateProcessorTest.java  |  8 ++-
 .../solr/client/solrj/impl/CloudSolrClient.java |  6 ++-
 .../solr/common/ToleratedUpdateError.java       | 24 +++++++++
 .../solr/common/TestToleratedUpdateError.java   | 16 ++++++
 8 files changed, 114 insertions(+), 54 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0bd817d1/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java b/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java
index 678d4af..e53d33c 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java
@@ -53,14 +53,17 @@ import org.slf4j.LoggerFactory;
 
 /** 
  * <p> 
- * Suppresses errors for individual add/delete commands within a batch.
- * Instead, all errors are logged and the batch continues. The client
- * will receive a 200 response, but gets a list of errors (keyed by
- * unique key) unless <code>maxErrors</code> is reached. 
- * If <code>maxErrors</code> occur, the first exception caught will be re-thrown, 
- * Solr will respond with 5XX or 4XX (depending on the underlying exceptions) and
- * it won't finish processing the batch. This means that the last docs
- * in the batch may not be added in this case even if they are valid. 
+ * Suppresses errors for individual add/delete commands within a single request.
+ * Instead of failing on the first error, at most <code>maxErrors</code> errors (or unlimited 
+ * if <code>-1==maxErrors</code>) are logged and recorded the batch continues. 
+ * The client will receive a <code>status==200</code> response, which includes a list of errors 
+ * that were tolerated.
+ * </p>
+ * <p>
+ * If more then <code>maxErrors</code> occur, the first exception recorded will be re-thrown, 
+ * Solr will respond with <code>status==5xx</code> or <code>status==4xx</code> 
+ * (depending on the underlying exceptions) and it won't finish processing any more updates in the request. 
+ * (ie: subsequent update commands in the request will not be processed even if they are valid).
  * </p>
  * 
  * <p>
@@ -125,10 +128,10 @@ public class TolerantUpdateProcessor extends UpdateRequestProcessor {
 
   public TolerantUpdateProcessor(SolrQueryRequest req, SolrQueryResponse rsp, UpdateRequestProcessor next, int maxErrors, DistribPhase distribPhase) {
     super(next);
-    assert maxErrors >= 0;
+    assert maxErrors >= -1;
       
     header = rsp.getResponseHeader();
-    this.maxErrors = maxErrors;
+    this.maxErrors = ToleratedUpdateError.getEffectiveMaxErrors(maxErrors);
     this.req = req;
     this.distribPhase = distribPhase;
     assert ! DistribPhase.FROMLEADER.equals(distribPhase);
@@ -296,7 +299,7 @@ public class TolerantUpdateProcessor extends UpdateRequestProcessor {
 
     header.add("errors", ToleratedUpdateError.formatForResponseHeader(knownErrors));
     // include in response so client knows what effective value was (may have been server side config)
-    header.add("maxErrors", maxErrors);
+    header.add("maxErrors", ToleratedUpdateError.getUserFriendlyMaxErrors(maxErrors));
 
     // annotate any error that might be thrown (or was already thrown)
     firstErrTracker.annotate(knownErrors);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0bd817d1/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessorFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessorFactory.java
index 1338908..d049077 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessorFactory.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessorFactory.java
@@ -27,26 +27,28 @@ import static org.apache.solr.update.processor.DistributingUpdateProcessorFactor
 
 /**
  * <p> 
- * Suppresses errors for individual add/delete commands within a batch.
- * Instead, all errors are logged and the batch continues. The client
- * will receive a 200 response, but gets a list of errors (keyed by
- * unique key) unless <code>maxErrors</code> is reached. 
- * If <code>maxErrors</code> occur, the last exception caught will be re-thrown, 
- * Solr will respond with 5XX or 4XX (depending on the exception) and
- * it won't finish processing the batch. This means that the last docs
- * in the batch may not be added in this case even if they are valid. 
- * Note that this UpdateRequestProcessor will only catch exceptions that occur 
- * on later elements in the chain.  
- * 
+ * Suppresses errors for individual add/delete commands within a single request.
+ * Instead of failing on the first error, at most <code>maxErrors</code> errors (or unlimited 
+ * if <code>-1==maxErrors</code>) are logged and recorded the batch continues. 
+ * The client will receive a <code>status==200</code> response, which includes a list of errors 
+ * that were tolerated.
  * </p>
- * 
  * <p>
- * <code>maxErrors</code> is an int value that can be specified in the 
- * configuration and can also be overridden per request. If unset, it will 
- * default to <code>Integer.MAX_VALUE</code>
+ * If more then <code>maxErrors</code> occur, the first exception recorded will be re-thrown, 
+ * Solr will respond with <code>status==5xx</code> or <code>status==4xx</code> 
+ * (depending on the underlying exceptions) and it won't finish processing any more updates in the request. 
+ * (ie: subsequent update commands in the request will not be processed even if they are valid).
  * </p>
  * 
+ * <p>
+ * <code>maxErrors</code> is an int value that can be specified in the configuration and/or overridden 
+ * per request. If unset, it will default to {@link Integer#MAX_VALUE}.  Specifying an explicit value 
+ * of <code>-1</code> is supported as shorthand for {@link Integer#MAX_VALUE}, all other negative 
+ * integer values are not supported.
+ * </p>
+ * <p>
  * An example configuration would be:
+ * </p>
  * <pre class="prettyprint">
  * &lt;updateRequestProcessorChain name="tolerant-chain"&gt;
  *   &lt;processor class="solr.TolerantUpdateProcessorFactory"&gt;
@@ -58,14 +60,12 @@ import static org.apache.solr.update.processor.DistributingUpdateProcessorFactor
  * </pre>
  * 
  * <p>
- * The maxErrors parameter can be overwritten per request, for example:
+ * The <code>maxErrors</code> parameter in the above chain could be overwritten per request, for example:
  * </p>
  * <pre class="prettyprint">
- * curl http://localhost:8983/update?maxErrors=100 -H "Content-Type: text/xml" -d @myfile.xml
+ * curl http://localhost:8983/update?update.chain=tolerant-chain&amp;maxErrors=100 -H "Content-Type: text/xml" -d @myfile.xml
  * </pre>
  * 
- * 
- * 
  */
 public class TolerantUpdateProcessorFactory extends UpdateRequestProcessorFactory
     implements UpdateRequestProcessorFactory.RunAlways {
@@ -81,20 +81,22 @@ public class TolerantUpdateProcessorFactory extends UpdateRequestProcessorFactor
    * Default maxErrors value that will be use if the value is not set in configuration
    * or in the request
    */
-  private Integer defaultMaxErrors = Integer.MAX_VALUE;
+  private int defaultMaxErrors = Integer.MAX_VALUE;
   
   @SuppressWarnings("rawtypes")
   @Override
   public void init( NamedList args ) {
 
-    // nocommit: clean error on invalid type for param ... don't fail stupidly on <str ...>42</str>
     Object maxErrorsObj = args.get(MAX_ERRORS_PARAM); 
     if (maxErrorsObj != null) {
       try {
-        defaultMaxErrors = (Integer)maxErrorsObj;
+        defaultMaxErrors = Integer.valueOf(maxErrorsObj.toString());
       } catch (Exception e) {
         throw new SolrException(ErrorCode.SERVER_ERROR, "Unnable to parse maxErrors parameter: " + maxErrorsObj, e);
       }
+      if (defaultMaxErrors < -1) {
+        throw new SolrException(ErrorCode.SERVER_ERROR, "Config option '"+MAX_ERRORS_PARAM + "' must either be non-negative, or -1 to indicate 'unlimiited': " + maxErrorsObj.toString());
+      }
     }
   }
   
@@ -107,13 +109,11 @@ public class TolerantUpdateProcessorFactory extends UpdateRequestProcessorFactor
       return next;
     }
     
-    Integer maxErrors = req.getParams().getInt(MAX_ERRORS_PARAM);
-    if(maxErrors == null) {
-      maxErrors = this.defaultMaxErrors;
+    int maxErrors = req.getParams().getInt(MAX_ERRORS_PARAM, defaultMaxErrors);
+    if (maxErrors < -1) {
+      throw new SolrException(ErrorCode.BAD_REQUEST, "'"+MAX_ERRORS_PARAM + "' must either be non-negative, or -1 to indicate 'unlimiited': " + maxErrors);
     }
 
-    // nocommit: support maxErrors < 0 to mean the same as Integer.MAX_VALUE (add test)
-    
     // NOTE: even if 0==maxErrors, we still inject processor into chain so respones has expected header info
     return new TolerantUpdateProcessor(req, rsp, next, maxErrors, distribPhase);
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0bd817d1/solr/core/src/test-files/solr/collection1/conf/solrconfig-distrib-update-processor-chains.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/collection1/conf/solrconfig-distrib-update-processor-chains.xml b/solr/core/src/test-files/solr/collection1/conf/solrconfig-distrib-update-processor-chains.xml
index 245f856..97ed18b 100644
--- a/solr/core/src/test-files/solr/collection1/conf/solrconfig-distrib-update-processor-chains.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/solrconfig-distrib-update-processor-chains.xml
@@ -64,7 +64,8 @@
   
   <updateRequestProcessorChain name="tolerant-chain-max-errors-10">
     <processor class="solr.TolerantUpdateProcessorFactory">
-      <int name="maxErrors">10</int>
+      <!-- explicitly testing that parsing still works if a valid int is specified as a string -->
+      <str name="maxErrors">10</str>
     </processor>
     <processor class="solr.DistributedUpdateProcessorFactory" />
     <processor class="solr.RunUpdateProcessorFactory" />

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0bd817d1/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java b/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java
index 7d8d769..efbba5a 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java
@@ -410,21 +410,27 @@ public class TestTolerantUpdateProcessorCloud extends SolrCloudTestCase {
     UpdateResponse rsp = null;
 
     // 2 docs that are both on shard1, the first one should fail
+    for (int maxErrors : new int[] { -1, 2, 47, 10 }) {
+      // regardless of which of these maxErrors values we use, behavior should be the same...
+      rsp = update(params("update.chain", "tolerant-chain-max-errors-10",
+                          "maxErrors", ""+maxErrors,
+                          "commit", "true"),
+                   doc(f("id", S_ONE_PRE + "42"), f("foo_i", "bogus_value")),
+                   doc(f("id", S_ONE_PRE + "666"), f("foo_i", "1976"))).process(client);
+      
+      assertEquals(0, rsp.getStatus());
+      assertUpdateTolerantAddErrors("single shard, 1st doc should fail", rsp, S_ONE_PRE + "42");
+      assertEquals(0, client.commit().getStatus());
+      assertQueryDocIds(client, false, S_ONE_PRE + "42");
+      assertQueryDocIds(client, true, S_ONE_PRE + "666");
+
+      // ...only diff should be that we get an accurate report of the effective maxErrors
+      assertEquals(maxErrors, rsp.getResponseHeader().get("maxErrors"));
+    }
     
-    rsp = update(params("update.chain", "tolerant-chain-max-errors-10",
-                        "commit", "true"),
-                 doc(f("id", S_ONE_PRE + "42"), f("foo_i", "bogus_value")),
-                 doc(f("id", S_ONE_PRE + "666"), f("foo_i", "1976"))).process(client);
-    
-    assertEquals(0, rsp.getStatus());
-    assertUpdateTolerantAddErrors("single shard, 1st doc should fail", rsp, S_ONE_PRE + "42");
-    assertEquals(0, client.commit().getStatus());
-    assertQueryDocIds(client, false, S_ONE_PRE + "42");
-    assertQueryDocIds(client, true, S_ONE_PRE + "666");
-           
     // 2 docs that are both on shard1, the second one should fail
     
-    rsp = update(params("update.chain", "tolerant-chain-max-errors-10",
+    rsp = update(params("update.chain", "tolerant-chain-max-errors-not-set",
                         "commit", "true"),
                  doc(f("id", S_ONE_PRE + "55"), f("foo_i", "1976")),
                  doc(f("id", S_ONE_PRE + "77"), f("foo_i", "bogus_val"))).process(client);
@@ -433,6 +439,8 @@ public class TestTolerantUpdateProcessorCloud extends SolrCloudTestCase {
     assertUpdateTolerantAddErrors("single shard, 2nd doc should fail", rsp, S_ONE_PRE + "77");
     assertQueryDocIds(client, false, S_ONE_PRE + "77");
     assertQueryDocIds(client, true, S_ONE_PRE + "666", S_ONE_PRE + "55");
+    // since maxErrors is unset, we should get an "unlimited" value back
+    assertEquals(-1, rsp.getResponseHeader().get("maxErrors"));
 
     // clean slate
     assertEquals(0, client.deleteByQuery("*:*").getStatus());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0bd817d1/solr/core/src/test/org/apache/solr/update/processor/TolerantUpdateProcessorTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/processor/TolerantUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/update/processor/TolerantUpdateProcessorTest.java
index 0cb3ba89..a519068 100644
--- a/solr/core/src/test/org/apache/solr/update/processor/TolerantUpdateProcessorTest.java
+++ b/solr/core/src/test/org/apache/solr/update/processor/TolerantUpdateProcessorTest.java
@@ -314,6 +314,7 @@ public class TolerantUpdateProcessorTest extends UpdateProcessorTestBase {
                                              "count(//arr[@name='errors']/lst)=0"));
     response = update("tolerant-chain-max-errors-10", adoc("text", "the quick brown fox"));
     assertNull(BaseTestHarness.validateXPath(response, "//int[@name='status']=0",
+        "//int[@name='maxErrors']/text()='10'",
         "count(//arr[@name='errors']/lst)=1",
         "//arr[@name='errors']/lst/str[@name='id']/text()='(unknown)'",
         "//arr[@name='errors']/lst/str[@name='message']/text()='Document is missing mandatory uniqueKey field: id'"));
@@ -327,6 +328,7 @@ public class TolerantUpdateProcessorTest extends UpdateProcessorTestBase {
     builder.append("</add>");
     response = update("tolerant-chain-max-errors-10", builder.toString());
     assertNull(BaseTestHarness.validateXPath(response, "//int[@name='status']=0",
+        "//int[@name='maxErrors']/text()='10'",
         "count(//arr[@name='errors']/lst)=10",
         "not(//arr[@name='errors']/lst/str[@name='id']/text()='0')",
         "//arr[@name='errors']/lst/str[@name='id']/text()='1'",
@@ -348,7 +350,11 @@ public class TolerantUpdateProcessorTest extends UpdateProcessorTestBase {
         "//arr[@name='errors']/lst/str[@name='id']/text()='17'",
         "not(//arr[@name='errors']/lst/str[@name='id']/text()='18')",
         "//arr[@name='errors']/lst/str[@name='id']/text()='19'"));
-    
+
+    // spot check response when effective maxErrors is unlimited
+    response = update("tolerant-chain-max-errors-not-set", builder.toString());
+    assertNull(BaseTestHarness.validateXPath(response, "//int[@name='maxErrors']/text()='-1'"));
+                                             
   }
 
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0bd817d1/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java
index 9a34976..37cee8e 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java
@@ -752,11 +752,12 @@ public class CloudSolrClient extends SolrClient {
       List<SimpleOrderedMap<String>> shardTolerantErrors = 
         (List<SimpleOrderedMap<String>>) header.get("errors");
       if (null != shardTolerantErrors) {
-        Number shardMaxToleratedErrors = (Number) header.get("maxErrors");
+        Integer shardMaxToleratedErrors = (Integer) header.get("maxErrors");
         assert null != shardMaxToleratedErrors : "TolerantUpdateProcessor reported errors but not maxErrors";
         // if we get into some weird state where the nodes disagree about the effective maxErrors,
         // assume the min value seen to decide if we should fail.
-        maxToleratedErrors = Math.min(maxToleratedErrors, shardMaxToleratedErrors.intValue());
+        maxToleratedErrors = Math.min(maxToleratedErrors,
+                                      ToleratedUpdateError.getEffectiveMaxErrors(shardMaxToleratedErrors.intValue()));
         
         if (null == toleratedErrors) {
           toleratedErrors = new ArrayList<SimpleOrderedMap<String>>(shardTolerantErrors.size());
@@ -775,6 +776,7 @@ public class CloudSolrClient extends SolrClient {
     if (minRf != null)
       cheader.add(UpdateRequest.MIN_REPFACT, minRf);
     if (null != toleratedErrors) {
+      cheader.add("maxErrors", ToleratedUpdateError.getUserFriendlyMaxErrors(maxToleratedErrors));
       cheader.add("errors", toleratedErrors);
       if (maxToleratedErrors < toleratedErrors.size()) {
         // cumulative errors are too high, we need to throw a client exception w/correct metadata

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0bd817d1/solr/solrj/src/java/org/apache/solr/common/ToleratedUpdateError.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/ToleratedUpdateError.java b/solr/solrj/src/java/org/apache/solr/common/ToleratedUpdateError.java
index 7261a21..c6c4244 100644
--- a/solr/solrj/src/java/org/apache/solr/common/ToleratedUpdateError.java
+++ b/solr/solrj/src/java/org/apache/solr/common/ToleratedUpdateError.java
@@ -28,6 +28,30 @@ public final class ToleratedUpdateError {
     
   private final static String META_PRE =  ToleratedUpdateError.class.getName() + "--";
   private final static int META_PRE_LEN = META_PRE.length();
+
+  /**
+   * Given a 'maxErrors' value such that<code>-1 &lt;= maxErrors &lt;= {@link Integer#MAX_VALUE}</code> 
+   * this method returns the original input unless it is <code>-1</code> in which case the effective value of
+   * {@link Integer#MAX_VALUE}  is returned.
+   * Input of <code>maxErrors &lt; -1</code> will trip an assertion and otherwise have undefined behavior.
+   * @see #getUserFriendlyMaxErrors
+   */
+  public static int getEffectiveMaxErrors(int maxErrors) {
+    assert -1 <= maxErrors;
+    return -1 == maxErrors ? Integer.MAX_VALUE : maxErrors;
+  }
+  
+  /**
+   * Given a 'maxErrors' value such that<code>-1 &lt;= maxErrors &lt;= {@link Integer#MAX_VALUE}</code> 
+   * this method returns the original input unless it is {@link Integer#MAX_VALUE} in which case 
+   * <code>-1</code> is returned for user convinience.
+   * Input of <code>maxErrors &lt; -1</code> will trip an assertion and otherwise have undefined behavior.
+   * @see #getEffectiveMaxErrors
+   */
+  public static int getUserFriendlyMaxErrors(int maxErrors) {
+    assert -1 <= maxErrors;
+    return Integer.MAX_VALUE == maxErrors ? -1 : maxErrors;
+  }
   
   /** 
    * returns a list of maps of simple objects suitable for putting in a SolrQueryResponse header 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0bd817d1/solr/solrj/src/test/org/apache/solr/common/TestToleratedUpdateError.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/common/TestToleratedUpdateError.java b/solr/solrj/src/test/org/apache/solr/common/TestToleratedUpdateError.java
index 6759b79..91636b3 100644
--- a/solr/solrj/src/test/org/apache/solr/common/TestToleratedUpdateError.java
+++ b/solr/solrj/src/test/org/apache/solr/common/TestToleratedUpdateError.java
@@ -79,6 +79,22 @@ public class TestToleratedUpdateError extends LuceneTestCase {
     });
   }
 
+  /** trivial sanity check */
+  public void testMaxErrorsValueConversion() {
+    
+    assertEquals(-1, ToleratedUpdateError.getUserFriendlyMaxErrors(-1));
+    assertEquals(-1, ToleratedUpdateError.getUserFriendlyMaxErrors(Integer.MAX_VALUE));
+    
+    assertEquals(Integer.MAX_VALUE, ToleratedUpdateError.getEffectiveMaxErrors(Integer.MAX_VALUE));
+    assertEquals(Integer.MAX_VALUE, ToleratedUpdateError.getEffectiveMaxErrors(-1));
+
+    for (int val : new int[] {0, 1, 10, 42, 600000 }) {
+      assertEquals(val, ToleratedUpdateError.getEffectiveMaxErrors(val));
+      assertEquals(val, ToleratedUpdateError.getUserFriendlyMaxErrors(val));
+    }
+    
+  }
+
   private static abstract class Coppier {
     public abstract ToleratedUpdateError copy(ToleratedUpdateError in);
   }


[23/50] [abbrv] lucene-solr git commit: LUCENE-7089, LUCENE-7075: add points to flexible queryparser to replace legacy numerics support

Posted by ho...@apache.org.
LUCENE-7089, LUCENE-7075: add points to flexible queryparser to replace legacy numerics support


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/89cc676f
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/89cc676f
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/89cc676f

Branch: refs/heads/jira/SOLR-445
Commit: 89cc676f2bf560091bd24db544faa16946654164
Parents: f24810b
Author: Robert Muir <rm...@apache.org>
Authored: Wed Mar 9 21:05:26 2016 -0500
Committer: Robert Muir <rm...@apache.org>
Committed: Wed Mar 9 21:06:41 2016 -0500

----------------------------------------------------------------------
 .../flexible/core/nodes/package-info.java       |   4 +-
 .../flexible/standard/StandardQueryParser.java  |  29 +-
 .../LegacyNumericRangeQueryNodeBuilder.java     |  93 ++++
 .../builders/NumericRangeQueryNodeBuilder.java  |  91 ----
 .../builders/PointRangeQueryNodeBuilder.java    | 137 +++++
 .../builders/StandardQueryTreeBuilder.java      |  12 +-
 .../standard/config/LegacyNumericConfig.java    | 166 ++++++
 .../LegacyNumericFieldConfigListener.java       |  75 +++
 .../flexible/standard/config/NumericConfig.java | 164 ------
 .../config/NumericFieldConfigListener.java      |  73 ---
 .../flexible/standard/config/PointsConfig.java  | 124 +++++
 .../standard/config/PointsConfigListener.java   |  65 +++
 .../config/StandardQueryConfigHandler.java      |  39 +-
 .../standard/nodes/LegacyNumericQueryNode.java  | 153 ++++++
 .../nodes/LegacyNumericRangeQueryNode.java      | 153 ++++++
 .../standard/nodes/NumericQueryNode.java        | 151 ------
 .../standard/nodes/NumericRangeQueryNode.java   | 151 ------
 .../flexible/standard/nodes/PointQueryNode.java | 151 ++++++
 .../standard/nodes/PointRangeQueryNode.java     | 124 +++++
 .../LegacyNumericQueryNodeProcessor.java        | 154 ++++++
 .../LegacyNumericRangeQueryNodeProcessor.java   | 170 ++++++
 .../processors/NumericQueryNodeProcessor.java   | 152 ------
 .../NumericRangeQueryNodeProcessor.java         | 168 ------
 .../processors/PointQueryNodeProcessor.java     | 136 +++++
 .../PointRangeQueryNodeProcessor.java           | 148 +++++
 .../StandardQueryNodeProcessorPipeline.java     |   6 +-
 .../standard/TestLegacyNumericQueryParser.java  | 535 +++++++++++++++++++
 .../standard/TestNumericQueryParser.java        | 535 -------------------
 .../flexible/standard/TestPointQueryParser.java |  82 +++
 lucene/tools/junit4/cached-timehints.txt        |   2 +-
 30 files changed, 2535 insertions(+), 1508 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/89cc676f/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/core/nodes/package-info.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/core/nodes/package-info.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/core/nodes/package-info.java
index c9d55ea..23c72a1 100644
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/core/nodes/package-info.java
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/core/nodes/package-info.java
@@ -52,14 +52,14 @@
  * <li>FuzzyQueryNode - fuzzy node</li>
  * <li>TermRangeQueryNode - used for parametric field:[low_value TO high_value]</li>
  * <li>ProximityQueryNode - used for proximity search</li>
- * <li>NumericRangeQueryNode - used for numeric range search</li>
+ * <li>LegacyNumericRangeQueryNode - used for numeric range search</li>
  * <li>TokenizedPhraseQueryNode - used by tokenizers/lemmatizers/analyzers for phrases/autophrases</li>
  * </ul>
  * <p>
  * Leaf Nodes:
  * <ul>
  * <li>FieldQueryNode - field/value node</li>
- * <li>NumericQueryNode - used for numeric search</li>
+ * <li>LegacyNumericQueryNode - used for numeric search</li>
  * <li>PathQueryNode - {@link org.apache.lucene.queryparser.flexible.core.nodes.QueryNode} object used with path-like queries</li>
  * <li>OpaqueQueryNode - Used as for part of the query that can be parsed by other parsers. schema/value</li> 
  * <li>PrefixWildcardQueryNode - non-phrase wildcard query</li>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/89cc676f/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/StandardQueryParser.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/StandardQueryParser.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/StandardQueryParser.java
index ada65a4..2774cf0 100644
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/StandardQueryParser.java
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/StandardQueryParser.java
@@ -29,7 +29,8 @@ import org.apache.lucene.queryparser.flexible.core.QueryParserHelper;
 import org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler;
 import org.apache.lucene.queryparser.flexible.standard.builders.StandardQueryTreeBuilder;
 import org.apache.lucene.queryparser.flexible.standard.config.FuzzyConfig;
-import org.apache.lucene.queryparser.flexible.standard.config.NumericConfig;
+import org.apache.lucene.queryparser.flexible.standard.config.LegacyNumericConfig;
+import org.apache.lucene.queryparser.flexible.standard.config.PointsConfig;
 import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler;
 import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler.ConfigurationKeys;
 import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler.Operator;
@@ -322,12 +323,30 @@ public class StandardQueryParser extends QueryParserHelper implements CommonQuer
     
   }
   
-  public void setNumericConfigMap(Map<String,NumericConfig> numericConfigMap) {
-    getQueryConfigHandler().set(ConfigurationKeys.NUMERIC_CONFIG_MAP, numericConfigMap);
+  /**
+   * Sets field configuration for legacy numeric fields
+   * @deprecated Index with points instead and use {@link #setPointsConfigMap(Map)}
+   */
+  @Deprecated
+  public void setLegacyNumericConfigMap(Map<String,LegacyNumericConfig> legacyNumericConfigMap) {
+    getQueryConfigHandler().set(ConfigurationKeys.LEGACY_NUMERIC_CONFIG_MAP, legacyNumericConfigMap);
+  }
+  
+  /**
+   * Gets field configuration for legacy numeric fields
+   * @deprecated Index with points instead and use {@link #getPointsConfigMap()}
+   */
+  @Deprecated
+  public Map<String,LegacyNumericConfig> getLegacyNumericConfigMap() {
+    return getQueryConfigHandler().get(ConfigurationKeys.LEGACY_NUMERIC_CONFIG_MAP);
+  }
+  
+  public void setPointsConfigMap(Map<String,PointsConfig> pointsConfigMap) {
+    getQueryConfigHandler().set(ConfigurationKeys.POINTS_CONFIG_MAP, pointsConfigMap);
   }
   
-  public Map<String,NumericConfig> getNumericConfigMap() {
-    return getQueryConfigHandler().get(ConfigurationKeys.NUMERIC_CONFIG_MAP);
+  public Map<String,PointsConfig> getPointsConfigMap() {
+    return getQueryConfigHandler().get(ConfigurationKeys.POINTS_CONFIG_MAP);
   }
   
   /**

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/89cc676f/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/LegacyNumericRangeQueryNodeBuilder.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/LegacyNumericRangeQueryNodeBuilder.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/LegacyNumericRangeQueryNodeBuilder.java
new file mode 100644
index 0000000..8ae7d5e
--- /dev/null
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/LegacyNumericRangeQueryNodeBuilder.java
@@ -0,0 +1,93 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.queryparser.flexible.standard.builders;
+
+import org.apache.lucene.document.FieldType;
+import org.apache.lucene.queryparser.flexible.core.QueryNodeException;
+import org.apache.lucene.queryparser.flexible.core.messages.QueryParserMessages;
+import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode;
+import org.apache.lucene.queryparser.flexible.core.util.StringUtils;
+import org.apache.lucene.queryparser.flexible.messages.MessageImpl;
+import org.apache.lucene.queryparser.flexible.standard.config.LegacyNumericConfig;
+import org.apache.lucene.queryparser.flexible.standard.nodes.LegacyNumericQueryNode;
+import org.apache.lucene.queryparser.flexible.standard.nodes.LegacyNumericRangeQueryNode;
+import org.apache.lucene.search.LegacyNumericRangeQuery;
+
+/**
+ * Builds {@link org.apache.lucene.search.LegacyNumericRangeQuery}s out of {@link LegacyNumericRangeQueryNode}s.
+ *
+ * @see org.apache.lucene.search.LegacyNumericRangeQuery
+ * @see LegacyNumericRangeQueryNode
+ * @deprecated Index with points and use {@link PointRangeQueryNodeBuilder} instead.
+ */
+@Deprecated
+public class LegacyNumericRangeQueryNodeBuilder implements StandardQueryBuilder {
+  
+  /**
+   * Constructs a {@link LegacyNumericRangeQueryNodeBuilder} object.
+   */
+  public LegacyNumericRangeQueryNodeBuilder() {
+  // empty constructor
+  }
+  
+  @Override
+  public LegacyNumericRangeQuery<? extends Number> build(QueryNode queryNode)
+      throws QueryNodeException {
+    LegacyNumericRangeQueryNode numericRangeNode = (LegacyNumericRangeQueryNode) queryNode;
+    
+    LegacyNumericQueryNode lowerNumericNode = numericRangeNode.getLowerBound();
+    LegacyNumericQueryNode upperNumericNode = numericRangeNode.getUpperBound();
+    
+    Number lowerNumber = lowerNumericNode.getValue();
+    Number upperNumber = upperNumericNode.getValue();
+    
+    LegacyNumericConfig numericConfig = numericRangeNode.getNumericConfig();
+    FieldType.LegacyNumericType numberType = numericConfig.getType();
+    String field = StringUtils.toString(numericRangeNode.getField());
+    boolean minInclusive = numericRangeNode.isLowerInclusive();
+    boolean maxInclusive = numericRangeNode.isUpperInclusive();
+    int precisionStep = numericConfig.getPrecisionStep();
+    
+    switch (numberType) {
+      
+      case LONG:
+        return LegacyNumericRangeQuery.newLongRange(field, precisionStep,
+            (Long) lowerNumber, (Long) upperNumber, minInclusive, maxInclusive);
+      
+      case INT:
+        return LegacyNumericRangeQuery.newIntRange(field, precisionStep,
+            (Integer) lowerNumber, (Integer) upperNumber, minInclusive,
+            maxInclusive);
+      
+      case FLOAT:
+        return LegacyNumericRangeQuery.newFloatRange(field, precisionStep,
+            (Float) lowerNumber, (Float) upperNumber, minInclusive,
+            maxInclusive);
+      
+      case DOUBLE:
+        return LegacyNumericRangeQuery.newDoubleRange(field, precisionStep,
+            (Double) lowerNumber, (Double) upperNumber, minInclusive,
+            maxInclusive);
+        
+        default :
+          throw new QueryNodeException(new MessageImpl(
+            QueryParserMessages.UNSUPPORTED_NUMERIC_DATA_TYPE, numberType));
+        
+    }
+  }
+  
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/89cc676f/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/NumericRangeQueryNodeBuilder.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/NumericRangeQueryNodeBuilder.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/NumericRangeQueryNodeBuilder.java
deleted file mode 100644
index 6c8790f..0000000
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/NumericRangeQueryNodeBuilder.java
+++ /dev/null
@@ -1,91 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.queryparser.flexible.standard.builders;
-
-import org.apache.lucene.document.FieldType;
-import org.apache.lucene.queryparser.flexible.core.QueryNodeException;
-import org.apache.lucene.queryparser.flexible.core.messages.QueryParserMessages;
-import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode;
-import org.apache.lucene.queryparser.flexible.core.util.StringUtils;
-import org.apache.lucene.queryparser.flexible.messages.MessageImpl;
-import org.apache.lucene.queryparser.flexible.standard.config.NumericConfig;
-import org.apache.lucene.queryparser.flexible.standard.nodes.NumericQueryNode;
-import org.apache.lucene.queryparser.flexible.standard.nodes.NumericRangeQueryNode;
-import org.apache.lucene.search.LegacyNumericRangeQuery;
-
-/**
- * Builds {@link org.apache.lucene.search.LegacyNumericRangeQuery}s out of {@link NumericRangeQueryNode}s.
- *
- * @see org.apache.lucene.search.LegacyNumericRangeQuery
- * @see NumericRangeQueryNode
- */
-public class NumericRangeQueryNodeBuilder implements StandardQueryBuilder {
-  
-  /**
-   * Constructs a {@link NumericRangeQueryNodeBuilder} object.
-   */
-  public NumericRangeQueryNodeBuilder() {
-  // empty constructor
-  }
-  
-  @Override
-  public LegacyNumericRangeQuery<? extends Number> build(QueryNode queryNode)
-      throws QueryNodeException {
-    NumericRangeQueryNode numericRangeNode = (NumericRangeQueryNode) queryNode;
-    
-    NumericQueryNode lowerNumericNode = numericRangeNode.getLowerBound();
-    NumericQueryNode upperNumericNode = numericRangeNode.getUpperBound();
-    
-    Number lowerNumber = lowerNumericNode.getValue();
-    Number upperNumber = upperNumericNode.getValue();
-    
-    NumericConfig numericConfig = numericRangeNode.getNumericConfig();
-    FieldType.LegacyNumericType numberType = numericConfig.getType();
-    String field = StringUtils.toString(numericRangeNode.getField());
-    boolean minInclusive = numericRangeNode.isLowerInclusive();
-    boolean maxInclusive = numericRangeNode.isUpperInclusive();
-    int precisionStep = numericConfig.getPrecisionStep();
-    
-    switch (numberType) {
-      
-      case LONG:
-        return LegacyNumericRangeQuery.newLongRange(field, precisionStep,
-            (Long) lowerNumber, (Long) upperNumber, minInclusive, maxInclusive);
-      
-      case INT:
-        return LegacyNumericRangeQuery.newIntRange(field, precisionStep,
-            (Integer) lowerNumber, (Integer) upperNumber, minInclusive,
-            maxInclusive);
-      
-      case FLOAT:
-        return LegacyNumericRangeQuery.newFloatRange(field, precisionStep,
-            (Float) lowerNumber, (Float) upperNumber, minInclusive,
-            maxInclusive);
-      
-      case DOUBLE:
-        return LegacyNumericRangeQuery.newDoubleRange(field, precisionStep,
-            (Double) lowerNumber, (Double) upperNumber, minInclusive,
-            maxInclusive);
-        
-        default :
-          throw new QueryNodeException(new MessageImpl(
-            QueryParserMessages.UNSUPPORTED_NUMERIC_DATA_TYPE, numberType));
-        
-    }
-  }
-  
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/89cc676f/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/PointRangeQueryNodeBuilder.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/PointRangeQueryNodeBuilder.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/PointRangeQueryNodeBuilder.java
new file mode 100644
index 0000000..0cce4bf
--- /dev/null
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/PointRangeQueryNodeBuilder.java
@@ -0,0 +1,137 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.queryparser.flexible.standard.builders;
+
+import org.apache.lucene.document.DoublePoint;
+import org.apache.lucene.document.FloatPoint;
+import org.apache.lucene.document.IntPoint;
+import org.apache.lucene.document.LongPoint;
+import org.apache.lucene.index.PointValues;
+import org.apache.lucene.queryparser.flexible.core.QueryNodeException;
+import org.apache.lucene.queryparser.flexible.core.messages.QueryParserMessages;
+import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode;
+import org.apache.lucene.queryparser.flexible.core.util.StringUtils;
+import org.apache.lucene.queryparser.flexible.messages.MessageImpl;
+import org.apache.lucene.queryparser.flexible.standard.config.PointsConfig;
+import org.apache.lucene.queryparser.flexible.standard.nodes.PointQueryNode;
+import org.apache.lucene.queryparser.flexible.standard.nodes.PointRangeQueryNode;
+import org.apache.lucene.search.Query;
+
+/**
+ * Builds {@link PointValues} range queries out of {@link PointRangeQueryNode}s.
+ *
+ * @see PointRangeQueryNode
+ */
+public class PointRangeQueryNodeBuilder implements StandardQueryBuilder {
+  
+  /**
+   * Constructs a {@link PointRangeQueryNodeBuilder} object.
+   */
+  public PointRangeQueryNodeBuilder() {
+  // empty constructor
+  }
+  
+  @Override
+  public Query build(QueryNode queryNode) throws QueryNodeException {
+    PointRangeQueryNode numericRangeNode = (PointRangeQueryNode) queryNode;
+    
+    PointQueryNode lowerNumericNode = numericRangeNode.getLowerBound();
+    PointQueryNode upperNumericNode = numericRangeNode.getUpperBound();
+    
+    Number lowerNumber = lowerNumericNode.getValue();
+    Number upperNumber = upperNumericNode.getValue();
+    
+    PointsConfig pointsConfig = numericRangeNode.getPointsConfig();
+    Class<? extends Number> numberType = pointsConfig.getType();
+    String field = StringUtils.toString(numericRangeNode.getField());
+    boolean minInclusive = numericRangeNode.isLowerInclusive();
+    boolean maxInclusive = numericRangeNode.isUpperInclusive();
+    
+    // TODO: push down cleaning up of crazy nulls and inclusive/exclusive elsewhere
+    if (Integer.class.equals(numberType)) {
+      Integer lower = (Integer) lowerNumber;
+      if (lower == null) {
+        lower = Integer.MIN_VALUE;
+      }
+      if (minInclusive == false) {
+        lower = lower + 1;
+      }
+      
+      Integer upper = (Integer) upperNumber;
+      if (upper == null) {
+        upper = Integer.MAX_VALUE;
+      }
+      if (maxInclusive == false) {
+        upper = upper - 1;
+      }
+      return IntPoint.newRangeQuery(field, lower, upper);
+    } else if (Long.class.equals(numberType)) {
+      Long lower = (Long) lowerNumber;
+      if (lower == null) {
+        lower = Long.MIN_VALUE;
+      }
+      if (minInclusive == false) {
+        lower = lower + 1;
+      }
+      
+      Long upper = (Long) upperNumber;
+      if (upper == null) {
+        upper = Long.MAX_VALUE;
+      }
+      if (maxInclusive == false) {
+        upper = upper - 1;
+      }
+      return LongPoint.newRangeQuery(field, lower, upper);
+    } else if (Float.class.equals(numberType)) {
+      Float lower = (Float) lowerNumber;
+      if (lower == null) {
+        lower = Float.NEGATIVE_INFINITY;
+      }
+      if (minInclusive == false) {
+        lower = Math.nextUp(lower);
+      }
+      
+      Float upper = (Float) upperNumber;
+      if (upper == null) {
+        upper = Float.POSITIVE_INFINITY;
+      }
+      if (maxInclusive == false) {
+        upper = Math.nextDown(upper);
+      }
+      return FloatPoint.newRangeQuery(field, lower, upper);
+    } else if (Double.class.equals(numberType)) {
+      Double lower = (Double) lowerNumber;
+      if (lower == null) {
+        lower = Double.NEGATIVE_INFINITY;
+      }
+      if (minInclusive == false) {
+        lower = Math.nextUp(lower);
+      }
+      
+      Double upper = (Double) upperNumber;
+      if (upper == null) {
+        upper = Double.POSITIVE_INFINITY;
+      }
+      if (maxInclusive == false) {
+        upper = Math.nextDown(upper);
+      }
+      return DoublePoint.newRangeQuery(field, lower, upper);
+    } else {
+      throw new QueryNodeException(new MessageImpl(QueryParserMessages.UNSUPPORTED_NUMERIC_DATA_TYPE, numberType));
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/89cc676f/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/StandardQueryTreeBuilder.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/StandardQueryTreeBuilder.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/StandardQueryTreeBuilder.java
index 2d7c643..360f6a7 100644
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/StandardQueryTreeBuilder.java
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/StandardQueryTreeBuilder.java
@@ -30,8 +30,10 @@ import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode;
 import org.apache.lucene.queryparser.flexible.core.nodes.SlopQueryNode;
 import org.apache.lucene.queryparser.flexible.core.nodes.TokenizedPhraseQueryNode;
 import org.apache.lucene.queryparser.flexible.standard.nodes.MultiPhraseQueryNode;
-import org.apache.lucene.queryparser.flexible.standard.nodes.NumericQueryNode;
-import org.apache.lucene.queryparser.flexible.standard.nodes.NumericRangeQueryNode;
+import org.apache.lucene.queryparser.flexible.standard.nodes.PointQueryNode;
+import org.apache.lucene.queryparser.flexible.standard.nodes.PointRangeQueryNode;
+import org.apache.lucene.queryparser.flexible.standard.nodes.LegacyNumericQueryNode;
+import org.apache.lucene.queryparser.flexible.standard.nodes.LegacyNumericRangeQueryNode;
 import org.apache.lucene.queryparser.flexible.standard.nodes.PrefixWildcardQueryNode;
 import org.apache.lucene.queryparser.flexible.standard.nodes.TermRangeQueryNode;
 import org.apache.lucene.queryparser.flexible.standard.nodes.RegexpQueryNode;
@@ -57,8 +59,10 @@ public class StandardQueryTreeBuilder extends QueryTreeBuilder implements
     setBuilder(FieldQueryNode.class, new FieldQueryNodeBuilder());
     setBuilder(BooleanQueryNode.class, new BooleanQueryNodeBuilder());
     setBuilder(FuzzyQueryNode.class, new FuzzyQueryNodeBuilder());
-    setBuilder(NumericQueryNode.class, new DummyQueryNodeBuilder());
-    setBuilder(NumericRangeQueryNode.class, new NumericRangeQueryNodeBuilder());
+    setBuilder(LegacyNumericQueryNode.class, new DummyQueryNodeBuilder());
+    setBuilder(LegacyNumericRangeQueryNode.class, new LegacyNumericRangeQueryNodeBuilder());
+    setBuilder(PointQueryNode.class, new DummyQueryNodeBuilder());
+    setBuilder(PointRangeQueryNode.class, new PointRangeQueryNodeBuilder());
     setBuilder(BoostQueryNode.class, new BoostQueryNodeBuilder());
     setBuilder(ModifierQueryNode.class, new ModifierQueryNodeBuilder());
     setBuilder(WildcardQueryNode.class, new WildcardQueryNodeBuilder());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/89cc676f/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/LegacyNumericConfig.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/LegacyNumericConfig.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/LegacyNumericConfig.java
new file mode 100644
index 0000000..985f55a
--- /dev/null
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/LegacyNumericConfig.java
@@ -0,0 +1,166 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.queryparser.flexible.standard.config;
+
+import java.text.NumberFormat;
+import java.util.Objects;
+
+import org.apache.lucene.document.FieldType;
+import org.apache.lucene.document.FieldType.LegacyNumericType;
+
+/**
+ * This class holds the configuration used to parse numeric queries and create
+ * {@link org.apache.lucene.search.LegacyNumericRangeQuery}s.
+ * 
+ * @see org.apache.lucene.search.LegacyNumericRangeQuery
+ * @see NumberFormat
+ * @deprecated Index with Points instead and use {@link PointsConfig}
+ */
+@Deprecated
+public class LegacyNumericConfig {
+  
+  private int precisionStep;
+  
+  private NumberFormat format;
+  
+  private FieldType.LegacyNumericType type;
+  
+  /**
+   * Constructs a {@link LegacyNumericConfig} object.
+   * 
+   * @param precisionStep
+   *          the precision used to index the numeric values
+   * @param format
+   *          the {@link NumberFormat} used to parse a {@link String} to
+   *          {@link Number}
+   * @param type
+   *          the numeric type used to index the numeric values
+   * 
+   * @see LegacyNumericConfig#setPrecisionStep(int)
+   * @see LegacyNumericConfig#setNumberFormat(NumberFormat)
+   * @see #setType(org.apache.lucene.document.FieldType.LegacyNumericType)
+   */
+  public LegacyNumericConfig(int precisionStep, NumberFormat format,
+      LegacyNumericType type) {
+    setPrecisionStep(precisionStep);
+    setNumberFormat(format);
+    setType(type);
+    
+  }
+  
+  /**
+   * Returns the precision used to index the numeric values
+   * 
+   * @return the precision used to index the numeric values
+   * 
+   * @see org.apache.lucene.search.LegacyNumericRangeQuery#getPrecisionStep()
+   */
+  public int getPrecisionStep() {
+    return precisionStep;
+  }
+  
+  /**
+   * Sets the precision used to index the numeric values
+   * 
+   * @param precisionStep
+   *          the precision used to index the numeric values
+   * 
+   * @see org.apache.lucene.search.LegacyNumericRangeQuery#getPrecisionStep()
+   */
+  public void setPrecisionStep(int precisionStep) {
+    this.precisionStep = precisionStep;
+  }
+  
+  /**
+   * Returns the {@link NumberFormat} used to parse a {@link String} to
+   * {@link Number}
+   * 
+   * @return the {@link NumberFormat} used to parse a {@link String} to
+   *         {@link Number}
+   */
+  public NumberFormat getNumberFormat() {
+    return format;
+  }
+  
+  /**
+   * Returns the numeric type used to index the numeric values
+   * 
+   * @return the numeric type used to index the numeric values
+   */
+  public LegacyNumericType getType() {
+    return type;
+  }
+  
+  /**
+   * Sets the numeric type used to index the numeric values
+   * 
+   * @param type the numeric type used to index the numeric values
+   */
+  public void setType(LegacyNumericType type) {
+    
+    if (type == null) {
+      throw new IllegalArgumentException("type cannot be null!");
+    }
+    
+    this.type = type;
+    
+  }
+  
+  /**
+   * Sets the {@link NumberFormat} used to parse a {@link String} to
+   * {@link Number}
+   * 
+   * @param format
+   *          the {@link NumberFormat} used to parse a {@link String} to
+   *          {@link Number}, cannot be <code>null</code>
+   */
+  public void setNumberFormat(NumberFormat format) {
+    
+    if (format == null) {
+      throw new IllegalArgumentException("format cannot be null!");
+    }
+    
+    this.format = format;
+    
+  }
+  
+  @Override
+  public boolean equals(Object obj) {
+    
+    if (obj == this) return true;
+    
+    if (obj instanceof LegacyNumericConfig) {
+      LegacyNumericConfig other = (LegacyNumericConfig) obj;
+      
+      if (this.precisionStep == other.precisionStep
+          && this.type == other.type
+          && (this.format == other.format || (this.format.equals(other.format)))) {
+        return true;
+      }
+      
+    }
+    
+    return false;
+    
+  }
+  
+  @Override
+  public int hashCode() {
+    return Objects.hash(precisionStep, type, format);
+  }
+  
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/89cc676f/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/LegacyNumericFieldConfigListener.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/LegacyNumericFieldConfigListener.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/LegacyNumericFieldConfigListener.java
new file mode 100644
index 0000000..f2d3124
--- /dev/null
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/LegacyNumericFieldConfigListener.java
@@ -0,0 +1,75 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.queryparser.flexible.standard.config;
+
+import java.util.Map;
+
+import org.apache.lucene.queryparser.flexible.core.config.FieldConfig;
+import org.apache.lucene.queryparser.flexible.core.config.FieldConfigListener;
+import org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler;
+import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler.ConfigurationKeys;
+
+/**
+ * This listener is used to listen to {@link FieldConfig} requests in
+ * {@link QueryConfigHandler} and add {@link ConfigurationKeys#LEGACY_NUMERIC_CONFIG}
+ * based on the {@link ConfigurationKeys#LEGACY_NUMERIC_CONFIG_MAP} set in the
+ * {@link QueryConfigHandler}.
+ * 
+ * @see LegacyNumericConfig
+ * @see QueryConfigHandler
+ * @see ConfigurationKeys#LEGACY_NUMERIC_CONFIG
+ * @see ConfigurationKeys#LEGACY_NUMERIC_CONFIG_MAP
+ * @deprecated Index with Points instead and use {@link PointsConfigListener}
+ */
+@Deprecated
+public class LegacyNumericFieldConfigListener implements FieldConfigListener {
+  
+  final private QueryConfigHandler config;
+  
+  /**
+   * Constructs a {@link LegacyNumericFieldConfigListener} object using the given {@link QueryConfigHandler}.
+   * 
+   * @param config the {@link QueryConfigHandler} it will listen too
+   */
+  public LegacyNumericFieldConfigListener(QueryConfigHandler config) {
+    
+    if (config == null) {
+      throw new IllegalArgumentException("config cannot be null!");
+    }
+    
+    this.config = config;
+    
+  }
+  
+  @Override
+  public void buildFieldConfig(FieldConfig fieldConfig) {
+    Map<String,LegacyNumericConfig> numericConfigMap = config
+        .get(ConfigurationKeys.LEGACY_NUMERIC_CONFIG_MAP);
+    
+    if (numericConfigMap != null) {
+      LegacyNumericConfig numericConfig = numericConfigMap
+          .get(fieldConfig.getField());
+      
+      if (numericConfig != null) {
+        fieldConfig.set(ConfigurationKeys.LEGACY_NUMERIC_CONFIG, numericConfig);
+      }
+      
+    }
+    
+  }
+  
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/89cc676f/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/NumericConfig.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/NumericConfig.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/NumericConfig.java
deleted file mode 100644
index c457a4e..0000000
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/NumericConfig.java
+++ /dev/null
@@ -1,164 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.queryparser.flexible.standard.config;
-
-import java.text.NumberFormat;
-import java.util.Objects;
-
-import org.apache.lucene.document.FieldType;
-import org.apache.lucene.document.FieldType.LegacyNumericType;
-
-/**
- * This class holds the configuration used to parse numeric queries and create
- * {@link org.apache.lucene.search.LegacyNumericRangeQuery}s.
- * 
- * @see org.apache.lucene.search.LegacyNumericRangeQuery
- * @see NumberFormat
- */
-public class NumericConfig {
-  
-  private int precisionStep;
-  
-  private NumberFormat format;
-  
-  private FieldType.LegacyNumericType type;
-  
-  /**
-   * Constructs a {@link NumericConfig} object.
-   * 
-   * @param precisionStep
-   *          the precision used to index the numeric values
-   * @param format
-   *          the {@link NumberFormat} used to parse a {@link String} to
-   *          {@link Number}
-   * @param type
-   *          the numeric type used to index the numeric values
-   * 
-   * @see NumericConfig#setPrecisionStep(int)
-   * @see NumericConfig#setNumberFormat(NumberFormat)
-   * @see #setType(org.apache.lucene.document.FieldType.LegacyNumericType)
-   */
-  public NumericConfig(int precisionStep, NumberFormat format,
-      LegacyNumericType type) {
-    setPrecisionStep(precisionStep);
-    setNumberFormat(format);
-    setType(type);
-    
-  }
-  
-  /**
-   * Returns the precision used to index the numeric values
-   * 
-   * @return the precision used to index the numeric values
-   * 
-   * @see org.apache.lucene.search.LegacyNumericRangeQuery#getPrecisionStep()
-   */
-  public int getPrecisionStep() {
-    return precisionStep;
-  }
-  
-  /**
-   * Sets the precision used to index the numeric values
-   * 
-   * @param precisionStep
-   *          the precision used to index the numeric values
-   * 
-   * @see org.apache.lucene.search.LegacyNumericRangeQuery#getPrecisionStep()
-   */
-  public void setPrecisionStep(int precisionStep) {
-    this.precisionStep = precisionStep;
-  }
-  
-  /**
-   * Returns the {@link NumberFormat} used to parse a {@link String} to
-   * {@link Number}
-   * 
-   * @return the {@link NumberFormat} used to parse a {@link String} to
-   *         {@link Number}
-   */
-  public NumberFormat getNumberFormat() {
-    return format;
-  }
-  
-  /**
-   * Returns the numeric type used to index the numeric values
-   * 
-   * @return the numeric type used to index the numeric values
-   */
-  public LegacyNumericType getType() {
-    return type;
-  }
-  
-  /**
-   * Sets the numeric type used to index the numeric values
-   * 
-   * @param type the numeric type used to index the numeric values
-   */
-  public void setType(LegacyNumericType type) {
-    
-    if (type == null) {
-      throw new IllegalArgumentException("type cannot be null!");
-    }
-    
-    this.type = type;
-    
-  }
-  
-  /**
-   * Sets the {@link NumberFormat} used to parse a {@link String} to
-   * {@link Number}
-   * 
-   * @param format
-   *          the {@link NumberFormat} used to parse a {@link String} to
-   *          {@link Number}, cannot be <code>null</code>
-   */
-  public void setNumberFormat(NumberFormat format) {
-    
-    if (format == null) {
-      throw new IllegalArgumentException("format cannot be null!");
-    }
-    
-    this.format = format;
-    
-  }
-  
-  @Override
-  public boolean equals(Object obj) {
-    
-    if (obj == this) return true;
-    
-    if (obj instanceof NumericConfig) {
-      NumericConfig other = (NumericConfig) obj;
-      
-      if (this.precisionStep == other.precisionStep
-          && this.type == other.type
-          && (this.format == other.format || (this.format.equals(other.format)))) {
-        return true;
-      }
-      
-    }
-    
-    return false;
-    
-  }
-  
-  @Override
-  public int hashCode() {
-    return Objects.hash(precisionStep, type, format);
-  }
-  
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/89cc676f/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/NumericFieldConfigListener.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/NumericFieldConfigListener.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/NumericFieldConfigListener.java
deleted file mode 100644
index c28cf2c..0000000
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/NumericFieldConfigListener.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.queryparser.flexible.standard.config;
-
-import java.util.Map;
-
-import org.apache.lucene.queryparser.flexible.core.config.FieldConfig;
-import org.apache.lucene.queryparser.flexible.core.config.FieldConfigListener;
-import org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler;
-import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler.ConfigurationKeys;
-
-/**
- * This listener is used to listen to {@link FieldConfig} requests in
- * {@link QueryConfigHandler} and add {@link ConfigurationKeys#NUMERIC_CONFIG}
- * based on the {@link ConfigurationKeys#NUMERIC_CONFIG_MAP} set in the
- * {@link QueryConfigHandler}.
- * 
- * @see NumericConfig
- * @see QueryConfigHandler
- * @see ConfigurationKeys#NUMERIC_CONFIG
- * @see ConfigurationKeys#NUMERIC_CONFIG_MAP
- */
-public class NumericFieldConfigListener implements FieldConfigListener {
-  
-  final private QueryConfigHandler config;
-  
-  /**
-   * Construcs a {@link NumericFieldConfigListener} object using the given {@link QueryConfigHandler}.
-   * 
-   * @param config the {@link QueryConfigHandler} it will listen too
-   */
-  public NumericFieldConfigListener(QueryConfigHandler config) {
-    
-    if (config == null) {
-      throw new IllegalArgumentException("config cannot be null!");
-    }
-    
-    this.config = config;
-    
-  }
-  
-  @Override
-  public void buildFieldConfig(FieldConfig fieldConfig) {
-    Map<String,NumericConfig> numericConfigMap = config
-        .get(ConfigurationKeys.NUMERIC_CONFIG_MAP);
-    
-    if (numericConfigMap != null) {
-      NumericConfig numericConfig = numericConfigMap
-          .get(fieldConfig.getField());
-      
-      if (numericConfig != null) {
-        fieldConfig.set(ConfigurationKeys.NUMERIC_CONFIG, numericConfig);
-      }
-      
-    }
-    
-  }
-  
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/89cc676f/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/PointsConfig.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/PointsConfig.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/PointsConfig.java
new file mode 100644
index 0000000..db59b48
--- /dev/null
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/PointsConfig.java
@@ -0,0 +1,124 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.queryparser.flexible.standard.config;
+
+import java.text.NumberFormat;
+
+import org.apache.lucene.index.PointValues;
+
+/**
+ * This class holds the configuration used to parse numeric queries and create
+ * {@link PointValues} queries.
+ * 
+ * @see PointValues
+ * @see NumberFormat
+ */
+public class PointsConfig {
+    
+  private NumberFormat format;
+  
+  private Class<? extends Number> type;
+  
+  /**
+   * Constructs a {@link PointsConfig} object.
+   * 
+   * @param format
+   *          the {@link NumberFormat} used to parse a {@link String} to
+   *          {@link Number}
+   * @param type
+   *          the numeric type used to index the numeric values
+   * 
+   * @see PointsConfig#setNumberFormat(NumberFormat)
+   */
+  public PointsConfig(NumberFormat format, Class<? extends Number> type) {
+    setNumberFormat(format);
+    setType(type);    
+  }
+  
+  /**
+   * Returns the {@link NumberFormat} used to parse a {@link String} to
+   * {@link Number}
+   * 
+   * @return the {@link NumberFormat} used to parse a {@link String} to
+   *         {@link Number}
+   */
+  public NumberFormat getNumberFormat() {
+    return format;
+  }
+  
+  /**
+   * Returns the numeric type used to index the numeric values
+   * 
+   * @return the numeric type used to index the numeric values
+   */
+  public Class<? extends Number> getType() {
+    return type;
+  }
+  
+  /**
+   * Sets the numeric type used to index the numeric values
+   * 
+   * @param type the numeric type used to index the numeric values
+   */
+  public void setType(Class<? extends Number> type) {
+    if (type == null) {
+      throw new IllegalArgumentException("type cannot be null!");
+    }
+    if (Integer.class.equals(type) == false &&
+        Long.class.equals(type) == false &&
+        Float.class.equals(type) == false &&
+        Double.class.equals(type) == false) {
+      throw new IllegalArgumentException("unsupported numeric type: " + type);
+    }
+    this.type = type;
+  }
+  
+  /**
+   * Sets the {@link NumberFormat} used to parse a {@link String} to
+   * {@link Number}
+   * 
+   * @param format
+   *          the {@link NumberFormat} used to parse a {@link String} to
+   *          {@link Number}, cannot be <code>null</code>
+   */
+  public void setNumberFormat(NumberFormat format) {    
+    if (format == null) {
+      throw new IllegalArgumentException("format cannot be null!");
+    } 
+    this.format = format;
+  }
+
+  @Override
+  public int hashCode() {
+    final int prime = 31;
+    int result = 1;
+    result = prime * result + format.hashCode();
+    result = prime * result + type.hashCode();
+    return result;
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (this == obj) return true;
+    if (obj == null) return false;
+    if (getClass() != obj.getClass()) return false;
+    PointsConfig other = (PointsConfig) obj;
+    if (!format.equals(other.format)) return false;
+    if (!type.equals(other.type)) return false;
+    return true;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/89cc676f/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/PointsConfigListener.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/PointsConfigListener.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/PointsConfigListener.java
new file mode 100644
index 0000000..9efbbb7
--- /dev/null
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/PointsConfigListener.java
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.queryparser.flexible.standard.config;
+
+import java.util.Map;
+
+import org.apache.lucene.queryparser.flexible.core.config.FieldConfig;
+import org.apache.lucene.queryparser.flexible.core.config.FieldConfigListener;
+import org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler;
+import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler.ConfigurationKeys;
+
+/**
+ * This listener is used to listen to {@link FieldConfig} requests in
+ * {@link QueryConfigHandler} and add {@link ConfigurationKeys#POINTS_CONFIG}
+ * based on the {@link ConfigurationKeys#POINTS_CONFIG_MAP} set in the
+ * {@link QueryConfigHandler}.
+ * 
+ * @see PointsConfig
+ * @see QueryConfigHandler
+ * @see ConfigurationKeys#POINTS_CONFIG
+ * @see ConfigurationKeys#POINTS_CONFIG_MAP
+ */
+public class PointsConfigListener implements FieldConfigListener {
+  
+  final private QueryConfigHandler config;
+  
+  /**
+   * Constructs a {@link PointsConfigListener} object using the given {@link QueryConfigHandler}.
+   * 
+   * @param config the {@link QueryConfigHandler} it will listen too
+   */
+  public PointsConfigListener(QueryConfigHandler config) { 
+    if (config == null) {
+      throw new IllegalArgumentException("config cannot be null!");
+    }
+    this.config = config;
+  }
+  
+  @Override
+  public void buildFieldConfig(FieldConfig fieldConfig) {
+    Map<String,PointsConfig> pointsConfigMap = config.get(ConfigurationKeys.POINTS_CONFIG_MAP);
+    
+    if (pointsConfigMap != null) {
+      PointsConfig pointsConfig = pointsConfigMap.get(fieldConfig.getField());
+      
+      if (pointsConfig != null) {
+        fieldConfig.set(ConfigurationKeys.POINTS_CONFIG, pointsConfig);
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/89cc676f/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/StandardQueryConfigHandler.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/StandardQueryConfigHandler.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/StandardQueryConfigHandler.java
index 77bd7bb..bba95ee 100644
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/StandardQueryConfigHandler.java
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/StandardQueryConfigHandler.java
@@ -167,21 +167,41 @@ public class StandardQueryConfigHandler extends QueryConfigHandler {
     final public static ConfigurationKey<Float> BOOST = ConfigurationKey.newInstance();
     
     /**
-     * Key used to set a field to its {@link NumericConfig}.
+     * Key used to set a field to its {@link LegacyNumericConfig}.
      * 
-     * @see StandardQueryParser#setNumericConfigMap(Map)
-     * @see StandardQueryParser#getNumericConfigMap()
+     * @see StandardQueryParser#setLegacyNumericConfigMap(Map)
+     * @see StandardQueryParser#getLegacyNumericConfigMap()
+     * @deprecated Index with Points instead and use {@link #POINTS_CONFIG}
      */
-    final public static ConfigurationKey<NumericConfig> NUMERIC_CONFIG = ConfigurationKey.newInstance();
+    @Deprecated
+    final public static ConfigurationKey<LegacyNumericConfig> LEGACY_NUMERIC_CONFIG = ConfigurationKey.newInstance();
     
     /**
-     * Key used to set the {@link NumericConfig} in {@link FieldConfig} for numeric fields.
+     * Key used to set the {@link LegacyNumericConfig} in {@link FieldConfig} for numeric fields.
      * 
-     * @see StandardQueryParser#setNumericConfigMap(Map)
-     * @see StandardQueryParser#getNumericConfigMap()
+     * @see StandardQueryParser#setLegacyNumericConfigMap(Map)
+     * @see StandardQueryParser#getLegacyNumericConfigMap()
+     * @deprecated Index with Points instead and use {@link #POINTS_CONFIG_MAP}
      */
-    final public static ConfigurationKey<Map<String,NumericConfig>> NUMERIC_CONFIG_MAP = ConfigurationKey.newInstance();
+    @Deprecated
+    final public static ConfigurationKey<Map<String,LegacyNumericConfig>> LEGACY_NUMERIC_CONFIG_MAP = ConfigurationKey.newInstance();
     
+    /**
+     * Key used to set a field to its {@link PointsConfig}.
+     * 
+     * @see StandardQueryParser#setLegacyNumericConfigMap(Map)
+     * @see StandardQueryParser#getLegacyNumericConfigMap()
+     */
+    final public static ConfigurationKey<PointsConfig> POINTS_CONFIG = ConfigurationKey.newInstance();
+
+    /**
+     * Key used to set the {@link PointsConfig} in {@link FieldConfig} for point fields.
+     * 
+     * @see StandardQueryParser#setLegacyNumericConfigMap(Map)
+     * @see StandardQueryParser#getLegacyNumericConfigMap()
+     */
+    final public static ConfigurationKey<Map<String,PointsConfig>> POINTS_CONFIG_MAP = ConfigurationKey.newInstance();
+
   }
   
   /**
@@ -195,7 +215,8 @@ public class StandardQueryConfigHandler extends QueryConfigHandler {
     // Add listener that will build the FieldConfig.
     addFieldConfigListener(new FieldBoostMapFCListener(this));
     addFieldConfigListener(new FieldDateResolutionFCListener(this));
-    addFieldConfigListener(new NumericFieldConfigListener(this));
+    addFieldConfigListener(new LegacyNumericFieldConfigListener(this));
+    addFieldConfigListener(new PointsConfigListener(this));
     
     // Default Values
     set(ConfigurationKeys.ALLOW_LEADING_WILDCARD, false); // default in 2.9

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/89cc676f/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/LegacyNumericQueryNode.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/LegacyNumericQueryNode.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/LegacyNumericQueryNode.java
new file mode 100644
index 0000000..b644d8a
--- /dev/null
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/LegacyNumericQueryNode.java
@@ -0,0 +1,153 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.queryparser.flexible.standard.nodes;
+
+import java.text.NumberFormat;
+import java.util.Locale;
+
+import org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode;
+import org.apache.lucene.queryparser.flexible.core.nodes.FieldValuePairQueryNode;
+import org.apache.lucene.queryparser.flexible.core.nodes.QueryNodeImpl;
+import org.apache.lucene.queryparser.flexible.core.parser.EscapeQuerySyntax;
+import org.apache.lucene.queryparser.flexible.core.parser.EscapeQuerySyntax.Type;
+import org.apache.lucene.queryparser.flexible.standard.config.LegacyNumericConfig;
+
+/**
+ * This query node represents a field query that holds a numeric value. It is
+ * similar to {@link FieldQueryNode}, however the {@link #getValue()} returns a
+ * {@link Number}.
+ * 
+ * @see LegacyNumericConfig
+ * @deprecated Index with Points instead and use {@link PointQueryNode} instead.
+ */
+@Deprecated
+public class LegacyNumericQueryNode extends QueryNodeImpl implements
+    FieldValuePairQueryNode<Number> {
+  
+  private NumberFormat numberFormat;
+  
+  private CharSequence field;
+  
+  private Number value;
+  
+  /**
+   * Creates a {@link LegacyNumericQueryNode} object using the given field,
+   * {@link Number} value and {@link NumberFormat} used to convert the value to
+   * {@link String}.
+   * 
+   * @param field the field associated with this query node
+   * @param value the value hold by this node
+   * @param numberFormat the {@link NumberFormat} used to convert the value to {@link String}
+   */
+  public LegacyNumericQueryNode(CharSequence field, Number value,
+      NumberFormat numberFormat) {
+    
+    super();
+    
+    setNumberFormat(numberFormat);
+    setField(field);
+    setValue(value);
+    
+  }
+  
+  /**
+   * Returns the field associated with this node.
+   * 
+   * @return the field associated with this node
+   */
+  @Override
+  public CharSequence getField() {
+    return this.field;
+  }
+  
+  /**
+   * Sets the field associated with this node.
+   * 
+   * @param fieldName the field associated with this node
+   */
+  @Override
+  public void setField(CharSequence fieldName) {
+    this.field = fieldName;
+  }
+  
+  /**
+   * This method is used to get the value converted to {@link String} and
+   * escaped using the given {@link EscapeQuerySyntax}.
+   * 
+   * @param escaper the {@link EscapeQuerySyntax} used to escape the value {@link String}
+   * 
+   * @return the value converte to {@link String} and escaped
+   */
+  protected CharSequence getTermEscaped(EscapeQuerySyntax escaper) {
+    return escaper.escape(numberFormat.format(this.value),
+        Locale.ROOT, Type.NORMAL);
+  }
+  
+  @Override
+  public CharSequence toQueryString(EscapeQuerySyntax escapeSyntaxParser) {
+    if (isDefaultField(this.field)) {
+      return getTermEscaped(escapeSyntaxParser);
+    } else {
+      return this.field + ":" + getTermEscaped(escapeSyntaxParser);
+    }
+  }
+  
+  /**
+   * Sets the {@link NumberFormat} used to convert the value to {@link String}.
+   * 
+   * @param format the {@link NumberFormat} used to convert the value to {@link String}
+   */
+  public void setNumberFormat(NumberFormat format) {
+    this.numberFormat = format;
+  }
+  
+  /**
+   * Returns the {@link NumberFormat} used to convert the value to {@link String}.
+   * 
+   * @return the {@link NumberFormat} used to convert the value to {@link String}
+   */
+  public NumberFormat getNumberFormat() {
+    return this.numberFormat;
+  }
+  
+  /**
+   * Returns the numeric value as {@link Number}.
+   * 
+   * @return the numeric value
+   */
+  @Override
+  public Number getValue() {
+    return value;
+  }
+  
+  /**
+   * Sets the numeric value.
+   * 
+   * @param value the numeric value
+   */
+  @Override
+  public void setValue(Number value) {
+    this.value = value;
+  }
+  
+  @Override
+  public String toString() {
+    return "<numeric field='" + this.field + "' number='"
+        + numberFormat.format(value) + "'/>";
+  }
+  
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/89cc676f/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/LegacyNumericRangeQueryNode.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/LegacyNumericRangeQueryNode.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/LegacyNumericRangeQueryNode.java
new file mode 100644
index 0000000..088ab98
--- /dev/null
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/LegacyNumericRangeQueryNode.java
@@ -0,0 +1,153 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.queryparser.flexible.standard.nodes;
+
+import org.apache.lucene.document.FieldType;
+import org.apache.lucene.document.FieldType.LegacyNumericType;
+import org.apache.lucene.queryparser.flexible.core.QueryNodeException;
+import org.apache.lucene.queryparser.flexible.core.messages.QueryParserMessages;
+import org.apache.lucene.queryparser.flexible.messages.MessageImpl;
+import org.apache.lucene.queryparser.flexible.standard.config.LegacyNumericConfig;
+
+/**
+ * This query node represents a range query composed by {@link LegacyNumericQueryNode}
+ * bounds, which means the bound values are {@link Number}s.
+ * 
+ * @see LegacyNumericQueryNode
+ * @see AbstractRangeQueryNode
+ * @deprecated Index with Points instead and use {@link PointRangeQueryNode} instead.
+ */
+@Deprecated
+public class LegacyNumericRangeQueryNode extends
+    AbstractRangeQueryNode<LegacyNumericQueryNode> {
+  
+  public LegacyNumericConfig numericConfig; 
+  
+  /**
+   * Constructs a {@link LegacyNumericRangeQueryNode} object using the given
+   * {@link LegacyNumericQueryNode} as its bounds and {@link LegacyNumericConfig}.
+   * 
+   * @param lower the lower bound
+   * @param upper the upper bound
+   * @param lowerInclusive <code>true</code> if the lower bound is inclusive, otherwise, <code>false</code>
+   * @param upperInclusive <code>true</code> if the upper bound is inclusive, otherwise, <code>false</code>
+   * @param numericConfig the {@link LegacyNumericConfig} that represents associated with the upper and lower bounds
+   * 
+   * @see #setBounds(LegacyNumericQueryNode, LegacyNumericQueryNode, boolean, boolean, LegacyNumericConfig)
+   */
+  public LegacyNumericRangeQueryNode(LegacyNumericQueryNode lower, LegacyNumericQueryNode upper,
+      boolean lowerInclusive, boolean upperInclusive, LegacyNumericConfig numericConfig) throws QueryNodeException {
+    setBounds(lower, upper, lowerInclusive, upperInclusive, numericConfig);
+  }
+  
+  private static LegacyNumericType getNumericDataType(Number number) throws QueryNodeException {
+    
+    if (number instanceof Long) {
+      return FieldType.LegacyNumericType.LONG;
+    } else if (number instanceof Integer) {
+      return FieldType.LegacyNumericType.INT;
+    } else if (number instanceof Double) {
+      return LegacyNumericType.DOUBLE;
+    } else if (number instanceof Float) {
+      return FieldType.LegacyNumericType.FLOAT;
+    } else {
+      throw new QueryNodeException(
+          new MessageImpl(
+              QueryParserMessages.NUMBER_CLASS_NOT_SUPPORTED_BY_NUMERIC_RANGE_QUERY,
+              number.getClass()));
+    }
+    
+  }
+  
+  /**
+   * Sets the upper and lower bounds of this range query node and the
+   * {@link LegacyNumericConfig} associated with these bounds.
+   * 
+   * @param lower the lower bound
+   * @param upper the upper bound
+   * @param lowerInclusive <code>true</code> if the lower bound is inclusive, otherwise, <code>false</code>
+   * @param upperInclusive <code>true</code> if the upper bound is inclusive, otherwise, <code>false</code>
+   * @param numericConfig the {@link LegacyNumericConfig} that represents associated with the upper and lower bounds
+   * 
+   */
+  public void setBounds(LegacyNumericQueryNode lower, LegacyNumericQueryNode upper,
+      boolean lowerInclusive, boolean upperInclusive, LegacyNumericConfig numericConfig) throws QueryNodeException {
+    
+    if (numericConfig == null) {
+      throw new IllegalArgumentException("numericConfig cannot be null!");
+    }
+    
+    LegacyNumericType lowerNumberType, upperNumberType;
+    
+    if (lower != null && lower.getValue() != null) {
+      lowerNumberType = getNumericDataType(lower.getValue());
+    } else {
+      lowerNumberType = null;
+    }
+    
+    if (upper != null && upper.getValue() != null) {
+      upperNumberType = getNumericDataType(upper.getValue());
+    } else {
+      upperNumberType = null;
+    }
+    
+    if (lowerNumberType != null
+        && !lowerNumberType.equals(numericConfig.getType())) {
+      throw new IllegalArgumentException(
+          "lower value's type should be the same as numericConfig type: "
+              + lowerNumberType + " != " + numericConfig.getType());
+    }
+    
+    if (upperNumberType != null
+        && !upperNumberType.equals(numericConfig.getType())) {
+      throw new IllegalArgumentException(
+          "upper value's type should be the same as numericConfig type: "
+              + upperNumberType + " != " + numericConfig.getType());
+    }
+    
+    super.setBounds(lower, upper, lowerInclusive, upperInclusive);
+    this.numericConfig = numericConfig;
+    
+  }
+  
+  /**
+   * Returns the {@link LegacyNumericConfig} associated with the lower and upper bounds.
+   * 
+   * @return the {@link LegacyNumericConfig} associated with the lower and upper bounds
+   */
+  public LegacyNumericConfig getNumericConfig() {
+    return this.numericConfig;
+  }
+  
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder("<numericRange lowerInclusive='");
+    
+    sb.append(isLowerInclusive()).append("' upperInclusive='").append(
+        isUpperInclusive()).append(
+        "' precisionStep='" + numericConfig.getPrecisionStep()).append(
+        "' type='" + numericConfig.getType()).append("'>\n");
+    
+    sb.append(getLowerBound()).append('\n');
+    sb.append(getUpperBound()).append('\n');
+    sb.append("</numericRange>");
+    
+    return sb.toString();
+    
+  }
+  
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/89cc676f/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/NumericQueryNode.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/NumericQueryNode.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/NumericQueryNode.java
deleted file mode 100644
index 7509a39..0000000
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/NumericQueryNode.java
+++ /dev/null
@@ -1,151 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.queryparser.flexible.standard.nodes;
-
-import java.text.NumberFormat;
-import java.util.Locale;
-
-import org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode;
-import org.apache.lucene.queryparser.flexible.core.nodes.FieldValuePairQueryNode;
-import org.apache.lucene.queryparser.flexible.core.nodes.QueryNodeImpl;
-import org.apache.lucene.queryparser.flexible.core.parser.EscapeQuerySyntax;
-import org.apache.lucene.queryparser.flexible.core.parser.EscapeQuerySyntax.Type;
-import org.apache.lucene.queryparser.flexible.standard.config.NumericConfig;
-
-/**
- * This query node represents a field query that holds a numeric value. It is
- * similar to {@link FieldQueryNode}, however the {@link #getValue()} returns a
- * {@link Number}.
- * 
- * @see NumericConfig
- */
-public class NumericQueryNode extends QueryNodeImpl implements
-    FieldValuePairQueryNode<Number> {
-  
-  private NumberFormat numberFormat;
-  
-  private CharSequence field;
-  
-  private Number value;
-  
-  /**
-   * Creates a {@link NumericQueryNode} object using the given field,
-   * {@link Number} value and {@link NumberFormat} used to convert the value to
-   * {@link String}.
-   * 
-   * @param field the field associated with this query node
-   * @param value the value hold by this node
-   * @param numberFormat the {@link NumberFormat} used to convert the value to {@link String}
-   */
-  public NumericQueryNode(CharSequence field, Number value,
-      NumberFormat numberFormat) {
-    
-    super();
-    
-    setNumberFormat(numberFormat);
-    setField(field);
-    setValue(value);
-    
-  }
-  
-  /**
-   * Returns the field associated with this node.
-   * 
-   * @return the field associated with this node
-   */
-  @Override
-  public CharSequence getField() {
-    return this.field;
-  }
-  
-  /**
-   * Sets the field associated with this node.
-   * 
-   * @param fieldName the field associated with this node
-   */
-  @Override
-  public void setField(CharSequence fieldName) {
-    this.field = fieldName;
-  }
-  
-  /**
-   * This method is used to get the value converted to {@link String} and
-   * escaped using the given {@link EscapeQuerySyntax}.
-   * 
-   * @param escaper the {@link EscapeQuerySyntax} used to escape the value {@link String}
-   * 
-   * @return the value converte to {@link String} and escaped
-   */
-  protected CharSequence getTermEscaped(EscapeQuerySyntax escaper) {
-    return escaper.escape(numberFormat.format(this.value),
-        Locale.ROOT, Type.NORMAL);
-  }
-  
-  @Override
-  public CharSequence toQueryString(EscapeQuerySyntax escapeSyntaxParser) {
-    if (isDefaultField(this.field)) {
-      return getTermEscaped(escapeSyntaxParser);
-    } else {
-      return this.field + ":" + getTermEscaped(escapeSyntaxParser);
-    }
-  }
-  
-  /**
-   * Sets the {@link NumberFormat} used to convert the value to {@link String}.
-   * 
-   * @param format the {@link NumberFormat} used to convert the value to {@link String}
-   */
-  public void setNumberFormat(NumberFormat format) {
-    this.numberFormat = format;
-  }
-  
-  /**
-   * Returns the {@link NumberFormat} used to convert the value to {@link String}.
-   * 
-   * @return the {@link NumberFormat} used to convert the value to {@link String}
-   */
-  public NumberFormat getNumberFormat() {
-    return this.numberFormat;
-  }
-  
-  /**
-   * Returns the numeric value as {@link Number}.
-   * 
-   * @return the numeric value
-   */
-  @Override
-  public Number getValue() {
-    return value;
-  }
-  
-  /**
-   * Sets the numeric value.
-   * 
-   * @param value the numeric value
-   */
-  @Override
-  public void setValue(Number value) {
-    this.value = value;
-  }
-  
-  @Override
-  public String toString() {
-    return "<numeric field='" + this.field + "' number='"
-        + numberFormat.format(value) + "'/>";
-  }
-  
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/89cc676f/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/NumericRangeQueryNode.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/NumericRangeQueryNode.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/NumericRangeQueryNode.java
deleted file mode 100644
index c132aa1..0000000
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/NumericRangeQueryNode.java
+++ /dev/null
@@ -1,151 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.queryparser.flexible.standard.nodes;
-
-import org.apache.lucene.document.FieldType;
-import org.apache.lucene.document.FieldType.LegacyNumericType;
-import org.apache.lucene.queryparser.flexible.core.QueryNodeException;
-import org.apache.lucene.queryparser.flexible.core.messages.QueryParserMessages;
-import org.apache.lucene.queryparser.flexible.messages.MessageImpl;
-import org.apache.lucene.queryparser.flexible.standard.config.NumericConfig;
-
-/**
- * This query node represents a range query composed by {@link NumericQueryNode}
- * bounds, which means the bound values are {@link Number}s.
- * 
- * @see NumericQueryNode
- * @see AbstractRangeQueryNode
- */
-public class NumericRangeQueryNode extends
-    AbstractRangeQueryNode<NumericQueryNode> {
-  
-  public NumericConfig numericConfig; 
-  
-  /**
-   * Constructs a {@link NumericRangeQueryNode} object using the given
-   * {@link NumericQueryNode} as its bounds and {@link NumericConfig}.
-   * 
-   * @param lower the lower bound
-   * @param upper the upper bound
-   * @param lowerInclusive <code>true</code> if the lower bound is inclusive, otherwise, <code>false</code>
-   * @param upperInclusive <code>true</code> if the upper bound is inclusive, otherwise, <code>false</code>
-   * @param numericConfig the {@link NumericConfig} that represents associated with the upper and lower bounds
-   * 
-   * @see #setBounds(NumericQueryNode, NumericQueryNode, boolean, boolean, NumericConfig)
-   */
-  public NumericRangeQueryNode(NumericQueryNode lower, NumericQueryNode upper,
-      boolean lowerInclusive, boolean upperInclusive, NumericConfig numericConfig) throws QueryNodeException {
-    setBounds(lower, upper, lowerInclusive, upperInclusive, numericConfig);
-  }
-  
-  private static LegacyNumericType getNumericDataType(Number number) throws QueryNodeException {
-    
-    if (number instanceof Long) {
-      return FieldType.LegacyNumericType.LONG;
-    } else if (number instanceof Integer) {
-      return FieldType.LegacyNumericType.INT;
-    } else if (number instanceof Double) {
-      return LegacyNumericType.DOUBLE;
-    } else if (number instanceof Float) {
-      return FieldType.LegacyNumericType.FLOAT;
-    } else {
-      throw new QueryNodeException(
-          new MessageImpl(
-              QueryParserMessages.NUMBER_CLASS_NOT_SUPPORTED_BY_NUMERIC_RANGE_QUERY,
-              number.getClass()));
-    }
-    
-  }
-  
-  /**
-   * Sets the upper and lower bounds of this range query node and the
-   * {@link NumericConfig} associated with these bounds.
-   * 
-   * @param lower the lower bound
-   * @param upper the upper bound
-   * @param lowerInclusive <code>true</code> if the lower bound is inclusive, otherwise, <code>false</code>
-   * @param upperInclusive <code>true</code> if the upper bound is inclusive, otherwise, <code>false</code>
-   * @param numericConfig the {@link NumericConfig} that represents associated with the upper and lower bounds
-   * 
-   */
-  public void setBounds(NumericQueryNode lower, NumericQueryNode upper,
-      boolean lowerInclusive, boolean upperInclusive, NumericConfig numericConfig) throws QueryNodeException {
-    
-    if (numericConfig == null) {
-      throw new IllegalArgumentException("numericConfig cannot be null!");
-    }
-    
-    LegacyNumericType lowerNumberType, upperNumberType;
-    
-    if (lower != null && lower.getValue() != null) {
-      lowerNumberType = getNumericDataType(lower.getValue());
-    } else {
-      lowerNumberType = null;
-    }
-    
-    if (upper != null && upper.getValue() != null) {
-      upperNumberType = getNumericDataType(upper.getValue());
-    } else {
-      upperNumberType = null;
-    }
-    
-    if (lowerNumberType != null
-        && !lowerNumberType.equals(numericConfig.getType())) {
-      throw new IllegalArgumentException(
-          "lower value's type should be the same as numericConfig type: "
-              + lowerNumberType + " != " + numericConfig.getType());
-    }
-    
-    if (upperNumberType != null
-        && !upperNumberType.equals(numericConfig.getType())) {
-      throw new IllegalArgumentException(
-          "upper value's type should be the same as numericConfig type: "
-              + upperNumberType + " != " + numericConfig.getType());
-    }
-    
-    super.setBounds(lower, upper, lowerInclusive, upperInclusive);
-    this.numericConfig = numericConfig;
-    
-  }
-  
-  /**
-   * Returns the {@link NumericConfig} associated with the lower and upper bounds.
-   * 
-   * @return the {@link NumericConfig} associated with the lower and upper bounds
-   */
-  public NumericConfig getNumericConfig() {
-    return this.numericConfig;
-  }
-  
-  @Override
-  public String toString() {
-    StringBuilder sb = new StringBuilder("<numericRange lowerInclusive='");
-    
-    sb.append(isLowerInclusive()).append("' upperInclusive='").append(
-        isUpperInclusive()).append(
-        "' precisionStep='" + numericConfig.getPrecisionStep()).append(
-        "' type='" + numericConfig.getType()).append("'>\n");
-    
-    sb.append(getLowerBound()).append('\n');
-    sb.append(getUpperBound()).append('\n');
-    sb.append("</numericRange>");
-    
-    return sb.toString();
-    
-  }
-  
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/89cc676f/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/PointQueryNode.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/PointQueryNode.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/PointQueryNode.java
new file mode 100644
index 0000000..6d4cba7
--- /dev/null
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/PointQueryNode.java
@@ -0,0 +1,151 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.queryparser.flexible.standard.nodes;
+
+import java.text.NumberFormat;
+import java.util.Locale;
+
+import org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode;
+import org.apache.lucene.queryparser.flexible.core.nodes.FieldValuePairQueryNode;
+import org.apache.lucene.queryparser.flexible.core.nodes.QueryNodeImpl;
+import org.apache.lucene.queryparser.flexible.core.parser.EscapeQuerySyntax;
+import org.apache.lucene.queryparser.flexible.core.parser.EscapeQuerySyntax.Type;
+import org.apache.lucene.queryparser.flexible.standard.config.PointsConfig;
+
+/**
+ * This query node represents a field query that holds a point value. It is
+ * similar to {@link FieldQueryNode}, however the {@link #getValue()} returns a
+ * {@link Number}.
+ * 
+ * @see PointsConfig
+ */
+public class PointQueryNode extends QueryNodeImpl implements
+    FieldValuePairQueryNode<Number> {
+  
+  private NumberFormat numberFormat;
+  
+  private CharSequence field;
+  
+  private Number value;
+  
+  /**
+   * Creates a {@link PointQueryNode} object using the given field,
+   * {@link Number} value and {@link NumberFormat} used to convert the value to
+   * {@link String}.
+   * 
+   * @param field the field associated with this query node
+   * @param value the value hold by this node
+   * @param numberFormat the {@link NumberFormat} used to convert the value to {@link String}
+   */
+  public PointQueryNode(CharSequence field, Number value,
+      NumberFormat numberFormat) {
+    
+    super();
+    
+    setNumberFormat(numberFormat);
+    setField(field);
+    setValue(value);
+    
+  }
+  
+  /**
+   * Returns the field associated with this node.
+   * 
+   * @return the field associated with this node
+   */
+  @Override
+  public CharSequence getField() {
+    return this.field;
+  }
+  
+  /**
+   * Sets the field associated with this node.
+   * 
+   * @param fieldName the field associated with this node
+   */
+  @Override
+  public void setField(CharSequence fieldName) {
+    this.field = fieldName;
+  }
+  
+  /**
+   * This method is used to get the value converted to {@link String} and
+   * escaped using the given {@link EscapeQuerySyntax}.
+   * 
+   * @param escaper the {@link EscapeQuerySyntax} used to escape the value {@link String}
+   * 
+   * @return the value converte to {@link String} and escaped
+   */
+  protected CharSequence getTermEscaped(EscapeQuerySyntax escaper) {
+    return escaper.escape(numberFormat.format(this.value),
+        Locale.ROOT, Type.NORMAL);
+  }
+  
+  @Override
+  public CharSequence toQueryString(EscapeQuerySyntax escapeSyntaxParser) {
+    if (isDefaultField(this.field)) {
+      return getTermEscaped(escapeSyntaxParser);
+    } else {
+      return this.field + ":" + getTermEscaped(escapeSyntaxParser);
+    }
+  }
+  
+  /**
+   * Sets the {@link NumberFormat} used to convert the value to {@link String}.
+   * 
+   * @param format the {@link NumberFormat} used to convert the value to {@link String}
+   */
+  public void setNumberFormat(NumberFormat format) {
+    this.numberFormat = format;
+  }
+  
+  /**
+   * Returns the {@link NumberFormat} used to convert the value to {@link String}.
+   * 
+   * @return the {@link NumberFormat} used to convert the value to {@link String}
+   */
+  public NumberFormat getNumberFormat() {
+    return this.numberFormat;
+  }
+  
+  /**
+   * Returns the numeric value as {@link Number}.
+   * 
+   * @return the numeric value
+   */
+  @Override
+  public Number getValue() {
+    return value;
+  }
+  
+  /**
+   * Sets the numeric value.
+   * 
+   * @param value the numeric value
+   */
+  @Override
+  public void setValue(Number value) {
+    this.value = value;
+  }
+  
+  @Override
+  public String toString() {
+    return "<numeric field='" + this.field + "' number='"
+        + numberFormat.format(value) + "'/>";
+  }
+  
+}


[13/50] [abbrv] lucene-solr git commit: LUCENE-7079: add newSetQuery(String, Collection) to primitive Point types

Posted by ho...@apache.org.
LUCENE-7079: add newSetQuery(String, Collection) to primitive Point types


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/58623e00
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/58623e00
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/58623e00

Branch: refs/heads/jira/SOLR-445
Commit: 58623e00ccd5ff4a6b97ad9c518fd597d9abdfda
Parents: dacbf33
Author: Robert Muir <rm...@apache.org>
Authored: Wed Mar 9 11:25:37 2016 -0500
Committer: Robert Muir <rm...@apache.org>
Committed: Wed Mar 9 11:25:37 2016 -0500

----------------------------------------------------------------------
 .../org/apache/lucene/document/DoublePoint.java     | 16 ++++++++++++++++
 .../java/org/apache/lucene/document/FloatPoint.java | 16 ++++++++++++++++
 .../java/org/apache/lucene/document/IntPoint.java   | 16 ++++++++++++++++
 .../java/org/apache/lucene/document/LongPoint.java  | 16 ++++++++++++++++
 .../org/apache/lucene/search/TestPointQueries.java  |  8 ++++++++
 .../apache/lucene/search/join/TestBlockJoin.java    |  7 +------
 6 files changed, 73 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/58623e00/lucene/core/src/java/org/apache/lucene/document/DoublePoint.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/document/DoublePoint.java b/lucene/core/src/java/org/apache/lucene/document/DoublePoint.java
index 26ac0ce..1133b22 100644
--- a/lucene/core/src/java/org/apache/lucene/document/DoublePoint.java
+++ b/lucene/core/src/java/org/apache/lucene/document/DoublePoint.java
@@ -17,6 +17,7 @@
 package org.apache.lucene.document;
 
 import java.util.Arrays;
+import java.util.Collection;
 
 import org.apache.lucene.index.PointValues;
 import org.apache.lucene.search.PointInSetQuery;
@@ -247,4 +248,19 @@ public final class DoublePoint extends Field {
       }
     };
   }
+  
+  /**
+   * Create a query matching any of the specified 1D values.  This is the points equivalent of {@code TermsQuery}.
+   * 
+   * @param field field name. must not be {@code null}.
+   * @param values all values to match
+   */
+  public static Query newSetQuery(String field, Collection<Double> values) {
+    Double[] boxed = values.toArray(new Double[0]);
+    double[] unboxed = new double[boxed.length];
+    for (int i = 0; i < boxed.length; i++) {
+      unboxed[i] = boxed[i];
+    }
+    return newSetQuery(field, unboxed);
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/58623e00/lucene/core/src/java/org/apache/lucene/document/FloatPoint.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/document/FloatPoint.java b/lucene/core/src/java/org/apache/lucene/document/FloatPoint.java
index c58881e..3d110db 100644
--- a/lucene/core/src/java/org/apache/lucene/document/FloatPoint.java
+++ b/lucene/core/src/java/org/apache/lucene/document/FloatPoint.java
@@ -17,6 +17,7 @@
 package org.apache.lucene.document;
 
 import java.util.Arrays;
+import java.util.Collection;
 
 import org.apache.lucene.index.PointValues;
 import org.apache.lucene.search.PointInSetQuery;
@@ -247,4 +248,19 @@ public final class FloatPoint extends Field {
       }
     };
   }
+
+  /**
+   * Create a query matching any of the specified 1D values.  This is the points equivalent of {@code TermsQuery}.
+   * 
+   * @param field field name. must not be {@code null}.
+   * @param values all values to match
+   */
+  public static Query newSetQuery(String field, Collection<Float> values) {
+    Float[] boxed = values.toArray(new Float[0]);
+    float[] unboxed = new float[boxed.length];
+    for (int i = 0; i < boxed.length; i++) {
+      unboxed[i] = boxed[i];
+    }
+    return newSetQuery(field, unboxed);
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/58623e00/lucene/core/src/java/org/apache/lucene/document/IntPoint.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/document/IntPoint.java b/lucene/core/src/java/org/apache/lucene/document/IntPoint.java
index cb8315f..53ae3d3 100644
--- a/lucene/core/src/java/org/apache/lucene/document/IntPoint.java
+++ b/lucene/core/src/java/org/apache/lucene/document/IntPoint.java
@@ -17,6 +17,7 @@
 package org.apache.lucene.document;
 
 import java.util.Arrays;
+import java.util.Collection;
 
 import org.apache.lucene.index.PointValues;
 import org.apache.lucene.search.PointInSetQuery;
@@ -247,4 +248,19 @@ public final class IntPoint extends Field {
       }
     };
   }
+  
+  /**
+   * Create a query matching any of the specified 1D values.  This is the points equivalent of {@code TermsQuery}.
+   * 
+   * @param field field name. must not be {@code null}.
+   * @param values all values to match
+   */
+  public static Query newSetQuery(String field, Collection<Integer> values) {
+    Integer[] boxed = values.toArray(new Integer[0]);
+    int[] unboxed = new int[boxed.length];
+    for (int i = 0; i < boxed.length; i++) {
+      unboxed[i] = boxed[i];
+    }
+    return newSetQuery(field, unboxed);
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/58623e00/lucene/core/src/java/org/apache/lucene/document/LongPoint.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/document/LongPoint.java b/lucene/core/src/java/org/apache/lucene/document/LongPoint.java
index ff78132..c4fd887 100644
--- a/lucene/core/src/java/org/apache/lucene/document/LongPoint.java
+++ b/lucene/core/src/java/org/apache/lucene/document/LongPoint.java
@@ -17,6 +17,7 @@
 package org.apache.lucene.document;
 
 import java.util.Arrays;
+import java.util.Collection;
 
 import org.apache.lucene.index.PointValues;
 import org.apache.lucene.search.PointInSetQuery;
@@ -247,4 +248,19 @@ public final class LongPoint extends Field {
       }
     };
   }
+  
+  /**
+   * Create a query matching any of the specified 1D values.  This is the points equivalent of {@code TermsQuery}.
+   * 
+   * @param field field name. must not be {@code null}.
+   * @param values all values to match
+   */
+  public static Query newSetQuery(String field, Collection<Long> values) {
+    Long[] boxed = values.toArray(new Long[0]);
+    long[] unboxed = new long[boxed.length];
+    for (int i = 0; i < boxed.length; i++) {
+      unboxed[i] = boxed[i];
+    }
+    return newSetQuery(field, unboxed);
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/58623e00/lucene/core/src/test/org/apache/lucene/search/TestPointQueries.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestPointQueries.java b/lucene/core/src/test/org/apache/lucene/search/TestPointQueries.java
index 4d9aa59..c72ab44 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestPointQueries.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestPointQueries.java
@@ -1602,6 +1602,14 @@ public class TestPointQueries extends LuceneTestCase {
     r.close();
     dir.close();
   }
+  
+  /** Boxed methods for primitive types should behave the same as unboxed: just sugar */
+  public void testPointIntSetBoxed() throws Exception {
+    assertEquals(IntPoint.newSetQuery("foo", 1, 2, 3), IntPoint.newSetQuery("foo", Arrays.asList(1, 2, 3)));
+    assertEquals(FloatPoint.newSetQuery("foo", 1F, 2F, 3F), FloatPoint.newSetQuery("foo", Arrays.asList(1F, 2F, 3F)));
+    assertEquals(LongPoint.newSetQuery("foo", 1L, 2L, 3L), LongPoint.newSetQuery("foo", Arrays.asList(1L, 2L, 3L)));
+    assertEquals(DoublePoint.newSetQuery("foo", 1D, 2D, 3D), DoublePoint.newSetQuery("foo", Arrays.asList(1D, 2D, 3D)));
+  }
 
   public void testBasicMultiValuedPointInSetQuery() throws Exception {
     Directory dir = newDirectory();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/58623e00/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java
----------------------------------------------------------------------
diff --git a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java
index b5f2038..9c39299 100644
--- a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java
+++ b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java
@@ -615,12 +615,7 @@ public class TestBlockJoin extends LuceneTestCase {
     }
 
     if (!toDelete.isEmpty()) {
-      // TODO: we should add newSetQuery(String, Collection<T>) ? this is awkward.
-      int[] array = new int[toDelete.size()];
-      for (int i = 0; i < toDelete.size(); i++) {
-        array[i] = toDelete.get(i);
-      }
-      Query query = IntPoint.newSetQuery("blockID", array);
+      Query query = IntPoint.newSetQuery("blockID", toDelete);
       w.deleteDocuments(query);
       joinW.deleteDocuments(query);
     }


[36/50] [abbrv] lucene-solr git commit: SOLR-445: fix my silly mistake with the merged exception metadata

Posted by ho...@apache.org.
SOLR-445: fix my silly mistake with the merged exception metadata


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/0ccee156
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/0ccee156
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/0ccee156

Branch: refs/heads/jira/SOLR-445
Commit: 0ccee15624d75d305aa3ab4d0b3c2dc5d93d8fea
Parents: b24fb02
Author: Chris Hostetter <ho...@apache.org>
Authored: Thu Mar 10 16:30:36 2016 -0700
Committer: Chris Hostetter <ho...@apache.org>
Committed: Thu Mar 10 16:30:36 2016 -0700

----------------------------------------------------------------------
 .../processor/DistributedUpdateProcessor.java   | 34 ++++++++------------
 .../processor/TolerantUpdateProcessor.java      | 14 +++++++-
 2 files changed, 27 insertions(+), 21 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0ccee156/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java b/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java
index 105d3ff..01aa38b 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java
@@ -1702,26 +1702,20 @@ public class DistributedUpdateProcessor extends UpdateRequestProcessor {
       super(buildCode(errors), buildMsg(errors), null);
       this.errors = errors;
 
-      // nocommit: the code below is useful for preserving things like "root-error-class"
-      // nocommit: but wreaks havoc on ToleranteUpdateProcessor's exception annotating.
-      //
-      // nocommit: before enabling the code below, we need to make ToleranteUpdateProcessor 
-      // nocommit: smart enough to remove metadata it cares about before adding it (and others) back
-      //
-      // // create a merged copy of the metadata from all wrapped exceptions
-      // NamedList<String> metadata = new NamedList<String>();
-      // for (Error error : errors) {
-      //   if (error.e instanceof SolrException) {
-      //     SolrException e = (SolrException) error.e;
-      //     NamedList<String> eMeta = e.getMetadata();
-      //     if (null != eMeta) {
-      //       metadata.addAll(eMeta);
-      //     }
-      //   }
-      // }
-      // if (0 < metadata.size()) {
-      //   this.setMetadata(metadata);
-      // }
+      // create a merged copy of the metadata from all wrapped exceptions
+      NamedList<String> metadata = new NamedList<String>();
+      for (Error error : errors) {
+        if (error.e instanceof SolrException) {
+          SolrException e = (SolrException) error.e;
+          NamedList<String> eMeta = e.getMetadata();
+          if (null != eMeta) {
+            metadata.addAll(eMeta);
+          }
+        }
+      }
+      if (0 < metadata.size()) {
+        this.setMetadata(metadata);
+      }
     }
 
     /** Helper method for constructor */

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0ccee156/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java b/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java
index a858e92..40e9b23 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java
@@ -217,7 +217,7 @@ public class TolerantUpdateProcessor extends UpdateRequestProcessor {
       firstErrTracker.caught(duae);
 
       
-      // adjust out stats based on the distributed errors
+      // adjust our stats based on each of the distributed errors
       for (Error error : duae.errors) {
         // we can't trust the req info from the Error, because multiple original requests might have been
         // lumped together
@@ -373,6 +373,18 @@ public class TolerantUpdateProcessor extends UpdateRequestProcessor {
       if (null == firstErrMetadata) { // obnoxious
         firstErrMetadata = new NamedList<String>();
         first.setMetadata(firstErrMetadata);
+      } else {
+        // any existing metadata representing ToleratedUpdateErrors in this single exception needs removed
+        // so we can add *all* of the known ToleratedUpdateErrors (from this and other exceptions)
+        for (int i = 0; i < firstErrMetadata.size(); i++) {
+          if (null != ToleratedUpdateError.parseMetadataIfToleratedUpdateError
+              (firstErrMetadata.getName(i), firstErrMetadata.getVal(i))) {
+               
+            firstErrMetadata.remove(i);
+            // NOTE: post decrementing index so we don't miss anything as we remove items
+            i--;
+          }
+        }
       }
 
       for (ToleratedUpdateError te : errors) {


[16/50] [abbrv] lucene-solr git commit: SOLR-8765: Set parameters correctly in async shard requests

Posted by ho...@apache.org.
SOLR-8765: Set parameters correctly in async shard requests


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/4015f12f
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/4015f12f
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/4015f12f

Branch: refs/heads/jira/SOLR-445
Commit: 4015f12ff1ff2d55fc94611a93e47015cca83b36
Parents: a7ff1c8
Author: Alan Woodward <ro...@apache.org>
Authored: Wed Mar 9 17:38:07 2016 +0000
Committer: Alan Woodward <ro...@apache.org>
Committed: Wed Mar 9 17:38:38 2016 +0000

----------------------------------------------------------------------
 .../apache/solr/client/solrj/request/CollectionAdminRequest.java   | 2 ++
 1 file changed, 2 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4015f12f/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java
index 4f28408..76eb19f 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java
@@ -203,6 +203,8 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
 
     public AsyncShardSpecificAdminRequest(CollectionAction action, String collection, String shard) {
       super(action);
+      this.collection = collection;
+      this.shard = shard;
     }
 
     @Deprecated


[24/50] [abbrv] lucene-solr git commit: LUCENE-7081: prefix-compress compressible fixed-width data (like InetAddress/BigInteger)

Posted by ho...@apache.org.
LUCENE-7081: prefix-compress compressible fixed-width data (like InetAddress/BigInteger)


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/162636bf
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/162636bf
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/162636bf

Branch: refs/heads/jira/SOLR-445
Commit: 162636bf05b5b6b35a79bacd2e7440830b05960f
Parents: 89cc676
Author: Robert Muir <rm...@apache.org>
Authored: Thu Mar 10 07:25:48 2016 -0500
Committer: Robert Muir <rm...@apache.org>
Committed: Thu Mar 10 07:25:48 2016 -0500

----------------------------------------------------------------------
 .../lucene54/Lucene54DocValuesConsumer.java     | 21 +++++++++++++++++---
 1 file changed, 18 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/162636bf/lucene/core/src/java/org/apache/lucene/codecs/lucene54/Lucene54DocValuesConsumer.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene54/Lucene54DocValuesConsumer.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene54/Lucene54DocValuesConsumer.java
index 858c54b..96acfd2 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/lucene54/Lucene54DocValuesConsumer.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene54/Lucene54DocValuesConsumer.java
@@ -411,17 +411,32 @@ final class Lucene54DocValuesConsumer extends DocValuesConsumer implements Close
   
   /** expert: writes a value dictionary for a sorted/sortedset field */
   private void addTermsDict(FieldInfo field, final Iterable<BytesRef> values) throws IOException {
-    // first check if it's a "fixed-length" terms dict
+    // first check if it's a "fixed-length" terms dict, and compressibility if so
     int minLength = Integer.MAX_VALUE;
     int maxLength = Integer.MIN_VALUE;
     long numValues = 0;
+    BytesRefBuilder previousValue = new BytesRefBuilder();
+    long prefixSum = 0; // only valid for fixed-width data, as we have a choice there
     for (BytesRef v : values) {
       minLength = Math.min(minLength, v.length);
       maxLength = Math.max(maxLength, v.length);
+      if (minLength == maxLength) {
+        int termPosition = (int) (numValues & INTERVAL_MASK);
+        if (termPosition == 0) {
+          // first term in block, save it away to compare against the last term later
+          previousValue.copyBytes(v);
+        } else if (termPosition == INTERVAL_COUNT - 1) {
+          // last term in block, accumulate shared prefix against first term
+          prefixSum += StringHelper.bytesDifference(previousValue.get(), v);
+        }
+      }
       numValues++;
     }
-    if (minLength == maxLength) {
-      // no index needed: direct addressing by mult
+    // for fixed width data, look at the avg(shared prefix) before deciding how to encode:
+    // prefix compression "costs" worst case 2 bytes per term because we must store suffix lengths.
+    // so if we share at least 3 bytes on average, always compress.
+    if (minLength == maxLength && prefixSum <= 3*(numValues >> INTERVAL_SHIFT)) {
+      // no index needed: not very compressible, direct addressing by mult
       addBinaryField(field, values);
     } else if (numValues < REVERSE_INTERVAL_COUNT) {
       // low cardinality: waste a few KB of ram, but can't really use fancy index etc


[31/50] [abbrv] lucene-solr git commit: LUCENE-7086: move SlowCompositeReaderWrapper to misc module, and throw clear exc if you try to use in with points

Posted by ho...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadSpans.java
----------------------------------------------------------------------
diff --git a/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadSpans.java b/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadSpans.java
index 3f168bb..179b971 100644
--- a/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadSpans.java
+++ b/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadSpans.java
@@ -42,7 +42,6 @@ import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.search.TopDocs;
 import org.apache.lucene.search.similarities.ClassicSimilarity;
 import org.apache.lucene.search.similarities.Similarity;
-import org.apache.lucene.search.spans.MultiSpansWrapper;
 import org.apache.lucene.search.spans.SpanCollector;
 import org.apache.lucene.search.spans.SpanFirstQuery;
 import org.apache.lucene.search.spans.SpanNearQuery;
@@ -75,12 +74,12 @@ public class TestPayloadSpans extends LuceneTestCase {
     Spans spans;
     stq = new SpanTermQuery(new Term(PayloadHelper.FIELD, "seventy"));
 
-    spans = MultiSpansWrapper.wrap(indexReader, stq, SpanWeight.Postings.PAYLOADS);
+    spans = stq.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
     assertTrue("spans is null and it shouldn't be", spans != null);
     checkSpans(spans, 100, 1, 1, 1);
 
     stq = new SpanTermQuery(new Term(PayloadHelper.NO_PAYLOAD_FIELD, "seventy"));  
-    spans = MultiSpansWrapper.wrap(indexReader, stq, SpanWeight.Postings.PAYLOADS);
+    spans = stq.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
     assertTrue("spans is null and it shouldn't be", spans != null);
     checkSpans(spans, 100, 0, 0, 0);
   }
@@ -91,7 +90,7 @@ public class TestPayloadSpans extends LuceneTestCase {
     SpanFirstQuery sfq;
     match = new SpanTermQuery(new Term(PayloadHelper.FIELD, "one"));
     sfq = new SpanFirstQuery(match, 2);
-    Spans spans = MultiSpansWrapper.wrap(indexReader, sfq, SpanWeight.Postings.PAYLOADS);
+    Spans spans = sfq.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
     checkSpans(spans, 109, 1, 1, 1);
     //Test more complicated subclause
     SpanQuery[] clauses = new SpanQuery[2];
@@ -99,11 +98,11 @@ public class TestPayloadSpans extends LuceneTestCase {
     clauses[1] = new SpanTermQuery(new Term(PayloadHelper.FIELD, "hundred"));
     match = new SpanNearQuery(clauses, 0, true);
     sfq = new SpanFirstQuery(match, 2);
-    checkSpans(MultiSpansWrapper.wrap(indexReader, sfq, SpanWeight.Postings.PAYLOADS), 100, 2, 1, 1);
+    checkSpans(sfq.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS), 100, 2, 1, 1);
 
     match = new SpanNearQuery(clauses, 0, false);
     sfq = new SpanFirstQuery(match, 2);
-    checkSpans(MultiSpansWrapper.wrap(indexReader, sfq, SpanWeight.Postings.PAYLOADS), 100, 2, 1, 1);
+    checkSpans(sfq.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS), 100, 2, 1, 1);
     
   }
   
@@ -123,10 +122,10 @@ public class TestPayloadSpans extends LuceneTestCase {
     Document doc = new Document();
     doc.add(newTextField(PayloadHelper.FIELD, "one two three one four three", Field.Store.YES));
     writer.addDocument(doc);
-    IndexReader reader = writer.getReader();
+    IndexReader reader = getOnlyLeafReader(writer.getReader());
     writer.close();
 
-    checkSpans(MultiSpansWrapper.wrap(reader, snq, SpanWeight.Postings.PAYLOADS), 1, new int[]{2});
+    checkSpans(snq.createWeight(newSearcher(reader), false).getSpans(reader.leaves().get(0), SpanWeight.Postings.PAYLOADS), 1, new int[]{2});
     reader.close();
     directory.close();
   }
@@ -137,7 +136,7 @@ public class TestPayloadSpans extends LuceneTestCase {
     IndexSearcher searcher = getSearcher();
 
     stq = new SpanTermQuery(new Term(PayloadHelper.FIELD, "mark"));
-    spans = MultiSpansWrapper.wrap(searcher.getIndexReader(), stq, SpanWeight.Postings.PAYLOADS);
+    spans = stq.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
     assertNull(spans);
 
     SpanQuery[] clauses = new SpanQuery[3];
@@ -146,7 +145,7 @@ public class TestPayloadSpans extends LuceneTestCase {
     clauses[2] = new SpanTermQuery(new Term(PayloadHelper.FIELD, "xx"));
     SpanNearQuery spanNearQuery = new SpanNearQuery(clauses, 12, false);
 
-    spans = MultiSpansWrapper.wrap(searcher.getIndexReader(), spanNearQuery, SpanWeight.Postings.PAYLOADS);
+    spans = spanNearQuery.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
     assertTrue("spans is null and it shouldn't be", spans != null);
     checkSpans(spans, 2, new int[]{3,3});
 
@@ -157,7 +156,7 @@ public class TestPayloadSpans extends LuceneTestCase {
 
     spanNearQuery = new SpanNearQuery(clauses, 6, true);
    
-    spans = MultiSpansWrapper.wrap(searcher.getIndexReader(), spanNearQuery, SpanWeight.Postings.PAYLOADS);
+    spans = spanNearQuery.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
 
     assertTrue("spans is null and it shouldn't be", spans != null);
     checkSpans(spans, 1, new int[]{3});
@@ -179,7 +178,7 @@ public class TestPayloadSpans extends LuceneTestCase {
     SpanNearQuery nestedSpanNearQuery = new SpanNearQuery(clauses2, 6, false);
     
     // yy within 6 of xx within 6 of rr
-    spans = MultiSpansWrapper.wrap(searcher.getIndexReader(), nestedSpanNearQuery, SpanWeight.Postings.PAYLOADS);
+    spans = nestedSpanNearQuery.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
     assertTrue("spans is null and it shouldn't be", spans != null);
     checkSpans(spans, 2, new int[]{3,3});
     closeIndexReader.close();
@@ -210,7 +209,7 @@ public class TestPayloadSpans extends LuceneTestCase {
     clauses3[1] = snq;
 
     SpanNearQuery nestedSpanNearQuery = new SpanNearQuery(clauses3, 6, false);
-    spans = MultiSpansWrapper.wrap(searcher.getIndexReader(), nestedSpanNearQuery, SpanWeight.Postings.PAYLOADS);
+    spans = nestedSpanNearQuery.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
 
     assertTrue("spans is null and it shouldn't be", spans != null);
     checkSpans(spans, 1, new int[]{3});
@@ -248,7 +247,7 @@ public class TestPayloadSpans extends LuceneTestCase {
      
     SpanNearQuery nestedSpanNearQuery = new SpanNearQuery(clauses3, 6, false);
 
-    spans = MultiSpansWrapper.wrap(searcher.getIndexReader(), nestedSpanNearQuery, SpanWeight.Postings.PAYLOADS);
+    spans = nestedSpanNearQuery.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
     assertTrue("spans is null and it shouldn't be", spans != null);
     checkSpans(spans, 2, new int[]{8, 8});
     closeIndexReader.close();
@@ -265,7 +264,7 @@ public class TestPayloadSpans extends LuceneTestCase {
     writer.addDocument(doc);
 
     IndexReader reader = writer.getReader();
-    IndexSearcher is = newSearcher(reader);
+    IndexSearcher is = newSearcher(getOnlyLeafReader(reader));
     writer.close();
 
     SpanTermQuery stq1 = new SpanTermQuery(new Term("content", "a"));
@@ -273,7 +272,7 @@ public class TestPayloadSpans extends LuceneTestCase {
     SpanQuery[] sqs = { stq1, stq2 };
     SpanNearQuery snq = new SpanNearQuery(sqs, 1, true);
     VerifyingCollector collector = new VerifyingCollector();
-    Spans spans = MultiSpansWrapper.wrap(is.getIndexReader(), snq, SpanWeight.Postings.PAYLOADS);
+    Spans spans = snq.createWeight(is, false).getSpans(is.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
 
     TopDocs topDocs = is.search(snq, 1);
     Set<String> payloadSet = new HashSet<>();
@@ -304,7 +303,7 @@ public class TestPayloadSpans extends LuceneTestCase {
     doc.add(new TextField("content", new StringReader("a b a d k f a h i k a k")));
     writer.addDocument(doc);
     IndexReader reader = writer.getReader();
-    IndexSearcher is = newSearcher(reader);
+    IndexSearcher is = newSearcher(getOnlyLeafReader(reader));
     writer.close();
 
     SpanTermQuery stq1 = new SpanTermQuery(new Term("content", "a"));
@@ -312,7 +311,7 @@ public class TestPayloadSpans extends LuceneTestCase {
     SpanQuery[] sqs = { stq1, stq2 };
     SpanNearQuery snq = new SpanNearQuery(sqs, 0, true);
     VerifyingCollector collector = new VerifyingCollector();
-    Spans spans =  MultiSpansWrapper.wrap(is.getIndexReader(), snq, SpanWeight.Postings.PAYLOADS);
+    Spans spans = snq.createWeight(is, false).getSpans(is.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
 
     TopDocs topDocs = is.search(snq, 1);
     Set<String> payloadSet = new HashSet<>();
@@ -343,14 +342,14 @@ public class TestPayloadSpans extends LuceneTestCase {
     doc.add(new TextField("content", new StringReader("j k a l f k k p a t a k l k t a")));
     writer.addDocument(doc);
     IndexReader reader = writer.getReader();
-    IndexSearcher is = newSearcher(reader);
+    IndexSearcher is = newSearcher(getOnlyLeafReader(reader));
     writer.close();
 
     SpanTermQuery stq1 = new SpanTermQuery(new Term("content", "a"));
     SpanTermQuery stq2 = new SpanTermQuery(new Term("content", "k"));
     SpanQuery[] sqs = { stq1, stq2 };
     SpanNearQuery snq = new SpanNearQuery(sqs, 0, true);
-    Spans spans =  MultiSpansWrapper.wrap(is.getIndexReader(), snq, SpanWeight.Postings.PAYLOADS);
+    Spans spans = snq.createWeight(is, false).getSpans(is.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
 
     TopDocs topDocs = is.search(snq, 1);
     Set<String> payloadSet = new HashSet<>();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadTermQuery.java
----------------------------------------------------------------------
diff --git a/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadTermQuery.java b/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadTermQuery.java
index f453b0d..dfa0191 100644
--- a/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadTermQuery.java
+++ b/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadTermQuery.java
@@ -38,10 +38,10 @@ import org.apache.lucene.search.ScoreDoc;
 import org.apache.lucene.search.TopDocs;
 import org.apache.lucene.search.similarities.ClassicSimilarity;
 import org.apache.lucene.search.similarities.Similarity;
-import org.apache.lucene.search.spans.MultiSpansWrapper;
 import org.apache.lucene.search.spans.SpanQuery;
-import org.apache.lucene.search.spans.Spans;
 import org.apache.lucene.search.spans.SpanTermQuery;
+import org.apache.lucene.search.spans.SpanWeight;
+import org.apache.lucene.search.spans.Spans;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.English;
@@ -131,10 +131,11 @@ public class TestPayloadTermQuery extends LuceneTestCase {
       doc.add(newTextField("multiField", English.intToEnglish(i) + "  " + English.intToEnglish(i), Field.Store.YES));
       writer.addDocument(doc);
     }
+    writer.forceMerge(1);
     reader = writer.getReader();
     writer.close();
 
-    searcher = newSearcher(reader);
+    searcher = newSearcher(getOnlyLeafReader(reader));
     searcher.setSimilarity(similarity);
   }
 
@@ -163,7 +164,7 @@ public class TestPayloadTermQuery extends LuceneTestCase {
       assertTrue(doc.score + " does not equal: " + 1, doc.score == 1);
     }
     CheckHits.checkExplanations(query, PayloadHelper.FIELD, searcher, true);
-    Spans spans = MultiSpansWrapper.wrap(searcher.getIndexReader(), query);
+    Spans spans = query.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertTrue("spans is null and it shouldn't be", spans != null);
     /*float score = hits.score(0);
     for (int i =1; i < hits.length(); i++)
@@ -214,7 +215,7 @@ public class TestPayloadTermQuery extends LuceneTestCase {
     }
     assertTrue(numTens + " does not equal: " + 10, numTens == 10);
     CheckHits.checkExplanations(query, "field", searcher, true);
-    Spans spans = MultiSpansWrapper.wrap(searcher.getIndexReader(), query);
+    Spans spans = query.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertTrue("spans is null and it shouldn't be", spans != null);
     //should be two matches per document
     int count = 0;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/test-framework/src/java/org/apache/lucene/index/BaseDocValuesFormatTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BaseDocValuesFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BaseDocValuesFormatTestCase.java
index 5a8a99f..85ac12f 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/BaseDocValuesFormatTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/BaseDocValuesFormatTestCase.java
@@ -568,7 +568,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     DirectoryReader ireader = iwriter.getReader();
     iwriter.close();
     
-    BinaryDocValues dv = getOnlySegmentReader(ireader).getBinaryDocValues("field");
+    BinaryDocValues dv = getOnlyLeafReader(ireader).getBinaryDocValues("field");
     assertEquals(new BytesRef(), dv.get(0));
     
     ireader.close();
@@ -743,7 +743,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     DirectoryReader ireader = iwriter.getReader();
     iwriter.close();
     
-    SortedDocValues dv = getOnlySegmentReader(ireader).getSortedDocValues("field");
+    SortedDocValues dv = getOnlyLeafReader(ireader).getSortedDocValues("field");
     if (codecSupportsDocsWithField()) {
       assertEquals(-1, dv.getOrd(0));
       assertEquals(0, dv.getValueCount());
@@ -833,7 +833,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     DirectoryReader ireader = iwriter.getReader();
     iwriter.close();
 
-    SortedDocValues dv = getOnlySegmentReader(ireader).getSortedDocValues("field");
+    SortedDocValues dv = getOnlyLeafReader(ireader).getSortedDocValues("field");
     assertEquals(3, dv.getValueCount());
     
     TermsEnum termsEnum = dv.termsEnum();
@@ -1077,7 +1077,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     TopDocs search = searcher.search(query.build(), 10);
     assertEquals(5, search.totalHits);
     ScoreDoc[] scoreDocs = search.scoreDocs;
-    NumericDocValues docValues = getOnlySegmentReader(reader).getNumericDocValues("docId");
+    NumericDocValues docValues = getOnlyLeafReader(reader).getNumericDocValues("docId");
     for (int i = 0; i < scoreDocs.length; i++) {
       assertEquals(i, scoreDocs[i].doc);
       assertEquals(i, docValues.get(scoreDocs[i].doc));
@@ -1154,12 +1154,11 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
       int ord = docValues.lookupTerm(expected);
       assertEquals(i, ord);
     }
-    LeafReader slowR = SlowCompositeReaderWrapper.wrap(reader);
     Set<Entry<String, String>> entrySet = docToString.entrySet();
 
     for (Entry<String, String> entry : entrySet) {
       // pk lookup
-      PostingsEnum termPostingsEnum = slowR.postings(new Term("id", entry.getKey()));
+      PostingsEnum termPostingsEnum = TestUtil.docs(random(), reader, "id", new BytesRef(entry.getKey()), null, 0);
       int docId = termPostingsEnum.nextDoc();
       expected = new BytesRef(entry.getValue());
       final BytesRef actual = docValues.get(docId);
@@ -1516,7 +1515,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     DirectoryReader ireader = iwriter.getReader();
     iwriter.close();
     
-    SortedSetDocValues dv = getOnlySegmentReader(ireader).getSortedSetDocValues("field");
+    SortedSetDocValues dv = getOnlyLeafReader(ireader).getSortedSetDocValues("field");
     
     dv.setDocument(0);
     assertEquals(0, dv.nextOrd());
@@ -1542,7 +1541,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     DirectoryReader ireader = iwriter.getReader();
     iwriter.close();
     
-    SortedSetDocValues dv = getOnlySegmentReader(ireader).getSortedSetDocValues("field");
+    SortedSetDocValues dv = getOnlyLeafReader(ireader).getSortedSetDocValues("field");
     
     dv.setDocument(0);
     assertEquals(0, dv.nextOrd());
@@ -1551,7 +1550,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     BytesRef bytes = dv.lookupOrd(0);
     assertEquals(new BytesRef("hello"), bytes);
     
-    dv = getOnlySegmentReader(ireader).getSortedSetDocValues("field2");
+    dv = getOnlyLeafReader(ireader).getSortedSetDocValues("field2");
 
     dv.setDocument(0);
     assertEquals(0, dv.nextOrd());
@@ -1585,7 +1584,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     DirectoryReader ireader = iwriter.getReader();
     iwriter.close();
 
-    SortedSetDocValues dv = getOnlySegmentReader(ireader).getSortedSetDocValues("field");
+    SortedSetDocValues dv = getOnlyLeafReader(ireader).getSortedSetDocValues("field");
     assertEquals(2, dv.getValueCount());
     
     dv.setDocument(0);
@@ -1619,7 +1618,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     DirectoryReader ireader = iwriter.getReader();
     iwriter.close();
     
-    SortedSetDocValues dv = getOnlySegmentReader(ireader).getSortedSetDocValues("field");
+    SortedSetDocValues dv = getOnlyLeafReader(ireader).getSortedSetDocValues("field");
     
     dv.setDocument(0);
     assertEquals(0, dv.nextOrd());
@@ -1649,7 +1648,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     DirectoryReader ireader = iwriter.getReader();
     iwriter.close();
     
-    SortedSetDocValues dv = getOnlySegmentReader(ireader).getSortedSetDocValues("field");
+    SortedSetDocValues dv = getOnlyLeafReader(ireader).getSortedSetDocValues("field");
     
     dv.setDocument(0);
     assertEquals(0, dv.nextOrd());
@@ -1689,7 +1688,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     DirectoryReader ireader = iwriter.getReader();
     iwriter.close();
 
-    SortedSetDocValues dv = getOnlySegmentReader(ireader).getSortedSetDocValues("field");
+    SortedSetDocValues dv = getOnlyLeafReader(ireader).getSortedSetDocValues("field");
     assertEquals(3, dv.getValueCount());
     
     dv.setDocument(0);
@@ -1733,7 +1732,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     DirectoryReader ireader = iwriter.getReader();
     iwriter.close();
     
-    SortedSetDocValues dv = getOnlySegmentReader(ireader).getSortedSetDocValues("field");
+    SortedSetDocValues dv = getOnlyLeafReader(ireader).getSortedSetDocValues("field");
     assertEquals(1, dv.getValueCount());
     
     dv.setDocument(0);
@@ -1767,7 +1766,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     DirectoryReader ireader = iwriter.getReader();
     iwriter.close();
     
-    SortedSetDocValues dv = getOnlySegmentReader(ireader).getSortedSetDocValues("field");
+    SortedSetDocValues dv = getOnlyLeafReader(ireader).getSortedSetDocValues("field");
     assertEquals(1, dv.getValueCount());
 
     dv.setDocument(0);
@@ -1800,7 +1799,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     DirectoryReader ireader = iwriter.getReader();
     iwriter.close();
     
-    SortedSetDocValues dv = getOnlySegmentReader(ireader).getSortedSetDocValues("field");
+    SortedSetDocValues dv = getOnlyLeafReader(ireader).getSortedSetDocValues("field");
     assertEquals(1, dv.getValueCount());
 
     dv.setDocument(1);
@@ -1834,7 +1833,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     DirectoryReader ireader = iwriter.getReader();
     iwriter.close();
     
-    SortedSetDocValues dv = getOnlySegmentReader(ireader).getSortedSetDocValues("field");
+    SortedSetDocValues dv = getOnlyLeafReader(ireader).getSortedSetDocValues("field");
     assertEquals(1, dv.getValueCount());
 
     dv.setDocument(1);
@@ -1870,7 +1869,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     DirectoryReader ireader = iwriter.getReader();
     iwriter.close();
     
-    SortedSetDocValues dv = getOnlySegmentReader(ireader).getSortedSetDocValues("field");
+    SortedSetDocValues dv = getOnlyLeafReader(ireader).getSortedSetDocValues("field");
     assertEquals(0, dv.getValueCount());
     
     ireader.close();
@@ -1894,7 +1893,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     DirectoryReader ireader = iwriter.getReader();
     iwriter.close();
 
-    SortedSetDocValues dv = getOnlySegmentReader(ireader).getSortedSetDocValues("field");
+    SortedSetDocValues dv = getOnlyLeafReader(ireader).getSortedSetDocValues("field");
     assertEquals(3, dv.getValueCount());
     
     TermsEnum termsEnum = dv.termsEnum();
@@ -2784,13 +2783,12 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
       IndexReader r = w.getReader();
       w.close();
 
-      LeafReader ar = SlowCompositeReaderWrapper.wrap(r);
-      BinaryDocValues values = ar.getBinaryDocValues("field");
+      BinaryDocValues values = MultiDocValues.getBinaryValues(r, "field");
       for(int j=0;j<5;j++) {
         BytesRef result = values.get(0);
         assertTrue(result.length == 0 || result.length == 1<<i);
       }
-      ar.close();
+      r.close();
       dir.close();
     }
   }
@@ -2866,7 +2864,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     DirectoryReader ireader = iwriter.getReader();
     iwriter.close();
     
-    NumericDocValues dv = getOnlySegmentReader(ireader).getNumericDocValues("field");
+    NumericDocValues dv = getOnlyLeafReader(ireader).getNumericDocValues("field");
     assertEquals(0, dv.get(0));
     
     ireader.close();
@@ -3003,7 +3001,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     DirectoryReader ireader = iwriter.getReader();
     iwriter.close();
     
-    SortedNumericDocValues dv = getOnlySegmentReader(ireader).getSortedNumericDocValues("field");
+    SortedNumericDocValues dv = getOnlyLeafReader(ireader).getSortedNumericDocValues("field");
     dv.setDocument(0);
     assertEquals(0, dv.count());
     
@@ -3033,7 +3031,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     DirectoryReader ireader = iwriter.getReader();
     iwriter.close();
 
-    SortedDocValues dv = getOnlySegmentReader(ireader).getSortedDocValues("field");
+    SortedDocValues dv = getOnlyLeafReader(ireader).getSortedDocValues("field");
     doTestSortedSetEnumAdvanceIndependently(DocValues.singleton(dv));
 
     ireader.close();
@@ -3064,7 +3062,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     DirectoryReader ireader = iwriter.getReader();
     iwriter.close();
 
-    SortedSetDocValues dv = getOnlySegmentReader(ireader).getSortedSetDocValues("field");
+    SortedSetDocValues dv = getOnlyLeafReader(ireader).getSortedSetDocValues("field");
     doTestSortedSetEnumAdvanceIndependently(dv);
 
     ireader.close();
@@ -3170,7 +3168,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     DirectoryReader ireader = iwriter.getReader();
     iwriter.close();
 
-    SortedDocValues dv = getOnlySegmentReader(ireader).getSortedDocValues("field");
+    SortedDocValues dv = getOnlyLeafReader(ireader).getSortedDocValues("field");
     for (int i = 0; i < numEmptyDocs; ++i) {
       assertEquals(-1, dv.getOrd(i));
     }
@@ -3202,7 +3200,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     DirectoryReader ireader = iwriter.getReader();
     iwriter.close();
 
-    SortedSetDocValues dv = getOnlySegmentReader(ireader).getSortedSetDocValues("field");
+    SortedSetDocValues dv = getOnlyLeafReader(ireader).getSortedSetDocValues("field");
     for (int i = 0; i < numEmptyDocs; ++i) {
       dv.setDocument(i);
       assertEquals(-1L, dv.nextOrd());
@@ -3235,8 +3233,8 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     DirectoryReader ireader = iwriter.getReader();
     iwriter.close();
 
-    NumericDocValues dv = getOnlySegmentReader(ireader).getNumericDocValues("field");
-    Bits docsWithField = getOnlySegmentReader(ireader).getDocsWithField("field");
+    NumericDocValues dv = getOnlyLeafReader(ireader).getNumericDocValues("field");
+    Bits docsWithField = getOnlyLeafReader(ireader).getDocsWithField("field");
     for (int i = 0; i < numEmptyDocs; ++i) {
       assertEquals(0, dv.get(i));
       assertFalse(docsWithField.get(i));
@@ -3269,7 +3267,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     DirectoryReader ireader = iwriter.getReader();
     iwriter.close();
 
-    SortedNumericDocValues dv = getOnlySegmentReader(ireader).getSortedNumericDocValues("field");
+    SortedNumericDocValues dv = getOnlyLeafReader(ireader).getSortedNumericDocValues("field");
     for (int i = 0; i < numEmptyDocs; ++i) {
       dv.setDocument(i);
       assertEquals(0, dv.count());
@@ -3302,8 +3300,8 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     DirectoryReader ireader = iwriter.getReader();
     iwriter.close();
 
-    BinaryDocValues dv = getOnlySegmentReader(ireader).getBinaryDocValues("field");
-    Bits docsWithField = getOnlySegmentReader(ireader).getDocsWithField("field");
+    BinaryDocValues dv = getOnlyLeafReader(ireader).getBinaryDocValues("field");
+    Bits docsWithField = getOnlyLeafReader(ireader).getDocsWithField("field");
     for (int i = 0; i < numEmptyDocs; ++i) {
       assertEquals(new BytesRef(), dv.get(i));
       assertFalse(docsWithField.get(i));

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java
index 3433caa..2c6f379 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java
@@ -258,14 +258,14 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
       if (i == 100) {
         w.forceMerge(1);
         w.commit();
-        reader1 = getOnlySegmentReader(DirectoryReader.open(dir));
+        reader1 = getOnlyLeafReader(DirectoryReader.open(dir));
       }
     }
     w.forceMerge(1);
     w.commit();
     w.close();
 
-    LeafReader reader2 = getOnlySegmentReader(DirectoryReader.open(dir));
+    LeafReader reader2 = getOnlyLeafReader(DirectoryReader.open(dir));
 
     for (LeafReader reader : Arrays.asList(reader1, reader2)) {
       new SimpleMergedSegmentWarmer(InfoStream.NO_OUTPUT).warm(reader);
@@ -295,7 +295,7 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
     oneDoc.add(customField);
     oneDoc.add(new NumericDocValuesField("field", 5));
     iw.addDocument(oneDoc);
-    LeafReader oneDocReader = getOnlySegmentReader(DirectoryReader.open(iw));
+    LeafReader oneDocReader = getOnlyLeafReader(DirectoryReader.open(iw));
     iw.close();
     
     // now feed to codec apis manually

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/test-framework/src/java/org/apache/lucene/index/BasePointsFormatTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BasePointsFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BasePointsFormatTestCase.java
index 24753aa..ecb3a61 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/BasePointsFormatTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/BasePointsFormatTestCase.java
@@ -77,7 +77,7 @@ public abstract class BasePointsFormatTestCase extends BaseIndexFileFormatTestCa
     w.close();
 
     DirectoryReader r = DirectoryReader.open(dir);
-    LeafReader sub = getOnlySegmentReader(r);
+    LeafReader sub = getOnlyLeafReader(r);
     PointValues values = sub.getPointValues();
 
     // Simple test: make sure intersect can visit every doc:
@@ -119,7 +119,7 @@ public abstract class BasePointsFormatTestCase extends BaseIndexFileFormatTestCa
     w.close();
 
     DirectoryReader r = DirectoryReader.open(dir);
-    LeafReader sub = getOnlySegmentReader(r);
+    LeafReader sub = getOnlyLeafReader(r);
     PointValues values = sub.getPointValues();
 
     // Simple test: make sure intersect can visit every doc:

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/test-framework/src/java/org/apache/lucene/index/BasePostingsFormatTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BasePostingsFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BasePostingsFormatTestCase.java
index a5957a2..10ed5b1 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/BasePostingsFormatTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/BasePostingsFormatTestCase.java
@@ -190,7 +190,7 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
     doc.add(newStringField("", "something", Field.Store.NO));
     iw.addDocument(doc);
     DirectoryReader ir = iw.getReader();
-    LeafReader ar = getOnlySegmentReader(ir);
+    LeafReader ar = getOnlyLeafReader(ir);
     Fields fields = ar.fields();
     int fieldCount = fields.size();
     // -1 is allowed, if the codec doesn't implement fields.size():
@@ -215,7 +215,7 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
     doc.add(newStringField("", "", Field.Store.NO));
     iw.addDocument(doc);
     DirectoryReader ir = iw.getReader();
-    LeafReader ar = getOnlySegmentReader(ir);
+    LeafReader ar = getOnlyLeafReader(ir);
     Fields fields = ar.fields();
     int fieldCount = fields.size();
     // -1 is allowed, if the codec doesn't implement fields.size():
@@ -241,7 +241,7 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
     iw.addDocument(doc);
     iw.addDocument(doc);
     DirectoryReader ir = iw.getReader();
-    LeafReader ar = getOnlySegmentReader(ir);
+    LeafReader ar = getOnlyLeafReader(ir);
     TermsEnum termsEnum = ar.terms("field").iterator();
     assertTrue(termsEnum.seekExact(new BytesRef("value")));
     PostingsEnum docsEnum = termsEnum.postings(null, PostingsEnum.NONE);
@@ -264,7 +264,7 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
     iw.addDocument(doc);
     iw.addDocument(doc);
     DirectoryReader ir = iw.getReader();
-    LeafReader ar = getOnlySegmentReader(ir);
+    LeafReader ar = getOnlyLeafReader(ir);
     TermsEnum termsEnum = ar.terms("field").iterator();
     assertTrue(termsEnum.seekExact(new BytesRef("value")));
     PostingsEnum docsEnum = termsEnum.postings(null, PostingsEnum.POSITIONS);
@@ -293,7 +293,7 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
     iw.deleteDocuments(new Term("ghostField", "something")); // delete the only term for the field
     iw.forceMerge(1);
     DirectoryReader ir = iw.getReader();
-    LeafReader ar = getOnlySegmentReader(ir);
+    LeafReader ar = getOnlyLeafReader(ir);
     Fields fields = ar.fields();
     // Ghost busting terms dict impls will have
     // fields.size() == 0; all others must be == 1:
@@ -567,14 +567,14 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
     DirectoryReader reader = DirectoryReader.open(iw);
     
     // sugar method (FREQS)
-    PostingsEnum postings = getOnlySegmentReader(reader).postings(new Term("foo", "bar"));
+    PostingsEnum postings = getOnlyLeafReader(reader).postings(new Term("foo", "bar"));
     assertEquals(-1, postings.docID());
     assertEquals(0, postings.nextDoc());
     assertEquals(1, postings.freq());
     assertEquals(DocIdSetIterator.NO_MORE_DOCS, postings.nextDoc());
     
     // termsenum reuse (FREQS)
-    TermsEnum termsEnum = getOnlySegmentReader(reader).terms("foo").iterator();
+    TermsEnum termsEnum = getOnlyLeafReader(reader).terms("foo").iterator();
     termsEnum.seekExact(new BytesRef("bar"));
     PostingsEnum postings2 = termsEnum.postings(postings);
     assertNotNull(postings2);
@@ -625,14 +625,14 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
     DirectoryReader reader = DirectoryReader.open(iw);
     
     // sugar method (FREQS)
-    PostingsEnum postings = getOnlySegmentReader(reader).postings(new Term("foo", "bar"));
+    PostingsEnum postings = getOnlyLeafReader(reader).postings(new Term("foo", "bar"));
     assertEquals(-1, postings.docID());
     assertEquals(0, postings.nextDoc());
     assertEquals(2, postings.freq());
     assertEquals(DocIdSetIterator.NO_MORE_DOCS, postings.nextDoc());
     
     // termsenum reuse (FREQS)
-    TermsEnum termsEnum = getOnlySegmentReader(reader).terms("foo").iterator();
+    TermsEnum termsEnum = getOnlyLeafReader(reader).terms("foo").iterator();
     termsEnum.seekExact(new BytesRef("bar"));
     PostingsEnum postings2 = termsEnum.postings(postings);
     assertNotNull(postings2);
@@ -703,14 +703,14 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
     DirectoryReader reader = DirectoryReader.open(iw);
     
     // sugar method (FREQS)
-    PostingsEnum postings = getOnlySegmentReader(reader).postings(new Term("foo", "bar"));
+    PostingsEnum postings = getOnlyLeafReader(reader).postings(new Term("foo", "bar"));
     assertEquals(-1, postings.docID());
     assertEquals(0, postings.nextDoc());
     assertEquals(2, postings.freq());
     assertEquals(DocIdSetIterator.NO_MORE_DOCS, postings.nextDoc());
     
     // termsenum reuse (FREQS)
-    TermsEnum termsEnum = getOnlySegmentReader(reader).terms("foo").iterator();
+    TermsEnum termsEnum = getOnlyLeafReader(reader).terms("foo").iterator();
     termsEnum.seekExact(new BytesRef("bar"));
     PostingsEnum postings2 = termsEnum.postings(postings);
     assertNotNull(postings2);
@@ -740,7 +740,7 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
     assertEquals(DocIdSetIterator.NO_MORE_DOCS, docsOnly2.nextDoc());
     
     // asking for positions, ok
-    PostingsEnum docsAndPositionsEnum = getOnlySegmentReader(reader).postings(new Term("foo", "bar"), PostingsEnum.POSITIONS);
+    PostingsEnum docsAndPositionsEnum = getOnlyLeafReader(reader).postings(new Term("foo", "bar"), PostingsEnum.POSITIONS);
     assertEquals(-1, docsAndPositionsEnum.docID());
     assertEquals(0, docsAndPositionsEnum.nextDoc());
     assertEquals(2, docsAndPositionsEnum.freq());
@@ -771,7 +771,7 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
     assertEquals(DocIdSetIterator.NO_MORE_DOCS, docsAndPositionsEnum2.nextDoc());
     
     // payloads, offsets, etc don't cause an error if they aren't there
-    docsAndPositionsEnum = getOnlySegmentReader(reader).postings(new Term("foo", "bar"), PostingsEnum.PAYLOADS);
+    docsAndPositionsEnum = getOnlyLeafReader(reader).postings(new Term("foo", "bar"), PostingsEnum.PAYLOADS);
     assertNotNull(docsAndPositionsEnum);
     // but make sure they work
     assertEquals(-1, docsAndPositionsEnum.docID());
@@ -802,7 +802,7 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
     assertNull(docsAndPositionsEnum2.getPayload());
     assertEquals(DocIdSetIterator.NO_MORE_DOCS, docsAndPositionsEnum2.nextDoc());
     
-    docsAndPositionsEnum = getOnlySegmentReader(reader).postings(new Term("foo", "bar"), PostingsEnum.OFFSETS);
+    docsAndPositionsEnum = getOnlyLeafReader(reader).postings(new Term("foo", "bar"), PostingsEnum.OFFSETS);
     assertNotNull(docsAndPositionsEnum);
     assertEquals(-1, docsAndPositionsEnum.docID());
     assertEquals(0, docsAndPositionsEnum.nextDoc());
@@ -832,7 +832,7 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
     assertNull(docsAndPositionsEnum2.getPayload());
     assertEquals(DocIdSetIterator.NO_MORE_DOCS, docsAndPositionsEnum2.nextDoc());
     
-    docsAndPositionsEnum = getOnlySegmentReader(reader).postings(new Term("foo", "bar"), PostingsEnum.ALL);
+    docsAndPositionsEnum = getOnlyLeafReader(reader).postings(new Term("foo", "bar"), PostingsEnum.ALL);
     assertNotNull(docsAndPositionsEnum);
     assertEquals(-1, docsAndPositionsEnum.docID());
     assertEquals(0, docsAndPositionsEnum.nextDoc());
@@ -883,14 +883,14 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
     DirectoryReader reader = DirectoryReader.open(iw);
     
     // sugar method (FREQS)
-    PostingsEnum postings = getOnlySegmentReader(reader).postings(new Term("foo", "bar"));
+    PostingsEnum postings = getOnlyLeafReader(reader).postings(new Term("foo", "bar"));
     assertEquals(-1, postings.docID());
     assertEquals(0, postings.nextDoc());
     assertEquals(2, postings.freq());
     assertEquals(DocIdSetIterator.NO_MORE_DOCS, postings.nextDoc());
     
     // termsenum reuse (FREQS)
-    TermsEnum termsEnum = getOnlySegmentReader(reader).terms("foo").iterator();
+    TermsEnum termsEnum = getOnlyLeafReader(reader).terms("foo").iterator();
     termsEnum.seekExact(new BytesRef("bar"));
     PostingsEnum postings2 = termsEnum.postings(postings);
     assertNotNull(postings2);
@@ -920,7 +920,7 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
     assertEquals(DocIdSetIterator.NO_MORE_DOCS, docsOnly2.nextDoc());
     
     // asking for positions, ok
-    PostingsEnum docsAndPositionsEnum = getOnlySegmentReader(reader).postings(new Term("foo", "bar"), PostingsEnum.POSITIONS);
+    PostingsEnum docsAndPositionsEnum = getOnlyLeafReader(reader).postings(new Term("foo", "bar"), PostingsEnum.POSITIONS);
     assertEquals(-1, docsAndPositionsEnum.docID());
     assertEquals(0, docsAndPositionsEnum.nextDoc());
     assertEquals(2, docsAndPositionsEnum.freq());
@@ -955,7 +955,7 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
     assertEquals(DocIdSetIterator.NO_MORE_DOCS, docsAndPositionsEnum2.nextDoc());
     
     // payloads don't cause an error if they aren't there
-    docsAndPositionsEnum = getOnlySegmentReader(reader).postings(new Term("foo", "bar"), PostingsEnum.PAYLOADS);
+    docsAndPositionsEnum = getOnlyLeafReader(reader).postings(new Term("foo", "bar"), PostingsEnum.PAYLOADS);
     assertNotNull(docsAndPositionsEnum);
     // but make sure they work
     assertEquals(-1, docsAndPositionsEnum.docID());
@@ -990,7 +990,7 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
     assertNull(docsAndPositionsEnum2.getPayload());
     assertEquals(DocIdSetIterator.NO_MORE_DOCS, docsAndPositionsEnum2.nextDoc());
     
-    docsAndPositionsEnum = getOnlySegmentReader(reader).postings(new Term("foo", "bar"), PostingsEnum.OFFSETS);
+    docsAndPositionsEnum = getOnlyLeafReader(reader).postings(new Term("foo", "bar"), PostingsEnum.OFFSETS);
     assertNotNull(docsAndPositionsEnum);
     assertEquals(-1, docsAndPositionsEnum.docID());
     assertEquals(0, docsAndPositionsEnum.nextDoc());
@@ -1020,7 +1020,7 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
     assertNull(docsAndPositionsEnum2.getPayload());
     assertEquals(DocIdSetIterator.NO_MORE_DOCS, docsAndPositionsEnum2.nextDoc());
     
-    docsAndPositionsEnum = getOnlySegmentReader(reader).postings(new Term("foo", "bar"), PostingsEnum.ALL);
+    docsAndPositionsEnum = getOnlyLeafReader(reader).postings(new Term("foo", "bar"), PostingsEnum.ALL);
     assertNotNull(docsAndPositionsEnum);
     assertEquals(-1, docsAndPositionsEnum.docID());
     assertEquals(0, docsAndPositionsEnum.nextDoc());
@@ -1068,14 +1068,14 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
     DirectoryReader reader = DirectoryReader.open(iw);
     
     // sugar method (FREQS)
-    PostingsEnum postings = getOnlySegmentReader(reader).postings(new Term("foo", "bar"));
+    PostingsEnum postings = getOnlyLeafReader(reader).postings(new Term("foo", "bar"));
     assertEquals(-1, postings.docID());
     assertEquals(0, postings.nextDoc());
     assertEquals(2, postings.freq());
     assertEquals(DocIdSetIterator.NO_MORE_DOCS, postings.nextDoc());
     
     // termsenum reuse (FREQS)
-    TermsEnum termsEnum = getOnlySegmentReader(reader).terms("foo").iterator();
+    TermsEnum termsEnum = getOnlyLeafReader(reader).terms("foo").iterator();
     termsEnum.seekExact(new BytesRef("bar"));
     PostingsEnum postings2 = termsEnum.postings(postings);
     assertNotNull(postings2);
@@ -1105,7 +1105,7 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
     assertEquals(DocIdSetIterator.NO_MORE_DOCS, docsOnly2.nextDoc());
     
     // asking for positions, ok
-    PostingsEnum docsAndPositionsEnum = getOnlySegmentReader(reader).postings(new Term("foo", "bar"), PostingsEnum.POSITIONS);
+    PostingsEnum docsAndPositionsEnum = getOnlyLeafReader(reader).postings(new Term("foo", "bar"), PostingsEnum.POSITIONS);
     assertEquals(-1, docsAndPositionsEnum.docID());
     assertEquals(0, docsAndPositionsEnum.nextDoc());
     assertEquals(2, docsAndPositionsEnum.freq());
@@ -1140,7 +1140,7 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
     assertEquals(DocIdSetIterator.NO_MORE_DOCS, docsAndPositionsEnum2.nextDoc());
     
     // payloads
-    docsAndPositionsEnum = getOnlySegmentReader(reader).postings(new Term("foo", "bar"), PostingsEnum.PAYLOADS);
+    docsAndPositionsEnum = getOnlyLeafReader(reader).postings(new Term("foo", "bar"), PostingsEnum.PAYLOADS);
     assertNotNull(docsAndPositionsEnum);
     assertEquals(-1, docsAndPositionsEnum.docID());
     assertEquals(0, docsAndPositionsEnum.nextDoc());
@@ -1170,7 +1170,7 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
     assertEquals(new BytesRef("pay2"), docsAndPositionsEnum2.getPayload());
     assertEquals(DocIdSetIterator.NO_MORE_DOCS, docsAndPositionsEnum2.nextDoc());
     
-    docsAndPositionsEnum = getOnlySegmentReader(reader).postings(new Term("foo", "bar"), PostingsEnum.OFFSETS);
+    docsAndPositionsEnum = getOnlyLeafReader(reader).postings(new Term("foo", "bar"), PostingsEnum.OFFSETS);
     assertNotNull(docsAndPositionsEnum);
     assertEquals(-1, docsAndPositionsEnum.docID());
     assertEquals(0, docsAndPositionsEnum.nextDoc());
@@ -1204,7 +1204,7 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
     assertTrue(docsAndPositionsEnum2.getPayload() == null || new BytesRef("pay2").equals(docsAndPositionsEnum2.getPayload()));
     assertEquals(DocIdSetIterator.NO_MORE_DOCS, docsAndPositionsEnum2.nextDoc());
     
-    docsAndPositionsEnum = getOnlySegmentReader(reader).postings(new Term("foo", "bar"), PostingsEnum.ALL);
+    docsAndPositionsEnum = getOnlyLeafReader(reader).postings(new Term("foo", "bar"), PostingsEnum.ALL);
     assertNotNull(docsAndPositionsEnum);
     assertEquals(-1, docsAndPositionsEnum.docID());
     assertEquals(0, docsAndPositionsEnum.nextDoc());
@@ -1254,14 +1254,14 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
     DirectoryReader reader = DirectoryReader.open(iw);
     
     // sugar method (FREQS)
-    PostingsEnum postings = getOnlySegmentReader(reader).postings(new Term("foo", "bar"));
+    PostingsEnum postings = getOnlyLeafReader(reader).postings(new Term("foo", "bar"));
     assertEquals(-1, postings.docID());
     assertEquals(0, postings.nextDoc());
     assertEquals(2, postings.freq());
     assertEquals(DocIdSetIterator.NO_MORE_DOCS, postings.nextDoc());
     
     // termsenum reuse (FREQS)
-    TermsEnum termsEnum = getOnlySegmentReader(reader).terms("foo").iterator();
+    TermsEnum termsEnum = getOnlyLeafReader(reader).terms("foo").iterator();
     termsEnum.seekExact(new BytesRef("bar"));
     PostingsEnum postings2 = termsEnum.postings(postings);
     assertNotNull(postings2);
@@ -1291,7 +1291,7 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
     assertEquals(DocIdSetIterator.NO_MORE_DOCS, docsOnly2.nextDoc());
     
     // asking for positions, ok
-    PostingsEnum docsAndPositionsEnum = getOnlySegmentReader(reader).postings(new Term("foo", "bar"), PostingsEnum.POSITIONS);
+    PostingsEnum docsAndPositionsEnum = getOnlyLeafReader(reader).postings(new Term("foo", "bar"), PostingsEnum.POSITIONS);
     assertEquals(-1, docsAndPositionsEnum.docID());
     assertEquals(0, docsAndPositionsEnum.nextDoc());
     assertEquals(2, docsAndPositionsEnum.freq());
@@ -1330,7 +1330,7 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
     assertEquals(DocIdSetIterator.NO_MORE_DOCS, docsAndPositionsEnum2.nextDoc());
     
     // payloads
-    docsAndPositionsEnum = getOnlySegmentReader(reader).postings(new Term("foo", "bar"), PostingsEnum.PAYLOADS);
+    docsAndPositionsEnum = getOnlyLeafReader(reader).postings(new Term("foo", "bar"), PostingsEnum.PAYLOADS);
     assertNotNull(docsAndPositionsEnum);
     assertEquals(-1, docsAndPositionsEnum.docID());
     assertEquals(0, docsAndPositionsEnum.nextDoc());
@@ -1364,7 +1364,7 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
     assertEquals(new BytesRef("pay2"), docsAndPositionsEnum2.getPayload());
     assertEquals(DocIdSetIterator.NO_MORE_DOCS, docsAndPositionsEnum2.nextDoc());
     
-    docsAndPositionsEnum = getOnlySegmentReader(reader).postings(new Term("foo", "bar"), PostingsEnum.OFFSETS);
+    docsAndPositionsEnum = getOnlyLeafReader(reader).postings(new Term("foo", "bar"), PostingsEnum.OFFSETS);
     assertNotNull(docsAndPositionsEnum);
     assertEquals(-1, docsAndPositionsEnum.docID());
     assertEquals(0, docsAndPositionsEnum.nextDoc());
@@ -1398,7 +1398,7 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
     assertTrue(docsAndPositionsEnum2.getPayload() == null || new BytesRef("pay2").equals(docsAndPositionsEnum2.getPayload()));
     assertEquals(DocIdSetIterator.NO_MORE_DOCS, docsAndPositionsEnum2.nextDoc());
     
-    docsAndPositionsEnum = getOnlySegmentReader(reader).postings(new Term("foo", "bar"), PostingsEnum.ALL);
+    docsAndPositionsEnum = getOnlyLeafReader(reader).postings(new Term("foo", "bar"), PostingsEnum.ALL);
     assertNotNull(docsAndPositionsEnum);
     assertEquals(-1, docsAndPositionsEnum.docID());
     assertEquals(0, docsAndPositionsEnum.nextDoc());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/test-framework/src/java/org/apache/lucene/index/BaseStoredFieldsFormatTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BaseStoredFieldsFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BaseStoredFieldsFormatTestCase.java
index c58d56a..adcb0af 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/BaseStoredFieldsFormatTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/BaseStoredFieldsFormatTestCase.java
@@ -789,7 +789,7 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat
     iw.addIndexes(dirs);
     iw.forceMerge(1);
     
-    LeafReader ir = getOnlySegmentReader(DirectoryReader.open(iw));
+    LeafReader ir = getOnlyLeafReader(DirectoryReader.open(iw));
     for (int i = 0; i < ir.maxDoc(); i++) {
       Document doc = ir.document(i);
       assertEquals(10, doc.getFields().size());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/test-framework/src/java/org/apache/lucene/index/BaseTermVectorsFormatTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BaseTermVectorsFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BaseTermVectorsFormatTestCase.java
index 77a46dd..432a25d 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/BaseTermVectorsFormatTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/BaseTermVectorsFormatTestCase.java
@@ -768,7 +768,7 @@ public abstract class BaseTermVectorsFormatTestCase extends BaseIndexFileFormatT
     iw.addDocument(doc);
     DirectoryReader reader = DirectoryReader.open(iw);
     
-    Terms terms = getOnlySegmentReader(reader).getTermVector(0, "foo");
+    Terms terms = getOnlyLeafReader(reader).getTermVector(0, "foo");
     TermsEnum termsEnum = terms.iterator();
     assertNotNull(termsEnum);
     assertEquals(new BytesRef("bar"), termsEnum.next());
@@ -849,7 +849,7 @@ public abstract class BaseTermVectorsFormatTestCase extends BaseIndexFileFormatT
     iw.addDocument(doc);
     DirectoryReader reader = DirectoryReader.open(iw);
     
-    Terms terms = getOnlySegmentReader(reader).getTermVector(0, "foo");
+    Terms terms = getOnlyLeafReader(reader).getTermVector(0, "foo");
     TermsEnum termsEnum = terms.iterator();
     assertNotNull(termsEnum);
     assertEquals(new BytesRef("bar"), termsEnum.next());
@@ -1028,7 +1028,7 @@ public abstract class BaseTermVectorsFormatTestCase extends BaseIndexFileFormatT
     iw.addDocument(doc);
     DirectoryReader reader = DirectoryReader.open(iw);
     
-    Terms terms = getOnlySegmentReader(reader).getTermVector(0, "foo");
+    Terms terms = getOnlyLeafReader(reader).getTermVector(0, "foo");
     TermsEnum termsEnum = terms.iterator();
     assertNotNull(termsEnum);
     assertEquals(new BytesRef("bar"), termsEnum.next());
@@ -1214,7 +1214,7 @@ public abstract class BaseTermVectorsFormatTestCase extends BaseIndexFileFormatT
     iw.addDocument(doc);
     DirectoryReader reader = DirectoryReader.open(iw);
     
-    Terms terms = getOnlySegmentReader(reader).getTermVector(0, "foo");
+    Terms terms = getOnlyLeafReader(reader).getTermVector(0, "foo");
     TermsEnum termsEnum = terms.iterator();
     assertNotNull(termsEnum);
     assertEquals(new BytesRef("bar"), termsEnum.next());
@@ -1400,7 +1400,7 @@ public abstract class BaseTermVectorsFormatTestCase extends BaseIndexFileFormatT
     iw.addDocument(doc);
     DirectoryReader reader = DirectoryReader.open(iw);
     
-    Terms terms = getOnlySegmentReader(reader).getTermVector(0, "foo");
+    Terms terms = getOnlyLeafReader(reader).getTermVector(0, "foo");
     TermsEnum termsEnum = terms.iterator();
     assertNotNull(termsEnum);
     assertEquals(new BytesRef("bar"), termsEnum.next());
@@ -1586,7 +1586,7 @@ public abstract class BaseTermVectorsFormatTestCase extends BaseIndexFileFormatT
     iw.addDocument(doc);
     DirectoryReader reader = DirectoryReader.open(iw);
     
-    Terms terms = getOnlySegmentReader(reader).getTermVector(0, "foo");
+    Terms terms = getOnlyLeafReader(reader).getTermVector(0, "foo");
     TermsEnum termsEnum = terms.iterator();
     assertNotNull(termsEnum);
     assertEquals(new BytesRef("bar"), termsEnum.next());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/test-framework/src/java/org/apache/lucene/search/QueryUtils.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/QueryUtils.java b/lucene/test-framework/src/java/org/apache/lucene/search/QueryUtils.java
index c92adde..b517af0 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/search/QueryUtils.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/search/QueryUtils.java
@@ -398,7 +398,7 @@ public class QueryUtils {
             // previous reader, hits NO_MORE_DOCS
             if (lastReader[0] != null) {
               final LeafReader previousReader = lastReader[0];
-              IndexSearcher indexSearcher = LuceneTestCase.newSearcher(previousReader);
+              IndexSearcher indexSearcher = LuceneTestCase.newSearcher(previousReader, false);
               indexSearcher.setSimilarity(s.getSimilarity(true));
               Weight w = indexSearcher.createNormalizedWeight(q, true);
               LeafReaderContext ctx = (LeafReaderContext)indexSearcher.getTopReaderContext();
@@ -500,7 +500,7 @@ public class QueryUtils {
         // previous reader, hits NO_MORE_DOCS
         if (lastReader[0] != null) {
           final LeafReader previousReader = lastReader[0];
-          IndexSearcher indexSearcher = LuceneTestCase.newSearcher(previousReader);
+          IndexSearcher indexSearcher = LuceneTestCase.newSearcher(previousReader, false);
           indexSearcher.setSimilarity(s.getSimilarity(true));
           Weight w = indexSearcher.createNormalizedWeight(q, true);
           Scorer scorer = w.scorer((LeafReaderContext)indexSearcher.getTopReaderContext());
@@ -528,7 +528,7 @@ public class QueryUtils {
       // confirm that skipping beyond the last doc, on the
       // previous reader, hits NO_MORE_DOCS
       final LeafReader previousReader = lastReader[0];
-      IndexSearcher indexSearcher = LuceneTestCase.newSearcher(previousReader);
+      IndexSearcher indexSearcher = LuceneTestCase.newSearcher(previousReader, false);
       indexSearcher.setSimilarity(s.getSimilarity(true));
       Weight w = indexSearcher.createNormalizedWeight(q, true);
       Scorer scorer = w.scorer((LeafReaderContext)indexSearcher.getTopReaderContext());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/test-framework/src/java/org/apache/lucene/search/spans/MultiSpansWrapper.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/spans/MultiSpansWrapper.java b/lucene/test-framework/src/java/org/apache/lucene/search/spans/MultiSpansWrapper.java
deleted file mode 100644
index 9915067..0000000
--- a/lucene/test-framework/src/java/org/apache/lucene/search/spans/MultiSpansWrapper.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.search.spans;
-
-import java.io.IOException;
-
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.LeafReader;
-import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.index.SlowCompositeReaderWrapper;
-import org.apache.lucene.search.IndexSearcher;
-
-/**
- * 
- * A wrapper to perform span operations on a non-leaf reader context
- * <p>
- * NOTE: This should be used for testing purposes only
- * @lucene.internal
- */
-public class MultiSpansWrapper {
-
-  public static Spans wrap(IndexReader reader, SpanQuery spanQuery) throws IOException {
-    return wrap(reader, spanQuery, SpanWeight.Postings.POSITIONS);
-  }
-
-  public static Spans wrap(IndexReader reader, SpanQuery spanQuery, SpanWeight.Postings requiredPostings) throws IOException {
-
-    LeafReader lr = SlowCompositeReaderWrapper.wrap(reader); // slow, but ok for testing
-    LeafReaderContext lrContext = lr.getContext();
-    IndexSearcher searcher = new IndexSearcher(lr);
-    searcher.setQueryCache(null);
-
-    SpanWeight w = spanQuery.createWeight(searcher, false);
-
-    return w.getSpans(lrContext, requiredPostings);
-  }
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
index 79eb6280..e5aa7a2 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
@@ -764,15 +764,29 @@ public abstract class LuceneTestCase extends Assert {
    * Some tests expect the directory to contain a single segment, and want to 
    * do tests on that segment's reader. This is an utility method to help them.
    */
+    /*
   public static SegmentReader getOnlySegmentReader(DirectoryReader reader) {
     List<LeafReaderContext> subReaders = reader.leaves();
     if (subReaders.size() != 1) {
       throw new IllegalArgumentException(reader + " has " + subReaders.size() + " segments instead of exactly one");
     }
     final LeafReader r = subReaders.get(0).reader();
-    assertTrue(r instanceof SegmentReader);
+    assertTrue("expected a SegmentReader but got " + r, r instanceof SegmentReader);
     return (SegmentReader) r;
   }
+    */
+
+  /**
+   * Some tests expect the directory to contain a single segment, and want to 
+   * do tests on that segment's reader. This is an utility method to help them.
+   */
+  public static LeafReader getOnlyLeafReader(IndexReader reader) {
+    List<LeafReaderContext> subReaders = reader.leaves();
+    if (subReaders.size() != 1) {
+      throw new IllegalArgumentException(reader + " has " + subReaders.size() + " segments instead of exactly one");
+    }
+    return subReaders.get(0).reader();
+  }
 
   /**
    * Returns true if and only if the calling thread is the primary thread 
@@ -1625,25 +1639,11 @@ public abstract class LuceneTestCase extends Assert {
   }
 
   public static IndexReader wrapReader(IndexReader r) throws IOException {
-    return wrapReader(r, true);
-  }
-
-  public static IndexReader wrapReader(IndexReader r, boolean allowSlowCompositeReader) throws IOException {
     Random random = random();
       
-    // TODO: remove this, and fix those tests to wrap before putting slow around:
-    final boolean wasOriginallyAtomic = r instanceof LeafReader;
     for (int i = 0, c = random.nextInt(6)+1; i < c; i++) {
-      switch(random.nextInt(6)) {
+      switch(random.nextInt(5)) {
       case 0:
-        if (allowSlowCompositeReader) {
-          if (VERBOSE) {
-            System.out.println("NOTE: LuceneTestCase.wrapReader: wrapping previous reader=" + r + " with SlowCompositeReaderWrapper.wrap");
-          }
-          r = SlowCompositeReaderWrapper.wrap(r);
-        }
-        break;
-      case 1:
         // will create no FC insanity in atomic case, as ParallelLeafReader has own cache key:
         if (VERBOSE) {
           System.out.println("NOTE: LuceneTestCase.wrapReader: wrapping previous reader=" + r + " with ParallelLeaf/CompositeReader");
@@ -1652,7 +1652,7 @@ public abstract class LuceneTestCase extends Assert {
           new ParallelLeafReader((LeafReader) r) :
         new ParallelCompositeReader((CompositeReader) r);
         break;
-      case 2:
+      case 1:
         // Häckidy-Hick-Hack: a standard MultiReader will cause FC insanity, so we use
         // QueryUtils' reader with a fake cache key, so insanity checker cannot walk
         // along our reader:
@@ -1661,9 +1661,9 @@ public abstract class LuceneTestCase extends Assert {
         }
         r = new FCInvisibleMultiReader(r);
         break;
-      case 3:
-        if (allowSlowCompositeReader) {
-          final LeafReader ar = SlowCompositeReaderWrapper.wrap(r);
+      case 2:
+        if (r instanceof LeafReader) {
+          final LeafReader ar = (LeafReader) r;
           final List<String> allFields = new ArrayList<>();
           for (FieldInfo fi : ar.getFieldInfos()) {
             allFields.add(fi.name);
@@ -1673,7 +1673,7 @@ public abstract class LuceneTestCase extends Assert {
           final Set<String> fields = new HashSet<>(allFields.subList(0, end));
           // will create no FC insanity as ParallelLeafReader has own cache key:
           if (VERBOSE) {
-            System.out.println("NOTE: LuceneTestCase.wrapReader: wrapping previous reader=" + r + " with ParallelLeafReader(SlowCompositeReaderWapper)");
+            System.out.println("NOTE: LuceneTestCase.wrapReader: wrapping previous reader=" + r + " with ParallelLeafReader");
           }
           r = new ParallelLeafReader(
                                      new FieldFilterLeafReader(ar, fields, false),
@@ -1681,7 +1681,7 @@ public abstract class LuceneTestCase extends Assert {
                                      );
         }
         break;
-      case 4:
+      case 3:
         // Häckidy-Hick-Hack: a standard Reader will cause FC insanity, so we use
         // QueryUtils' reader with a fake cache key, so insanity checker cannot walk
         // along our reader:
@@ -1694,7 +1694,7 @@ public abstract class LuceneTestCase extends Assert {
           r = new AssertingDirectoryReader((DirectoryReader)r);
         }
         break;
-      case 5:
+      case 4:
         if (VERBOSE) {
           System.out.println("NOTE: LuceneTestCase.wrapReader: wrapping previous reader=" + r + " with MismatchedLeaf/DirectoryReader");
         }
@@ -1708,11 +1708,8 @@ public abstract class LuceneTestCase extends Assert {
         fail("should not get here");
       }
     }
-    if (wasOriginallyAtomic) {
-      if (allowSlowCompositeReader) {
-        r = SlowCompositeReaderWrapper.wrap(r);
-      }
-    } else if ((r instanceof CompositeReader) && !(r instanceof FCInvisibleMultiReader)) {
+
+    if ((r instanceof CompositeReader) && !(r instanceof FCInvisibleMultiReader)) {
       // prevent cache insanity caused by e.g. ParallelCompositeReader, to fix we wrap one more time:
       r = new FCInvisibleMultiReader(r);
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/test-framework/src/test/org/apache/lucene/analysis/TestMockAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/test/org/apache/lucene/analysis/TestMockAnalyzer.java b/lucene/test-framework/src/test/org/apache/lucene/analysis/TestMockAnalyzer.java
index 22cd467..5295349 100644
--- a/lucene/test-framework/src/test/org/apache/lucene/analysis/TestMockAnalyzer.java
+++ b/lucene/test-framework/src/test/org/apache/lucene/analysis/TestMockAnalyzer.java
@@ -315,7 +315,7 @@ public class TestMockAnalyzer extends BaseTokenStreamTestCase {
     doc.add(new Field("f", "a", ft));
     doc.add(new Field("f", "a", ft));
     writer.addDocument(doc);
-    final LeafReader reader = getOnlySegmentReader(writer.getReader());
+    final LeafReader reader = getOnlyLeafReader(writer.getReader());
     final Fields fields = reader.getTermVectors(0);
     final Terms terms = fields.terms("f");
     final TermsEnum te = terms.iterator();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/test-framework/src/test/org/apache/lucene/codecs/compressing/TestCompressingStoredFieldsFormat.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/test/org/apache/lucene/codecs/compressing/TestCompressingStoredFieldsFormat.java b/lucene/test-framework/src/test/org/apache/lucene/codecs/compressing/TestCompressingStoredFieldsFormat.java
index d84bed9..c6396ae 100644
--- a/lucene/test-framework/src/test/org/apache/lucene/codecs/compressing/TestCompressingStoredFieldsFormat.java
+++ b/lucene/test-framework/src/test/org/apache/lucene/codecs/compressing/TestCompressingStoredFieldsFormat.java
@@ -21,17 +21,18 @@ import java.util.Random;
 
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.codecs.Codec;
-import org.apache.lucene.document.IntPoint;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
 import org.apache.lucene.document.FieldType;
 import org.apache.lucene.document.IntPoint;
+import org.apache.lucene.document.IntPoint;
 import org.apache.lucene.document.StoredField;
 import org.apache.lucene.index.BaseStoredFieldsFormatTestCase;
 import org.apache.lucene.index.CodecReader;
 import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.LeafReader;
 import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.index.NoMergePolicy;
 import org.apache.lucene.store.ByteArrayDataInput;
@@ -306,7 +307,7 @@ public class TestCompressingStoredFieldsFormat extends BaseStoredFieldsFormatTes
     assertNotNull(ir2);
     ir.close();
     ir = ir2;
-    CodecReader sr = getOnlySegmentReader(ir);
+    CodecReader sr = (CodecReader) getOnlyLeafReader(ir);
     CompressingStoredFieldsReader reader = (CompressingStoredFieldsReader)sr.getFieldsReader();
     // we could get lucky, and have zero, but typically one.
     assertTrue(reader.getNumDirtyChunks() <= 1);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/test-framework/src/test/org/apache/lucene/codecs/compressing/TestCompressingTermVectorsFormat.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/test/org/apache/lucene/codecs/compressing/TestCompressingTermVectorsFormat.java b/lucene/test-framework/src/test/org/apache/lucene/codecs/compressing/TestCompressingTermVectorsFormat.java
index 4fa0278..f4858d1 100644
--- a/lucene/test-framework/src/test/org/apache/lucene/codecs/compressing/TestCompressingTermVectorsFormat.java
+++ b/lucene/test-framework/src/test/org/apache/lucene/codecs/compressing/TestCompressingTermVectorsFormat.java
@@ -25,18 +25,18 @@ import org.apache.lucene.document.Field;
 import org.apache.lucene.document.FieldType;
 import org.apache.lucene.document.StoredField;
 import org.apache.lucene.document.TextField;
+import org.apache.lucene.index.BaseTermVectorsFormatTestCase;
 import org.apache.lucene.index.CodecReader;
 import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.index.IndexWriterConfig;
 import org.apache.lucene.index.LeafReader;
-import org.apache.lucene.index.BaseTermVectorsFormatTestCase;
 import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.index.NoMergePolicy;
 import org.apache.lucene.index.RandomIndexWriter;
 import org.apache.lucene.index.Terms;
-import org.apache.lucene.index.TermsEnum;
 import org.apache.lucene.index.TermsEnum.SeekStatus;
+import org.apache.lucene.index.TermsEnum;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.BytesRef;
 
@@ -56,7 +56,7 @@ public class TestCompressingTermVectorsFormat extends BaseTermVectorsFormatTestC
     ft.setStoreTermVectors(true);
     doc.add(new Field("foo", "this is a test", ft));
     iw.addDocument(doc);
-    LeafReader ir = getOnlySegmentReader(iw.getReader());
+    LeafReader ir = getOnlyLeafReader(iw.getReader());
     Terms terms = ir.getTermVector(0, "foo");
     assertNotNull(terms);
     TermsEnum termsEnum = terms.iterator();
@@ -118,7 +118,7 @@ public class TestCompressingTermVectorsFormat extends BaseTermVectorsFormatTestC
     assertNotNull(ir2);
     ir.close();
     ir = ir2;
-    CodecReader sr = getOnlySegmentReader(ir);
+    CodecReader sr = (CodecReader) getOnlyLeafReader(ir);
     CompressingTermVectorsReader reader = (CompressingTermVectorsReader)sr.getTermVectorsReader();
     // we could get lucky, and have zero, but typically one.
     assertTrue(reader.getNumDirtyChunks() <= 1);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/test-framework/src/test/org/apache/lucene/index/TestAssertingLeafReader.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/test/org/apache/lucene/index/TestAssertingLeafReader.java b/lucene/test-framework/src/test/org/apache/lucene/index/TestAssertingLeafReader.java
index b572289..5276301 100644
--- a/lucene/test-framework/src/test/org/apache/lucene/index/TestAssertingLeafReader.java
+++ b/lucene/test-framework/src/test/org/apache/lucene/index/TestAssertingLeafReader.java
@@ -54,9 +54,8 @@ public class TestAssertingLeafReader extends LuceneTestCase {
     assertEquals(1, r.numDocs());
 
     r = new AssertingDirectoryReader((DirectoryReader) r);
+    final IndexReader r2 = r;
 
-    final IndexReader r2 = SlowCompositeReaderWrapper.wrap(r);
-   
     Thread thread = new Thread() {
       @Override
       public void run() {
@@ -68,6 +67,6 @@ public class TestAssertingLeafReader extends LuceneTestCase {
     thread.start();
     thread.join();
 
-    IOUtils.close(r2, dir);
+    IOUtils.close(r, dir);
   }
 }


[02/50] [abbrv] lucene-solr git commit: SOLR-8799: Update CHANGES.txt

Posted by ho...@apache.org.
SOLR-8799: Update CHANGES.txt


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/02523d5b
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/02523d5b
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/02523d5b

Branch: refs/heads/jira/SOLR-445
Commit: 02523d5b6d7a4416265c4a36289b3dcb497ed6a6
Parents: 56ad6e5
Author: jbernste <jb...@apache.org>
Authored: Tue Mar 8 15:25:52 2016 -0500
Committer: jbernste <jb...@apache.org>
Committed: Tue Mar 8 15:25:52 2016 -0500

----------------------------------------------------------------------
 solr/CHANGES.txt | 2 ++
 1 file changed, 2 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/02523d5b/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index d094b58..dc7c45f 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -408,6 +408,8 @@ Other Changes
 * SOLR-8766: Remove deprecated <admin> tag in solrconfig.xml and support for admin/gettableFiles
   (noble, Jason Gerlowski, Varun Thacker)
 
+* SOLR-8799: Improve error message when tuple can't be read by SolrJ JDBC (Kevin Risden, Joel Bernstein)
+
 ==================  5.5.1 ==================
 
 Bug Fixes


[04/50] [abbrv] lucene-solr git commit: remove troublesome float tests since facets only actually expose doubles

Posted by ho...@apache.org.
remove troublesome float tests since facets only actually expose doubles


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/12f7ad66
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/12f7ad66
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/12f7ad66

Branch: refs/heads/jira/SOLR-445
Commit: 12f7ad66963a5ae784f2bd0bf8b5dbc4b3c1630e
Parents: a6c8ccb
Author: Mike McCandless <mi...@apache.org>
Authored: Tue Mar 8 17:30:30 2016 -0500
Committer: Mike McCandless <mi...@apache.org>
Committed: Tue Mar 8 17:30:30 2016 -0500

----------------------------------------------------------------------
 .../facet/range/TestRangeFacetCounts.java       | 203 -------------------
 1 file changed, 203 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/12f7ad66/lucene/facet/src/test/org/apache/lucene/facet/range/TestRangeFacetCounts.java
----------------------------------------------------------------------
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/range/TestRangeFacetCounts.java b/lucene/facet/src/test/org/apache/lucene/facet/range/TestRangeFacetCounts.java
index 9f8b109..626d772 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/range/TestRangeFacetCounts.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/range/TestRangeFacetCounts.java
@@ -23,11 +23,9 @@ import java.util.Set;
 import java.util.concurrent.atomic.AtomicBoolean;
 
 import org.apache.lucene.document.DoublePoint;
-import org.apache.lucene.document.FloatPoint;
 import org.apache.lucene.document.LongPoint;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.DoubleDocValuesField;
-import org.apache.lucene.document.FloatDocValuesField;
 import org.apache.lucene.document.NumericDocValuesField;
 import org.apache.lucene.facet.DrillDownQuery;
 import org.apache.lucene.facet.DrillSideways;
@@ -52,7 +50,6 @@ import org.apache.lucene.queries.function.FunctionValues;
 import org.apache.lucene.queries.function.ValueSource;
 import org.apache.lucene.queries.function.docvalues.DoubleDocValues;
 import org.apache.lucene.queries.function.valuesource.DoubleFieldSource;
-import org.apache.lucene.queries.function.valuesource.FloatFieldSource;
 import org.apache.lucene.queries.function.valuesource.LongFieldSource;
 import org.apache.lucene.search.Explanation;
 import org.apache.lucene.search.IndexSearcher;
@@ -321,37 +318,6 @@ public class TestRangeFacetCounts extends FacetTestCase {
     IOUtils.close(r, d);
   }
 
-  public void testBasicFloat() throws Exception {
-    Directory d = newDirectory();
-    RandomIndexWriter w = new RandomIndexWriter(random(), d);
-    Document doc = new Document();
-    FloatDocValuesField field = new FloatDocValuesField("field", 0.0f);
-    doc.add(field);
-    for(long l=0;l<100;l++) {
-      field.setFloatValue(l);
-      w.addDocument(doc);
-    }
-
-    IndexReader r = w.getReader();
-
-    FacetsCollector fc = new FacetsCollector();
-
-    IndexSearcher s = newSearcher(r);
-    s.search(new MatchAllDocsQuery(), fc);
-
-    Facets facets = new DoubleRangeFacetCounts("field", new FloatFieldSource("field"), fc,
-        new DoubleRange("less than 10", 0.0f, true, 10.0f, false),
-        new DoubleRange("less than or equal to 10", 0.0f, true, 10.0f, true),
-        new DoubleRange("over 90", 90.0f, false, 100.0f, false),
-        new DoubleRange("90 or above", 90.0f, true, 100.0f, false),
-        new DoubleRange("over 1000", 1000.0f, false, Double.POSITIVE_INFINITY, false));
-    
-    assertEquals("dim=field path=[] value=21 childCount=5\n  less than 10 (10)\n  less than or equal to 10 (11)\n  over 90 (9)\n  90 or above (10)\n  over 1000 (0)\n",
-                 facets.getTopChildren(10, "field").toString());
-    w.close();
-    IOUtils.close(r, d);
-  }
-
   public void testRandomLongs() throws Exception {
     Directory dir = newDirectory();
     RandomIndexWriter w = new RandomIndexWriter(random(), dir);
@@ -499,175 +465,6 @@ public class TestRangeFacetCounts extends FacetTestCase {
     IOUtils.close(r, dir);
   }
 
-  public void testRandomFloats() throws Exception {
-    Directory dir = newDirectory();
-    RandomIndexWriter w = new RandomIndexWriter(random(), dir);
-
-    int numDocs = atLeast(1000);
-    float[] values = new float[numDocs];
-    float minValue = Float.POSITIVE_INFINITY;
-    float maxValue = Float.NEGATIVE_INFINITY;
-    for(int i=0;i<numDocs;i++) {
-      Document doc = new Document();
-      float v = random().nextFloat();
-      values[i] = v;
-      doc.add(new FloatDocValuesField("field", v));
-      doc.add(new FloatPoint("field", v));
-      w.addDocument(doc);
-      minValue = Math.min(minValue, v);
-      maxValue = Math.max(maxValue, v);
-    }
-    IndexReader r = w.getReader();
-
-    IndexSearcher s = newSearcher(r, false);
-    FacetsConfig config = new FacetsConfig();
-    
-    int numIters = atLeast(10);
-    for(int iter=0;iter<numIters;iter++) {
-      if (VERBOSE) {
-        System.out.println("TEST: iter=" + iter);
-      }
-      int numRange = TestUtil.nextInt(random(), 1, 5);
-      DoubleRange[] ranges = new DoubleRange[numRange];
-      int[] expectedCounts = new int[numRange];
-      float minAcceptedValue = Float.POSITIVE_INFINITY;
-      float maxAcceptedValue = Float.NEGATIVE_INFINITY;
-      boolean[] rangeMinIncl = new boolean[numRange];
-      boolean[] rangeMaxIncl = new boolean[numRange];
-      if (VERBOSE) {
-        System.out.println("TEST: " + numRange + " ranges");
-      }
-      for(int rangeID=0;rangeID<numRange;rangeID++) {
-        double min;
-        if (rangeID > 0 && random().nextInt(10) == 7) {
-          // Use an existing boundary:
-          DoubleRange prevRange = ranges[random().nextInt(rangeID)];
-          if (random().nextBoolean()) {
-            min = prevRange.min;
-          } else {
-            min = prevRange.max;
-          }
-        } else {
-          min = random().nextDouble();
-        }
-        double max;
-        if (rangeID > 0 && random().nextInt(10) == 7) {
-          // Use an existing boundary:
-          DoubleRange prevRange = ranges[random().nextInt(rangeID)];
-          if (random().nextBoolean()) {
-            max = prevRange.min;
-          } else {
-            max = prevRange.max;
-          }
-        } else {
-          max = random().nextDouble();
-        }
-
-        if (min > max) {
-          double x = min;
-          min = max;
-          max = x;
-        }
-
-        // Must truncate to float precision so that the
-        // drill-down counts (which use NRQ.newFloatRange)
-        // are correct:
-        min = (float) min;
-        max = (float) max;
-
-        boolean minIncl;
-        boolean maxIncl;
-        if (min == max) {
-          minIncl = true;
-          maxIncl = true;
-        } else {
-          minIncl = random().nextBoolean();
-          maxIncl = random().nextBoolean();
-        }
-        rangeMinIncl[rangeID] = minIncl;
-        rangeMaxIncl[rangeID] = maxIncl;
-        ranges[rangeID] = new DoubleRange("r" + rangeID, min, minIncl, max, maxIncl);
-
-        if (VERBOSE) {
-          System.out.println("TEST:   range " + rangeID + ": " + ranges[rangeID]);
-        }
-
-        // Do "slow but hopefully correct" computation of
-        // expected count:
-        for(int i=0;i<numDocs;i++) {
-          boolean accept = true;
-          if (minIncl) {
-            accept &= values[i] >= min;
-          } else {
-            accept &= values[i] > min;
-          }
-          if (maxIncl) {
-            accept &= values[i] <= max;
-          } else {
-            accept &= values[i] < max;
-          }
-          if (VERBOSE) {
-            System.out.println("TEST:   check doc=" + i + " val=" + values[i] + " accept=" + accept);
-          }
-          if (accept) {
-            expectedCounts[rangeID]++;
-            minAcceptedValue = Math.min(minAcceptedValue, values[i]);
-            maxAcceptedValue = Math.max(maxAcceptedValue, values[i]);
-          }
-        }
-      }
-
-      FacetsCollector sfc = new FacetsCollector();
-      s.search(new MatchAllDocsQuery(), sfc);
-      Query fastMatchQuery;
-      if (random().nextBoolean()) {
-        if (random().nextBoolean()) {
-          fastMatchQuery = FloatPoint.newRangeQuery("field", minValue, maxValue);
-        } else {
-          fastMatchQuery = FloatPoint.newRangeQuery("field", minAcceptedValue, maxAcceptedValue);
-        }
-      } else {
-        fastMatchQuery = null;
-      }
-      ValueSource vs = new FloatFieldSource("field");
-      Facets facets = new DoubleRangeFacetCounts("field", vs, sfc, fastMatchQuery, ranges);
-      FacetResult result = facets.getTopChildren(10, "field");
-      assertEquals(numRange, result.labelValues.length);
-      for(int rangeID=0;rangeID<numRange;rangeID++) {
-        if (VERBOSE) {
-          System.out.println("TEST: verify range " + rangeID + " expectedCount=" + expectedCounts[rangeID]);
-        }
-        LabelAndValue subNode = result.labelValues[rangeID];
-        assertEquals("r" + rangeID, subNode.label);
-        assertEquals(expectedCounts[rangeID], subNode.value.intValue());
-
-        DoubleRange range = ranges[rangeID];
-
-        // Test drill-down:
-        DrillDownQuery ddq = new DrillDownQuery(config);
-        if (random().nextBoolean()) {
-          // We must do the nextUp/down in float space, here, because the nextUp that DoubleRange did in double space, when cast back to float,
-          // in fact does nothing!
-          float minFloat = (float) range.min;
-          if (rangeMinIncl[rangeID] == false) {
-            minFloat = Math.nextUp(minFloat);
-          }
-          float maxFloat = (float) range.max;
-          if (rangeMaxIncl[rangeID] == false) {
-            maxFloat = Math.nextAfter(maxFloat, Float.NEGATIVE_INFINITY);
-          }
-          ddq.add("field", FloatPoint.newRangeQuery("field", minFloat, maxFloat));
-        } else {
-          ddq.add("field", range.getQuery(fastMatchQuery, vs));
-        }
-        assertEquals(expectedCounts[rangeID], s.search(ddq, 10).totalHits);
-      }
-    }
-
-    w.close();
-    IOUtils.close(r, dir);
-  }
-
   public void testRandomDoubles() throws Exception {
     Directory dir = newDirectory();
     RandomIndexWriter w = new RandomIndexWriter(random(), dir);


[05/50] [abbrv] lucene-solr git commit: SOLR-445: adding really basic CloudSolrClient support

Posted by ho...@apache.org.
SOLR-445: adding really basic CloudSolrClient support


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/50697eee
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/50697eee
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/50697eee

Branch: refs/heads/jira/SOLR-445
Commit: 50697eee9585d99df7eaccf643e3e3088a4172a3
Parents: 0691d47
Author: Chris Hostetter <ho...@apache.org>
Authored: Tue Mar 8 18:51:24 2016 -0700
Committer: Chris Hostetter <ho...@apache.org>
Committed: Tue Mar 8 18:51:24 2016 -0700

----------------------------------------------------------------------
 .../processor/TolerantUpdateProcessor.java      |  1 +
 .../cloud/TestTolerantUpdateProcessorCloud.java | 29 +++++++++---------
 .../solr/client/solrj/impl/CloudSolrClient.java | 31 +++++++++++++++++++-
 3 files changed, 45 insertions(+), 16 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/50697eee/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java b/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java
index a325420..cbfa1e2 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java
@@ -270,6 +270,7 @@ public class TolerantUpdateProcessor extends UpdateRequestProcessor {
     } else {
       header.add("numErrors", 0); // nocommit: eliminate from response, client can count
     }
+    header.add("maxErrors", maxErrors);
 
     // annotate any error that might be thrown (or was already thrown)
     firstErrTracker.annotate(knownErrors);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/50697eee/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java b/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java
index 410b142..aef0385 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java
@@ -51,7 +51,6 @@ import org.apache.solr.util.RevertDefaultThreadHandlerRule;
 import org.junit.AfterClass;
 import org.junit.Before;
 import org.junit.BeforeClass;
-import org.junit.Ignore;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -234,6 +233,18 @@ public class TestTolerantUpdateProcessorCloud extends SolrCloudTestCase {
       assertQueryDocIds(c, true, S_ONE_PRE + "1",  S_TWO_PRE + "2");
       assertQueryDocIds(c, false, "id_not_exists");
 
+      // verify adding 2 broken docs causes a clint exception
+      try {
+        UpdateResponse rsp = update(params(),
+                                    doc(f("id", S_ONE_PRE + "X"), f("foo_i", "bogus_val_X")),
+                                    doc(f("id", S_TWO_PRE + "Y"), f("foo_i", "bogus_val_Y"))
+                                    ).process(c);
+        fail("did not get a top level exception when more then 10 docs failed: " + rsp.toString());
+      } catch (SolrException e) {
+        assertEquals("not the type of error we were expecting ("+e.code()+"): " + e.toString(),
+                     400, e.code());
+      }
+        
       // verify malformed deleteByQuerys fail
       try {
         UpdateResponse rsp = update(params()).deleteByQuery("foo_i:not_a_num").process(c);
@@ -260,7 +271,6 @@ public class TestTolerantUpdateProcessorCloud extends SolrCloudTestCase {
   }
 
   //
-  @Ignore("nocommit: need to implement tolerante response merging in cloud client")
   public void testVariousDeletesViaCloudClient() throws Exception {
     testVariousDeletes(CLOUD_CLIENT);
   }
@@ -372,7 +382,6 @@ public class TestTolerantUpdateProcessorCloud extends SolrCloudTestCase {
 
   
   //
-  @Ignore("nocommit: need to implement tolerante response merging in cloud client")
   public void testVariousAddsViaCloudClient() throws Exception {
     testVariousAdds(CLOUD_CLIENT);
   }
@@ -509,9 +518,6 @@ public class TestTolerantUpdateProcessorCloud extends SolrCloudTestCase {
                    doc(f("id", S_ONE_PRE + "10")), // may be skipped, more then 10 fails
                    doc(f("id", S_TWO_PRE + "20"))  // may be skipped, more then 10 fails
                    ).process(client);
-
-      // nocommit: should this really be a top level exception?
-      // nocommit: or should it be an HTTP:200 with the details of what faild in the body?
       
       fail("did not get a top level exception when more then 10 docs failed: " + rsp.toString());
     } catch (SolrException e) {
@@ -563,9 +569,6 @@ public class TestTolerantUpdateProcessorCloud extends SolrCloudTestCase {
                           "commit", "true"),
                    docs.toArray(new SolrInputDocument[docs.size()])).process(client);
       
-      // nocommit: should this really be a top level exception?
-      // nocommit: or should it be an HTTP:200 with the details of what faild in the body?
-      
       fail("did not get a top level exception when more then 10 docs failed: " + rsp.toString());
     } catch (SolrException e) {
       // we can't make any reliable assertions about the error message, because
@@ -599,7 +602,6 @@ public class TestTolerantUpdateProcessorCloud extends SolrCloudTestCase {
   }
 
   //
-  @Ignore("nocommit: need to implement tolerante response merging in cloud client")
   public void testAddsMixedWithDeletesViaCloudClient() throws Exception {
     testAddsMixedWithDeletes(CLOUD_CLIENT);
   }
@@ -623,6 +625,8 @@ public class TestTolerantUpdateProcessorCloud extends SolrCloudTestCase {
     assertNotNull("client not initialized", client);
 
     // nocommit: test adds & deletes mixed in a single UpdateRequest, w/ tolerated failures of both types
+
+    // nocommit: be sure to include DBQ mixed with other things.
   }
 
   /** Asserts that the UpdateResponse contains the specified expectedErrs and no others */
@@ -657,11 +661,6 @@ public class TestTolerantUpdateProcessorCloud extends SolrCloudTestCase {
       assertTrue(assertErrPre + " ... unexpected err, not found in: " + response.toString(), found);
 
     }
-    
-    // nocommit: retire numErrors, we've already checked errors.size()
-    assertEquals(assertionMsgPrefix + ": numErrors: " + response.toString(),
-                 expectedErrs.length, response.getResponseHeader().get("numErrors"));
- 
   }
   
   /** convinience method when the only type of errors you expect are 'add' errors */

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/50697eee/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java
index 59b37c5..0248a7d 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java
@@ -72,6 +72,7 @@ import org.apache.solr.common.params.UpdateParams;
 import org.apache.solr.common.util.ExecutorUtil;
 import org.apache.solr.common.util.Hash;
 import org.apache.solr.common.util.NamedList;
+import org.apache.solr.common.util.SimpleOrderedMap;
 import org.apache.solr.common.util.SolrjNamedThreadFactory;
 import org.apache.solr.common.util.StrUtils;
 import org.apache.zookeeper.KeeperException;
@@ -726,6 +727,11 @@ public class CloudSolrClient extends SolrClient {
     int status = 0;
     Integer rf = null;
     Integer minRf = null;
+    
+    // TolerantUpdateProcessor
+    List<SimpleOrderedMap<String>> toleratedErrors = null; 
+    int maxToleratedErrors = Integer.MAX_VALUE;
+      
     for(int i=0; i<response.size(); i++) {
       NamedList shardResponse = (NamedList)response.getVal(i);
       NamedList header = (NamedList)shardResponse.get("responseHeader");      
@@ -741,6 +747,23 @@ public class CloudSolrClient extends SolrClient {
           rf = routeRf;
       }
       minRf = (Integer)header.get(UpdateRequest.MIN_REPFACT);
+
+      List<SimpleOrderedMap<String>> shardTolerantErrors = 
+        (List<SimpleOrderedMap<String>>) header.get("errors");
+      if (null != shardTolerantErrors) {
+        Number shardMaxToleratedErrors = (Number) header.get("maxErrors");
+        assert null != shardMaxToleratedErrors : "TolerantUpdateProcessor reported errors but not maxErrors";
+        // if we get into some weird state where the nodes disagree about the effective maxErrors,
+        // assume the min value seen to decide if we should fail.
+        maxToleratedErrors = Math.min(maxToleratedErrors, shardMaxToleratedErrors.intValue());
+        
+        if (null == toleratedErrors) {
+          toleratedErrors = new ArrayList<SimpleOrderedMap<String>>(shardTolerantErrors.size());
+        }
+        for (SimpleOrderedMap<String> err : shardTolerantErrors) {
+          toleratedErrors.add(err);
+        }
+      }
     }
 
     NamedList cheader = new NamedList();
@@ -750,7 +773,13 @@ public class CloudSolrClient extends SolrClient {
       cheader.add(UpdateRequest.REPFACT, rf);
     if (minRf != null)
       cheader.add(UpdateRequest.MIN_REPFACT, minRf);
-    
+    if (null != toleratedErrors) {
+      cheader.add("errors", toleratedErrors);
+      if (maxToleratedErrors < toleratedErrors.size()) {
+        // nocommit: populate metadata based on the toleratedErrors
+        throw new SolrException(ErrorCode.BAD_REQUEST, "nocommit: need better msg");
+      }
+    }
     condensed.add("responseHeader", cheader);
     return condensed;
   }


[46/50] [abbrv] lucene-solr git commit: SOLR-445: reflect test future proofing to ensure we always implement any (future) update processor lifecycle methods where exception tracking/annotating is important

Posted by ho...@apache.org.
SOLR-445: reflect test future proofing to ensure we always implement any (future) update processor lifecycle methods where exception tracking/annotating is important


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/7d6ed177
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/7d6ed177
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/7d6ed177

Branch: refs/heads/jira/SOLR-445
Commit: 7d6ed177f54d1a943a72e6a108a7044b5f5470e5
Parents: 0ccee15
Author: Chris Hostetter <ho...@apache.org>
Authored: Fri Mar 11 11:10:15 2016 -0700
Committer: Chris Hostetter <ho...@apache.org>
Committed: Fri Mar 11 11:10:15 2016 -0700

----------------------------------------------------------------------
 .../processor/TolerantUpdateProcessor.java      | 41 ++++++++++++++++++--
 .../processor/TolerantUpdateProcessorTest.java  | 17 +++++++-
 2 files changed, 54 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7d6ed177/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java b/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java
index 40e9b23..678d4af 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java
@@ -41,7 +41,10 @@ import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.response.SolrQueryResponse;
 import org.apache.solr.schema.SchemaField;
 import org.apache.solr.update.AddUpdateCommand;
+import org.apache.solr.update.CommitUpdateCommand;
 import org.apache.solr.update.DeleteUpdateCommand;
+import org.apache.solr.update.MergeIndexesCommand;
+import org.apache.solr.update.RollbackUpdateCommand;
 import org.apache.solr.update.SolrCmdDistributor.Error;
 import org.apache.solr.update.processor.DistributedUpdateProcessor.DistribPhase;
 
@@ -200,9 +203,41 @@ public class TolerantUpdateProcessor extends UpdateRequestProcessor {
     }
   }
 
-  
-  // nocommit: override processCommit and other UpdateProcessor methods
-  // nocommit: ...use firstErrTracker to catch & rethrow so finish can annotate
+  @Override
+  public void processMergeIndexes(MergeIndexesCommand cmd) throws IOException {
+    try {
+      super.processMergeIndexes(cmd);
+    } catch (Throwable t) { // nocommit: OOM trap
+      // we're not tolerante of errors from this type of command, but we
+      // do need to track it so we can annotate it with any other errors we were allready tolerant of
+      firstErrTracker.caught(t);
+      throw t;
+    }
+  }
+
+  @Override
+  public void processCommit(CommitUpdateCommand cmd) throws IOException {
+    try {
+      super.processCommit(cmd);
+    } catch (Throwable t) { // nocommit: OOM trap
+      // we're not tolerante of errors from this type of command, but we
+      // do need to track it so we can annotate it with any other errors we were allready tolerant of
+      firstErrTracker.caught(t);
+      throw t;
+    }
+  }
+
+  @Override
+  public void processRollback(RollbackUpdateCommand cmd) throws IOException {
+    try {
+      super.processRollback(cmd);
+    } catch (Throwable t) { // nocommit: OOM trap
+      // we're not tolerante of errors from this type of command, but we
+      // do need to track it so we can annotate it with any other errors we were allready tolerant of
+      firstErrTracker.caught(t);
+      throw t;
+    }
+  }
 
   @Override
   public void finish() throws IOException {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7d6ed177/solr/core/src/test/org/apache/solr/update/processor/TolerantUpdateProcessorTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/processor/TolerantUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/update/processor/TolerantUpdateProcessorTest.java
index 7f77ad5..0cb3ba89 100644
--- a/solr/core/src/test/org/apache/solr/update/processor/TolerantUpdateProcessorTest.java
+++ b/solr/core/src/test/org/apache/solr/update/processor/TolerantUpdateProcessorTest.java
@@ -18,6 +18,7 @@ package org.apache.solr.update.processor;
 
 import java.io.IOException;
 import java.io.StringWriter;
+import java.lang.reflect.Method;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
@@ -100,7 +101,21 @@ public class TolerantUpdateProcessorTest extends UpdateProcessorTestBase {
     super.tearDown();
   }
 
-  // nocommit: add reflection based test to ensure processor overrides all methods & uses firstErrTracker
+  /**
+   * future proof TolerantUpdateProcessor against new default method impls being added to UpdateProcessor 
+   * to ensure that every method involved in a processor chain life cycle is overridden with 
+   * exception catching/tracking.
+   */
+  public void testReflection() {
+    for (Method method : TolerantUpdateProcessor.class.getMethods()) {
+      if (method.getDeclaringClass().equals(Object.class)) {
+        continue;
+      }
+      assertEquals("base class(es) has changed, TolerantUpdateProcessor needs updated to ensure it " +
+                   "overrides all solr update lifcycle methods with exception tracking: " + method.toString(),
+                   TolerantUpdateProcessor.class, method.getDeclaringClass());
+    }
+  }
  
   
   @Test


[35/50] [abbrv] lucene-solr git commit: SOLR-445: cleaned up some low haning nocommits, and added a few more based on a review of the current code

Posted by ho...@apache.org.
SOLR-445: cleaned up some low haning nocommits, and added a few more based on a review of the current code

in particularly updated a few test comments to note that improvements are blocked by SOLR-8830


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/b24fb027
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/b24fb027
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/b24fb027

Branch: refs/heads/jira/SOLR-445
Commit: b24fb0274f5736d1a733f227458ed6fa54865fcc
Parents: c37d5a8
Author: Chris Hostetter <ho...@apache.org>
Authored: Thu Mar 10 15:50:00 2016 -0700
Committer: Chris Hostetter <ho...@apache.org>
Committed: Thu Mar 10 15:50:00 2016 -0700

----------------------------------------------------------------------
 .../processor/DistributedUpdateProcessor.java   | 27 ++++++++++++++++----
 .../processor/TolerantUpdateProcessor.java      | 13 +++-------
 .../TolerantUpdateProcessorFactory.java         |  4 +--
 .../DistribTolerantUpdateProcessorTest.java     | 27 ++++++++------------
 .../cloud/TestTolerantUpdateProcessorCloud.java |  6 ++---
 .../processor/TolerantUpdateProcessorTest.java  | 23 ++++++++---------
 .../solr/client/solrj/impl/CloudSolrClient.java |  5 ++--
 7 files changed, 53 insertions(+), 52 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b24fb027/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java b/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java
index 6b650fb..105d3ff 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java
@@ -923,10 +923,6 @@ public class DistributedUpdateProcessor extends UpdateRequestProcessor {
 
     
     if (0 < errorsForClient.size()) {
-      // nocommit: slight intentional change here: throwing instead of using setException directly
-      // nocommit: sanity check that doesn't break any other assumptions?
-      //
-      // nocommit: if 1==errorsForClient.size() should we throw it directly? ... would mean changes for catching logic in TolerantUP.finish()
       throw new DistributedUpdatesAsyncException(errorsForClient);
     }
   }
@@ -1706,7 +1702,26 @@ public class DistributedUpdateProcessor extends UpdateRequestProcessor {
       super(buildCode(errors), buildMsg(errors), null);
       this.errors = errors;
 
-      // nocommit: can/should we try to merge the ((SolrException)Error.e).getMetadata() into this.getMetadata() ?
+      // nocommit: the code below is useful for preserving things like "root-error-class"
+      // nocommit: but wreaks havoc on ToleranteUpdateProcessor's exception annotating.
+      //
+      // nocommit: before enabling the code below, we need to make ToleranteUpdateProcessor 
+      // nocommit: smart enough to remove metadata it cares about before adding it (and others) back
+      //
+      // // create a merged copy of the metadata from all wrapped exceptions
+      // NamedList<String> metadata = new NamedList<String>();
+      // for (Error error : errors) {
+      //   if (error.e instanceof SolrException) {
+      //     SolrException e = (SolrException) error.e;
+      //     NamedList<String> eMeta = e.getMetadata();
+      //     if (null != eMeta) {
+      //       metadata.addAll(eMeta);
+      //     }
+      //   }
+      // }
+      // if (0 < metadata.size()) {
+      //   this.setMetadata(metadata);
+      // }
     }
 
     /** Helper method for constructor */
@@ -1721,6 +1736,8 @@ public class DistributedUpdateProcessor extends UpdateRequestProcessor {
         if (result != error.statusCode ) {
           // ...otherwise use sensible default
           return ErrorCode.SERVER_ERROR.code;
+          // nocommit: don't short circut - check them all...
+          // nocommit: ...even if not all same, use 400 if all 4xx, else use 500
         }
       }
       return result;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b24fb027/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java b/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java
index 30e5f80..a858e92 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java
@@ -89,7 +89,6 @@ public class TolerantUpdateProcessor extends UpdateRequestProcessor {
   private final int maxErrors;
   
   private final SolrQueryRequest req;
-  private final SolrQueryResponse rsp; // nocommit: needed?
   private ZkController zkController;
 
   /**
@@ -116,7 +115,6 @@ public class TolerantUpdateProcessor extends UpdateRequestProcessor {
   //
   // So as a kludge, we keep track of them for deduping against identical remote failures
   //
-  // :nocommit: probably need to use this for "commit" as well?
   private Set<ToleratedUpdateError> knownDBQErrors = new HashSet<>();
         
   private final FirstErrTracker firstErrTracker = new FirstErrTracker();
@@ -126,7 +124,6 @@ public class TolerantUpdateProcessor extends UpdateRequestProcessor {
     super(next);
     assert maxErrors >= 0;
       
-    this.rsp = rsp; // nocommit: needed?
     header = rsp.getResponseHeader();
     this.maxErrors = maxErrors;
     this.req = req;
@@ -177,16 +174,12 @@ public class TolerantUpdateProcessor extends UpdateRequestProcessor {
   @Override
   public void processDelete(DeleteUpdateCommand cmd) throws IOException {
     
-    // nocommit: do we need special delById => isLeader(id) vs delByQ => isAnyLeader logic?
-      
     try {
       
       super.processDelete(cmd);
       
     } catch (Throwable t) { // nocommit: OOM trap
       firstErrTracker.caught(t);
-
-      // nocommit: do we need isLeader type logic like processAdd ? does processAdd even need it?
       
       ToleratedUpdateError err = new ToleratedUpdateError(cmd.isDeleteById() ? CmdType.DELID : CmdType.DELQ,
                                                           cmd.isDeleteById() ? cmd.id : cmd.query,
@@ -208,8 +201,8 @@ public class TolerantUpdateProcessor extends UpdateRequestProcessor {
   }
 
   
-  // nocommit: what about processCommit and other UpdateProcessor methods?
-  // nocommit: ...at a minimum use firstErrTracker to catch & rethrow so finish can annotate
+  // nocommit: override processCommit and other UpdateProcessor methods
+  // nocommit: ...use firstErrTracker to catch & rethrow so finish can annotate
 
   @Override
   public void finish() throws IOException {
@@ -299,7 +292,7 @@ public class TolerantUpdateProcessor extends UpdateRequestProcessor {
     return field.getType().indexedToReadable(ref, new CharsRefBuilder()).toString();
   }
 
-  // nocommit: javadocs ... also: sanity check this method is even accurate
+  // nocommit: 1) is this method even needed? 2) is this method correct? 3) javadocs
   private boolean isLeader(AddUpdateCommand cmd) {
     if(!cmd.getReq().getCore().getCoreDescriptor().getCoreContainer().isZooKeeperAware())
       return true;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b24fb027/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessorFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessorFactory.java
index 45f5fcb..1338908 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessorFactory.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessorFactory.java
@@ -112,9 +112,9 @@ public class TolerantUpdateProcessorFactory extends UpdateRequestProcessorFactor
       maxErrors = this.defaultMaxErrors;
     }
 
-    // nocommit: support maxErrors < 0 to mean the same as Integer.MAX_VALUE
+    // nocommit: support maxErrors < 0 to mean the same as Integer.MAX_VALUE (add test)
     
-    // nocommit: if (effective) maxErrors==0, then bypass this processor completley?
+    // NOTE: even if 0==maxErrors, we still inject processor into chain so respones has expected header info
     return new TolerantUpdateProcessor(req, rsp, next, maxErrors, distribPhase);
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b24fb027/solr/core/src/test/org/apache/solr/cloud/DistribTolerantUpdateProcessorTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/DistribTolerantUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/cloud/DistribTolerantUpdateProcessorTest.java
index bebe642..5555e31 100644
--- a/solr/core/src/test/org/apache/solr/cloud/DistribTolerantUpdateProcessorTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/DistribTolerantUpdateProcessorTest.java
@@ -35,6 +35,7 @@ import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.SimpleOrderedMap;
 import org.junit.Test;
 
+// nocommit: should we just remove this class? does it give us anything not covered by TestTolerantUpdateProcessorCloud?
 public class DistribTolerantUpdateProcessorTest extends AbstractFullDistribZkTestBase {
   
 
@@ -90,15 +91,14 @@ public class DistribTolerantUpdateProcessorTest extends AbstractFullDistribZkTes
       fail("Expecting exception");
     } catch (SolrException e) {
       // we can't make any reliable assertions about the error message, because
-      // it varies based on how the request was routed
-      // nocommit: can we tighten this any more?
+      // it varies based on how the request was routed -- see SOLR-8830
       assertTrue("not the type of error we were expecting: " + e.toString(),
                  400 <= e.code() && e.code() < 500);
     }
-    assertUSucceedsWithErrors("tolerant-chain-max-errors-10",
-                              new SolrInputDocument[]{ invalidDoc,
-                                                      sdoc("id", 4, "text", "the brown fox") },
-                              null, 1, "1");
+    assertAddsSucceedWithErrors("tolerant-chain-max-errors-10",
+                                new SolrInputDocument[]{ invalidDoc,
+                                                         sdoc("id", 4, "text", "the brown fox") },
+                                null, "1");
     commit();
 
     ModifiableSolrParams query = new ModifiableSolrParams();
@@ -109,15 +109,9 @@ public class DistribTolerantUpdateProcessorTest extends AbstractFullDistribZkTes
 
   }
 
-  // nocommit: redesign so that we can assert errors of diff types besides "add" (ie: deletes) 
-  private void assertUSucceedsWithErrors(String chain, SolrInputDocument[] docs,
-                                         SolrParams requestParams,
-                                         int numErrors,
-                                         String... idsShouldFail) throws Exception {
-    
-    // nocommit: retire numErrors from this method sig ... trappy
-    assertEquals("bad test, idsShouldFail.length doesn't match numErrors",
-                 numErrors, idsShouldFail.length);
+  private void assertAddsSucceedWithErrors(String chain, SolrInputDocument[] docs,
+                                            SolrParams requestParams,
+                                            String... idsShouldFail) throws Exception {
     
     ModifiableSolrParams newParams = new ModifiableSolrParams(requestParams);
     newParams.set("update.chain", chain);
@@ -132,8 +126,7 @@ public class DistribTolerantUpdateProcessorTest extends AbstractFullDistribZkTes
     Set<String> addErrorIdsExpected = new HashSet<String>(Arrays.asList(idsShouldFail));
     
     for (SimpleOrderedMap<String> err : errors) {
-      // nocommit: support other types
-      assertEquals("nocommit: error type not handled yet by this method",
+      assertEquals("error type not handled yet by this method",
                    "ADD", err.get("type"));
       
       String id = err.get("id");

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b24fb027/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java b/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java
index 93200d8..7d8d769 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java
@@ -525,8 +525,7 @@ public class TestTolerantUpdateProcessorCloud extends SolrCloudTestCase {
       fail("did not get a top level exception when more then 10 docs failed: " + rsp.toString());
     } catch (SolrException e) {
       // we can't make any reliable assertions about the error message, because
-      // it varies based on how the request was routed
-      // nocommit: verify that we can't do an e.getMessage() substring check
+      // it varies based on how the request was routed -- see SOLR-8830
       assertEquals("not the type of error we were expecting ("+e.code()+"): " + e.toString(),
                    // NOTE: we always expect a 400 because we know that's what we would get from these types of errors
                    // on a single node setup -- a 5xx type error isn't something we should have triggered
@@ -600,8 +599,7 @@ public class TestTolerantUpdateProcessorCloud extends SolrCloudTestCase {
       fail("did not get a top level exception when more then 10 docs failed: " + rsp.toString());
     } catch (SolrException e) {
       // we can't make any reliable assertions about the error message, because
-      // it varies based on how the request was routed
-      // nocommit: verify that we can't do an e.getMessage() substring check
+      // it varies based on how the request was routed -- see SOLR-8830
       assertEquals("not the type of error we were expecting ("+e.code()+"): " + e.toString(),
                    // NOTE: we always expect a 400 because we know that's what we would get from these types of errors
                    // on a single node setup -- a 5xx type error isn't something we should have triggered

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b24fb027/solr/core/src/test/org/apache/solr/update/processor/TolerantUpdateProcessorTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/processor/TolerantUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/update/processor/TolerantUpdateProcessorTest.java
index 7470ea0..7f77ad5 100644
--- a/solr/core/src/test/org/apache/solr/update/processor/TolerantUpdateProcessorTest.java
+++ b/solr/core/src/test/org/apache/solr/update/processor/TolerantUpdateProcessorTest.java
@@ -99,6 +99,9 @@ public class TolerantUpdateProcessorTest extends UpdateProcessorTestBase {
         ,"//result[@numFound='0']");
     super.tearDown();
   }
+
+  // nocommit: add reflection based test to ensure processor overrides all methods & uses firstErrTracker
+ 
   
   @Test
   public void testValidAdds() throws IOException {
@@ -128,7 +131,7 @@ public class TolerantUpdateProcessorTest extends UpdateProcessorTestBase {
       //expected
       assertTrue(e.getMessage().contains("Document is missing mandatory uniqueKey field"));
     }
-    assertAddsSucceedWithErrors("tolerant-chain-max-errors-10", Arrays.asList(new SolrInputDocument[]{invalidDoc}), null, 1, "(unknown)");
+    assertAddsSucceedWithErrors("tolerant-chain-max-errors-10", Arrays.asList(new SolrInputDocument[]{invalidDoc}), null, "(unknown)");
     
     //a valid doc
     SolrInputDocument validDoc = doc(field("id", 1f, "1"), field("text", 1f, "the quick brown fox"));
@@ -147,7 +150,7 @@ public class TolerantUpdateProcessorTest extends UpdateProcessorTestBase {
         ,"//result[@numFound='0']");
     
     
-    assertAddsSucceedWithErrors("tolerant-chain-max-errors-10", Arrays.asList(new SolrInputDocument[]{invalidDoc, validDoc}), null, 1, "(unknown)");
+    assertAddsSucceedWithErrors("tolerant-chain-max-errors-10", Arrays.asList(new SolrInputDocument[]{invalidDoc, validDoc}), null, "(unknown)");
     assertU(commit());
     
     // verify that the good document made it in. 
@@ -170,7 +173,7 @@ public class TolerantUpdateProcessorTest extends UpdateProcessorTestBase {
     assertQ(req("q","id:3")
         ,"//result[@numFound='0']");
     
-    assertAddsSucceedWithErrors("tolerant-chain-max-errors-10", Arrays.asList(new SolrInputDocument[]{invalidDoc, validDoc}), null, 1, "2");
+    assertAddsSucceedWithErrors("tolerant-chain-max-errors-10", Arrays.asList(new SolrInputDocument[]{invalidDoc, validDoc}), null, "2");
     assertU(commit());
     
     // The valid document was indexed
@@ -187,7 +190,7 @@ public class TolerantUpdateProcessorTest extends UpdateProcessorTestBase {
   public void testMaxErrorsDefault() throws IOException {
     try {
       // by default the TolerantUpdateProcessor accepts all errors, so this batch should succeed with 10 errors.
-      assertAddsSucceedWithErrors("tolerant-chain-max-errors-not-set", docs, null, 10, badIds);
+      assertAddsSucceedWithErrors("tolerant-chain-max-errors-not-set", docs, null, badIds);
     } catch(Exception e) {
       fail("Shouldn't get an exception for this batch: " + e.getMessage());
     }
@@ -200,7 +203,7 @@ public class TolerantUpdateProcessorTest extends UpdateProcessorTestBase {
     ModifiableSolrParams requestParams = new ModifiableSolrParams();
     requestParams.add("maxErrors", "10");
     // still OK
-    assertAddsSucceedWithErrors("tolerant-chain-max-errors-not-set", docs, requestParams, 10, badIds);
+    assertAddsSucceedWithErrors("tolerant-chain-max-errors-not-set", docs, requestParams, badIds);
     assertU(commit());
     assertQ(req("q","*:*")
         ,"//result[@numFound='10']");
@@ -212,7 +215,7 @@ public class TolerantUpdateProcessorTest extends UpdateProcessorTestBase {
     requestParams.add("maxErrors", "5");
     try {
       // should fail
-      assertAddsSucceedWithErrors("tolerant-chain-max-errors-not-set", docs, requestParams, 10, badIds);
+      assertAddsSucceedWithErrors("tolerant-chain-max-errors-not-set", docs, requestParams, badIds);
       fail("Expecting exception");
     } catch (SolrException e) {
       assertTrue(e.getMessage(),
@@ -234,7 +237,7 @@ public class TolerantUpdateProcessorTest extends UpdateProcessorTestBase {
     requestParams.add("maxErrors", "0");
     try {
       // should fail
-      assertAddsSucceedWithErrors("tolerant-chain-max-errors-10", smallBatch, requestParams, 1, "1");
+      assertAddsSucceedWithErrors("tolerant-chain-max-errors-10", smallBatch, requestParams, "1");
       fail("Expecting exception");
     } catch (SolrException e) {
       assertTrue(e.getMessage().contains("ERROR: [doc=1] Error adding field 'weight'='b' msg=For input string: \"b\""));
@@ -351,13 +354,9 @@ public class TolerantUpdateProcessorTest extends UpdateProcessorTestBase {
   
   private void assertAddsSucceedWithErrors(String chain,
                                            final Collection<SolrInputDocument> docs,
-                                           SolrParams requestParams, int numErrors,
+                                           SolrParams requestParams, 
                                            String... idsShouldFail) throws IOException {
 
-    // nocommit: retire numErrors from this method sig ... trappy
-    assertEquals("bad test, idsShouldFail.length doesn't match numErrors",
-                 numErrors, idsShouldFail.length);
-    
     SolrQueryResponse response = add(chain, requestParams, docs);
     
     @SuppressWarnings("unchecked")

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b24fb027/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java
index febb56f..9a34976 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java
@@ -778,9 +778,10 @@ public class CloudSolrClient extends SolrClient {
       cheader.add("errors", toleratedErrors);
       if (maxToleratedErrors < toleratedErrors.size()) {
         // cumulative errors are too high, we need to throw a client exception w/correct metadata
-        
+
+        // nocommit: refactor & reuse DistributedUpdatesAsyncException
         NamedList metadata = new NamedList<String>();
-        SolrException toThrow = new SolrException(ErrorCode.BAD_REQUEST, "nocommit: need better msg");
+        SolrException toThrow = new SolrException(ErrorCode.BAD_REQUEST, "nocommit: better msg from DUAE");
         toThrow.setMetadata(metadata);
         for (SimpleOrderedMap<String> err : toleratedErrors) {
           ToleratedUpdateError te = ToleratedUpdateError.parseMap(err);


[18/50] [abbrv] lucene-solr git commit: SOLR-8813: Add test for MultiValued fields being returned in the correct order

Posted by ho...@apache.org.
SOLR-8813: Add test for MultiValued fields being returned in the correct order


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/540e8010
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/540e8010
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/540e8010

Branch: refs/heads/jira/SOLR-445
Commit: 540e8010c32c600a4ec6b29f6236ba43b9d1afd6
Parents: dee8b5e
Author: Erick Erickson <er...@apache.org>
Authored: Wed Mar 9 11:40:44 2016 -0800
Committer: Erick Erickson <er...@apache.org>
Committed: Wed Mar 9 11:40:44 2016 -0800

----------------------------------------------------------------------
 .../conf/schema-non-stored-docvalues.xml        | 131 ++++++++++---------
 .../solr/schema/TestUseDocValuesAsStored.java   |  60 ++++++++-
 2 files changed, 126 insertions(+), 65 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/540e8010/solr/core/src/test-files/solr/collection1/conf/schema-non-stored-docvalues.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/collection1/conf/schema-non-stored-docvalues.xml b/solr/core/src/test-files/solr/collection1/conf/schema-non-stored-docvalues.xml
index aab4da4..f700e60 100644
--- a/solr/core/src/test-files/solr/collection1/conf/schema-non-stored-docvalues.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/schema-non-stored-docvalues.xml
@@ -35,70 +35,73 @@
        1.6: useDocValuesAsStored defaults to true.
      -->
 
-  <types>
-
-    <!-- field type definitions... note that the "name" attribute is
-         just a label to be used by field definitions.  The "class"
-         attribute and any other attributes determine the real type and
-         behavior of the fieldType.
-      -->
-
-    <fieldType name="int" class="solr.TrieIntField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
-    <fieldType name="float" class="solr.TrieFloatField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
-    <fieldType name="long" class="solr.TrieLongField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
-    <fieldType name="double" class="solr.TrieDoubleField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
-
-
-    <fieldType name="tint" class="solr.TrieIntField" precisionStep="8" omitNorms="true" positionIncrementGap="0"/>
-    <fieldType name="tfloat" class="solr.TrieFloatField" precisionStep="8" omitNorms="true" positionIncrementGap="0"/>
-    <fieldType name="tlong" class="solr.TrieLongField" precisionStep="8" omitNorms="true" positionIncrementGap="0"/>
-    <fieldType name="tdouble" class="solr.TrieDoubleField" precisionStep="8" omitNorms="true" positionIncrementGap="0"/>
-
-
-    <fieldType name="boolean" class="solr.BoolField" sortMissingLast="true"/>
-    <fieldType name="string" class="solr.StrField" sortMissingLast="true"/>
-
-    <!-- format for date is 1995-12-31T23:59:59.999Z and only the fractional
-         seconds part (.999) is optional.
-      -->
-    <fieldType name="date" class="solr.TrieDateField" precisionStep="0"/>
-    <fieldType name="tdate" class="solr.TrieDateField" precisionStep="6"/>
-    <fieldType name="tdatedv" class="solr.TrieDateField" precisionStep="6" docValues="true"/>
-    <fieldType name="enumField" class="solr.EnumField" enumsConfig="enumsConfig.xml" enumName="severity"/>
-  </types>
-
-  <fields>
-    <field name="id" type="string" indexed="true" stored="true" multiValued="false" required="false"/>
-
-    <field name="_version_" type="long" indexed="true" stored="true" docValues="true" multiValued="false"/>
-
-    <field name="nonstored_dv_str" type="string" indexed="true" stored="false"
-           docValues="true" useDocValuesAsStored="false" multiValued="false"/>
-
-    <field name="test_nonstored_dv_str" type="string" indexed="true" stored="false" docValues="true" multiValued="false"/>
-
-    <!--  single valued fields -->
-    <field name="enum_dvo" type="enumField" indexed="true" stored="false" multiValued="false" docValues="true"/>
-    <dynamicField name="*_s_dvo2" multiValued="false" type="string" docValues="true" indexed="false" stored="false"/>
-    <dynamicField name="*_s_dv" multiValued="false" type="string" docValues="true" indexed="true" stored="true"/>
-    <dynamicField name="*_s_dvo" multiValued="false" type="string" docValues="true" indexed="true" stored="false"/>
-    <dynamicField name="*_i_dvo" multiValued="false" type="int" docValues="true" indexed="true" stored="false"/>
-    <dynamicField name="*_f_dvo" multiValued="false" type="float" docValues="true" indexed="true" stored="false"/>
-    <dynamicField name="*_d_dvo" multiValued="false" type="double" docValues="true" indexed="true" stored="false"/>
-    <dynamicField name="*_l_dvo" multiValued="false" type="long" docValues="true" indexed="true" stored="false"/>
-    <dynamicField name="*_dt_dvo" multiValued="false" type="date" docValues="true" indexed="true" stored="false"/>
-
-    <!--  multi valued fields -->
-    <field name="enums_dvo" type="enumField" indexed="true" stored="false" multiValued="true" docValues="true"/>
-    <dynamicField name="*_ss_dvo2" multiValued="true" type="string" docValues="true" indexed="false" stored="false"/>
-    <dynamicField name="*_ss_dv" multiValued="true" type="string" docValues="true" indexed="true" stored="true"/>
-    <dynamicField name="*_ss_dvo" multiValued="true" type="string" docValues="true" indexed="true" stored="false"/>
-    <dynamicField name="*_is_dvo" multiValued="true" type="int" docValues="true" indexed="true" stored="false"/>
-    <dynamicField name="*_fs_dvo" multiValued="true" type="float" docValues="true" indexed="true" stored="false"/>
-    <dynamicField name="*_ds_dvo" multiValued="true" type="double" docValues="true" indexed="true" stored="false"/>
-    <dynamicField name="*_ls_dvo" multiValued="true" type="long" docValues="true" indexed="true" stored="false"/>
-    <dynamicField name="*_dts_dvo" multiValued="true" type="date" docValues="true" indexed="true" stored="false"/>
-  </fields>
+
+  <!-- field type definitions... note that the "name" attribute is
+       just a label to be used by field definitions.  The "class"
+       attribute and any other attributes determine the real type and
+       behavior of the fieldType.
+    -->
+
+  <fieldType name="int" class="solr.TrieIntField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
+  <fieldType name="float" class="solr.TrieFloatField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
+  <fieldType name="long" class="solr.TrieLongField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
+  <fieldType name="double" class="solr.TrieDoubleField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
+
+
+  <fieldType name="tint" class="solr.TrieIntField" precisionStep="8" omitNorms="true" positionIncrementGap="0"/>
+  <fieldType name="tfloat" class="solr.TrieFloatField" precisionStep="8" omitNorms="true" positionIncrementGap="0"/>
+  <fieldType name="tlong" class="solr.TrieLongField" precisionStep="8" omitNorms="true" positionIncrementGap="0"/>
+  <fieldType name="tdouble" class="solr.TrieDoubleField" precisionStep="8" omitNorms="true" positionIncrementGap="0"/>
+
+
+  <fieldType name="boolean" class="solr.BoolField" sortMissingLast="true"/>
+  <fieldType name="string" class="solr.StrField" sortMissingLast="true"/>
+
+  <!-- format for date is 1995-12-31T23:59:59.999Z and only the fractional
+       seconds part (.999) is optional.
+    -->
+  <fieldType name="date" class="solr.TrieDateField" precisionStep="0"/>
+  <fieldType name="tdate" class="solr.TrieDateField" precisionStep="6"/>
+  <fieldType name="tdatedv" class="solr.TrieDateField" precisionStep="6" docValues="true"/>
+  <fieldType name="enumField" class="solr.EnumField" enumsConfig="enumsConfig.xml" enumName="severity"/>
+
+
+  <field name="id" type="string" indexed="true" stored="true" multiValued="false" required="false"/>
+
+  <field name="_version_" type="long" indexed="true" stored="true" docValues="true" multiValued="false"/>
+
+  <field name="nonstored_dv_str" type="string" indexed="true" stored="false"
+         docValues="true" useDocValuesAsStored="false" multiValued="false"/>
+
+  <field name="test_nonstored_dv_str" type="string" indexed="true" stored="false" docValues="true" multiValued="false"/>
+
+  <field name="test_mvt_dvt_st_str" type="string" indexed="true" multiValued="true" docValues="true" stored="true"/>
+  <field name="test_mvt_dvt_sf_str" type="string" indexed="true" multiValued="true" docValues="true" stored="false"/>
+  <field name="test_mvt_dvf_st_str" type="string" indexed="true" multiValued="true" docValues="false" stored="true"/>
+  <field name="test_mvt_dvu_st_str" type="string" indexed="true" multiValued="true" stored="true"/>
+
+  <!--  single valued fields -->
+  <field name="enum_dvo" type="enumField" indexed="true" stored="false" multiValued="false" docValues="true"/>
+  <dynamicField name="*_s_dvo2" multiValued="false" type="string" docValues="true" indexed="false" stored="false"/>
+  <dynamicField name="*_s_dv" multiValued="false" type="string" docValues="true" indexed="true" stored="true"/>
+  <dynamicField name="*_s_dvo" multiValued="false" type="string" docValues="true" indexed="true" stored="false"/>
+  <dynamicField name="*_i_dvo" multiValued="false" type="int" docValues="true" indexed="true" stored="false"/>
+  <dynamicField name="*_f_dvo" multiValued="false" type="float" docValues="true" indexed="true" stored="false"/>
+  <dynamicField name="*_d_dvo" multiValued="false" type="double" docValues="true" indexed="true" stored="false"/>
+  <dynamicField name="*_l_dvo" multiValued="false" type="long" docValues="true" indexed="true" stored="false"/>
+  <dynamicField name="*_dt_dvo" multiValued="false" type="date" docValues="true" indexed="true" stored="false"/>
+
+  <!--  multi valued fields -->
+  <field name="enums_dvo" type="enumField" indexed="true" stored="false" multiValued="true" docValues="true"/>
+  <dynamicField name="*_ss_dvo2" multiValued="true" type="string" docValues="true" indexed="false" stored="false"/>
+  <dynamicField name="*_ss_dv" multiValued="true" type="string" docValues="true" indexed="true" stored="true"/>
+  <dynamicField name="*_ss_dvo" multiValued="true" type="string" docValues="true" indexed="true" stored="false"/>
+  <dynamicField name="*_is_dvo" multiValued="true" type="int" docValues="true" indexed="true" stored="false"/>
+  <dynamicField name="*_fs_dvo" multiValued="true" type="float" docValues="true" indexed="true" stored="false"/>
+  <dynamicField name="*_ds_dvo" multiValued="true" type="double" docValues="true" indexed="true" stored="false"/>
+  <dynamicField name="*_ls_dvo" multiValued="true" type="long" docValues="true" indexed="true" stored="false"/>
+  <dynamicField name="*_dts_dvo" multiValued="true" type="date" docValues="true" indexed="true" stored="false"/>
+
 
   <uniqueKey>id</uniqueKey>
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/540e8010/solr/core/src/test/org/apache/solr/schema/TestUseDocValuesAsStored.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/schema/TestUseDocValuesAsStored.java b/solr/core/src/test/org/apache/solr/schema/TestUseDocValuesAsStored.java
index 46f76ab..839121a 100644
--- a/solr/core/src/test/org/apache/solr/schema/TestUseDocValuesAsStored.java
+++ b/solr/core/src/test/org/apache/solr/schema/TestUseDocValuesAsStored.java
@@ -76,7 +76,7 @@ public class TestUseDocValuesAsStored extends AbstractBadConfigTestBase {
 
   @Test
   public void testOnEmptyIndex() throws Exception {
-    assertU(delQ("*:*"));
+    clearIndex();
     assertU(commit());
     assertJQ(req("q", "*:*"), "/response/numFound==0");
     assertJQ(req("q", "*:*", "fl", "*"), "/response/numFound==0");
@@ -238,4 +238,62 @@ public class TestUseDocValuesAsStored extends AbstractBadConfigTestBase {
     assertQ(desc + ": " + fl, req("q", "*:*", "fl", fl), xpaths);
 
   }
+  
+  // See SOLR-8740 for a discussion. This test is here to make sure we consciously change behavior of multiValued
+  // fields given that we can now return docValues fields. The behavior we've guaranteed in the past is that if
+  // multiValued fields are stored, they're returned in the document in the order they were added.
+  // There are four new fieldTypes added:
+  // <field name="test_mvt_dvt_st_str" type="string" indexed="true" multiValued="true" docValues="true"  stored="true"/>
+  // <field name="test_mvt_dvt_sf_str" type="string" indexed="true" multiValued="true" docValues="true"  stored="false"/>
+  // <field name="test_mvt_dvf_st_str" type="string" indexed="true" multiValued="true" docValues="false" stored="true"/>
+  // <field name="test_mvt_dvu_st_str" type="string" indexed="true" multiValued="true"                   stored="true"/>
+  //
+  // If any of these tests break as a result of returning DocValues rather than stored values, make sure we reach some
+  // consensus that any breaks on back-compat are A Good Thing and that that behavior is carefully documented!
+
+  @Test
+  public void testMultivaluedOrdering() throws Exception {
+    clearIndex();
+    
+    // multiValued=true, docValues=true, stored=true. Should return in original order
+    assertU(adoc("id", "1", "test_mvt_dvt_st_str", "cccc", "test_mvt_dvt_st_str", "aaaa", "test_mvt_dvt_st_str", "bbbb"));
+    
+    // multiValued=true, docValues=true, stored=false. Should return in sorted order
+    assertU(adoc("id", "2", "test_mvt_dvt_sf_str", "cccc", "test_mvt_dvt_sf_str", "aaaa", "test_mvt_dvt_sf_str", "bbbb"));
+    
+    // multiValued=true, docValues=false, stored=true. Should return in original order
+    assertU(adoc("id", "3", "test_mvt_dvf_st_str", "cccc", "test_mvt_dvf_st_str", "aaaa", "test_mvt_dvf_st_str", "bbbb"));
+    
+    // multiValued=true, docValues=not specified, stored=true. Should return in original order
+    assertU(adoc("id", "4", "test_mvt_dvu_st_str", "cccc", "test_mvt_dvu_st_str", "aaaa", "test_mvt_dvu_st_str", "bbbb"));
+    
+    assertU(commit());
+    
+    assertJQ(req("q", "id:1", "fl", "test_mvt_dvt_st_str"), 
+        "/response/docs/[0]/test_mvt_dvt_st_str/[0]==cccc",
+        "/response/docs/[0]/test_mvt_dvt_st_str/[1]==aaaa",
+        "/response/docs/[0]/test_mvt_dvt_st_str/[2]==bbbb");
+
+    // Currently, this test fails since stored=false. When SOLR-8740 is committed, it should not throw an exception
+    // and should succeed, returning the field in sorted order.
+    try {
+      assertJQ(req("q", "id:2", "fl", "test_mvt_dvt_sf_str"),
+          "/response/docs/[0]/test_mvt_dvt_sf_str/[0]==aaaa",
+          "/response/docs/[0]/test_mvt_dvt_sf_str/[1]==bbbb",
+          "/response/docs/[0]/test_mvt_dvt_sf_str/[2]==cccc");
+    } catch (Exception e) {
+      // do nothing until SOLR-8740 is committed. At that point this should not throw an exception. 
+      // NOTE: I think the test is correct after 8740 so just remove the try/catch
+    }
+    assertJQ(req("q", "id:3", "fl", "test_mvt_dvf_st_str"),
+        "/response/docs/[0]/test_mvt_dvf_st_str/[0]==cccc",
+        "/response/docs/[0]/test_mvt_dvf_st_str/[1]==aaaa",
+        "/response/docs/[0]/test_mvt_dvf_st_str/[2]==bbbb");
+
+    assertJQ(req("q", "id:4", "fl", "test_mvt_dvu_st_str"),
+        "/response/docs/[0]/test_mvt_dvu_st_str/[0]==cccc",
+        "/response/docs/[0]/test_mvt_dvu_st_str/[1]==aaaa",
+        "/response/docs/[0]/test_mvt_dvu_st_str/[2]==bbbb");
+
+  }
 }


[28/50] [abbrv] lucene-solr git commit: LUCENE-7090, LUCENE-7075: deprecate single-valued LegacyNumerics fieldcaching, provide Points-based replacement.

Posted by ho...@apache.org.
LUCENE-7090, LUCENE-7075: deprecate single-valued LegacyNumerics fieldcaching, provide Points-based replacement.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/d35d5694
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/d35d5694
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/d35d5694

Branch: refs/heads/jira/SOLR-445
Commit: d35d5694d8ce04f062891e01566514d5496e646a
Parents: 0b8b16f
Author: Robert Muir <rm...@apache.org>
Authored: Thu Mar 10 08:28:51 2016 -0500
Committer: Robert Muir <rm...@apache.org>
Committed: Thu Mar 10 08:29:56 2016 -0500

----------------------------------------------------------------------
 .../apache/lucene/uninverting/FieldCache.java   | 113 ++-
 .../lucene/uninverting/FieldCacheImpl.java      | 164 ++++-
 .../lucene/uninverting/UninvertingReader.java   |  97 ++-
 .../lucene/uninverting/TestDocTermOrds.java     |   2 +-
 .../lucene/uninverting/TestFieldCache.java      | 141 ++--
 .../uninverting/TestFieldCacheReopen.java       |   9 +-
 .../TestFieldCacheSanityChecker.java            |  10 +-
 .../lucene/uninverting/TestFieldCacheSort.java  | 684 +++++++++++++++++--
 .../uninverting/TestFieldCacheSortRandom.java   |   8 +-
 .../uninverting/TestFieldCacheVsDocValues.java  |   4 +-
 .../uninverting/TestFieldCacheWithThreads.java  |  11 +-
 .../uninverting/TestLegacyFieldCache.java       | 498 ++++++++++++++
 .../lucene/uninverting/TestNumericTerms32.java  |   6 +-
 .../lucene/uninverting/TestNumericTerms64.java  |   8 +-
 .../uninverting/TestUninvertingReader.java      |   6 +-
 .../apache/lucene/spatial/SpatialTestCase.java  |   4 +-
 .../java/org/apache/solr/schema/EnumField.java  |   2 +-
 .../java/org/apache/solr/schema/TrieField.java  |   8 +-
 18 files changed, 1575 insertions(+), 200 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d35d5694/lucene/misc/src/java/org/apache/lucene/uninverting/FieldCache.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/java/org/apache/lucene/uninverting/FieldCache.java b/lucene/misc/src/java/org/apache/lucene/uninverting/FieldCache.java
index 314d6aa..27d68e0 100644
--- a/lucene/misc/src/java/org/apache/lucene/uninverting/FieldCache.java
+++ b/lucene/misc/src/java/org/apache/lucene/uninverting/FieldCache.java
@@ -32,6 +32,7 @@ import org.apache.lucene.util.Accountable;
 import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.LegacyNumericUtils;
+import org.apache.lucene.util.NumericUtils;
 import org.apache.lucene.util.RamUsageEstimator;
 
 /**
@@ -71,21 +72,101 @@ interface FieldCache {
      * @param terms the {@link Terms} instance to create the {@link TermsEnum} from.
      * @return a possibly filtered {@link TermsEnum} instance, this method must not return <code>null</code>.
      * @throws IOException if an {@link IOException} occurs
+     * @deprecated index with Points instead
      */
+    @Deprecated
     public TermsEnum termsEnum(Terms terms) throws IOException;
     
     /** Parse's this field's value */
     public long parseValue(BytesRef term);
   }
+  
+  /**
+   * Base class for points parsers. These parsers do not use the inverted index, but instead
+   * uninvert point data.
+   * 
+   * This abstraction can be cleaned up when Parser.termsEnum is removed.
+   */
+  public abstract class PointParser implements Parser {
+    public final TermsEnum termsEnum(Terms terms) throws IOException {
+      throw new UnsupportedOperationException("makes no sense for parsing points");
+    }
+  }
 
   /** Expert: The cache used internally by sorting and range query classes. */
   public static FieldCache DEFAULT = new FieldCacheImpl();
 
   /**
+   * A parser instance for int values encoded by {@link org.apache.lucene.util.NumericUtils}, e.g. when indexed
+   * via {@link org.apache.lucene.document.IntPoint}.
+   */
+  public static final Parser INT_POINT_PARSER = new PointParser() {
+    @Override
+    public long parseValue(BytesRef point) {
+      return NumericUtils.sortableBytesToInt(point.bytes, point.offset);
+    }
+    
+    @Override
+    public String toString() { 
+      return FieldCache.class.getName()+".INT_POINT_PARSER"; 
+    }
+  };
+  
+  /**
+   * A parser instance for long values encoded by {@link org.apache.lucene.util.NumericUtils}, e.g. when indexed
+   * via {@link org.apache.lucene.document.LongPoint}.
+   */
+  public static final Parser LONG_POINT_PARSER = new PointParser() {
+    @Override
+    public long parseValue(BytesRef point) {
+      return NumericUtils.sortableBytesToLong(point.bytes, point.offset);
+    }
+    
+    @Override
+    public String toString() { 
+      return FieldCache.class.getName()+".LONG_POINT_PARSER"; 
+    }
+  };
+  
+  /**
+   * A parser instance for float values encoded by {@link org.apache.lucene.util.NumericUtils}, e.g. when indexed
+   * via {@link org.apache.lucene.document.FloatPoint}.
+   */
+  public static final Parser FLOAT_POINT_PARSER = new PointParser() {
+    @Override
+    public long parseValue(BytesRef point) {
+      return NumericUtils.sortableFloatBits(NumericUtils.sortableBytesToInt(point.bytes, point.offset));
+    }
+    
+    @Override
+    public String toString() { 
+      return FieldCache.class.getName()+".FLOAT_POINT_PARSER"; 
+    }
+  };
+  
+  /**
+   * A parser instance for double values encoded by {@link org.apache.lucene.util.NumericUtils}, e.g. when indexed
+   * via {@link org.apache.lucene.document.DoublePoint}.
+   */
+  public static final Parser DOUBLE_POINT_PARSER = new PointParser() {
+    @Override
+    public long parseValue(BytesRef point) {
+      return NumericUtils.sortableDoubleBits(NumericUtils.sortableBytesToLong(point.bytes, point.offset));
+    }
+    
+    @Override
+    public String toString() { 
+      return FieldCache.class.getName()+".DOUBLE_POINT_PARSER"; 
+    }
+  };
+  
+  /**
    * A parser instance for int values encoded by {@link org.apache.lucene.util.LegacyNumericUtils}, e.g. when indexed
    * via {@link org.apache.lucene.document.LegacyIntField}/{@link org.apache.lucene.analysis.LegacyNumericTokenStream}.
+   * @deprecated Index with points and use {@link #INT_POINT_PARSER} instead.
    */
-  public static final Parser NUMERIC_UTILS_INT_PARSER = new Parser() {
+  @Deprecated
+  public static final Parser LEGACY_INT_PARSER = new Parser() {
     @Override
     public long parseValue(BytesRef term) {
       return LegacyNumericUtils.prefixCodedToInt(term);
@@ -98,15 +179,17 @@ interface FieldCache {
     
     @Override
     public String toString() { 
-      return FieldCache.class.getName()+".NUMERIC_UTILS_INT_PARSER"; 
+      return FieldCache.class.getName()+".LEGACY_INT_PARSER"; 
     }
   };
 
   /**
    * A parser instance for float values encoded with {@link org.apache.lucene.util.LegacyNumericUtils}, e.g. when indexed
    * via {@link org.apache.lucene.document.LegacyFloatField}/{@link org.apache.lucene.analysis.LegacyNumericTokenStream}.
+   * @deprecated Index with points and use {@link #FLOAT_POINT_PARSER} instead.
    */
-  public static final Parser NUMERIC_UTILS_FLOAT_PARSER = new Parser() {
+  @Deprecated
+  public static final Parser LEGACY_FLOAT_PARSER = new Parser() {
     @Override
     public long parseValue(BytesRef term) {
       int val = LegacyNumericUtils.prefixCodedToInt(term);
@@ -116,7 +199,7 @@ interface FieldCache {
     
     @Override
     public String toString() { 
-      return FieldCache.class.getName()+".NUMERIC_UTILS_FLOAT_PARSER"; 
+      return FieldCache.class.getName()+".LEGACY_FLOAT_PARSER"; 
     }
     
     @Override
@@ -128,15 +211,17 @@ interface FieldCache {
   /**
    * A parser instance for long values encoded by {@link org.apache.lucene.util.LegacyNumericUtils}, e.g. when indexed
    * via {@link org.apache.lucene.document.LegacyLongField}/{@link org.apache.lucene.analysis.LegacyNumericTokenStream}.
+   * @deprecated Index with points and use {@link #LONG_POINT_PARSER} instead.
    */
-  public static final Parser NUMERIC_UTILS_LONG_PARSER = new Parser() {
+  @Deprecated
+  public static final Parser LEGACY_LONG_PARSER = new Parser() {
     @Override
     public long parseValue(BytesRef term) {
       return LegacyNumericUtils.prefixCodedToLong(term);
     }
     @Override
     public String toString() { 
-      return FieldCache.class.getName()+".NUMERIC_UTILS_LONG_PARSER"; 
+      return FieldCache.class.getName()+".LEGACY_LONG_PARSER"; 
     }
     
     @Override
@@ -148,8 +233,10 @@ interface FieldCache {
   /**
    * A parser instance for double values encoded with {@link org.apache.lucene.util.LegacyNumericUtils}, e.g. when indexed
    * via {@link org.apache.lucene.document.LegacyDoubleField}/{@link org.apache.lucene.analysis.LegacyNumericTokenStream}.
+   * @deprecated Index with points and use {@link #DOUBLE_POINT_PARSER} instead.
    */
-  public static final Parser NUMERIC_UTILS_DOUBLE_PARSER = new Parser() {
+  @Deprecated
+  public static final Parser LEGACY_DOUBLE_PARSER = new Parser() {
     @Override
     public long parseValue(BytesRef term) {
       long val = LegacyNumericUtils.prefixCodedToLong(term);
@@ -158,7 +245,7 @@ interface FieldCache {
     }
     @Override
     public String toString() { 
-      return FieldCache.class.getName()+".NUMERIC_UTILS_DOUBLE_PARSER"; 
+      return FieldCache.class.getName()+".LEGACY_DOUBLE_PARSER"; 
     }
     
     @Override
@@ -168,18 +255,20 @@ interface FieldCache {
   };
   
   /** Checks the internal cache for an appropriate entry, and if none is found,
-   *  reads the terms in <code>field</code> and returns a bit set at the size of
+   *  reads the terms/points in <code>field</code> and returns a bit set at the size of
    *  <code>reader.maxDoc()</code>, with turned on bits for each docid that 
    *  does have a value for this field.
+   *  @param parser May be {@code null} if coming from the inverted index, otherwise
+   *                can be a {@link PointParser} to compute from point values.
    */
-  public Bits getDocsWithField(LeafReader reader, String field) throws IOException;
+  public Bits getDocsWithField(LeafReader reader, String field, Parser parser) throws IOException;
 
   /**
    * Returns a {@link NumericDocValues} over the values found in documents in the given
    * field. If the field was indexed as {@link NumericDocValuesField}, it simply
    * uses {@link org.apache.lucene.index.LeafReader#getNumericDocValues(String)} to read the values.
    * Otherwise, it checks the internal cache for an appropriate entry, and if
-   * none is found, reads the terms in <code>field</code> as longs and returns
+   * none is found, reads the terms/points in <code>field</code> as longs and returns
    * an array of size <code>reader.maxDoc()</code> of the value each document
    * has in the given field.
    * 
@@ -199,7 +288,7 @@ interface FieldCache {
    *           If any error occurs.
    */
   public NumericDocValues getNumerics(LeafReader reader, String field, Parser parser, boolean setDocsWithField) throws IOException;
-
+  
   /** Checks the internal cache for an appropriate entry, and if none
    * is found, reads the term values in <code>field</code>
    * and returns a {@link BinaryDocValues} instance, providing a

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d35d5694/lucene/misc/src/java/org/apache/lucene/uninverting/FieldCacheImpl.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/java/org/apache/lucene/uninverting/FieldCacheImpl.java b/lucene/misc/src/java/org/apache/lucene/uninverting/FieldCacheImpl.java
index c6bd943..589da13 100644
--- a/lucene/misc/src/java/org/apache/lucene/uninverting/FieldCacheImpl.java
+++ b/lucene/misc/src/java/org/apache/lucene/uninverting/FieldCacheImpl.java
@@ -34,6 +34,9 @@ import org.apache.lucene.index.FieldInfo;
 import org.apache.lucene.index.IndexOptions;
 import org.apache.lucene.index.LeafReader;
 import org.apache.lucene.index.NumericDocValues;
+import org.apache.lucene.index.PointValues;
+import org.apache.lucene.index.PointValues.IntersectVisitor;
+import org.apache.lucene.index.PointValues.Relation;
 import org.apache.lucene.index.SegmentReader;
 import org.apache.lucene.index.SortedDocValues;
 import org.apache.lucene.index.SortedSetDocValues;
@@ -262,8 +265,68 @@ class FieldCacheImpl implements FieldCache {
   private static abstract class Uninvert {
 
     public Bits docsWithField;
+    final boolean points;
+    
+    // pass true to pull from points, otherwise postings.
+    Uninvert(boolean points) {
+      this.points = points;
+    }
+
+    final void uninvert(LeafReader reader, String field, boolean setDocsWithField) throws IOException {
+      if (points) {
+        uninvertPoints(reader, field, setDocsWithField);
+      } else {
+        uninvertPostings(reader, field, setDocsWithField);
+      }
+    }
+    
+    final void uninvertPoints(LeafReader reader, String field, boolean setDocsWithField) throws IOException {
+      final int maxDoc = reader.maxDoc();
+      PointValues values = reader.getPointValues();
+      assert values != null;
+      assert values.size(field) > 0;
+      
+      if (setDocsWithField) {
+        final int docCount = values.getDocCount(field);
+        assert docCount <= maxDoc;
+        if (docCount == maxDoc) {
+          // Fast case: all docs have this field:
+          this.docsWithField = new Bits.MatchAllBits(maxDoc);
+          setDocsWithField = false;
+        }
+      }
+
+      final boolean doDocsWithField = setDocsWithField;
+      BytesRef scratch = new BytesRef();
+      values.intersect(field, new IntersectVisitor() {
+        @Override
+        public void visit(int docID) throws IOException { 
+          throw new AssertionError(); 
+        }
+
+        @Override
+        public void visit(int docID, byte[] packedValue) throws IOException {
+          scratch.bytes = packedValue;
+          scratch.length = packedValue.length;
+          visitTerm(scratch);
+          visitDoc(docID);
+          if (doDocsWithField) {
+            if (docsWithField == null) {
+              // Lazy init
+              docsWithField = new FixedBitSet(maxDoc);
+            }
+            ((FixedBitSet)docsWithField).set(docID);
+          }
+        }
 
-    public void uninvert(LeafReader reader, String field, boolean setDocsWithField) throws IOException {
+        @Override
+        public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
+          return Relation.CELL_CROSSES_QUERY; // inspect all byte-docid pairs
+        }
+      });
+    }
+    
+    final void uninvertPostings(LeafReader reader, String field, boolean setDocsWithField) throws IOException {
       final int maxDoc = reader.maxDoc();
       Terms terms = reader.terms(field);
       if (terms != null) {
@@ -306,13 +369,15 @@ class FieldCacheImpl implements FieldCache {
       }
     }
 
+    /** @deprecated remove this when legacy numerics are removed */
+    @Deprecated
     protected abstract TermsEnum termsEnum(Terms terms) throws IOException;
     protected abstract void visitTerm(BytesRef term);
     protected abstract void visitDoc(int docID);
   }
 
   // null Bits means no docs matched
-  void setDocsWithField(LeafReader reader, String field, Bits docsWithField) {
+  void setDocsWithField(LeafReader reader, String field, Bits docsWithField, Parser parser) {
     final int maxDoc = reader.maxDoc();
     final Bits bits;
     if (docsWithField == null) {
@@ -329,7 +394,7 @@ class FieldCacheImpl implements FieldCache {
     } else {
       bits = docsWithField;
     }
-    caches.get(DocsWithFieldCache.class).put(reader, new CacheKey(field, null), new BitsEntry(bits));
+    caches.get(DocsWithFieldCache.class).put(reader, new CacheKey(field, parser), new BitsEntry(bits));
   }
 
   private static class HoldsOneThing<T> {
@@ -353,17 +418,25 @@ class FieldCacheImpl implements FieldCache {
     public long minValue;
   }
 
-  public Bits getDocsWithField(LeafReader reader, String field) throws IOException {
+  public Bits getDocsWithField(LeafReader reader, String field, Parser parser) throws IOException {
     final FieldInfo fieldInfo = reader.getFieldInfos().fieldInfo(field);
     if (fieldInfo == null) {
       // field does not exist or has no value
       return new Bits.MatchNoBits(reader.maxDoc());
     } else if (fieldInfo.getDocValuesType() != DocValuesType.NONE) {
       return reader.getDocsWithField(field);
-    } else if (fieldInfo.getIndexOptions() == IndexOptions.NONE) {
-      return new Bits.MatchNoBits(reader.maxDoc());
+    } 
+    
+    if (parser instanceof PointParser) {
+      // points case
+      
+    } else {
+      // postings case
+      if (fieldInfo.getIndexOptions() == IndexOptions.NONE) {
+        return new Bits.MatchNoBits(reader.maxDoc());
+      }
     }
-    BitsEntry bitsEntry = (BitsEntry) caches.get(DocsWithFieldCache.class).get(reader, new CacheKey(field, null), false);
+    BitsEntry bitsEntry = (BitsEntry) caches.get(DocsWithFieldCache.class).get(reader, new CacheKey(field, parser), false);
     return bitsEntry.bits;
   }
   
@@ -391,9 +464,48 @@ class FieldCacheImpl implements FieldCache {
     }
     
     @Override
-    protected BitsEntry createValue(LeafReader reader, CacheKey key, boolean setDocsWithField /* ignored */)
-    throws IOException {
+    protected BitsEntry createValue(LeafReader reader, CacheKey key, boolean setDocsWithField /* ignored */) throws IOException {
       final String field = key.field;
+      final Parser parser = (Parser) key.custom;
+      if (parser instanceof PointParser) {
+        return createValuePoints(reader, field);
+      } else {
+        return createValuePostings(reader, field);
+      }
+    }
+  
+    private BitsEntry createValuePoints(LeafReader reader, String field) throws IOException {
+      final int maxDoc = reader.maxDoc();
+      PointValues values = reader.getPointValues();
+      assert values != null;
+      assert values.size(field) > 0;
+      
+      final int docCount = values.getDocCount(field);
+      assert docCount <= maxDoc;
+      if (docCount == maxDoc) {
+        // Fast case: all docs have this field:
+        return new BitsEntry(new Bits.MatchAllBits(maxDoc));
+      }
+      
+      // otherwise a no-op uninvert!
+      Uninvert u = new Uninvert(true) {
+        @Override
+        protected TermsEnum termsEnum(Terms terms) throws IOException {
+          throw new AssertionError();
+        }
+
+        @Override
+        protected void visitTerm(BytesRef term) {}
+
+        @Override
+        protected void visitDoc(int docID) {}
+      };
+      u.uninvert(reader, field, true);
+      return new BitsEntry(u.docsWithField);
+    }
+    
+    // TODO: it is dumb that uninverting code is duplicated here in this method!!
+    private BitsEntry createValuePostings(LeafReader reader, String field) throws IOException {
       final int maxDoc = reader.maxDoc();
 
       // Visit all docs that have terms for this field
@@ -458,8 +570,32 @@ class FieldCacheImpl implements FieldCache {
         return DocValues.emptyNumeric();
       } else if (info.getDocValuesType() != DocValuesType.NONE) {
         throw new IllegalStateException("Type mismatch: " + field + " was indexed as " + info.getDocValuesType());
-      } else if (info.getIndexOptions() == IndexOptions.NONE) {
-        return DocValues.emptyNumeric();
+      }
+      
+      if (parser instanceof PointParser) {
+        // points case
+        // no points in this segment
+        if (info.getPointDimensionCount() == 0) {
+          return DocValues.emptyNumeric();
+        }
+        if (info.getPointDimensionCount() != 1) {
+          throw new IllegalStateException("Type mismatch: " + field + " was indexed with dimensions=" + info.getPointDimensionCount());
+        }
+        PointValues values = reader.getPointValues();
+        // no actual points for this field (e.g. all points deleted)
+        if (values == null || values.size(field) == 0) {
+          return DocValues.emptyNumeric();
+        }
+        // not single-valued
+        if (values.size(field) != values.getDocCount(field)) {
+          throw new IllegalStateException("Type mismatch: " + field + " was indexed with multiple values, numValues=" + values.size(field) + ",numDocs=" + values.getDocCount(field));
+        }
+      } else {
+        // postings case 
+        // not indexed
+        if (info.getIndexOptions() == IndexOptions.NONE) {
+          return DocValues.emptyNumeric();
+        }
       }
       return (NumericDocValues) caches.get(Long.TYPE).get(reader, new CacheKey(field, parser), setDocsWithField);
     }
@@ -498,7 +634,7 @@ class FieldCacheImpl implements FieldCache {
 
       final HoldsOneThing<GrowableWriterAndMinValue> valuesRef = new HoldsOneThing<>();
 
-      Uninvert u = new Uninvert() {
+      Uninvert u = new Uninvert(parser instanceof PointParser) {
           private long minValue;
           private long currentValue;
           private GrowableWriter values;
@@ -542,7 +678,7 @@ class FieldCacheImpl implements FieldCache {
       u.uninvert(reader, key.field, setDocsWithField);
 
       if (setDocsWithField) {
-        wrapper.setDocsWithField(reader, key.field, u.docsWithField);
+        wrapper.setDocsWithField(reader, key.field, u.docsWithField, parser);
       }
       GrowableWriterAndMinValue values = valuesRef.get();
       if (values == null) {
@@ -872,7 +1008,7 @@ class FieldCacheImpl implements FieldCache {
           public int length() {
             return maxDoc;
           }
-        });
+        }, null);
       }
       // maybe an int-only impl?
       return new BinaryDocValuesImpl(bytes.freeze(true), offsetReader);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d35d5694/lucene/misc/src/java/org/apache/lucene/uninverting/UninvertingReader.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/java/org/apache/lucene/uninverting/UninvertingReader.java b/lucene/misc/src/java/org/apache/lucene/uninverting/UninvertingReader.java
index 9f96b4f..10d1a5b 100644
--- a/lucene/misc/src/java/org/apache/lucene/uninverting/UninvertingReader.java
+++ b/lucene/misc/src/java/org/apache/lucene/uninverting/UninvertingReader.java
@@ -58,33 +58,69 @@ public class UninvertingReader extends FilterLeafReader {
    */
   public static enum Type {
     /** 
+     * Single-valued Integer, (e.g. indexed with {@link org.apache.lucene.document.IntPoint})
+     * <p>
+     * Fields with this type act as if they were indexed with
+     * {@link NumericDocValuesField}.
+     */
+    INTEGER_POINT,
+    /** 
+     * Single-valued Integer, (e.g. indexed with {@link org.apache.lucene.document.LongPoint})
+     * <p>
+     * Fields with this type act as if they were indexed with
+     * {@link NumericDocValuesField}.
+     */
+    LONG_POINT,
+    /** 
+     * Single-valued Integer, (e.g. indexed with {@link org.apache.lucene.document.FloatPoint})
+     * <p>
+     * Fields with this type act as if they were indexed with
+     * {@link NumericDocValuesField}.
+     */
+    FLOAT_POINT,
+    /** 
+     * Single-valued Integer, (e.g. indexed with {@link org.apache.lucene.document.DoublePoint})
+     * <p>
+     * Fields with this type act as if they were indexed with
+     * {@link NumericDocValuesField}.
+     */
+    DOUBLE_POINT,
+    /** 
      * Single-valued Integer, (e.g. indexed with {@link org.apache.lucene.document.LegacyIntField})
      * <p>
      * Fields with this type act as if they were indexed with
      * {@link NumericDocValuesField}.
+     * @deprecated Index with points and use {@link #INTEGER_POINT} instead.
      */
-    INTEGER,
+    @Deprecated
+    LEGACY_INTEGER,
     /** 
      * Single-valued Long, (e.g. indexed with {@link org.apache.lucene.document.LegacyLongField})
      * <p>
      * Fields with this type act as if they were indexed with
      * {@link NumericDocValuesField}.
+     * @deprecated Index with points and use {@link #LONG_POINT} instead.
      */
-    LONG,
+    @Deprecated
+    LEGACY_LONG,
     /** 
      * Single-valued Float, (e.g. indexed with {@link org.apache.lucene.document.LegacyFloatField})
      * <p>
      * Fields with this type act as if they were indexed with
      * {@link NumericDocValuesField}.
+     * @deprecated Index with points and use {@link #FLOAT_POINT} instead.
      */
-    FLOAT,
+    @Deprecated
+    LEGACY_FLOAT,
     /** 
      * Single-valued Double, (e.g. indexed with {@link org.apache.lucene.document.LegacyDoubleField})
      * <p>
      * Fields with this type act as if they were indexed with
      * {@link NumericDocValuesField}.
+     * @deprecated Index with points and use {@link #DOUBLE_POINT} instead.
      */
-    DOUBLE,
+    @Deprecated
+    LEGACY_DOUBLE,
     /** 
      * Single-valued Binary, (e.g. indexed with {@link StringField}) 
      * <p>
@@ -181,14 +217,29 @@ public class UninvertingReader extends FilterLeafReader {
     ArrayList<FieldInfo> filteredInfos = new ArrayList<>();
     for (FieldInfo fi : in.getFieldInfos()) {
       DocValuesType type = fi.getDocValuesType();
-      if (fi.getIndexOptions() != IndexOptions.NONE && fi.getDocValuesType() == DocValuesType.NONE) {
+      if (type == DocValuesType.NONE) {        
         Type t = mapping.get(fi.name);
         if (t != null) {
+          if (t == Type.INTEGER_POINT || t == Type.LONG_POINT || t == Type.FLOAT_POINT || t == Type.DOUBLE_POINT) {
+            // type uses points
+            if (fi.getPointDimensionCount() == 0) {
+              continue;
+            }
+          } else {
+            // type uses inverted index
+            if (fi.getIndexOptions() == IndexOptions.NONE) {
+              continue;
+            }
+          }
           switch(t) {
-            case INTEGER:
-            case LONG:
-            case FLOAT:
-            case DOUBLE:
+            case INTEGER_POINT:
+            case LONG_POINT:
+            case FLOAT_POINT:
+            case DOUBLE_POINT:
+            case LEGACY_INTEGER:
+            case LEGACY_LONG:
+            case LEGACY_FLOAT:
+            case LEGACY_DOUBLE:
               type = DocValuesType.NUMERIC;
               break;
             case BINARY:
@@ -226,10 +277,14 @@ public class UninvertingReader extends FilterLeafReader {
     Type v = getType(field);
     if (v != null) {
       switch (v) {
-        case INTEGER: return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.NUMERIC_UTILS_INT_PARSER, true);
-        case FLOAT: return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.NUMERIC_UTILS_FLOAT_PARSER, true);
-        case LONG: return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.NUMERIC_UTILS_LONG_PARSER, true);
-        case DOUBLE: return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, true);
+        case INTEGER_POINT: return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.INT_POINT_PARSER, true);
+        case FLOAT_POINT: return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.FLOAT_POINT_PARSER, true);
+        case LONG_POINT: return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.LONG_POINT_PARSER, true);
+        case DOUBLE_POINT: return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.DOUBLE_POINT_PARSER, true);
+        case LEGACY_INTEGER: return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.LEGACY_INT_PARSER, true);
+        case LEGACY_FLOAT: return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.LEGACY_FLOAT_PARSER, true);
+        case LEGACY_LONG: return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.LEGACY_LONG_PARSER, true);
+        case LEGACY_DOUBLE: return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.LEGACY_DOUBLE_PARSER, true);
       }
     }
     return super.getNumericDocValues(field);
@@ -275,8 +330,20 @@ public class UninvertingReader extends FilterLeafReader {
 
   @Override
   public Bits getDocsWithField(String field) throws IOException {
-    if (getType(field) != null) {
-      return FieldCache.DEFAULT.getDocsWithField(in, field);
+    Type v = getType(field);
+    if (v != null) {
+      switch (v) {
+        case INTEGER_POINT:  return FieldCache.DEFAULT.getDocsWithField(in, field, FieldCache.INT_POINT_PARSER);
+        case FLOAT_POINT:    return FieldCache.DEFAULT.getDocsWithField(in, field, FieldCache.FLOAT_POINT_PARSER);
+        case LONG_POINT:     return FieldCache.DEFAULT.getDocsWithField(in, field, FieldCache.LONG_POINT_PARSER);
+        case DOUBLE_POINT:   return FieldCache.DEFAULT.getDocsWithField(in, field, FieldCache.DOUBLE_POINT_PARSER);
+        case LEGACY_INTEGER: return FieldCache.DEFAULT.getDocsWithField(in, field, FieldCache.LEGACY_INT_PARSER);
+        case LEGACY_FLOAT:   return FieldCache.DEFAULT.getDocsWithField(in, field, FieldCache.LEGACY_FLOAT_PARSER);
+        case LEGACY_LONG:    return FieldCache.DEFAULT.getDocsWithField(in, field, FieldCache.LEGACY_LONG_PARSER);
+        case LEGACY_DOUBLE:  return FieldCache.DEFAULT.getDocsWithField(in, field, FieldCache.LEGACY_DOUBLE_PARSER);
+        default:
+          return FieldCache.DEFAULT.getDocsWithField(in, field, null);
+      }
     } else {
       return in.getDocsWithField(field);
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d35d5694/lucene/misc/src/test/org/apache/lucene/uninverting/TestDocTermOrds.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestDocTermOrds.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestDocTermOrds.java
index 8c1fae7..4861cd3 100644
--- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestDocTermOrds.java
+++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestDocTermOrds.java
@@ -353,7 +353,7 @@ public class TestDocTermOrds extends LuceneTestCase {
                                             TestUtil.nextInt(random(), 2, 10));
                                             
 
-    final NumericDocValues docIDToID = FieldCache.DEFAULT.getNumerics(r, "id", FieldCache.NUMERIC_UTILS_INT_PARSER, false);
+    final NumericDocValues docIDToID = FieldCache.DEFAULT.getNumerics(r, "id", FieldCache.LEGACY_INT_PARSER, false);
     /*
       for(int docID=0;docID<subR.maxDoc();docID++) {
       System.out.println("  docID=" + docID + " id=" + docIDToID[docID]);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d35d5694/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCache.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCache.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCache.java
index fb05875..1b322d9 100644
--- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCache.java
+++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCache.java
@@ -30,17 +30,22 @@ import java.util.concurrent.atomic.AtomicInteger;
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.document.BinaryDocValuesField;
 import org.apache.lucene.document.Document;
+import org.apache.lucene.document.DoublePoint;
 import org.apache.lucene.document.LegacyDoubleField;
 import org.apache.lucene.document.Field;
 import org.apache.lucene.document.Field.Store;
+import org.apache.lucene.document.FloatPoint;
+import org.apache.lucene.document.IntPoint;
 import org.apache.lucene.document.LegacyFloatField;
 import org.apache.lucene.document.LegacyIntField;
 import org.apache.lucene.document.LegacyLongField;
+import org.apache.lucene.document.LongPoint;
 import org.apache.lucene.document.NumericDocValuesField;
 import org.apache.lucene.document.SortedDocValuesField;
 import org.apache.lucene.document.SortedSetDocValuesField;
 import org.apache.lucene.document.StoredField;
 import org.apache.lucene.index.LeafReader;
+import org.apache.lucene.index.LogDocMergePolicy;
 import org.apache.lucene.index.BinaryDocValues;
 import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.IndexReader;
@@ -76,7 +81,7 @@ public class TestFieldCache extends LuceneTestCase {
     NUM_DOCS = atLeast(500);
     NUM_ORDS = atLeast(2);
     directory = newDirectory();
-    RandomIndexWriter writer= new RandomIndexWriter(random(), directory, newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
+    IndexWriter writer= new IndexWriter(directory, new IndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(new LogDocMergePolicy()));
     long theLong = Long.MAX_VALUE;
     double theDouble = Double.MAX_VALUE;
     int theInt = Integer.MAX_VALUE;
@@ -88,16 +93,16 @@ public class TestFieldCache extends LuceneTestCase {
     }
     for (int i = 0; i < NUM_DOCS; i++){
       Document doc = new Document();
-      doc.add(new LegacyLongField("theLong", theLong--, Field.Store.NO));
-      doc.add(new LegacyDoubleField("theDouble", theDouble--, Field.Store.NO));
-      doc.add(new LegacyIntField("theInt", theInt--, Field.Store.NO));
-      doc.add(new LegacyFloatField("theFloat", theFloat--, Field.Store.NO));
+      doc.add(new LongPoint("theLong", theLong--));
+      doc.add(new DoublePoint("theDouble", theDouble--));
+      doc.add(new IntPoint("theInt", theInt--));
+      doc.add(new FloatPoint("theFloat", theFloat--));
       if (i%2 == 0) {
-        doc.add(new LegacyIntField("sparse", i, Field.Store.NO));
+        doc.add(new IntPoint("sparse", i));
       }
 
       if (i%2 == 0) {
-        doc.add(new LegacyIntField("numInt", i, Field.Store.NO));
+        doc.add(new IntPoint("numInt", i));
       }
 
       // sometimes skip the field:
@@ -117,8 +122,10 @@ public class TestFieldCache extends LuceneTestCase {
       }
       writer.addDocument(doc);
     }
-    IndexReader r = writer.getReader();
-    reader = SlowCompositeReaderWrapper.wrap(r);
+    writer.forceMerge(1); // this test relies on one segment and docid order
+    IndexReader r = DirectoryReader.open(writer);
+    assertEquals(1, r.leaves().size());
+    reader = r.leaves().get(0).reader();
     TestUtil.checkReader(reader);
     writer.close();
   }
@@ -133,67 +140,42 @@ public class TestFieldCache extends LuceneTestCase {
     multiValued = null;
   }
   
-  public void testInfoStream() throws Exception {
-    try {
-      FieldCache cache = FieldCache.DEFAULT;
-      ByteArrayOutputStream bos = new ByteArrayOutputStream(1024);
-      cache.setInfoStream(new PrintStream(bos, false, IOUtils.UTF_8));
-      cache.getNumerics(reader, "theDouble", FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, false);
-      cache.getNumerics(reader, "theDouble", new FieldCache.Parser() {
-        @Override
-        public TermsEnum termsEnum(Terms terms) throws IOException {
-          return LegacyNumericUtils.filterPrefixCodedLongs(terms.iterator());
-        }
-        @Override
-        public long parseValue(BytesRef term) {
-          int val = (int) LegacyNumericUtils.prefixCodedToLong(term);
-          if (val<0) val ^= 0x7fffffff;
-          return val;
-        }
-      }, false);
-      assertTrue(bos.toString(IOUtils.UTF_8).indexOf("WARNING") != -1);
-    } finally {
-      FieldCache.DEFAULT.setInfoStream(null);
-      FieldCache.DEFAULT.purgeAllCaches();
-    }
-  }
-
   public void test() throws IOException {
     FieldCache cache = FieldCache.DEFAULT;
-    NumericDocValues doubles = cache.getNumerics(reader, "theDouble", FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, random().nextBoolean());
-    assertSame("Second request to cache return same array", doubles, cache.getNumerics(reader, "theDouble", FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, random().nextBoolean()));
+    NumericDocValues doubles = cache.getNumerics(reader, "theDouble", FieldCache.DOUBLE_POINT_PARSER, random().nextBoolean());
+    assertSame("Second request to cache return same array", doubles, cache.getNumerics(reader, "theDouble", FieldCache.DOUBLE_POINT_PARSER, random().nextBoolean()));
     for (int i = 0; i < NUM_DOCS; i++) {
       assertEquals(Double.doubleToLongBits(Double.MAX_VALUE - i), doubles.get(i));
     }
     
-    NumericDocValues longs = cache.getNumerics(reader, "theLong", FieldCache.NUMERIC_UTILS_LONG_PARSER, random().nextBoolean());
-    assertSame("Second request to cache return same array", longs, cache.getNumerics(reader, "theLong", FieldCache.NUMERIC_UTILS_LONG_PARSER, random().nextBoolean()));
+    NumericDocValues longs = cache.getNumerics(reader, "theLong", FieldCache.LONG_POINT_PARSER, random().nextBoolean());
+    assertSame("Second request to cache return same array", longs, cache.getNumerics(reader, "theLong", FieldCache.LONG_POINT_PARSER, random().nextBoolean()));
     for (int i = 0; i < NUM_DOCS; i++) {
       assertEquals(Long.MAX_VALUE - i, longs.get(i));
     }
 
-    NumericDocValues ints = cache.getNumerics(reader, "theInt", FieldCache.NUMERIC_UTILS_INT_PARSER, random().nextBoolean());
-    assertSame("Second request to cache return same array", ints, cache.getNumerics(reader, "theInt", FieldCache.NUMERIC_UTILS_INT_PARSER, random().nextBoolean()));
+    NumericDocValues ints = cache.getNumerics(reader, "theInt", FieldCache.INT_POINT_PARSER, random().nextBoolean());
+    assertSame("Second request to cache return same array", ints, cache.getNumerics(reader, "theInt", FieldCache.INT_POINT_PARSER, random().nextBoolean()));
     for (int i = 0; i < NUM_DOCS; i++) {
       assertEquals(Integer.MAX_VALUE - i, ints.get(i));
     }
     
-    NumericDocValues floats = cache.getNumerics(reader, "theFloat", FieldCache.NUMERIC_UTILS_FLOAT_PARSER, random().nextBoolean());
-    assertSame("Second request to cache return same array", floats, cache.getNumerics(reader, "theFloat", FieldCache.NUMERIC_UTILS_FLOAT_PARSER, random().nextBoolean()));
+    NumericDocValues floats = cache.getNumerics(reader, "theFloat", FieldCache.FLOAT_POINT_PARSER, random().nextBoolean());
+    assertSame("Second request to cache return same array", floats, cache.getNumerics(reader, "theFloat", FieldCache.FLOAT_POINT_PARSER, random().nextBoolean()));
     for (int i = 0; i < NUM_DOCS; i++) {
       assertEquals(Float.floatToIntBits(Float.MAX_VALUE - i), floats.get(i));
     }
 
-    Bits docsWithField = cache.getDocsWithField(reader, "theLong");
-    assertSame("Second request to cache return same array", docsWithField, cache.getDocsWithField(reader, "theLong"));
+    Bits docsWithField = cache.getDocsWithField(reader, "theLong", FieldCache.LONG_POINT_PARSER);
+    assertSame("Second request to cache return same array", docsWithField, cache.getDocsWithField(reader, "theLong", FieldCache.LONG_POINT_PARSER));
     assertTrue("docsWithField(theLong) must be class Bits.MatchAllBits", docsWithField instanceof Bits.MatchAllBits);
     assertTrue("docsWithField(theLong) Size: " + docsWithField.length() + " is not: " + NUM_DOCS, docsWithField.length() == NUM_DOCS);
     for (int i = 0; i < docsWithField.length(); i++) {
       assertTrue(docsWithField.get(i));
     }
     
-    docsWithField = cache.getDocsWithField(reader, "sparse");
-    assertSame("Second request to cache return same array", docsWithField, cache.getDocsWithField(reader, "sparse"));
+    docsWithField = cache.getDocsWithField(reader, "sparse", FieldCache.INT_POINT_PARSER);
+    assertSame("Second request to cache return same array", docsWithField, cache.getDocsWithField(reader, "sparse", FieldCache.INT_POINT_PARSER));
     assertFalse("docsWithField(sparse) must not be class Bits.MatchAllBits", docsWithField instanceof Bits.MatchAllBits);
     assertTrue("docsWithField(sparse) Size: " + docsWithField.length() + " is not: " + NUM_DOCS, docsWithField.length() == NUM_DOCS);
     for (int i = 0; i < docsWithField.length(); i++) {
@@ -243,7 +225,7 @@ public class TestFieldCache extends LuceneTestCase {
 
     // getTerms
     BinaryDocValues terms = cache.getTerms(reader, "theRandomUnicodeString", true);
-    Bits bits = cache.getDocsWithField(reader, "theRandomUnicodeString");
+    Bits bits = cache.getDocsWithField(reader, "theRandomUnicodeString", null);
     for (int i = 0; i < NUM_DOCS; i++) {
       final String s;
       if (!bits.get(i)) {
@@ -322,20 +304,20 @@ public class TestFieldCache extends LuceneTestCase {
     FieldCache cache = FieldCache.DEFAULT;
     cache.purgeAllCaches();
     assertEquals(0, cache.getCacheEntries().length);
-    cache.getNumerics(reader, "theDouble", FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, true);
+    cache.getNumerics(reader, "theDouble", FieldCache.DOUBLE_POINT_PARSER, true);
 
     // The double[] takes one slots, and docsWithField should also
     // have been populated:
     assertEquals(2, cache.getCacheEntries().length);
-    Bits bits = cache.getDocsWithField(reader, "theDouble");
+    Bits bits = cache.getDocsWithField(reader, "theDouble", FieldCache.DOUBLE_POINT_PARSER);
 
     // No new entries should appear:
     assertEquals(2, cache.getCacheEntries().length);
     assertTrue(bits instanceof Bits.MatchAllBits);
 
-    NumericDocValues ints = cache.getNumerics(reader, "sparse", FieldCache.NUMERIC_UTILS_INT_PARSER, true);
+    NumericDocValues ints = cache.getNumerics(reader, "sparse", FieldCache.INT_POINT_PARSER, true);
     assertEquals(4, cache.getCacheEntries().length);
-    Bits docsWithField = cache.getDocsWithField(reader, "sparse");
+    Bits docsWithField = cache.getDocsWithField(reader, "sparse", FieldCache.INT_POINT_PARSER);
     assertEquals(4, cache.getCacheEntries().length);
     for (int i = 0; i < docsWithField.length(); i++) {
       if (i%2 == 0) {
@@ -346,8 +328,8 @@ public class TestFieldCache extends LuceneTestCase {
       }
     }
 
-    NumericDocValues numInts = cache.getNumerics(reader, "numInt", FieldCache.NUMERIC_UTILS_INT_PARSER, random().nextBoolean());
-    docsWithField = cache.getDocsWithField(reader, "numInt");
+    NumericDocValues numInts = cache.getNumerics(reader, "numInt", FieldCache.INT_POINT_PARSER, random().nextBoolean());
+    docsWithField = cache.getDocsWithField(reader, "numInt", FieldCache.INT_POINT_PARSER);
     for (int i = 0; i < docsWithField.length(); i++) {
       if (i%2 == 0) {
         assertTrue(docsWithField.get(i));
@@ -391,13 +373,13 @@ public class TestFieldCache extends LuceneTestCase {
                     break;
                   }
                 } else if (op == 1) {
-                  Bits docsWithField = cache.getDocsWithField(reader, "sparse");
+                  Bits docsWithField = cache.getDocsWithField(reader, "sparse", FieldCache.INT_POINT_PARSER);
                   for (int i = 0; i < docsWithField.length(); i++) {
                     assertEquals(i%2 == 0, docsWithField.get(i));
                   }
                 } else {
-                  NumericDocValues ints = cache.getNumerics(reader, "sparse", FieldCache.NUMERIC_UTILS_INT_PARSER, true);
-                  Bits docsWithField = cache.getDocsWithField(reader, "sparse");
+                  NumericDocValues ints = cache.getNumerics(reader, "sparse", FieldCache.INT_POINT_PARSER, true);
+                  Bits docsWithField = cache.getDocsWithField(reader, "sparse", FieldCache.INT_POINT_PARSER);
                   for (int i = 0; i < docsWithField.length(); i++) {
                     if (i%2 == 0) {
                       assertTrue(docsWithField.get(i));
@@ -441,7 +423,7 @@ public class TestFieldCache extends LuceneTestCase {
     
     // Binary type: can be retrieved via getTerms()
     expectThrows(IllegalStateException.class, () -> {
-      FieldCache.DEFAULT.getNumerics(ar, "binary", FieldCache.NUMERIC_UTILS_INT_PARSER, false);
+      FieldCache.DEFAULT.getNumerics(ar, "binary", FieldCache.INT_POINT_PARSER, false);
     });
     
     BinaryDocValues binary = FieldCache.DEFAULT.getTerms(ar, "binary", true);
@@ -460,12 +442,12 @@ public class TestFieldCache extends LuceneTestCase {
       new DocTermOrds(ar, null, "binary");
     });
     
-    Bits bits = FieldCache.DEFAULT.getDocsWithField(ar, "binary");
+    Bits bits = FieldCache.DEFAULT.getDocsWithField(ar, "binary", null);
     assertTrue(bits.get(0));
     
     // Sorted type: can be retrieved via getTerms(), getTermsIndex(), getDocTermOrds()
     expectThrows(IllegalStateException.class, () -> {
-      FieldCache.DEFAULT.getNumerics(ar, "sorted", FieldCache.NUMERIC_UTILS_INT_PARSER, false);
+      FieldCache.DEFAULT.getNumerics(ar, "sorted", FieldCache.INT_POINT_PARSER, false);
     });
     
     expectThrows(IllegalStateException.class, () -> {
@@ -488,11 +470,11 @@ public class TestFieldCache extends LuceneTestCase {
     assertEquals(SortedSetDocValues.NO_MORE_ORDS, sortedSet.nextOrd());
     assertEquals(1, sortedSet.getValueCount());
     
-    bits = FieldCache.DEFAULT.getDocsWithField(ar, "sorted");
+    bits = FieldCache.DEFAULT.getDocsWithField(ar, "sorted", null);
     assertTrue(bits.get(0));
     
     // Numeric type: can be retrieved via getInts() and so on
-    NumericDocValues numeric = FieldCache.DEFAULT.getNumerics(ar, "numeric", FieldCache.NUMERIC_UTILS_INT_PARSER, false);
+    NumericDocValues numeric = FieldCache.DEFAULT.getNumerics(ar, "numeric", FieldCache.INT_POINT_PARSER, false);
     assertEquals(42, numeric.get(0));
     
     expectThrows(IllegalStateException.class, () -> {
@@ -511,12 +493,12 @@ public class TestFieldCache extends LuceneTestCase {
       new DocTermOrds(ar, null, "numeric");
     });
     
-    bits = FieldCache.DEFAULT.getDocsWithField(ar, "numeric");
+    bits = FieldCache.DEFAULT.getDocsWithField(ar, "numeric", null);
     assertTrue(bits.get(0));
     
     // SortedSet type: can be retrieved via getDocTermOrds() 
     expectThrows(IllegalStateException.class, () -> {
-      FieldCache.DEFAULT.getNumerics(ar, "sortedset", FieldCache.NUMERIC_UTILS_INT_PARSER, false);
+      FieldCache.DEFAULT.getNumerics(ar, "sortedset", FieldCache.INT_POINT_PARSER, false);
     });
     
     expectThrows(IllegalStateException.class, () -> {
@@ -538,7 +520,7 @@ public class TestFieldCache extends LuceneTestCase {
     assertEquals(SortedSetDocValues.NO_MORE_ORDS, sortedSet.nextOrd());
     assertEquals(2, sortedSet.getValueCount());
     
-    bits = FieldCache.DEFAULT.getDocsWithField(ar, "sortedset");
+    bits = FieldCache.DEFAULT.getDocsWithField(ar, "sortedset", null);
     assertTrue(bits.get(0));
     
     ir.close();
@@ -559,16 +541,16 @@ public class TestFieldCache extends LuceneTestCase {
     cache.purgeAllCaches();
     assertEquals(0, cache.getCacheEntries().length);
     
-    NumericDocValues ints = cache.getNumerics(ar, "bogusints", FieldCache.NUMERIC_UTILS_INT_PARSER, true);
+    NumericDocValues ints = cache.getNumerics(ar, "bogusints", FieldCache.INT_POINT_PARSER, true);
     assertEquals(0, ints.get(0));
     
-    NumericDocValues longs = cache.getNumerics(ar, "boguslongs", FieldCache.NUMERIC_UTILS_LONG_PARSER, true);
+    NumericDocValues longs = cache.getNumerics(ar, "boguslongs", FieldCache.LONG_POINT_PARSER, true);
     assertEquals(0, longs.get(0));
     
-    NumericDocValues floats = cache.getNumerics(ar, "bogusfloats", FieldCache.NUMERIC_UTILS_FLOAT_PARSER, true);
+    NumericDocValues floats = cache.getNumerics(ar, "bogusfloats", FieldCache.FLOAT_POINT_PARSER, true);
     assertEquals(0, floats.get(0));
     
-    NumericDocValues doubles = cache.getNumerics(ar, "bogusdoubles", FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, true);
+    NumericDocValues doubles = cache.getNumerics(ar, "bogusdoubles", FieldCache.DOUBLE_POINT_PARSER, true);
     assertEquals(0, doubles.get(0));
     
     BinaryDocValues binaries = cache.getTerms(ar, "bogusterms", true);
@@ -584,7 +566,7 @@ public class TestFieldCache extends LuceneTestCase {
     sortedSet.setDocument(0);
     assertEquals(SortedSetDocValues.NO_MORE_ORDS, sortedSet.nextOrd());
     
-    Bits bits = cache.getDocsWithField(ar, "bogusbits");
+    Bits bits = cache.getDocsWithField(ar, "bogusbits", null);
     assertFalse(bits.get(0));
     
     // check that we cached nothing
@@ -617,16 +599,16 @@ public class TestFieldCache extends LuceneTestCase {
     cache.purgeAllCaches();
     assertEquals(0, cache.getCacheEntries().length);
     
-    NumericDocValues ints = cache.getNumerics(ar, "bogusints", FieldCache.NUMERIC_UTILS_INT_PARSER, true);
+    NumericDocValues ints = cache.getNumerics(ar, "bogusints", FieldCache.INT_POINT_PARSER, true);
     assertEquals(0, ints.get(0));
     
-    NumericDocValues longs = cache.getNumerics(ar, "boguslongs", FieldCache.NUMERIC_UTILS_LONG_PARSER, true);
+    NumericDocValues longs = cache.getNumerics(ar, "boguslongs", FieldCache.LONG_POINT_PARSER, true);
     assertEquals(0, longs.get(0));
     
-    NumericDocValues floats = cache.getNumerics(ar, "bogusfloats", FieldCache.NUMERIC_UTILS_FLOAT_PARSER, true);
+    NumericDocValues floats = cache.getNumerics(ar, "bogusfloats", FieldCache.FLOAT_POINT_PARSER, true);
     assertEquals(0, floats.get(0));
     
-    NumericDocValues doubles = cache.getNumerics(ar, "bogusdoubles", FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, true);
+    NumericDocValues doubles = cache.getNumerics(ar, "bogusdoubles", FieldCache.DOUBLE_POINT_PARSER, true);
     assertEquals(0, doubles.get(0));
     
     BinaryDocValues binaries = cache.getTerms(ar, "bogusterms", true);
@@ -642,7 +624,7 @@ public class TestFieldCache extends LuceneTestCase {
     sortedSet.setDocument(0);
     assertEquals(SortedSetDocValues.NO_MORE_ORDS, sortedSet.nextOrd());
     
-    Bits bits = cache.getDocsWithField(ar, "bogusbits");
+    Bits bits = cache.getDocsWithField(ar, "bogusbits", null);
     assertFalse(bits.get(0));
     
     // check that we cached nothing
@@ -658,8 +640,10 @@ public class TestFieldCache extends LuceneTestCase {
     cfg.setMergePolicy(newLogMergePolicy());
     RandomIndexWriter iw = new RandomIndexWriter(random(), dir, cfg);
     Document doc = new Document();
-    LegacyLongField field = new LegacyLongField("f", 0L, Store.YES);
+    LongPoint field = new LongPoint("f", 0L);
+    StoredField field2 = new StoredField("f", 0L);
     doc.add(field);
+    doc.add(field2);
     final long[] values = new long[TestUtil.nextInt(random(), 1, 10)];
     for (int i = 0; i < values.length; ++i) {
       final long v;
@@ -683,12 +667,13 @@ public class TestFieldCache extends LuceneTestCase {
         iw.addDocument(new Document());
       } else {
         field.setLongValue(v);
+        field2.setLongValue(v);
         iw.addDocument(doc);
       }
     }
     iw.forceMerge(1);
     final DirectoryReader reader = iw.getReader();
-    final NumericDocValues longs = FieldCache.DEFAULT.getNumerics(getOnlySegmentReader(reader), "f", FieldCache.NUMERIC_UTILS_LONG_PARSER, false);
+    final NumericDocValues longs = FieldCache.DEFAULT.getNumerics(getOnlySegmentReader(reader), "f", FieldCache.LONG_POINT_PARSER, false);
     for (int i = 0; i < values.length; ++i) {
       assertEquals(values[i], longs.get(i));
     }
@@ -704,7 +689,7 @@ public class TestFieldCache extends LuceneTestCase {
     cfg.setMergePolicy(newLogMergePolicy());
     RandomIndexWriter iw = new RandomIndexWriter(random(), dir, cfg);
     Document doc = new Document();
-    LegacyIntField field = new LegacyIntField("f", 0, Store.YES);
+    IntPoint field = new IntPoint("f", 0);
     doc.add(field);
     final int[] values = new int[TestUtil.nextInt(random(), 1, 10)];
     for (int i = 0; i < values.length; ++i) {
@@ -734,7 +719,7 @@ public class TestFieldCache extends LuceneTestCase {
     }
     iw.forceMerge(1);
     final DirectoryReader reader = iw.getReader();
-    final NumericDocValues ints = FieldCache.DEFAULT.getNumerics(getOnlySegmentReader(reader), "f", FieldCache.NUMERIC_UTILS_INT_PARSER, false);
+    final NumericDocValues ints = FieldCache.DEFAULT.getNumerics(getOnlySegmentReader(reader), "f", FieldCache.INT_POINT_PARSER, false);
     for (int i = 0; i < values.length; ++i) {
       assertEquals(values[i], ints.get(i));
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d35d5694/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheReopen.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheReopen.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheReopen.java
index a85731f..0d5584e 100644
--- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheReopen.java
+++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheReopen.java
@@ -18,8 +18,7 @@ package org.apache.lucene.uninverting;
 
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.document.Document;
-import org.apache.lucene.document.Field;
-import org.apache.lucene.document.LegacyIntField;
+import org.apache.lucene.document.IntPoint;
 import org.apache.lucene.index.LeafReader;
 import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.IndexWriter;
@@ -42,14 +41,14 @@ public class TestFieldCacheReopen extends LuceneTestCase {
             setMergePolicy(newLogMergePolicy(10))
     );
     Document doc = new Document();
-    doc.add(new LegacyIntField("number", 17, Field.Store.NO));
+    doc.add(new IntPoint("number", 17));
     writer.addDocument(doc);
     writer.commit();
   
     // Open reader1
     DirectoryReader r = DirectoryReader.open(dir);
     LeafReader r1 = getOnlySegmentReader(r);
-    final NumericDocValues ints = FieldCache.DEFAULT.getNumerics(r1, "number", FieldCache.NUMERIC_UTILS_INT_PARSER, false);
+    final NumericDocValues ints = FieldCache.DEFAULT.getNumerics(r1, "number", FieldCache.INT_POINT_PARSER, false);
     assertEquals(17, ints.get(0));
   
     // Add new segment
@@ -61,7 +60,7 @@ public class TestFieldCacheReopen extends LuceneTestCase {
     assertNotNull(r2);
     r.close();
     LeafReader sub0 = r2.leaves().get(0).reader();
-    final NumericDocValues ints2 = FieldCache.DEFAULT.getNumerics(sub0, "number", FieldCache.NUMERIC_UTILS_INT_PARSER, false);
+    final NumericDocValues ints2 = FieldCache.DEFAULT.getNumerics(sub0, "number", FieldCache.INT_POINT_PARSER, false);
     r2.close();
     assertTrue(ints == ints2);
   

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d35d5694/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSanityChecker.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSanityChecker.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSanityChecker.java
index f7dc048..f5c62e2 100644
--- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSanityChecker.java
+++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSanityChecker.java
@@ -94,11 +94,11 @@ public class TestFieldCacheSanityChecker extends LuceneTestCase {
     FieldCache cache = FieldCache.DEFAULT;
     cache.purgeAllCaches();
 
-    cache.getNumerics(readerA, "theDouble", FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, false);
-    cache.getNumerics(readerAclone, "theDouble", FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, false);
-    cache.getNumerics(readerB, "theDouble", FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, false);
+    cache.getNumerics(readerA, "theDouble", FieldCache.LEGACY_DOUBLE_PARSER, false);
+    cache.getNumerics(readerAclone, "theDouble", FieldCache.LEGACY_DOUBLE_PARSER, false);
+    cache.getNumerics(readerB, "theDouble", FieldCache.LEGACY_DOUBLE_PARSER, false);
 
-    cache.getNumerics(readerX, "theInt", FieldCache.NUMERIC_UTILS_INT_PARSER, false);
+    cache.getNumerics(readerX, "theInt", FieldCache.LEGACY_INT_PARSER, false);
 
     // // // 
 
@@ -117,7 +117,7 @@ public class TestFieldCacheSanityChecker extends LuceneTestCase {
     FieldCache cache = FieldCache.DEFAULT;
     cache.purgeAllCaches();
 
-    cache.getNumerics(readerX, "theInt", FieldCache.NUMERIC_UTILS_INT_PARSER, false);
+    cache.getNumerics(readerX, "theInt", FieldCache.LEGACY_INT_PARSER, false);
     cache.getTerms(readerX, "theInt", false);
 
     // // // 


[48/50] [abbrv] lucene-solr git commit: SOLR-8832: Faulty DaemonStream shutdown procedures

Posted by ho...@apache.org.
SOLR-8832: Faulty DaemonStream shutdown procedures


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/007d41c9
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/007d41c9
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/007d41c9

Branch: refs/heads/jira/SOLR-445
Commit: 007d41c9f5073ee796dc35168d397e7a5b501997
Parents: 50c413e
Author: jbernste <jb...@apache.org>
Authored: Fri Mar 11 15:34:31 2016 -0500
Committer: jbernste <jb...@apache.org>
Committed: Fri Mar 11 15:35:51 2016 -0500

----------------------------------------------------------------------
 .../org/apache/solr/client/solrj/io/stream/DaemonStream.java | 6 +++++-
 .../solr/client/solrj/io/stream/StreamExpressionTest.java    | 8 ++++++--
 2 files changed, 11 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/007d41c9/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/DaemonStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/DaemonStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/DaemonStream.java
index 2f65394..edafd7e 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/DaemonStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/DaemonStream.java
@@ -171,11 +171,16 @@ public class DaemonStream extends TupleStream implements Expressible {
     this.tupleStream.setStreamContext(streamContext);
   }
 
+  public void shutdown() {
+    streamRunner.setShutdown(true);
+  }
+
   public void close() {
     if(closed) {
       return;
     }
     streamRunner.setShutdown(true);
+    this.closed = true;
   }
 
   public List<TupleStream> children() {
@@ -226,7 +231,6 @@ public class DaemonStream extends TupleStream implements Expressible {
 
     public synchronized void setShutdown(boolean shutdown) {
       this.shutdown = shutdown;
-      interrupt(); //We could be blocked on the queue or sleeping
     }
 
     public synchronized boolean getShutdown() {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/007d41c9/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
index 465369b..e7f57c1 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
@@ -623,7 +623,7 @@ public class StreamExpressionTest extends AbstractFullDistribZkTestBase {
     DaemonStream daemonStream;
 
     expression = StreamExpressionParser.parse("daemon(rollup("
-        + "search(collection1, q=*:*, fl=\"a_i,a_s\", sort=\"a_s asc\"),"
+        + "search(collection1, q=\"*:*\", fl=\"a_i,a_s\", sort=\"a_s asc\"),"
         + "over=\"a_s\","
         + "sum(a_i)"
         + "), id=\"test\", runInterval=\"1000\", queueSize=\"9\")");
@@ -2366,13 +2366,17 @@ public class StreamExpressionTest extends AbstractFullDistribZkTestBase {
         assertEquals(14, (long) tuple.getLong(id));
         tuple = dstream.read(); // This should trigger a checkpoint as it's the 4th read from the stream.
         assertEquals(15, (long) tuple.getLong(id));
+
+        dstream.shutdown();
+        tuple = dstream.read();
+        assertTrue(tuple.EOF);
       } finally {
         dstream.close();
       }
     } finally {
-      cache.close();
       del("*:*");
       commit();
+      cache.close();
     }
   }
 


[34/50] [abbrv] lucene-solr git commit: fix rare-ish test bug

Posted by ho...@apache.org.
fix rare-ish test bug


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/859ad950
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/859ad950
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/859ad950

Branch: refs/heads/jira/SOLR-445
Commit: 859ad950cce53bb5bc11c032f020639c8d7ea4d5
Parents: 7523ca1
Author: Mike McCandless <mi...@apache.org>
Authored: Thu Mar 10 16:34:04 2016 -0500
Committer: Mike McCandless <mi...@apache.org>
Committed: Thu Mar 10 16:34:04 2016 -0500

----------------------------------------------------------------------
 .../org/apache/lucene/index/TestAllFilesCheckIndexHeader.java | 7 +++++++
 1 file changed, 7 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/859ad950/lucene/core/src/test/org/apache/lucene/index/TestAllFilesCheckIndexHeader.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestAllFilesCheckIndexHeader.java b/lucene/core/src/test/org/apache/lucene/index/TestAllFilesCheckIndexHeader.java
index f6c1486..c430281 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestAllFilesCheckIndexHeader.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestAllFilesCheckIndexHeader.java
@@ -28,6 +28,7 @@ import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IOContext;
 import org.apache.lucene.store.IndexInput;
 import org.apache.lucene.store.IndexOutput;
+import org.apache.lucene.store.MockDirectoryWrapper;
 import org.apache.lucene.util.LineFileDocs;
 import org.apache.lucene.util.LuceneTestCase.SuppressFileSystems;
 import org.apache.lucene.util.LuceneTestCase;
@@ -86,6 +87,12 @@ public class TestAllFilesCheckIndexHeader extends LuceneTestCase {
   private void checkOneFile(Directory dir, String victim) throws IOException {
     try (BaseDirectoryWrapper dirCopy = newDirectory()) {
       dirCopy.setCheckIndexOnClose(false);
+
+      if (dirCopy instanceof MockDirectoryWrapper) {
+        // The while(true) loop below, under rarish circumstances, can sometimes double write:
+        ((MockDirectoryWrapper) dirCopy).setPreventDoubleWrite(false);
+      }
+
       long victimLength = dir.fileLength(victim);
       int wrongBytes = TestUtil.nextInt(random(), 1, (int) Math.min(100, victimLength));
       assert victimLength > 0;


[38/50] [abbrv] lucene-solr git commit: SOLR-8790: Add the node name to core responses in calls from the Overseer

Posted by ho...@apache.org.
SOLR-8790: Add the node name to core responses in calls from the Overseer


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/209f5c29
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/209f5c29
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/209f5c29

Branch: refs/heads/jira/SOLR-445
Commit: 209f5c2946c8816a7ab175dc5e886ea661d154a5
Parents: 937a414
Author: anshum <an...@apache.org>
Authored: Thu Mar 10 12:22:23 2016 -0800
Committer: anshum <an...@apache.org>
Committed: Thu Mar 10 16:20:44 2016 -0800

----------------------------------------------------------------------
 solr/CHANGES.txt                                                  | 3 +++
 .../org/apache/solr/cloud/OverseerCollectionMessageHandler.java   | 1 +
 .../src/test/org/apache/solr/cloud/CollectionsAPISolrJTest.java   | 2 +-
 3 files changed, 5 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/209f5c29/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 2d4ddff..4e969bf 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -286,6 +286,9 @@ Bug Fixes
   other than count, resulted in incorrect results.  This has been fixed, and facet.prefix
   support for facet.method=uif has been enabled. (Mikhail Khludnev, yonik)
 
+* SOLR-8790: Collections API responses contain node name in the core-level responses that are
+  returned. (Anshum Gupta)
+
 Optimizations
 ----------------------
 * SOLR-7876: Speed up queries and operations that use many terms when timeAllowed has not been

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/209f5c29/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java b/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java
index d7d894b..78db473 100644
--- a/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java
+++ b/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java
@@ -1743,6 +1743,7 @@ public class OverseerCollectionMessageHandler implements OverseerMessageHandler
     String replica = zkStateReader.getBaseUrlForNodeName(nodeName);
     sreq.shards = new String[]{replica};
     sreq.actualShards = sreq.shards;
+    sreq.nodeName = nodeName;
     sreq.params = params;
 
     shardHandler.submit(sreq, replica, sreq.params);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/209f5c29/solr/core/src/test/org/apache/solr/cloud/CollectionsAPISolrJTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/CollectionsAPISolrJTest.java b/solr/core/src/test/org/apache/solr/cloud/CollectionsAPISolrJTest.java
index 9f41c0d..6b0db0f 100644
--- a/solr/core/src/test/org/apache/solr/cloud/CollectionsAPISolrJTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/CollectionsAPISolrJTest.java
@@ -54,7 +54,6 @@ import static org.apache.solr.cloud.ReplicaPropertiesBase.verifyUniqueAcrossColl
 public class CollectionsAPISolrJTest extends AbstractFullDistribZkTestBase {
 
   @Test
-  @Ignore
   public void test() throws Exception {
     testCreateAndDeleteCollection();
     testCreateAndDeleteShard();
@@ -80,6 +79,7 @@ public class CollectionsAPISolrJTest extends AbstractFullDistribZkTestBase {
             .setConfigName("conf1")
             .setRouterField("myOwnField")
             .setStateFormat(1);
+
     CollectionAdminResponse response = createCollectionRequest.process(cloudClient);
 
     assertEquals(0, response.getStatus());


[32/50] [abbrv] lucene-solr git commit: LUCENE-7086: move SlowCompositeReaderWrapper to misc module, and throw clear exc if you try to use in with points

Posted by ho...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/index/TestPayloads.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestPayloads.java b/lucene/core/src/test/org/apache/lucene/index/TestPayloads.java
index 3fa2131..eca293e 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestPayloads.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestPayloads.java
@@ -85,7 +85,7 @@ public class TestPayloads extends LuceneTestCase {
     // flush
     writer.close();
 
-    SegmentReader reader = getOnlySegmentReader(DirectoryReader.open(ram));
+    LeafReader reader = getOnlyLeafReader(DirectoryReader.open(ram));
     FieldInfos fi = reader.getFieldInfos();
     assertFalse("Payload field bit should not be set.", fi.fieldInfo("f1").hasPayloads());
     assertTrue("Payload field bit should be set.", fi.fieldInfo("f2").hasPayloads());
@@ -112,7 +112,7 @@ public class TestPayloads extends LuceneTestCase {
     // flush
     writer.close();
 
-    reader = getOnlySegmentReader(DirectoryReader.open(ram));
+    reader = getOnlyLeafReader(DirectoryReader.open(ram));
     fi = reader.getFieldInfos();
     assertFalse("Payload field bit should not be set.", fi.fieldInfo("f1").hasPayloads());
     assertTrue("Payload field bit should be set.", fi.fieldInfo("f2").hasPayloads());
@@ -603,8 +603,9 @@ public class TestPayloads extends LuceneTestCase {
     field.setTokenStream(ts);
     writer.addDocument(doc);
     DirectoryReader reader = writer.getReader();
-    LeafReader sr = SlowCompositeReaderWrapper.wrap(reader);
-    PostingsEnum de = sr.postings(new Term("field", "withPayload"), PostingsEnum.PAYLOADS);
+    TermsEnum te = MultiFields.getFields(reader).terms("field").iterator();
+    assertTrue(te.seekExact(new BytesRef("withPayload")));
+    PostingsEnum de = te.postings(null, PostingsEnum.PAYLOADS);
     de.nextDoc();
     de.nextPosition();
     assertEquals(new BytesRef("test"), de.getPayload());
@@ -637,7 +638,7 @@ public class TestPayloads extends LuceneTestCase {
     doc.add(field3);
     writer.addDocument(doc);
     DirectoryReader reader = writer.getReader();
-    SegmentReader sr = getOnlySegmentReader(reader);
+    LeafReader sr = getOnlyLeafReader(reader);
     PostingsEnum de = sr.postings(new Term("field", "withPayload"), PostingsEnum.PAYLOADS);
     de.nextDoc();
     de.nextPosition();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java b/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java
index 9b18f02..0946234 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java
@@ -182,7 +182,7 @@ public class TestPointValues extends LuceneTestCase {
     w2.addDocument(doc);
     DirectoryReader r = DirectoryReader.open(dir);
     IllegalArgumentException expected = expectThrows(IllegalArgumentException.class, () -> {
-      w2.addIndexes(new CodecReader[] {getOnlySegmentReader(r)});
+        w2.addIndexes(new CodecReader[] {(CodecReader) getOnlyLeafReader(r)});
     });
     assertEquals("cannot change point dimension count from 2 to 1 for field=\"dim\"", expected.getMessage());
 
@@ -331,7 +331,7 @@ public class TestPointValues extends LuceneTestCase {
     w2.addDocument(doc);
     DirectoryReader r = DirectoryReader.open(dir);
     IllegalArgumentException expected = expectThrows(IllegalArgumentException.class, () -> {
-      w2.addIndexes(new CodecReader[] {getOnlySegmentReader(r)});
+        w2.addIndexes(new CodecReader[] {(CodecReader) getOnlyLeafReader(r)});
     });
     assertEquals("cannot change point numBytes from 6 to 4 for field=\"dim\"", expected.getMessage());
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/index/TestPostingsOffsets.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestPostingsOffsets.java b/lucene/core/src/test/org/apache/lucene/index/TestPostingsOffsets.java
index aff0bd9..b21cb23 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestPostingsOffsets.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestPostingsOffsets.java
@@ -375,10 +375,9 @@ public class TestPostingsOffsets extends LuceneTestCase {
       riw.addDocument(doc);
     }
     CompositeReader ir = riw.getReader();
-    LeafReader slow = SlowCompositeReaderWrapper.wrap(ir);
-    FieldInfos fis = slow.getFieldInfos();
+    FieldInfos fis = MultiFields.getMergedFieldInfos(ir);
     assertEquals(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS, fis.fieldInfo("foo").getIndexOptions());
-    slow.close();
+    ir.close();
     ir.close();
     riw.close();
     dir.close();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/index/TestReaderClosed.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestReaderClosed.java b/lucene/core/src/test/org/apache/lucene/index/TestReaderClosed.java
index 6a1ab3a..401254b 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestReaderClosed.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestReaderClosed.java
@@ -32,7 +32,7 @@ import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.TestUtil;
 
 public class TestReaderClosed extends LuceneTestCase {
-  private IndexReader reader;
+  private DirectoryReader reader;
   private Directory dir;
 
   @Override
@@ -54,6 +54,7 @@ public class TestReaderClosed extends LuceneTestCase {
       field.setStringValue(TestUtil.randomUnicodeString(random(), 10));
       writer.addDocument(doc);
     }
+    writer.forceMerge(1);
     reader = writer.getReader();
     writer.close();
   }
@@ -77,8 +78,7 @@ public class TestReaderClosed extends LuceneTestCase {
   // LUCENE-3800
   public void testReaderChaining() throws Exception {
     assertTrue(reader.getRefCount() > 0);
-    IndexReader wrappedReader = SlowCompositeReaderWrapper.wrap(reader);
-    wrappedReader = new ParallelLeafReader((LeafReader) wrappedReader);
+    LeafReader wrappedReader = new ParallelLeafReader(getOnlyLeafReader(reader));
 
     IndexSearcher searcher = newSearcher(wrappedReader);
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/index/TestReaderWrapperDVTypeCheck.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestReaderWrapperDVTypeCheck.java b/lucene/core/src/test/org/apache/lucene/index/TestReaderWrapperDVTypeCheck.java
index 308c48e..88b177d 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestReaderWrapperDVTypeCheck.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestReaderWrapperDVTypeCheck.java
@@ -45,7 +45,7 @@ public class TestReaderWrapperDVTypeCheck extends LuceneTestCase {
     {
       final Random indexRandom = new Random(seed);
       final int docs;
-      docs = indexRandom.nextInt(4);
+      docs = TestUtil.nextInt(indexRandom, 1, 4);
      // System.out.println("docs:"+docs);
       
       for(int i=0; i< docs; i++){
@@ -68,12 +68,13 @@ public class TestReaderWrapperDVTypeCheck extends LuceneTestCase {
         iw.commit();
       }
     }
+    iw.forceMerge(1);
     final DirectoryReader reader = iw.getReader();
     
    // System.out.println("sdv:"+ sdvExist+ " ssdv:"+ssdvExist+", segs: "+reader.leaves().size() +", "+reader.leaves());
     
     iw.close();
-    final LeafReader wrapper = SlowCompositeReaderWrapper.wrap(reader);
+    final LeafReader wrapper = getOnlyLeafReader(reader);
     
     {
       //final Random indexRandom = new Random(seed);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/index/TestSegmentTermEnum.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestSegmentTermEnum.java b/lucene/core/src/test/org/apache/lucene/index/TestSegmentTermEnum.java
index fc708c2..1e85e14 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestSegmentTermEnum.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestSegmentTermEnum.java
@@ -79,7 +79,7 @@ public class TestSegmentTermEnum extends LuceneTestCase {
                                 .setCodec(TestUtil.alwaysPostingsFormat(TestUtil.getDefaultPostingsFormat())));
     addDoc(writer, "aaa bbb");
     writer.close();
-    SegmentReader reader = getOnlySegmentReader(DirectoryReader.open(dir));
+    LeafReader reader = getOnlyLeafReader(DirectoryReader.open(dir));
     TermsEnum terms = reader.fields().terms("content").iterator();
     assertNotNull(terms.next());
     assertEquals("aaa", terms.term().utf8ToString());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/index/TestStressAdvance.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestStressAdvance.java b/lucene/core/src/test/org/apache/lucene/index/TestStressAdvance.java
index d423616..606a11a 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestStressAdvance.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestStressAdvance.java
@@ -74,7 +74,7 @@ public class TestStressAdvance extends LuceneTestCase {
           bDocIDs.add(docID);
         }
       }
-      final TermsEnum te = getOnlySegmentReader(r).fields().terms("field").iterator();
+      final TermsEnum te = getOnlyLeafReader(r).fields().terms("field").iterator();
       
       PostingsEnum de = null;
       for(int iter2=0;iter2<10;iter2++) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/index/TestTermsEnum.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestTermsEnum.java b/lucene/core/src/test/org/apache/lucene/index/TestTermsEnum.java
index aa2ca24..b074f81 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestTermsEnum.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestTermsEnum.java
@@ -732,7 +732,7 @@ public class TestTermsEnum extends LuceneTestCase {
     w.forceMerge(1);
     DirectoryReader r = w.getReader();
     w.close();
-    LeafReader sub = getOnlySegmentReader(r);
+    LeafReader sub = getOnlyLeafReader(r);
     Terms terms = sub.fields().terms("field");
     Automaton automaton = new RegExp(".*", RegExp.NONE).toAutomaton();
     CompiledAutomaton ca = new CompiledAutomaton(automaton, false, false);    
@@ -786,7 +786,7 @@ public class TestTermsEnum extends LuceneTestCase {
     w.forceMerge(1);
     DirectoryReader r = w.getReader();
     w.close();
-    LeafReader sub = getOnlySegmentReader(r);
+    LeafReader sub = getOnlyLeafReader(r);
     Terms terms = sub.fields().terms("field");
 
     Automaton automaton = new RegExp(".*d", RegExp.NONE).toAutomaton();
@@ -840,7 +840,7 @@ public class TestTermsEnum extends LuceneTestCase {
     w.forceMerge(1);
     DirectoryReader r = w.getReader();
     w.close();
-    LeafReader sub = getOnlySegmentReader(r);
+    LeafReader sub = getOnlyLeafReader(r);
     Terms terms = sub.fields().terms("field");
 
     Automaton automaton = new RegExp(".*", RegExp.NONE).toAutomaton();  // accept ALL

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/search/TestDisjunctionMaxQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestDisjunctionMaxQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestDisjunctionMaxQuery.java
index 7a9bb4e..79c32d3 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestDisjunctionMaxQuery.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestDisjunctionMaxQuery.java
@@ -36,7 +36,6 @@ import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.index.IndexWriterConfig;
 import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.index.RandomIndexWriter;
-import org.apache.lucene.index.SlowCompositeReaderWrapper;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.search.similarities.ClassicSimilarity;
 import org.apache.lucene.search.similarities.Similarity;
@@ -157,9 +156,10 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase {
       writer.addDocument(d4);
     }
     
-    r = SlowCompositeReaderWrapper.wrap(writer.getReader());
+    writer.forceMerge(1);
+    r = getOnlyLeafReader(writer.getReader());
     writer.close();
-    s = newSearcher(r);
+    s = new IndexSearcher(r);
     s.setSimilarity(sim);
   }
   

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/search/TestMinShouldMatch2.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestMinShouldMatch2.java b/lucene/core/src/test/org/apache/lucene/search/TestMinShouldMatch2.java
index f289975..cc6606f 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestMinShouldMatch2.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestMinShouldMatch2.java
@@ -87,7 +87,7 @@ public class TestMinShouldMatch2 extends LuceneTestCase {
     iw.forceMerge(1);
     iw.close();
     r = DirectoryReader.open(dir);
-    reader = getOnlySegmentReader(r);
+    reader = getOnlyLeafReader(r);
     searcher = new IndexSearcher(reader);
     searcher.setSimilarity(new ClassicSimilarity() {
       @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/search/TestMultiPhraseEnum.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestMultiPhraseEnum.java b/lucene/core/src/test/org/apache/lucene/search/TestMultiPhraseEnum.java
index aa0c86d..5d0096d 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestMultiPhraseEnum.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestMultiPhraseEnum.java
@@ -48,8 +48,8 @@ public class TestMultiPhraseEnum extends LuceneTestCase {
     DirectoryReader ir = DirectoryReader.open(writer);
     writer.close();
 
-    PostingsEnum p1 = getOnlySegmentReader(ir).postings(new Term("field", "foo"), PostingsEnum.POSITIONS);
-    PostingsEnum p2 = getOnlySegmentReader(ir).postings(new Term("field", "bar"), PostingsEnum.POSITIONS);
+    PostingsEnum p1 = getOnlyLeafReader(ir).postings(new Term("field", "foo"), PostingsEnum.POSITIONS);
+    PostingsEnum p2 = getOnlyLeafReader(ir).postings(new Term("field", "bar"), PostingsEnum.POSITIONS);
     PostingsEnum union = new MultiPhraseQuery.UnionPostingsEnum(Arrays.asList(p1, p2));
     
     assertEquals(-1, union.docID());
@@ -90,8 +90,8 @@ public class TestMultiPhraseEnum extends LuceneTestCase {
     DirectoryReader ir = DirectoryReader.open(writer);
     writer.close();
 
-    PostingsEnum p1 = getOnlySegmentReader(ir).postings(new Term("field", "foo"), PostingsEnum.POSITIONS);
-    PostingsEnum p2 = getOnlySegmentReader(ir).postings(new Term("field", "bar"), PostingsEnum.POSITIONS);
+    PostingsEnum p1 = getOnlyLeafReader(ir).postings(new Term("field", "foo"), PostingsEnum.POSITIONS);
+    PostingsEnum p2 = getOnlyLeafReader(ir).postings(new Term("field", "bar"), PostingsEnum.POSITIONS);
     PostingsEnum union = new MultiPhraseQuery.UnionPostingsEnum(Arrays.asList(p1, p2));
     
     assertEquals(-1, union.docID());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/search/TestPhraseQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestPhraseQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestPhraseQuery.java
index cf77903..0d6cb36 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestPhraseQuery.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestPhraseQuery.java
@@ -95,7 +95,7 @@ public class TestPhraseQuery extends LuceneTestCase {
     reader = writer.getReader();
     writer.close();
 
-    searcher = newSearcher(reader);
+    searcher = new IndexSearcher(reader);
   }
   
   @Override
@@ -123,7 +123,7 @@ public class TestPhraseQuery extends LuceneTestCase {
     query = new PhraseQuery(3, "field", "one", "five");
     ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs;
     assertEquals(1, hits.length);
-    QueryUtils.check(random(), query,searcher);
+    QueryUtils.check(random(), query, searcher);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/search/TestPositionIncrement.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestPositionIncrement.java b/lucene/core/src/test/org/apache/lucene/search/TestPositionIncrement.java
index b7ae42a..227b15d 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestPositionIncrement.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestPositionIncrement.java
@@ -36,9 +36,7 @@ import org.apache.lucene.index.LeafReader;
 import org.apache.lucene.index.MultiFields;
 import org.apache.lucene.index.PostingsEnum;
 import org.apache.lucene.index.RandomIndexWriter;
-import org.apache.lucene.index.SlowCompositeReaderWrapper;
 import org.apache.lucene.index.Term;
-import org.apache.lucene.search.spans.MultiSpansWrapper;
 import org.apache.lucene.search.spans.SpanCollector;
 import org.apache.lucene.search.spans.SpanNearQuery;
 import org.apache.lucene.search.spans.SpanQuery;
@@ -225,7 +223,7 @@ public class TestPositionIncrement extends LuceneTestCase {
     writer.addDocument(doc);
 
     final IndexReader readerFromWriter = writer.getReader();
-    LeafReader r = SlowCompositeReaderWrapper.wrap(readerFromWriter);
+    LeafReader r = getOnlyLeafReader(readerFromWriter);
 
     PostingsEnum tp = r.postings(new Term("content", "a"), PostingsEnum.ALL);
     
@@ -241,7 +239,7 @@ public class TestPositionIncrement extends LuceneTestCase {
     // only one doc has "a"
     assertEquals(DocIdSetIterator.NO_MORE_DOCS, tp.nextDoc());
 
-    IndexSearcher is = newSearcher(readerFromWriter);
+    IndexSearcher is = newSearcher(getOnlyLeafReader(readerFromWriter));
   
     SpanTermQuery stq1 = new SpanTermQuery(new Term("content", "a"));
     SpanTermQuery stq2 = new SpanTermQuery(new Term("content", "k"));
@@ -254,7 +252,7 @@ public class TestPositionIncrement extends LuceneTestCase {
       System.out.println("\ngetPayloadSpans test");
     }
     PayloadSpanCollector collector = new PayloadSpanCollector();
-    Spans pspans = MultiSpansWrapper.wrap(is.getIndexReader(), snq, SpanWeight.Postings.PAYLOADS);
+    Spans pspans = snq.createWeight(is, false).getSpans(is.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
     while (pspans.nextDoc() != Spans.NO_MORE_DOCS) {
       while (pspans.nextStartPosition() != Spans.NO_MORE_POSITIONS) {
         if (VERBOSE) {
@@ -276,7 +274,7 @@ public class TestPositionIncrement extends LuceneTestCase {
     assertEquals(8, count);
 
     // System.out.println("\ngetSpans test");
-    Spans spans = MultiSpansWrapper.wrap(is.getIndexReader(), snq);
+    Spans spans = snq.createWeight(is, false).getSpans(is.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     count = 0;
     sawZero = false;
     while (spans.nextDoc() != Spans.NO_MORE_DOCS) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/search/TestSimilarityProvider.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestSimilarityProvider.java b/lucene/core/src/test/org/apache/lucene/search/TestSimilarityProvider.java
index 9278934..c332c10 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestSimilarityProvider.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestSimilarityProvider.java
@@ -20,13 +20,13 @@ package org.apache.lucene.search;
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
-import org.apache.lucene.index.LeafReader;
 import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.FieldInvertState;
 import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.LeafReader;
+import org.apache.lucene.index.MultiDocValues;
 import org.apache.lucene.index.NumericDocValues;
 import org.apache.lucene.index.RandomIndexWriter;
-import org.apache.lucene.index.SlowCompositeReaderWrapper;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.search.similarities.PerFieldSimilarityWrapper;
 import org.apache.lucene.search.similarities.Similarity;
@@ -75,10 +75,9 @@ public class TestSimilarityProvider extends LuceneTestCase {
   public void testBasics() throws Exception {
     // sanity check of norms writer
     // TODO: generalize
-    LeafReader slow = SlowCompositeReaderWrapper.wrap(reader);
-    NumericDocValues fooNorms = slow.getNormValues("foo");
-    NumericDocValues barNorms = slow.getNormValues("bar");
-    for (int i = 0; i < slow.maxDoc(); i++) {
+    NumericDocValues fooNorms = MultiDocValues.getNormValues(reader, "foo");
+    NumericDocValues barNorms = MultiDocValues.getNormValues(reader, "bar");
+    for (int i = 0; i < reader.maxDoc(); i++) {
       assertFalse(fooNorms.get(i) == barNorms.get(i));
     }
     

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/search/TestTermScorer.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestTermScorer.java b/lucene/core/src/test/org/apache/lucene/search/TestTermScorer.java
index 30061b5..2cdcba4 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestTermScorer.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestTermScorer.java
@@ -29,7 +29,6 @@ import org.apache.lucene.index.LeafReader;
 import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.index.NumericDocValues;
 import org.apache.lucene.index.RandomIndexWriter;
-import org.apache.lucene.index.SlowCompositeReaderWrapper;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.search.similarities.ClassicSimilarity;
 import org.apache.lucene.store.Directory;
@@ -55,11 +54,11 @@ public class TestTermScorer extends LuceneTestCase {
         .setSimilarity(new ClassicSimilarity()));
     for (int i = 0; i < values.length; i++) {
       Document doc = new Document();
-      doc
-          .add(newTextField(FIELD, values[i], Field.Store.YES));
+      doc.add(newTextField(FIELD, values[i], Field.Store.YES));
       writer.addDocument(doc);
     }
-    indexReader = SlowCompositeReaderWrapper.wrap(writer.getReader());
+    writer.forceMerge(1);
+    indexReader = getOnlyLeafReader(writer.getReader());
     writer.close();
     indexSearcher = newSearcher(indexReader);
     indexSearcher.setSimilarity(new ClassicSimilarity());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/search/TestUsageTrackingFilterCachingPolicy.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestUsageTrackingFilterCachingPolicy.java b/lucene/core/src/test/org/apache/lucene/search/TestUsageTrackingFilterCachingPolicy.java
index 9c7ada8..c656b85 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestUsageTrackingFilterCachingPolicy.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestUsageTrackingFilterCachingPolicy.java
@@ -16,10 +16,15 @@
  */
 package org.apache.lucene.search;
 
+import org.apache.lucene.document.Document;
 import org.apache.lucene.document.IntPoint;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
 import org.apache.lucene.index.MultiReader;
-import org.apache.lucene.index.SlowCompositeReaderWrapper;
 import org.apache.lucene.index.Term;
+import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.LuceneTestCase;
 
 public class TestUsageTrackingFilterCachingPolicy extends LuceneTestCase {
@@ -36,7 +41,15 @@ public class TestUsageTrackingFilterCachingPolicy extends LuceneTestCase {
     for (int i = 0; i < 1000; ++i) {
       policy.onUse(q);
     }
-    assertFalse(policy.shouldCache(q, SlowCompositeReaderWrapper.wrap(new MultiReader()).getContext()));
+    Directory dir = newDirectory();
+    IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null));
+    w.addDocument(new Document());
+    IndexReader r = DirectoryReader.open(w);
+    assertFalse(policy.shouldCache(q, getOnlyLeafReader(r).getContext()));
+    
+    r.close();
+    w.close();
+    dir.close();
   }
 
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/search/spans/TestFieldMaskingSpanQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/spans/TestFieldMaskingSpanQuery.java b/lucene/core/src/test/org/apache/lucene/search/spans/TestFieldMaskingSpanQuery.java
index 052457b..b4435e7 100644
--- a/lucene/core/src/test/org/apache/lucene/search/spans/TestFieldMaskingSpanQuery.java
+++ b/lucene/core/src/test/org/apache/lucene/search/spans/TestFieldMaskingSpanQuery.java
@@ -116,8 +116,9 @@ public class TestFieldMaskingSpanQuery extends LuceneTestCase {
                                          field("first",  "bubba"),
                                          field("last",   "jones")     }));
     reader = writer.getReader();
+    writer.forceMerge(1);
     writer.close();
-    searcher = newSearcher(reader);
+    searcher = new IndexSearcher(getOnlyLeafReader(reader));
   }
 
   @AfterClass
@@ -251,7 +252,7 @@ public class TestFieldMaskingSpanQuery extends LuceneTestCase {
     SpanQuery q  = new SpanOrQuery(q1, new FieldMaskingSpanQuery(q2, "gender"));
     check(q, new int[] { 0, 1, 2, 3, 4 });
 
-    Spans span = MultiSpansWrapper.wrap(searcher.getIndexReader(), q);
+    Spans span = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertNext(span, 0,0,1);
     assertNext(span, 1,0,1);
     assertNext(span, 1,1,2);
@@ -273,8 +274,8 @@ public class TestFieldMaskingSpanQuery extends LuceneTestCase {
     check(qA, new int[] { 0, 1, 2, 4 });
     check(qB, new int[] { 0, 1, 2, 4 });
   
-    Spans spanA = MultiSpansWrapper.wrap(searcher.getIndexReader(), qA);
-    Spans spanB = MultiSpansWrapper.wrap(searcher.getIndexReader(), qB);
+    Spans spanA = qA.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
+    Spans spanB = qB.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     
     while (spanA.nextDoc() != Spans.NO_MORE_DOCS) {
       assertNotSame("spanB not still going", Spans.NO_MORE_DOCS, spanB.nextDoc());
@@ -299,7 +300,7 @@ public class TestFieldMaskingSpanQuery extends LuceneTestCase {
         new FieldMaskingSpanQuery(qB, "id") }, -1, false );
     check(q, new int[] { 0, 1, 2, 3 });
 
-    Spans span = MultiSpansWrapper.wrap(searcher.getIndexReader(), q);
+    Spans span = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertNext(span, 0,0,1);
     assertNext(span, 1,1,2);
     assertNext(span, 2,0,1);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/search/spans/TestNearSpansOrdered.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/spans/TestNearSpansOrdered.java b/lucene/core/src/test/org/apache/lucene/search/spans/TestNearSpansOrdered.java
index b101c61..f297f33 100644
--- a/lucene/core/src/test/org/apache/lucene/search/spans/TestNearSpansOrdered.java
+++ b/lucene/core/src/test/org/apache/lucene/search/spans/TestNearSpansOrdered.java
@@ -60,9 +60,10 @@ public class TestNearSpansOrdered extends LuceneTestCase {
       doc.add(newTextField(FIELD, docFields[i], Field.Store.NO));
       writer.addDocument(doc);
     }
+    writer.forceMerge(1);
     reader = writer.getReader();
     writer.close();
-    searcher = newSearcher(reader);
+    searcher = newSearcher(getOnlyLeafReader(reader));
   }
 
   protected String[] docFields = {
@@ -118,7 +119,7 @@ public class TestNearSpansOrdered extends LuceneTestCase {
   
   public void testNearSpansNext() throws Exception {
     SpanNearQuery q = makeQuery();
-    Spans span = MultiSpansWrapper.wrap(searcher.getIndexReader(), q);
+    Spans span = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertNext(span,0,0,3);
     assertNext(span,1,0,4);
     assertFinished(span);
@@ -131,7 +132,7 @@ public class TestNearSpansOrdered extends LuceneTestCase {
    */
   public void testNearSpansAdvanceLikeNext() throws Exception {
     SpanNearQuery q = makeQuery();
-    Spans span = MultiSpansWrapper.wrap(searcher.getIndexReader(), q);
+    Spans span = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertEquals(0, span.advance(0));
     assertEquals(0, span.nextStartPosition());
     assertEquals(s(0,0,3), s(span));
@@ -143,7 +144,7 @@ public class TestNearSpansOrdered extends LuceneTestCase {
   
   public void testNearSpansNextThenAdvance() throws Exception {
     SpanNearQuery q = makeQuery();
-    Spans span = MultiSpansWrapper.wrap(searcher.getIndexReader(), q);
+    Spans span = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertNotSame(Spans.NO_MORE_DOCS, span.nextDoc());
     assertEquals(0, span.nextStartPosition());
     assertEquals(s(0,0,3), s(span));
@@ -155,7 +156,7 @@ public class TestNearSpansOrdered extends LuceneTestCase {
   
   public void testNearSpansNextThenAdvancePast() throws Exception {
     SpanNearQuery q = makeQuery();
-    Spans span = MultiSpansWrapper.wrap(searcher.getIndexReader(), q);
+    Spans span = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertNotSame(Spans.NO_MORE_DOCS, span.nextDoc());
     assertEquals(0, span.nextStartPosition());
     assertEquals(s(0,0,3), s(span));
@@ -164,13 +165,13 @@ public class TestNearSpansOrdered extends LuceneTestCase {
   
   public void testNearSpansAdvancePast() throws Exception {
     SpanNearQuery q = makeQuery();
-    Spans span = MultiSpansWrapper.wrap(searcher.getIndexReader(), q);
+    Spans span = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertEquals(Spans.NO_MORE_DOCS, span.advance(2));
   }
   
   public void testNearSpansAdvanceTo0() throws Exception {
     SpanNearQuery q = makeQuery();
-    Spans span = MultiSpansWrapper.wrap(searcher.getIndexReader(), q);
+    Spans span = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertEquals(0, span.advance(0));
     assertEquals(0, span.nextStartPosition());
     assertEquals(s(0,0,3), s(span));
@@ -178,7 +179,7 @@ public class TestNearSpansOrdered extends LuceneTestCase {
 
   public void testNearSpansAdvanceTo1() throws Exception {
     SpanNearQuery q = makeQuery();
-    Spans span = MultiSpansWrapper.wrap(searcher.getIndexReader(), q);
+    Spans span = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertEquals(1, span.advance(1));
     assertEquals(0, span.nextStartPosition());
     assertEquals(s(1,0,4), s(span));
@@ -217,7 +218,7 @@ public class TestNearSpansOrdered extends LuceneTestCase {
         new SpanOrQuery(new SpanTermQuery(new Term(FIELD, "w1")), new SpanTermQuery(new Term(FIELD, "w2"))),
         new SpanTermQuery(new Term(FIELD, "w4"))
     }, 10, true);
-    Spans spans = MultiSpansWrapper.wrap(reader, q);
+    Spans spans = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertNext(spans,0,0,4);
     assertNext(spans,0,1,4);
     assertFinished(spans);
@@ -227,7 +228,7 @@ public class TestNearSpansOrdered extends LuceneTestCase {
     SpanNearQuery q = new SpanNearQuery(new SpanQuery[]{
         new SpanTermQuery(new Term(FIELD, "t1")), new SpanTermQuery(new Term(FIELD, "t2"))
     }, 1, true);
-    Spans spans = MultiSpansWrapper.wrap(reader, q);
+    Spans spans = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertNext(spans,4,0,2);
     assertFinished(spans);
   }
@@ -236,7 +237,7 @@ public class TestNearSpansOrdered extends LuceneTestCase {
     SpanNearQuery q = new SpanNearQuery(new SpanQuery[]{
         new SpanTermQuery(new Term(FIELD, "t2")), new SpanTermQuery(new Term(FIELD, "t1"))
     }, 1, true);
-    Spans spans = MultiSpansWrapper.wrap(reader, q);
+    Spans spans = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertNext(spans,4,1,4);
     assertNext(spans,4,2,4);
     assertFinished(spans);
@@ -260,7 +261,7 @@ public class TestNearSpansOrdered extends LuceneTestCase {
         .addGap(1)
         .addClause(new SpanTermQuery(new Term(FIELD, "w2")))
         .build();
-    Spans spans = MultiSpansWrapper.wrap(reader, q);
+    Spans spans = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertNext(spans, 1, 0, 3);
     assertNext(spans, 2, 0, 3);
     assertFinished(spans);
@@ -273,7 +274,7 @@ public class TestNearSpansOrdered extends LuceneTestCase {
         .addClause(new SpanTermQuery(new Term(FIELD, "w3")))
         .setSlop(1)
         .build();
-    spans = MultiSpansWrapper.wrap(reader, q);
+    spans = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertNext(spans, 2, 0, 5);
     assertNext(spans, 3, 0, 6);
     assertFinished(spans);
@@ -285,7 +286,7 @@ public class TestNearSpansOrdered extends LuceneTestCase {
         .addGap(2)
         .addClause(new SpanTermQuery(new Term(FIELD, "g")))
         .build();
-    Spans spans = MultiSpansWrapper.wrap(reader, q);
+    Spans spans = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertNext(spans, 5, 0, 4);
     assertNext(spans, 5, 9, 13);
     assertFinished(spans);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/search/spans/TestSpanCollection.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/spans/TestSpanCollection.java b/lucene/core/src/test/org/apache/lucene/search/spans/TestSpanCollection.java
index 03fc40d..dfc0439 100644
--- a/lucene/core/src/test/org/apache/lucene/search/spans/TestSpanCollection.java
+++ b/lucene/core/src/test/org/apache/lucene/search/spans/TestSpanCollection.java
@@ -67,9 +67,10 @@ public class TestSpanCollection extends LuceneTestCase {
       doc.add(newField(FIELD, docFields[i], OFFSETS));
       writer.addDocument(doc);
     }
+    writer.forceMerge(1);
     reader = writer.getReader();
     writer.close();
-    searcher = newSearcher(reader);
+    searcher = newSearcher(getOnlyLeafReader(reader));
   }
 
   private static class TermCollector implements SpanCollector {
@@ -119,7 +120,7 @@ public class TestSpanCollection extends LuceneTestCase {
     SpanNearQuery q7 = new SpanNearQuery(new SpanQuery[]{q1, q6}, 1, true);
 
     TermCollector collector = new TermCollector();
-    Spans spans = MultiSpansWrapper.wrap(reader, q7, SpanWeight.Postings.POSITIONS);
+    Spans spans = q7.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertEquals(0, spans.advance(0));
     spans.nextStartPosition();
     checkCollectedTerms(spans, collector, new Term(FIELD, "w1"), new Term(FIELD, "w2"), new Term(FIELD, "w3"));
@@ -139,7 +140,7 @@ public class TestSpanCollection extends LuceneTestCase {
     SpanOrQuery orQuery = new SpanOrQuery(q2, q3);
 
     TermCollector collector = new TermCollector();
-    Spans spans = MultiSpansWrapper.wrap(reader, orQuery, SpanWeight.Postings.POSITIONS);
+    Spans spans = orQuery.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
 
     assertEquals(1, spans.advance(1));
     spans.nextStartPosition();
@@ -169,7 +170,7 @@ public class TestSpanCollection extends LuceneTestCase {
     SpanNotQuery notq = new SpanNotQuery(nq, q3);
 
     TermCollector collector = new TermCollector();
-    Spans spans = MultiSpansWrapper.wrap(reader, notq, SpanWeight.Postings.POSITIONS);
+    Spans spans = notq.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
 
     assertEquals(2, spans.advance(2));
     spans.nextStartPosition();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/search/spans/TestSpanContainQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/spans/TestSpanContainQuery.java b/lucene/core/src/test/org/apache/lucene/search/spans/TestSpanContainQuery.java
index c260704..3e50183 100644
--- a/lucene/core/src/test/org/apache/lucene/search/spans/TestSpanContainQuery.java
+++ b/lucene/core/src/test/org/apache/lucene/search/spans/TestSpanContainQuery.java
@@ -47,9 +47,10 @@ public class TestSpanContainQuery extends LuceneTestCase {
       doc.add(newTextField(field, docFields[i], Field.Store.YES));
       writer.addDocument(doc);
     }
+    writer.forceMerge(1);
     reader = writer.getReader();
     writer.close();
-    searcher = newSearcher(reader);
+    searcher = newSearcher(getOnlyLeafReader(reader));
   }
 
   @Override
@@ -71,7 +72,7 @@ public class TestSpanContainQuery extends LuceneTestCase {
   }
 
   Spans makeSpans(SpanQuery sq) throws Exception {
-    return MultiSpansWrapper.wrap(searcher.getIndexReader(), sq);
+    return sq.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
   }
 
   void tstEqualSpans(String mes, SpanQuery expectedQ, SpanQuery actualQ) throws Exception {
@@ -144,4 +145,4 @@ public class TestSpanContainQuery extends LuceneTestCase {
     assertFinished(spans);
   }
 
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/search/spans/TestSpans.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/spans/TestSpans.java b/lucene/core/src/test/org/apache/lucene/search/spans/TestSpans.java
index 9352f60..2983301 100644
--- a/lucene/core/src/test/org/apache/lucene/search/spans/TestSpans.java
+++ b/lucene/core/src/test/org/apache/lucene/search/spans/TestSpans.java
@@ -70,9 +70,10 @@ public class TestSpans extends LuceneTestCase {
       doc.add(newTextField(field, docFields[i], Field.Store.YES));
       writer.addDocument(doc);
     }
+    writer.forceMerge(1);
     reader = writer.getReader();
     writer.close();
-    searcher = newSearcher(reader);
+    searcher = newSearcher(getOnlyLeafReader(reader));
   }
   
   @Override
@@ -201,7 +202,7 @@ public class TestSpans extends LuceneTestCase {
   public void testSpanNearOrderedOverlap() throws Exception {
     final SpanQuery query = spanNearOrderedQuery(field, 1, "t1", "t2", "t3");
     
-    Spans spans = MultiSpansWrapper.wrap(searcher.getIndexReader(), query);
+    Spans spans = query.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
 
     assertEquals("first doc", 11, spans.nextDoc());
     assertEquals("first start", 0, spans.nextStartPosition());
@@ -216,7 +217,7 @@ public class TestSpans extends LuceneTestCase {
   public void testSpanNearUnOrdered() throws Exception {
     //See http://www.gossamer-threads.com/lists/lucene/java-dev/52270 for discussion about this test
     SpanQuery senq = spanNearUnorderedQuery(field, 0, "u1", "u2");
-    Spans spans = MultiSpansWrapper.wrap(reader, senq);
+    Spans spans = senq.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertNext(spans, 4, 1, 3);
     assertNext(spans, 5, 2, 4);
     assertNext(spans, 8, 2, 4);
@@ -225,7 +226,7 @@ public class TestSpans extends LuceneTestCase {
     assertFinished(spans);
 
     senq = spanNearUnorderedQuery(1, senq, spanTermQuery(field, "u2")); 
-    spans = MultiSpansWrapper.wrap(reader, senq);
+    spans = senq.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertNext(spans, 4, 0, 3);
     assertNext(spans, 4, 1, 3); // unordered spans can be subsets
     assertNext(spans, 5, 0, 4);
@@ -239,7 +240,7 @@ public class TestSpans extends LuceneTestCase {
   }
 
   private Spans orSpans(String[] terms) throws Exception {
-    return MultiSpansWrapper.wrap(searcher.getIndexReader(), spanOrQuery(field, terms));
+    return spanOrQuery(field, terms).createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
   }
 
   public void testSpanOrEmpty() throws Exception {
@@ -443,7 +444,7 @@ public class TestSpans extends LuceneTestCase {
      SpanQuery iq = spanTermQuery(field, include);
      SpanQuery eq = spanTermQuery(field, exclude);
      SpanQuery snq = spanNotQuery(iq, eq, pre, post);
-     Spans spans = MultiSpansWrapper.wrap(searcher.getIndexReader(), snq);
+     Spans spans = snq.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
 
      int i = 0;
      if (spans != null) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/facet/src/java/org/apache/lucene/facet/sortedset/DefaultSortedSetDocValuesReaderState.java
----------------------------------------------------------------------
diff --git a/lucene/facet/src/java/org/apache/lucene/facet/sortedset/DefaultSortedSetDocValuesReaderState.java b/lucene/facet/src/java/org/apache/lucene/facet/sortedset/DefaultSortedSetDocValuesReaderState.java
index 5d374f7..e052541 100644
--- a/lucene/facet/src/java/org/apache/lucene/facet/sortedset/DefaultSortedSetDocValuesReaderState.java
+++ b/lucene/facet/src/java/org/apache/lucene/facet/sortedset/DefaultSortedSetDocValuesReaderState.java
@@ -23,9 +23,16 @@ import java.util.Map;
 
 import org.apache.lucene.facet.FacetsConfig;
 import org.apache.lucene.facet.sortedset.SortedSetDocValuesReaderState.OrdRange;
-import org.apache.lucene.index.LeafReader;
+import org.apache.lucene.index.DocValues;
+import org.apache.lucene.index.DocValuesType;
+import org.apache.lucene.index.FieldInfo;
 import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.SlowCompositeReaderWrapper;
+import org.apache.lucene.index.LeafReader;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.MultiDocValues.MultiSortedDocValues;
+import org.apache.lucene.index.MultiDocValues.MultiSortedSetDocValues;
+import org.apache.lucene.index.MultiDocValues.OrdinalMap;
+import org.apache.lucene.index.MultiDocValues;
 import org.apache.lucene.index.SortedSetDocValues;
 import org.apache.lucene.util.BytesRef;
 
@@ -35,12 +42,13 @@ import org.apache.lucene.util.BytesRef;
 public class DefaultSortedSetDocValuesReaderState extends SortedSetDocValuesReaderState {
 
   private final String field;
-  private final LeafReader topReader;
   private final int valueCount;
 
   /** {@link IndexReader} passed to the constructor. */
   public final IndexReader origReader;
 
+  private final Map<String,OrdinalMap> cachedOrdMaps = new HashMap<>();
+
   private final Map<String,OrdRange> prefixToOrdRange = new HashMap<>();
 
   /** Creates this, pulling doc values from the default {@link
@@ -57,8 +65,7 @@ public class DefaultSortedSetDocValuesReaderState extends SortedSetDocValuesRead
 
     // We need this to create thread-safe MultiSortedSetDV
     // per collector:
-    topReader = SlowCompositeReaderWrapper.wrap(reader);
-    SortedSetDocValues dv = topReader.getSortedSetDocValues(field);
+    SortedSetDocValues dv = getDocValues();
     if (dv == null) {
       throw new IllegalArgumentException("field \"" + field + "\" was not indexed with SortedSetDocValues");
     }
@@ -100,7 +107,43 @@ public class DefaultSortedSetDocValuesReaderState extends SortedSetDocValuesRead
   /** Return top-level doc values. */
   @Override
   public SortedSetDocValues getDocValues() throws IOException {
-    return topReader.getSortedSetDocValues(field);
+    // TODO: this is dup'd from slow composite reader wrapper ... can we factor it out to share?
+    OrdinalMap map = null;
+    synchronized (cachedOrdMaps) {
+      map = cachedOrdMaps.get(field);
+      if (map == null) {
+        // uncached, or not a multi dv
+        SortedSetDocValues dv = MultiDocValues.getSortedSetValues(origReader, field);
+        if (dv instanceof MultiSortedSetDocValues) {
+          map = ((MultiSortedSetDocValues)dv).mapping;
+          if (map.owner == origReader.getCoreCacheKey()) {
+            cachedOrdMaps.put(field, map);
+          }
+        }
+        return dv;
+      }
+    }
+   
+    assert map != null;
+    int size = origReader.leaves().size();
+    final SortedSetDocValues[] values = new SortedSetDocValues[size];
+    final int[] starts = new int[size+1];
+    for (int i = 0; i < size; i++) {
+      LeafReaderContext context = origReader.leaves().get(i);
+      final LeafReader reader = context.reader();
+      final FieldInfo fieldInfo = reader.getFieldInfos().fieldInfo(field);
+      if (fieldInfo != null && fieldInfo.getDocValuesType() != DocValuesType.SORTED_SET) {
+        return null;
+      }
+      SortedSetDocValues v = reader.getSortedSetDocValues(field);
+      if (v == null) {
+        v = DocValues.emptySortedSet();
+      }
+      values[i] = v;
+      starts[i] = context.docBase;
+    }
+    starts[size] = origReader.maxDoc();
+    return new MultiSortedSetDocValues(values, starts, map);
   }
 
   /** Returns mapping from prefix to {@link OrdRange}. */

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/facet/src/test/org/apache/lucene/facet/sortedset/TestSortedSetDocValuesFacets.java
----------------------------------------------------------------------
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/sortedset/TestSortedSetDocValuesFacets.java b/lucene/facet/src/test/org/apache/lucene/facet/sortedset/TestSortedSetDocValuesFacets.java
index 7659be8..60beddd 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/sortedset/TestSortedSetDocValuesFacets.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/sortedset/TestSortedSetDocValuesFacets.java
@@ -32,7 +32,6 @@ import org.apache.lucene.facet.FacetsConfig;
 import org.apache.lucene.facet.LabelAndValue;
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.RandomIndexWriter;
-import org.apache.lucene.index.SlowCompositeReaderWrapper;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.search.MatchAllDocsQuery;
@@ -227,41 +226,6 @@ public class TestSortedSetDocValuesFacets extends FacetTestCase {
     dir.close();
   }
 
-  public void testSlowCompositeReaderWrapper() throws Exception {
-    Directory dir = newDirectory();
-
-    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
-
-    FacetsConfig config = new FacetsConfig();
-
-    Document doc = new Document();
-    doc.add(new SortedSetDocValuesFacetField("a", "foo1"));
-    writer.addDocument(config.build(doc));
-
-    writer.commit();
-
-    doc = new Document();
-    doc.add(new SortedSetDocValuesFacetField("a", "foo2"));
-    writer.addDocument(config.build(doc));
-
-    // NRT open
-    IndexSearcher searcher = new IndexSearcher(SlowCompositeReaderWrapper.wrap(writer.getReader()));
-
-    // Per-top-reader state:
-    SortedSetDocValuesReaderState state = new DefaultSortedSetDocValuesReaderState(searcher.getIndexReader());
-
-    FacetsCollector c = new FacetsCollector();
-    searcher.search(new MatchAllDocsQuery(), c);    
-    Facets facets = new SortedSetDocValuesFacetCounts(state, c);
-
-    // Ask for top 10 labels for any dims that have counts:
-    assertEquals("dim=a path=[] value=2 childCount=2\n  foo1 (1)\n  foo2 (1)\n", facets.getTopChildren(10, "a").toString());
-
-    writer.close();
-    IOUtils.close(searcher.getIndexReader(), dir);
-  }
-
-
   public void testRandom() throws Exception {
     String[] tokens = getRandomTokens(10);
     Directory indexDir = newDirectory();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java
----------------------------------------------------------------------
diff --git a/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java b/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java
index a3bda0b..2f51c94 100644
--- a/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java
+++ b/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java
@@ -44,7 +44,6 @@ import org.apache.lucene.index.MultiDocValues;
 import org.apache.lucene.index.NumericDocValues;
 import org.apache.lucene.index.RandomIndexWriter;
 import org.apache.lucene.index.ReaderUtil;
-import org.apache.lucene.index.SlowCompositeReaderWrapper;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.queries.function.ValueSource;
 import org.apache.lucene.queries.function.valuesource.BytesRefFieldSource;
@@ -1164,16 +1163,7 @@ public class TestGrouping extends LuceneTestCase {
     final List<Collection<SearchGroup<BytesRef>>> shardGroups = new ArrayList<>();
     List<AbstractFirstPassGroupingCollector<?>> firstPassGroupingCollectors = new ArrayList<>();
     AbstractFirstPassGroupingCollector<?> firstPassCollector = null;
-    boolean shardsCanUseIDV;
-    if (canUseIDV) {
-      if (SlowCompositeReaderWrapper.class.isAssignableFrom(subSearchers[0].getIndexReader().getClass())) {
-        shardsCanUseIDV = false;
-      } else {
-        shardsCanUseIDV = !preFlex;
-      }
-    } else {
-      shardsCanUseIDV = false;
-    }
+    boolean shardsCanUseIDV = canUseIDV;
 
     String groupField = "group";
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java
----------------------------------------------------------------------
diff --git a/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java b/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java
index af507cd..2796e01 100644
--- a/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java
+++ b/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java
@@ -58,7 +58,6 @@ import org.apache.lucene.index.NoMergePolicy;
 import org.apache.lucene.index.NumericDocValues;
 import org.apache.lucene.index.PostingsEnum;
 import org.apache.lucene.index.RandomIndexWriter;
-import org.apache.lucene.index.SlowCompositeReaderWrapper;
 import org.apache.lucene.index.SortedDocValues;
 import org.apache.lucene.index.SortedSetDocValues;
 import org.apache.lucene.index.Term;
@@ -558,7 +557,7 @@ public class TestJoinUtil extends LuceneTestCase {
     assertEquals(numParents, topDocs.totalHits);
     for (int i = 0; i < topDocs.scoreDocs.length; i++) {
       ScoreDoc scoreDoc = topDocs.scoreDocs[i];
-      String id = SlowCompositeReaderWrapper.wrap(searcher.getIndexReader()).document(scoreDoc.doc).get("id");
+      String id = searcher.doc(scoreDoc.doc).get("id");
       assertEquals(lowestScoresPerParent.get(id), scoreDoc.score, 0f);
     }
 
@@ -567,7 +566,7 @@ public class TestJoinUtil extends LuceneTestCase {
     assertEquals(numParents, topDocs.totalHits);
     for (int i = 0; i < topDocs.scoreDocs.length; i++) {
       ScoreDoc scoreDoc = topDocs.scoreDocs[i];
-      String id = SlowCompositeReaderWrapper.wrap(searcher.getIndexReader()).document(scoreDoc.doc).get("id");
+      String id = searcher.doc(scoreDoc.doc).get("id");
       assertEquals(highestScoresPerParent.get(id), scoreDoc.score, 0f);
     }
 
@@ -1229,8 +1228,7 @@ public class TestJoinUtil extends LuceneTestCase {
 
       final Map<Integer, JoinScore> docToJoinScore = new HashMap<>();
       if (multipleValuesPerDocument) {
-        LeafReader slowCompositeReader = SlowCompositeReaderWrapper.wrap(topLevelReader);
-        Terms terms = slowCompositeReader.terms(toField);
+        Terms terms = MultiFields.getTerms(topLevelReader, toField);
         if (terms != null) {
           PostingsEnum postingsEnum = null;
           SortedSet<BytesRef> joinValues = new TreeSet<>();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndexAgainstRAMDir.java
----------------------------------------------------------------------
diff --git a/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndexAgainstRAMDir.java b/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndexAgainstRAMDir.java
index e29e8c8..57e25fe 100644
--- a/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndexAgainstRAMDir.java
+++ b/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndexAgainstRAMDir.java
@@ -41,7 +41,6 @@ import org.apache.lucene.document.FieldType;
 import org.apache.lucene.document.TextField;
 import org.apache.lucene.index.CompositeReader;
 import org.apache.lucene.index.DirectoryReader;
-import org.apache.lucene.index.PostingsEnum;
 import org.apache.lucene.index.Fields;
 import org.apache.lucene.index.IndexOptions;
 import org.apache.lucene.index.IndexReader;
@@ -49,8 +48,10 @@ import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.index.IndexWriterConfig;
 import org.apache.lucene.index.IndexableField;
 import org.apache.lucene.index.LeafReader;
+import org.apache.lucene.index.MultiDocValues;
+import org.apache.lucene.index.MultiFields;
 import org.apache.lucene.index.NumericDocValues;
-import org.apache.lucene.index.SlowCompositeReaderWrapper;
+import org.apache.lucene.index.PostingsEnum;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.index.Terms;
 import org.apache.lucene.index.TermsEnum;
@@ -66,8 +67,8 @@ import org.apache.lucene.search.spans.SpanOrQuery;
 import org.apache.lucene.search.spans.SpanQuery;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.RAMDirectory;
-import org.apache.lucene.util.ByteBlockPool;
 import org.apache.lucene.util.ByteBlockPool.Allocator;
+import org.apache.lucene.util.ByteBlockPool;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.LineFileDocs;
@@ -170,15 +171,14 @@ public class TestMemoryIndexAgainstRAMDir extends BaseTokenStreamTestCase {
 
   private void duellReaders(CompositeReader other, LeafReader memIndexReader)
       throws IOException {
-    LeafReader competitor = SlowCompositeReaderWrapper.wrap(other);
     Fields memFields = memIndexReader.fields();
-    for (String field : competitor.fields()) {
+    for (String field : MultiFields.getFields(other)) {
       Terms memTerms = memFields.terms(field);
       Terms iwTerms = memIndexReader.terms(field);
       if (iwTerms == null) {
         assertNull(memTerms);
       } else {
-        NumericDocValues normValues = competitor.getNormValues(field);
+        NumericDocValues normValues = MultiDocValues.getNormValues(other, field);
         NumericDocValues memNormValues = memIndexReader.getNormValues(field);
         if (normValues != null) {
           // mem idx always computes norms on the fly

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/misc/src/java/org/apache/lucene/index/SlowCompositeReaderWrapper.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/java/org/apache/lucene/index/SlowCompositeReaderWrapper.java b/lucene/misc/src/java/org/apache/lucene/index/SlowCompositeReaderWrapper.java
new file mode 100644
index 0000000..de79ab0
--- /dev/null
+++ b/lucene/misc/src/java/org/apache/lucene/index/SlowCompositeReaderWrapper.java
@@ -0,0 +1,275 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.index;
+
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.lucene.index.MultiDocValues.MultiSortedDocValues;
+import org.apache.lucene.index.MultiDocValues.MultiSortedSetDocValues;
+import org.apache.lucene.index.MultiDocValues.OrdinalMap;
+import org.apache.lucene.util.Bits;
+
+/**
+ * This class forces a composite reader (eg a {@link
+ * MultiReader} or {@link DirectoryReader}) to emulate a
+ * {@link LeafReader}.  This requires implementing the postings
+ * APIs on-the-fly, using the static methods in {@link
+ * MultiFields}, {@link MultiDocValues}, by stepping through
+ * the sub-readers to merge fields/terms, appending docs, etc.
+ *
+ * <p><b>NOTE</b>: this class almost always results in a
+ * performance hit.  If this is important to your use case,
+ * you'll get better performance by gathering the sub readers using
+ * {@link IndexReader#getContext()} to get the
+ * leaves and then operate per-LeafReader,
+ * instead of using this class.
+ */
+
+public final class SlowCompositeReaderWrapper extends LeafReader {
+
+  private final CompositeReader in;
+  private final Fields fields;
+  private final boolean merging;
+  
+  /** This method is sugar for getting an {@link LeafReader} from
+   * an {@link IndexReader} of any kind. If the reader is already atomic,
+   * it is returned unchanged, otherwise wrapped by this class.
+   */
+  public static LeafReader wrap(IndexReader reader) throws IOException {
+    if (reader instanceof CompositeReader) {
+      return new SlowCompositeReaderWrapper((CompositeReader) reader, false);
+    } else {
+      assert reader instanceof LeafReader;
+      return (LeafReader) reader;
+    }
+  }
+
+  SlowCompositeReaderWrapper(CompositeReader reader, boolean merging) throws IOException {
+    super();
+    in = reader;
+    if (getFieldInfos().hasPointValues()) {
+      throw new IllegalArgumentException("cannot wrap points");
+    }
+    fields = MultiFields.getFields(in);
+    in.registerParentReader(this);
+    this.merging = merging;
+  }
+
+  @Override
+  public String toString() {
+    return "SlowCompositeReaderWrapper(" + in + ")";
+  }
+
+  @Override
+  public void addCoreClosedListener(CoreClosedListener listener) {
+    addCoreClosedListenerAsReaderClosedListener(in, listener);
+  }
+
+  @Override
+  public void removeCoreClosedListener(CoreClosedListener listener) {
+    removeCoreClosedListenerAsReaderClosedListener(in, listener);
+  }
+
+  @Override
+  public Fields fields() {
+    ensureOpen();
+    return fields;
+  }
+
+  @Override
+  public NumericDocValues getNumericDocValues(String field) throws IOException {
+    ensureOpen();
+    return MultiDocValues.getNumericValues(in, field);
+  }
+
+  @Override
+  public Bits getDocsWithField(String field) throws IOException {
+    ensureOpen();
+    return MultiDocValues.getDocsWithField(in, field);
+  }
+
+  @Override
+  public BinaryDocValues getBinaryDocValues(String field) throws IOException {
+    ensureOpen();
+    return MultiDocValues.getBinaryValues(in, field);
+  }
+  
+  @Override
+  public SortedNumericDocValues getSortedNumericDocValues(String field) throws IOException {
+    ensureOpen();
+    return MultiDocValues.getSortedNumericValues(in, field);
+  }
+
+  @Override
+  public SortedDocValues getSortedDocValues(String field) throws IOException {
+    ensureOpen();
+    OrdinalMap map = null;
+    synchronized (cachedOrdMaps) {
+      map = cachedOrdMaps.get(field);
+      if (map == null) {
+        // uncached, or not a multi dv
+        SortedDocValues dv = MultiDocValues.getSortedValues(in, field);
+        if (dv instanceof MultiSortedDocValues) {
+          map = ((MultiSortedDocValues)dv).mapping;
+          if (map.owner == getCoreCacheKey() && merging == false) {
+            cachedOrdMaps.put(field, map);
+          }
+        }
+        return dv;
+      }
+    }
+    int size = in.leaves().size();
+    final SortedDocValues[] values = new SortedDocValues[size];
+    final int[] starts = new int[size+1];
+    for (int i = 0; i < size; i++) {
+      LeafReaderContext context = in.leaves().get(i);
+      final LeafReader reader = context.reader();
+      final FieldInfo fieldInfo = reader.getFieldInfos().fieldInfo(field);
+      if (fieldInfo != null && fieldInfo.getDocValuesType() != DocValuesType.SORTED) {
+        return null;
+      }
+      SortedDocValues v = reader.getSortedDocValues(field);
+      if (v == null) {
+        v = DocValues.emptySorted();
+      }
+      values[i] = v;
+      starts[i] = context.docBase;
+    }
+    starts[size] = maxDoc();
+    return new MultiSortedDocValues(values, starts, map);
+  }
+  
+  @Override
+  public SortedSetDocValues getSortedSetDocValues(String field) throws IOException {
+    ensureOpen();
+    OrdinalMap map = null;
+    synchronized (cachedOrdMaps) {
+      map = cachedOrdMaps.get(field);
+      if (map == null) {
+        // uncached, or not a multi dv
+        SortedSetDocValues dv = MultiDocValues.getSortedSetValues(in, field);
+        if (dv instanceof MultiSortedSetDocValues) {
+          map = ((MultiSortedSetDocValues)dv).mapping;
+          if (map.owner == getCoreCacheKey() && merging == false) {
+            cachedOrdMaps.put(field, map);
+          }
+        }
+        return dv;
+      }
+    }
+   
+    assert map != null;
+    int size = in.leaves().size();
+    final SortedSetDocValues[] values = new SortedSetDocValues[size];
+    final int[] starts = new int[size+1];
+    for (int i = 0; i < size; i++) {
+      LeafReaderContext context = in.leaves().get(i);
+      final LeafReader reader = context.reader();
+      final FieldInfo fieldInfo = reader.getFieldInfos().fieldInfo(field);
+      if(fieldInfo != null && fieldInfo.getDocValuesType() != DocValuesType.SORTED_SET){
+        return null;
+      }
+      SortedSetDocValues v = reader.getSortedSetDocValues(field);
+      if (v == null) {
+        v = DocValues.emptySortedSet();
+      }
+      values[i] = v;
+      starts[i] = context.docBase;
+    }
+    starts[size] = maxDoc();
+    return new MultiSortedSetDocValues(values, starts, map);
+  }
+  
+  // TODO: this could really be a weak map somewhere else on the coreCacheKey,
+  // but do we really need to optimize slow-wrapper any more?
+  private final Map<String,OrdinalMap> cachedOrdMaps = new HashMap<>();
+
+  @Override
+  public NumericDocValues getNormValues(String field) throws IOException {
+    ensureOpen();
+    return MultiDocValues.getNormValues(in, field);
+  }
+  
+  @Override
+  public Fields getTermVectors(int docID) throws IOException {
+    ensureOpen();
+    return in.getTermVectors(docID);
+  }
+
+  @Override
+  public int numDocs() {
+    // Don't call ensureOpen() here (it could affect performance)
+    return in.numDocs();
+  }
+
+  @Override
+  public int maxDoc() {
+    // Don't call ensureOpen() here (it could affect performance)
+    return in.maxDoc();
+  }
+
+  @Override
+  public void document(int docID, StoredFieldVisitor visitor) throws IOException {
+    ensureOpen();
+    in.document(docID, visitor);
+  }
+
+  @Override
+  public Bits getLiveDocs() {
+    ensureOpen();
+    return MultiFields.getLiveDocs(in);
+  }
+
+  @Override
+  public PointValues getPointValues() {
+    ensureOpen();
+    return null;
+  }
+
+  @Override
+  public FieldInfos getFieldInfos() {
+    ensureOpen();
+    return MultiFields.getMergedFieldInfos(in);
+  }
+
+  @Override
+  public Object getCoreCacheKey() {
+    return in.getCoreCacheKey();
+  }
+
+  @Override
+  public Object getCombinedCoreAndDeletesKey() {
+    return in.getCombinedCoreAndDeletesKey();
+  }
+
+  @Override
+  protected void doClose() throws IOException {
+    // TODO: as this is a wrapper, should we really close the delegate?
+    in.close();
+  }
+
+  @Override
+  public void checkIntegrity() throws IOException {
+    ensureOpen();
+    for (LeafReaderContext ctx : in.leaves()) {
+      ctx.reader().checkIntegrity();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/misc/src/test/org/apache/lucene/index/TestSlowCompositeReaderWrapper.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/test/org/apache/lucene/index/TestSlowCompositeReaderWrapper.java b/lucene/misc/src/test/org/apache/lucene/index/TestSlowCompositeReaderWrapper.java
new file mode 100644
index 0000000..d37e45a
--- /dev/null
+++ b/lucene/misc/src/test/org/apache/lucene/index/TestSlowCompositeReaderWrapper.java
@@ -0,0 +1,91 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.index;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util.TestUtil;
+
+public class TestSlowCompositeReaderWrapper extends LuceneTestCase {
+
+  public void testCoreListenerOnSlowCompositeReaderWrapper() throws IOException {
+    RandomIndexWriter w = new RandomIndexWriter(random(), newDirectory());
+    final int numDocs = TestUtil.nextInt(random(), 1, 5);
+    for (int i = 0; i < numDocs; ++i) {
+      w.addDocument(new Document());
+      if (random().nextBoolean()) {
+        w.commit();
+      }
+    }
+    w.commit();
+    w.close();
+
+    final IndexReader reader = DirectoryReader.open(w.w.getDirectory());
+    final LeafReader leafReader = SlowCompositeReaderWrapper.wrap(reader);
+    
+    final int numListeners = TestUtil.nextInt(random(), 1, 10);
+    final List<LeafReader.CoreClosedListener> listeners = new ArrayList<>();
+    AtomicInteger counter = new AtomicInteger(numListeners);
+    
+    for (int i = 0; i < numListeners; ++i) {
+      CountCoreListener listener = new CountCoreListener(counter, leafReader.getCoreCacheKey());
+      listeners.add(listener);
+      leafReader.addCoreClosedListener(listener);
+    }
+    for (int i = 0; i < 100; ++i) {
+      leafReader.addCoreClosedListener(listeners.get(random().nextInt(listeners.size())));
+    }
+    final int removed = random().nextInt(numListeners);
+    Collections.shuffle(listeners, random());
+    for (int i = 0; i < removed; ++i) {
+      leafReader.removeCoreClosedListener(listeners.get(i));
+    }
+    assertEquals(numListeners, counter.get());
+    // make sure listeners are registered on the wrapped reader and that closing any of them has the same effect
+    if (random().nextBoolean()) {
+      reader.close();
+    } else {
+      leafReader.close();
+    }
+    assertEquals(removed, counter.get());
+    w.w.getDirectory().close();
+  }
+
+  private static final class CountCoreListener implements LeafReader.CoreClosedListener {
+
+    private final AtomicInteger count;
+    private final Object coreCacheKey;
+
+    public CountCoreListener(AtomicInteger count, Object coreCacheKey) {
+      this.count = count;
+      this.coreCacheKey = coreCacheKey;
+    }
+
+    @Override
+    public void onClose(Object coreCacheKey) {
+      assertSame(this.coreCacheKey, coreCacheKey);
+      count.decrementAndGet();
+    }
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/misc/src/test/org/apache/lucene/uninverting/TestDocTermOrds.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestDocTermOrds.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestDocTermOrds.java
index 4861cd3..0c5d765 100644
--- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestDocTermOrds.java
+++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestDocTermOrds.java
@@ -443,9 +443,9 @@ public class TestDocTermOrds extends LuceneTestCase {
     iw.deleteDocuments(new Term("foo", "baz"));
     DirectoryReader r2 = DirectoryReader.open(iw);
     
-    FieldCache.DEFAULT.getDocTermOrds(getOnlySegmentReader(r2), "foo", null);
+    FieldCache.DEFAULT.getDocTermOrds(getOnlyLeafReader(r2), "foo", null);
     
-    SortedSetDocValues v = FieldCache.DEFAULT.getDocTermOrds(getOnlySegmentReader(r1), "foo", null);
+    SortedSetDocValues v = FieldCache.DEFAULT.getDocTermOrds(getOnlyLeafReader(r1), "foo", null);
     assertEquals(3, v.getValueCount());
     v.setDocument(1);
     assertEquals(1, v.nextOrd());
@@ -473,7 +473,7 @@ public class TestDocTermOrds extends LuceneTestCase {
     iw.close();
     
     DirectoryReader ir = DirectoryReader.open(dir);
-    LeafReader ar = getOnlySegmentReader(ir);
+    LeafReader ar = getOnlyLeafReader(ir);
     
     SortedSetDocValues v = FieldCache.DEFAULT.getDocTermOrds(ar, "foo", FieldCache.INT32_TERM_PREFIX);
     assertEquals(2, v.getValueCount());
@@ -514,7 +514,7 @@ public class TestDocTermOrds extends LuceneTestCase {
     iw.close();
     
     DirectoryReader ir = DirectoryReader.open(dir);
-    LeafReader ar = getOnlySegmentReader(ir);
+    LeafReader ar = getOnlyLeafReader(ir);
     
     SortedSetDocValues v = FieldCache.DEFAULT.getDocTermOrds(ar, "foo", FieldCache.INT64_TERM_PREFIX);
     assertEquals(2, v.getValueCount());
@@ -563,7 +563,7 @@ public class TestDocTermOrds extends LuceneTestCase {
     DirectoryReader ireader = iwriter.getReader();
     iwriter.close();
 
-    LeafReader ar = getOnlySegmentReader(ireader);
+    LeafReader ar = getOnlyLeafReader(ireader);
     SortedSetDocValues dv = FieldCache.DEFAULT.getDocTermOrds(ar, "field", null);
     assertEquals(3, dv.getValueCount());
     
@@ -648,7 +648,7 @@ public class TestDocTermOrds extends LuceneTestCase {
     iw.close();
     
     DirectoryReader ir = DirectoryReader.open(dir);
-    LeafReader ar = getOnlySegmentReader(ir);
+    LeafReader ar = getOnlyLeafReader(ir);
     
     SortedSetDocValues v = FieldCache.DEFAULT.getDocTermOrds(ar, "foo", null);
     assertNotNull(DocValues.unwrapSingleton(v)); // actually a single-valued field

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCache.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCache.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCache.java
index 1b322d9..93c302c 100644
--- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCache.java
+++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCache.java
@@ -419,7 +419,7 @@ public class TestFieldCache extends LuceneTestCase {
     iw.addDocument(doc);
     DirectoryReader ir = iw.getReader();
     iw.close();
-    LeafReader ar = getOnlySegmentReader(ir);
+    LeafReader ar = getOnlyLeafReader(ir);
     
     // Binary type: can be retrieved via getTerms()
     expectThrows(IllegalStateException.class, () -> {
@@ -535,7 +535,7 @@ public class TestFieldCache extends LuceneTestCase {
     DirectoryReader ir = iw.getReader();
     iw.close();
     
-    LeafReader ar = getOnlySegmentReader(ir);
+    LeafReader ar = getOnlyLeafReader(ir);
     
     final FieldCache cache = FieldCache.DEFAULT;
     cache.purgeAllCaches();
@@ -593,7 +593,7 @@ public class TestFieldCache extends LuceneTestCase {
     DirectoryReader ir = iw.getReader();
     iw.close();
     
-    LeafReader ar = getOnlySegmentReader(ir);
+    LeafReader ar = getOnlyLeafReader(ir);
     
     final FieldCache cache = FieldCache.DEFAULT;
     cache.purgeAllCaches();
@@ -673,7 +673,7 @@ public class TestFieldCache extends LuceneTestCase {
     }
     iw.forceMerge(1);
     final DirectoryReader reader = iw.getReader();
-    final NumericDocValues longs = FieldCache.DEFAULT.getNumerics(getOnlySegmentReader(reader), "f", FieldCache.LONG_POINT_PARSER, false);
+    final NumericDocValues longs = FieldCache.DEFAULT.getNumerics(getOnlyLeafReader(reader), "f", FieldCache.LONG_POINT_PARSER, false);
     for (int i = 0; i < values.length; ++i) {
       assertEquals(values[i], longs.get(i));
     }
@@ -719,7 +719,7 @@ public class TestFieldCache extends LuceneTestCase {
     }
     iw.forceMerge(1);
     final DirectoryReader reader = iw.getReader();
-    final NumericDocValues ints = FieldCache.DEFAULT.getNumerics(getOnlySegmentReader(reader), "f", FieldCache.INT_POINT_PARSER, false);
+    final NumericDocValues ints = FieldCache.DEFAULT.getNumerics(getOnlyLeafReader(reader), "f", FieldCache.INT_POINT_PARSER, false);
     for (int i = 0; i < values.length; ++i) {
       assertEquals(values[i], ints.get(i));
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheReopen.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheReopen.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheReopen.java
index 0d5584e..de8eab1 100644
--- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheReopen.java
+++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheReopen.java
@@ -47,7 +47,7 @@ public class TestFieldCacheReopen extends LuceneTestCase {
   
     // Open reader1
     DirectoryReader r = DirectoryReader.open(dir);
-    LeafReader r1 = getOnlySegmentReader(r);
+    LeafReader r1 = getOnlyLeafReader(r);
     final NumericDocValues ints = FieldCache.DEFAULT.getNumerics(r1, "number", FieldCache.INT_POINT_PARSER, false);
     assertEquals(17, ints.get(0));
   

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheVsDocValues.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheVsDocValues.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheVsDocValues.java
index 23b7d0c..9809324 100644
--- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheVsDocValues.java
+++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheVsDocValues.java
@@ -399,7 +399,7 @@ public class TestFieldCacheVsDocValues extends LuceneTestCase {
     
     // now compare again after the merge
     ir = writer.getReader();
-    LeafReader ar = getOnlySegmentReader(ir);
+    LeafReader ar = getOnlyLeafReader(ir);
     SortedSetDocValues expected = FieldCache.DEFAULT.getDocTermOrds(ar, "indexed", null);
     SortedSetDocValues actual = ar.getSortedSetDocValues("dv");
     assertEquals(ir.maxDoc(), expected, actual);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheWithThreads.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheWithThreads.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheWithThreads.java
index e716419..3f1f450 100644
--- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheWithThreads.java
+++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheWithThreads.java
@@ -181,7 +181,7 @@ public class TestFieldCacheWithThreads extends LuceneTestCase {
     final DirectoryReader r = writer.getReader();
     writer.close();
     
-    final LeafReader sr = getOnlySegmentReader(r);
+    final LeafReader sr = getOnlyLeafReader(r);
 
     final long END_TIME = System.currentTimeMillis() + (TEST_NIGHTLY ? 30 : 1);
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/misc/src/test/org/apache/lucene/uninverting/TestLegacyFieldCache.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestLegacyFieldCache.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestLegacyFieldCache.java
index c4ef1c4..4fd66ad 100644
--- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestLegacyFieldCache.java
+++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestLegacyFieldCache.java
@@ -307,7 +307,7 @@ public class TestLegacyFieldCache extends LuceneTestCase {
     iw.addDocument(doc);
     DirectoryReader ir = iw.getReader();
     iw.close();
-    LeafReader ar = getOnlySegmentReader(ir);
+    LeafReader ar = getOnlyLeafReader(ir);
     
     // Binary type: can be retrieved via getTerms()
     expectThrows(IllegalStateException.class, () -> {
@@ -340,7 +340,7 @@ public class TestLegacyFieldCache extends LuceneTestCase {
     DirectoryReader ir = iw.getReader();
     iw.close();
     
-    LeafReader ar = getOnlySegmentReader(ir);
+    LeafReader ar = getOnlyLeafReader(ir);
     
     final FieldCache cache = FieldCache.DEFAULT;
     cache.purgeAllCaches();
@@ -379,7 +379,7 @@ public class TestLegacyFieldCache extends LuceneTestCase {
     DirectoryReader ir = iw.getReader();
     iw.close();
     
-    LeafReader ar = getOnlySegmentReader(ir);
+    LeafReader ar = getOnlyLeafReader(ir);
     
     final FieldCache cache = FieldCache.DEFAULT;
     cache.purgeAllCaches();
@@ -440,7 +440,7 @@ public class TestLegacyFieldCache extends LuceneTestCase {
     }
     iw.forceMerge(1);
     final DirectoryReader reader = iw.getReader();
-    final NumericDocValues longs = FieldCache.DEFAULT.getNumerics(getOnlySegmentReader(reader), "f", FieldCache.LEGACY_LONG_PARSER, false);
+    final NumericDocValues longs = FieldCache.DEFAULT.getNumerics(getOnlyLeafReader(reader), "f", FieldCache.LEGACY_LONG_PARSER, false);
     for (int i = 0; i < values.length; ++i) {
       assertEquals(values[i], longs.get(i));
     }
@@ -486,7 +486,7 @@ public class TestLegacyFieldCache extends LuceneTestCase {
     }
     iw.forceMerge(1);
     final DirectoryReader reader = iw.getReader();
-    final NumericDocValues ints = FieldCache.DEFAULT.getNumerics(getOnlySegmentReader(reader), "f", FieldCache.LEGACY_INT_PARSER, false);
+    final NumericDocValues ints = FieldCache.DEFAULT.getNumerics(getOnlyLeafReader(reader), "f", FieldCache.LEGACY_INT_PARSER, false);
     for (int i = 0; i < values.length; ++i) {
       assertEquals(values[i], ints.get(i));
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/queries/src/test/org/apache/lucene/queries/CommonTermsQueryTest.java
----------------------------------------------------------------------
diff --git a/lucene/queries/src/test/org/apache/lucene/queries/CommonTermsQueryTest.java b/lucene/queries/src/test/org/apache/lucene/queries/CommonTermsQueryTest.java
index a7bfffd..e991b0c 100644
--- a/lucene/queries/src/test/org/apache/lucene/queries/CommonTermsQueryTest.java
+++ b/lucene/queries/src/test/org/apache/lucene/queries/CommonTermsQueryTest.java
@@ -32,7 +32,6 @@ import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.index.RandomIndexWriter;
-import org.apache.lucene.index.SlowCompositeReaderWrapper;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.index.TermContext;
 import org.apache.lucene.index.Terms;
@@ -399,8 +398,9 @@ public class CommonTermsQueryTest extends LuceneTestCase {
     analyzer.setMaxTokenLength(TestUtil.nextInt(random(), 1, IndexWriter.MAX_TERM_LENGTH));
     RandomIndexWriter w = new RandomIndexWriter(random(), dir, analyzer);
     createRandomIndex(atLeast(50), w, random().nextLong());
+    w.forceMerge(1);
     DirectoryReader reader = w.getReader();
-    LeafReader wrapper = SlowCompositeReaderWrapper.wrap(reader);
+    LeafReader wrapper = getOnlyLeafReader(reader);
     String field = "body";
     Terms terms = wrapper.terms(field);
     PriorityQueue<TermAndFreq> lowFreqQueue = new PriorityQueue<CommonTermsQueryTest.TermAndFreq>(
@@ -489,7 +489,7 @@ public class CommonTermsQueryTest extends LuceneTestCase {
       QueryUtils.check(random(), cq, newSearcher(reader2));
       reader2.close();
     } finally {
-      IOUtils.close(reader, wrapper, w, dir, analyzer);
+      IOUtils.close(wrapper, w, dir, analyzer);
     }
     
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/queries/src/test/org/apache/lucene/queries/TermsQueryTest.java
----------------------------------------------------------------------
diff --git a/lucene/queries/src/test/org/apache/lucene/queries/TermsQueryTest.java b/lucene/queries/src/test/org/apache/lucene/queries/TermsQueryTest.java
index c14d543..a87e45d 100644
--- a/lucene/queries/src/test/org/apache/lucene/queries/TermsQueryTest.java
+++ b/lucene/queries/src/test/org/apache/lucene/queries/TermsQueryTest.java
@@ -342,11 +342,11 @@ public class TermsQueryTest extends LuceneTestCase {
     w.close();
     TermsQuery query = new TermsQuery(new Term("foo", "bar"), new Term("foo", "baz"));
     UsageTrackingQueryCachingPolicy policy = new UsageTrackingQueryCachingPolicy();
-    assertFalse(policy.shouldCache(query, getOnlySegmentReader(reader).getContext()));
+    assertFalse(policy.shouldCache(query, getOnlyLeafReader(reader).getContext()));
     policy.onUse(query);
     policy.onUse(query);
     // cached after two uses
-    assertTrue(policy.shouldCache(query, getOnlySegmentReader(reader).getContext()));
+    assertTrue(policy.shouldCache(query, getOnlyLeafReader(reader).getContext()));
     reader.close();
     dir.close();
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/queries/src/test/org/apache/lucene/queries/function/TestSortedSetFieldSource.java
----------------------------------------------------------------------
diff --git a/lucene/queries/src/test/org/apache/lucene/queries/function/TestSortedSetFieldSource.java b/lucene/queries/src/test/org/apache/lucene/queries/function/TestSortedSetFieldSource.java
index d028dce..b72bfeb 100644
--- a/lucene/queries/src/test/org/apache/lucene/queries/function/TestSortedSetFieldSource.java
+++ b/lucene/queries/src/test/org/apache/lucene/queries/function/TestSortedSetFieldSource.java
@@ -50,7 +50,7 @@ public class TestSortedSetFieldSource extends LuceneTestCase {
 
     DirectoryReader ir = DirectoryReader.open(dir);
     IndexSearcher searcher = newSearcher(ir);
-    LeafReader ar = getOnlySegmentReader(ir);
+    LeafReader ar = getOnlyLeafReader(ir);
     
     ValueSource vs = new SortedSetFieldSource("value");
     FunctionValues values = vs.getValues(Collections.emptyMap(), ar.getContext());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/queries/src/test/org/apache/lucene/queries/payloads/PayloadHelper.java
----------------------------------------------------------------------
diff --git a/lucene/queries/src/test/org/apache/lucene/queries/payloads/PayloadHelper.java b/lucene/queries/src/test/org/apache/lucene/queries/payloads/PayloadHelper.java
index 484c1bd..cfd5156 100644
--- a/lucene/queries/src/test/org/apache/lucene/queries/payloads/PayloadHelper.java
+++ b/lucene/queries/src/test/org/apache/lucene/queries/payloads/PayloadHelper.java
@@ -126,10 +126,11 @@ public class PayloadHelper {
       doc.add(new TextField(NO_PAYLOAD_FIELD, English.intToEnglish(i), Field.Store.YES));
       writer.addDocument(doc);
     }
+    writer.forceMerge(1);
     reader = DirectoryReader.open(writer);
     writer.close();
 
-    IndexSearcher searcher = LuceneTestCase.newSearcher(reader);
+    IndexSearcher searcher = LuceneTestCase.newSearcher(LuceneTestCase.getOnlyLeafReader(reader));
     searcher.setSimilarity(similarity);
     return searcher;
   }


[37/50] [abbrv] lucene-solr git commit: SOLR-7858 - update links between new/old UIs for 6.x release

Posted by ho...@apache.org.
SOLR-7858 - update links between new/old UIs for 6.x release


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/937a4148
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/937a4148
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/937a4148

Branch: refs/heads/jira/SOLR-445
Commit: 937a41489f78aa32efb5e65dcc4e60e9bae19431
Parents: 859ad95
Author: Upayavira <uv...@odoko.co.uk>
Authored: Thu Mar 10 23:48:48 2016 +0000
Committer: Upayavira <uv...@odoko.co.uk>
Committed: Thu Mar 10 23:48:48 2016 +0000

----------------------------------------------------------------------
 solr/webapp/web/css/angular/common.css | 13 ++-----------
 solr/webapp/web/css/styles/common.css  | 16 +++++++++++++++-
 solr/webapp/web/index.html             |  6 ++----
 solr/webapp/web/old.html               |  5 +++--
 4 files changed, 22 insertions(+), 18 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/937a4148/solr/webapp/web/css/angular/common.css
----------------------------------------------------------------------
diff --git a/solr/webapp/web/css/angular/common.css b/solr/webapp/web/css/angular/common.css
index 1a3b087..d960446 100644
--- a/solr/webapp/web/css/angular/common.css
+++ b/solr/webapp/web/css/angular/common.css
@@ -762,16 +762,7 @@ pre.syntax .tex .formula
   padding-left: 16px;
 }
 
-.new-ui-warning {
-  position: absolute;
-  left: 150px;
-  top: -20px;
-  align: center;
-  color: red;
-  font-weight: bold;
-}
-.new-ui-warning a.ul {
-  color: red;
-  font-weight: bold;
+.other-ui-link a.ul {
   text-decoration: underline;
 }
+

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/937a4148/solr/webapp/web/css/styles/common.css
----------------------------------------------------------------------
diff --git a/solr/webapp/web/css/styles/common.css b/solr/webapp/web/css/styles/common.css
index f0e0652..6c0a9fb 100644
--- a/solr/webapp/web/css/styles/common.css
+++ b/solr/webapp/web/css/styles/common.css
@@ -714,4 +714,18 @@ pre.syntax .tex .formula
 }
 .other-ui-link a.ul {
   text-decoration: underline;
-}
\ No newline at end of file
+}
+
+.old-ui-warning {
+  position: absolute;
+  right: 0px;
+  top: -20px;
+  align: center;
+  color: red;
+  font-weight: bold;
+}
+.old-ui-warning a.ul {
+  color: red;
+  font-weight: bold;
+  text-decoration: underline;
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/937a4148/solr/webapp/web/index.html
----------------------------------------------------------------------
diff --git a/solr/webapp/web/index.html b/solr/webapp/web/index.html
index 3c23047..6c4df6a 100644
--- a/solr/webapp/web/index.html
+++ b/solr/webapp/web/index.html
@@ -121,10 +121,8 @@ limitations under the License.
         <div class="exception">{{exception.msg}}</div>
       </div>
 
-      <div class="new-ui-warning">
-        This is an experimental UI. Report bugs <a class="ul" target="_blank" href="http://issues.apache.org/jira/browse/SOLR">here</a>.
-        For the old UI click <a class="ul" href="/solr/old.html">here</a>
-        <a target="_blank" href="http://wiki.apache.org/solr/AngularUI">&nbsp;<span class="help"></span></a>
+      <div class="other-ui-link">
+        Use <a class="ul" href="/solr/old.html">original UI</a><a target="_blank" href="http://wiki.apache.org/solr/AngularUI">&nbsp;<span class="help"></span></a>
       </div>
 
       <div id="content-wrapper">

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/937a4148/solr/webapp/web/old.html
----------------------------------------------------------------------
diff --git a/solr/webapp/web/old.html b/solr/webapp/web/old.html
index a25504e..d688a0a 100644
--- a/solr/webapp/web/old.html
+++ b/solr/webapp/web/old.html
@@ -79,8 +79,9 @@ limitations under the License.
                 
       </div>
 
-      <div class="other-ui-link">
-        Try <a class="ul" href="/solr/">New UI</a><a target="_blank" href="http://wiki.apache.org/solr/AngularUI">&nbsp;<span class="help"></span></a>
+      <div class="old-ui-warning">
+        THIS USER INTERFACE IS DEPRECATED. Please use the current UI <a class="ul" href="/solr/">here</a>
+        <a target="_blank" href="http://wiki.apache.org/solr/AngularUI">&nbsp;<span class="help"></span></a>
       </div>
 
       <div id="content-wrapper">


[22/50] [abbrv] lucene-solr git commit: LUCENE-7089, LUCENE-7075: add points to flexible queryparser to replace legacy numerics support

Posted by ho...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/89cc676f/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/PointRangeQueryNode.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/PointRangeQueryNode.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/PointRangeQueryNode.java
new file mode 100644
index 0000000..cb838fc
--- /dev/null
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/PointRangeQueryNode.java
@@ -0,0 +1,124 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.queryparser.flexible.standard.nodes;
+
+import org.apache.lucene.queryparser.flexible.core.QueryNodeException;
+import org.apache.lucene.queryparser.flexible.standard.config.PointsConfig;
+
+/**
+ * This query node represents a range query composed by {@link PointQueryNode}
+ * bounds, which means the bound values are {@link Number}s.
+ * 
+ * @see PointQueryNode
+ * @see AbstractRangeQueryNode
+ */
+public class PointRangeQueryNode extends AbstractRangeQueryNode<PointQueryNode> {
+  
+  public PointsConfig numericConfig; 
+  
+  /**
+   * Constructs a {@link PointRangeQueryNode} object using the given
+   * {@link PointQueryNode} as its bounds and {@link PointsConfig}.
+   * 
+   * @param lower the lower bound
+   * @param upper the upper bound
+   * @param lowerInclusive <code>true</code> if the lower bound is inclusive, otherwise, <code>false</code>
+   * @param upperInclusive <code>true</code> if the upper bound is inclusive, otherwise, <code>false</code>
+   * @param numericConfig the {@link PointsConfig} that represents associated with the upper and lower bounds
+   * 
+   * @see #setBounds(PointQueryNode, PointQueryNode, boolean, boolean, PointsConfig)
+   */
+  public PointRangeQueryNode(PointQueryNode lower, PointQueryNode upper,
+      boolean lowerInclusive, boolean upperInclusive, PointsConfig numericConfig) throws QueryNodeException {
+    setBounds(lower, upper, lowerInclusive, upperInclusive, numericConfig);
+  }
+  
+  /**
+   * Sets the upper and lower bounds of this range query node and the
+   * {@link PointsConfig} associated with these bounds.
+   * 
+   * @param lower the lower bound
+   * @param upper the upper bound
+   * @param lowerInclusive <code>true</code> if the lower bound is inclusive, otherwise, <code>false</code>
+   * @param upperInclusive <code>true</code> if the upper bound is inclusive, otherwise, <code>false</code>
+   * @param pointsConfig the {@link PointsConfig} that represents associated with the upper and lower bounds
+   * 
+   */
+  public void setBounds(PointQueryNode lower, PointQueryNode upper,
+      boolean lowerInclusive, boolean upperInclusive, PointsConfig pointsConfig) throws QueryNodeException {
+    
+    if (pointsConfig == null) {
+      throw new IllegalArgumentException("pointsConfig cannot be null!");
+    }
+    
+    Class<? extends Number> lowerNumberType, upperNumberType;
+    
+    if (lower != null && lower.getValue() != null) {
+      lowerNumberType = lower.getValue().getClass();
+    } else {
+      lowerNumberType = null;
+    }
+    
+    if (upper != null && upper.getValue() != null) {
+      upperNumberType = upper.getValue().getClass();
+    } else {
+      upperNumberType = null;
+    }
+    
+    if (lowerNumberType != null
+        && !lowerNumberType.equals(pointsConfig.getType())) {
+      throw new IllegalArgumentException(
+          "lower value's type should be the same as numericConfig type: "
+              + lowerNumberType + " != " + pointsConfig.getType());
+    }
+    
+    if (upperNumberType != null
+        && !upperNumberType.equals(pointsConfig.getType())) {
+      throw new IllegalArgumentException(
+          "upper value's type should be the same as numericConfig type: "
+              + upperNumberType + " != " + pointsConfig.getType());
+    }
+    
+    super.setBounds(lower, upper, lowerInclusive, upperInclusive);
+    this.numericConfig = pointsConfig;
+  }
+  
+  /**
+   * Returns the {@link PointsConfig} associated with the lower and upper bounds.
+   * 
+   * @return the {@link PointsConfig} associated with the lower and upper bounds
+   */
+  public PointsConfig getPointsConfig() {
+    return this.numericConfig;
+  }
+  
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder();
+    sb.append("<pointRange lowerInclusive='");
+    sb.append(isLowerInclusive());
+    sb.append("' upperInclusive='");
+    sb.append(isUpperInclusive());
+    sb.append("' type='");
+    sb.append(numericConfig.getType().getSimpleName());
+    sb.append("'>\n");
+    sb.append(getLowerBound()).append('\n');
+    sb.append(getUpperBound()).append('\n');
+    sb.append("</pointRange>");
+    return sb.toString();
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/89cc676f/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/LegacyNumericQueryNodeProcessor.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/LegacyNumericQueryNodeProcessor.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/LegacyNumericQueryNodeProcessor.java
new file mode 100644
index 0000000..8b71824
--- /dev/null
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/LegacyNumericQueryNodeProcessor.java
@@ -0,0 +1,154 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.queryparser.flexible.standard.processors;
+
+import java.text.NumberFormat;
+import java.text.ParseException;
+import java.util.List;
+
+import org.apache.lucene.queryparser.flexible.messages.MessageImpl;
+import org.apache.lucene.queryparser.flexible.core.QueryNodeException;
+import org.apache.lucene.queryparser.flexible.core.QueryNodeParseException;
+import org.apache.lucene.queryparser.flexible.core.config.FieldConfig;
+import org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler;
+import org.apache.lucene.queryparser.flexible.core.messages.QueryParserMessages;
+import org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode;
+import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode;
+import org.apache.lucene.queryparser.flexible.core.nodes.RangeQueryNode;
+import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessorImpl;
+import org.apache.lucene.queryparser.flexible.standard.config.LegacyNumericConfig;
+import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler.ConfigurationKeys;
+import org.apache.lucene.queryparser.flexible.standard.nodes.LegacyNumericQueryNode;
+import org.apache.lucene.queryparser.flexible.standard.nodes.LegacyNumericRangeQueryNode;
+
+/**
+ * This processor is used to convert {@link FieldQueryNode}s to
+ * {@link LegacyNumericRangeQueryNode}s. It looks for
+ * {@link ConfigurationKeys#LEGACY_NUMERIC_CONFIG} set in the {@link FieldConfig} of
+ * every {@link FieldQueryNode} found. If
+ * {@link ConfigurationKeys#LEGACY_NUMERIC_CONFIG} is found, it considers that
+ * {@link FieldQueryNode} to be a numeric query and convert it to
+ * {@link LegacyNumericRangeQueryNode} with upper and lower inclusive and lower and
+ * upper equals to the value represented by the {@link FieldQueryNode} converted
+ * to {@link Number}. It means that <b>field:1</b> is converted to <b>field:[1
+ * TO 1]</b>. <br>
+ * <br>
+ * Note that {@link FieldQueryNode}s children of a
+ * {@link RangeQueryNode} are ignored.
+ * 
+ * @see ConfigurationKeys#LEGACY_NUMERIC_CONFIG
+ * @see FieldQueryNode
+ * @see LegacyNumericConfig
+ * @see LegacyNumericQueryNode
+ * @deprecated Index with points and use {@link PointQueryNodeProcessor} instead.
+ */
+@Deprecated
+public class LegacyNumericQueryNodeProcessor extends QueryNodeProcessorImpl {
+  
+  /**
+   * Constructs a {@link LegacyNumericQueryNodeProcessor} object.
+   */
+  public LegacyNumericQueryNodeProcessor() {
+  // empty constructor
+  }
+  
+  @Override
+  protected QueryNode postProcessNode(QueryNode node) throws QueryNodeException {
+    
+    if (node instanceof FieldQueryNode
+        && !(node.getParent() instanceof RangeQueryNode)) {
+      
+      QueryConfigHandler config = getQueryConfigHandler();
+      
+      if (config != null) {
+        FieldQueryNode fieldNode = (FieldQueryNode) node;
+        FieldConfig fieldConfig = config.getFieldConfig(fieldNode
+            .getFieldAsString());
+        
+        if (fieldConfig != null) {
+          LegacyNumericConfig numericConfig = fieldConfig
+              .get(ConfigurationKeys.LEGACY_NUMERIC_CONFIG);
+          
+          if (numericConfig != null) {
+            
+            NumberFormat numberFormat = numericConfig.getNumberFormat();
+            String text = fieldNode.getTextAsString();
+            Number number = null;
+            
+            if (text.length() > 0) {
+              
+              try {
+                number = numberFormat.parse(text);
+                
+              } catch (ParseException e) {
+                throw new QueryNodeParseException(new MessageImpl(
+                    QueryParserMessages.COULD_NOT_PARSE_NUMBER, fieldNode
+                        .getTextAsString(), numberFormat.getClass()
+                        .getCanonicalName()), e);
+              }
+              
+              switch (numericConfig.getType()) {
+                case LONG:
+                  number = number.longValue();
+                  break;
+                case INT:
+                  number = number.intValue();
+                  break;
+                case DOUBLE:
+                  number = number.doubleValue();
+                  break;
+                case FLOAT:
+                  number = number.floatValue();
+              }
+              
+            } else {
+              throw new QueryNodeParseException(new MessageImpl(
+                  QueryParserMessages.NUMERIC_CANNOT_BE_EMPTY, fieldNode.getFieldAsString()));
+            }
+            
+            LegacyNumericQueryNode lowerNode = new LegacyNumericQueryNode(fieldNode
+                .getField(), number, numberFormat);
+            LegacyNumericQueryNode upperNode = new LegacyNumericQueryNode(fieldNode
+                .getField(), number, numberFormat);
+            
+            return new LegacyNumericRangeQueryNode(lowerNode, upperNode, true, true,
+                numericConfig);
+            
+          }
+          
+        }
+        
+      }
+      
+    }
+    
+    return node;
+    
+  }
+  
+  @Override
+  protected QueryNode preProcessNode(QueryNode node) throws QueryNodeException {
+    return node;
+  }
+  
+  @Override
+  protected List<QueryNode> setChildrenOrder(List<QueryNode> children)
+      throws QueryNodeException {
+    return children;
+  }
+  
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/89cc676f/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/LegacyNumericRangeQueryNodeProcessor.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/LegacyNumericRangeQueryNodeProcessor.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/LegacyNumericRangeQueryNodeProcessor.java
new file mode 100644
index 0000000..5a54b7b
--- /dev/null
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/LegacyNumericRangeQueryNodeProcessor.java
@@ -0,0 +1,170 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.queryparser.flexible.standard.processors;
+
+import java.text.NumberFormat;
+import java.text.ParseException;
+import java.util.List;
+
+import org.apache.lucene.queryparser.flexible.messages.MessageImpl;
+import org.apache.lucene.queryparser.flexible.core.QueryNodeException;
+import org.apache.lucene.queryparser.flexible.core.QueryNodeParseException;
+import org.apache.lucene.queryparser.flexible.core.config.FieldConfig;
+import org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler;
+import org.apache.lucene.queryparser.flexible.core.messages.QueryParserMessages;
+import org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode;
+import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode;
+import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessorImpl;
+import org.apache.lucene.queryparser.flexible.core.util.StringUtils;
+import org.apache.lucene.queryparser.flexible.standard.config.LegacyNumericConfig;
+import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler.ConfigurationKeys;
+import org.apache.lucene.queryparser.flexible.standard.nodes.LegacyNumericQueryNode;
+import org.apache.lucene.queryparser.flexible.standard.nodes.LegacyNumericRangeQueryNode;
+import org.apache.lucene.queryparser.flexible.standard.nodes.TermRangeQueryNode;
+
+/**
+ * This processor is used to convert {@link TermRangeQueryNode}s to
+ * {@link LegacyNumericRangeQueryNode}s. It looks for
+ * {@link ConfigurationKeys#LEGACY_NUMERIC_CONFIG} set in the {@link FieldConfig} of
+ * every {@link TermRangeQueryNode} found. If
+ * {@link ConfigurationKeys#LEGACY_NUMERIC_CONFIG} is found, it considers that
+ * {@link TermRangeQueryNode} to be a numeric range query and convert it to
+ * {@link LegacyNumericRangeQueryNode}.
+ * 
+ * @see ConfigurationKeys#LEGACY_NUMERIC_CONFIG
+ * @see TermRangeQueryNode
+ * @see LegacyNumericConfig
+ * @see LegacyNumericRangeQueryNode
+ * @deprecated Index with points and use {@link PointRangeQueryNodeProcessor} instead.
+ */
+@Deprecated
+public class LegacyNumericRangeQueryNodeProcessor extends QueryNodeProcessorImpl {
+  
+  /**
+   * Constructs an empty {@link LegacyNumericRangeQueryNode} object.
+   */
+  public LegacyNumericRangeQueryNodeProcessor() {
+  // empty constructor
+  }
+  
+  @Override
+  protected QueryNode postProcessNode(QueryNode node) throws QueryNodeException {
+    
+    if (node instanceof TermRangeQueryNode) {
+      QueryConfigHandler config = getQueryConfigHandler();
+      
+      if (config != null) {
+        TermRangeQueryNode termRangeNode = (TermRangeQueryNode) node;
+        FieldConfig fieldConfig = config.getFieldConfig(StringUtils
+            .toString(termRangeNode.getField()));
+        
+        if (fieldConfig != null) {
+          
+          LegacyNumericConfig numericConfig = fieldConfig
+              .get(ConfigurationKeys.LEGACY_NUMERIC_CONFIG);
+          
+          if (numericConfig != null) {
+            
+            FieldQueryNode lower = termRangeNode.getLowerBound();
+            FieldQueryNode upper = termRangeNode.getUpperBound();
+            
+            String lowerText = lower.getTextAsString();
+            String upperText = upper.getTextAsString();
+            NumberFormat numberFormat = numericConfig.getNumberFormat();
+            Number lowerNumber = null, upperNumber = null;
+            
+             if (lowerText.length() > 0) {
+              
+              try {
+                lowerNumber = numberFormat.parse(lowerText);
+                
+              } catch (ParseException e) {
+                throw new QueryNodeParseException(new MessageImpl(
+                    QueryParserMessages.COULD_NOT_PARSE_NUMBER, lower
+                        .getTextAsString(), numberFormat.getClass()
+                        .getCanonicalName()), e);
+              }
+              
+            }
+            
+             if (upperText.length() > 0) {
+            
+              try {
+                upperNumber = numberFormat.parse(upperText);
+                
+              } catch (ParseException e) {
+                throw new QueryNodeParseException(new MessageImpl(
+                    QueryParserMessages.COULD_NOT_PARSE_NUMBER, upper
+                        .getTextAsString(), numberFormat.getClass()
+                        .getCanonicalName()), e);
+              }
+            
+            }
+            
+            switch (numericConfig.getType()) {
+              case LONG:
+                if (upperNumber != null) upperNumber = upperNumber.longValue();
+                if (lowerNumber != null) lowerNumber = lowerNumber.longValue();
+                break;
+              case INT:
+                if (upperNumber != null) upperNumber = upperNumber.intValue();
+                if (lowerNumber != null) lowerNumber = lowerNumber.intValue();
+                break;
+              case DOUBLE:
+                if (upperNumber != null) upperNumber = upperNumber.doubleValue();
+                if (lowerNumber != null) lowerNumber = lowerNumber.doubleValue();
+                break;
+              case FLOAT:
+                if (upperNumber != null) upperNumber = upperNumber.floatValue();
+                if (lowerNumber != null) lowerNumber = lowerNumber.floatValue();
+            }
+            
+            LegacyNumericQueryNode lowerNode = new LegacyNumericQueryNode(
+                termRangeNode.getField(), lowerNumber, numberFormat);
+            LegacyNumericQueryNode upperNode = new LegacyNumericQueryNode(
+                termRangeNode.getField(), upperNumber, numberFormat);
+            
+            boolean lowerInclusive = termRangeNode.isLowerInclusive();
+            boolean upperInclusive = termRangeNode.isUpperInclusive();
+            
+            return new LegacyNumericRangeQueryNode(lowerNode, upperNode,
+                lowerInclusive, upperInclusive, numericConfig);
+            
+          }
+          
+        }
+        
+      }
+      
+    }
+    
+    return node;
+    
+  }
+  
+  @Override
+  protected QueryNode preProcessNode(QueryNode node) throws QueryNodeException {
+    return node;
+  }
+  
+  @Override
+  protected List<QueryNode> setChildrenOrder(List<QueryNode> children)
+      throws QueryNodeException {
+    return children;
+  }
+  
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/89cc676f/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/NumericQueryNodeProcessor.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/NumericQueryNodeProcessor.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/NumericQueryNodeProcessor.java
deleted file mode 100644
index 10bd6ba..0000000
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/NumericQueryNodeProcessor.java
+++ /dev/null
@@ -1,152 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.queryparser.flexible.standard.processors;
-
-import java.text.NumberFormat;
-import java.text.ParseException;
-import java.util.List;
-
-import org.apache.lucene.queryparser.flexible.messages.MessageImpl;
-import org.apache.lucene.queryparser.flexible.core.QueryNodeException;
-import org.apache.lucene.queryparser.flexible.core.QueryNodeParseException;
-import org.apache.lucene.queryparser.flexible.core.config.FieldConfig;
-import org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler;
-import org.apache.lucene.queryparser.flexible.core.messages.QueryParserMessages;
-import org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode;
-import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode;
-import org.apache.lucene.queryparser.flexible.core.nodes.RangeQueryNode;
-import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessorImpl;
-import org.apache.lucene.queryparser.flexible.standard.config.NumericConfig;
-import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler.ConfigurationKeys;
-import org.apache.lucene.queryparser.flexible.standard.nodes.NumericQueryNode;
-import org.apache.lucene.queryparser.flexible.standard.nodes.NumericRangeQueryNode;
-
-/**
- * This processor is used to convert {@link FieldQueryNode}s to
- * {@link NumericRangeQueryNode}s. It looks for
- * {@link ConfigurationKeys#NUMERIC_CONFIG} set in the {@link FieldConfig} of
- * every {@link FieldQueryNode} found. If
- * {@link ConfigurationKeys#NUMERIC_CONFIG} is found, it considers that
- * {@link FieldQueryNode} to be a numeric query and convert it to
- * {@link NumericRangeQueryNode} with upper and lower inclusive and lower and
- * upper equals to the value represented by the {@link FieldQueryNode} converted
- * to {@link Number}. It means that <b>field:1</b> is converted to <b>field:[1
- * TO 1]</b>. <br>
- * <br>
- * Note that {@link FieldQueryNode}s children of a
- * {@link RangeQueryNode} are ignored.
- * 
- * @see ConfigurationKeys#NUMERIC_CONFIG
- * @see FieldQueryNode
- * @see NumericConfig
- * @see NumericQueryNode
- */
-public class NumericQueryNodeProcessor extends QueryNodeProcessorImpl {
-  
-  /**
-   * Constructs a {@link NumericQueryNodeProcessor} object.
-   */
-  public NumericQueryNodeProcessor() {
-  // empty constructor
-  }
-  
-  @Override
-  protected QueryNode postProcessNode(QueryNode node) throws QueryNodeException {
-    
-    if (node instanceof FieldQueryNode
-        && !(node.getParent() instanceof RangeQueryNode)) {
-      
-      QueryConfigHandler config = getQueryConfigHandler();
-      
-      if (config != null) {
-        FieldQueryNode fieldNode = (FieldQueryNode) node;
-        FieldConfig fieldConfig = config.getFieldConfig(fieldNode
-            .getFieldAsString());
-        
-        if (fieldConfig != null) {
-          NumericConfig numericConfig = fieldConfig
-              .get(ConfigurationKeys.NUMERIC_CONFIG);
-          
-          if (numericConfig != null) {
-            
-            NumberFormat numberFormat = numericConfig.getNumberFormat();
-            String text = fieldNode.getTextAsString();
-            Number number = null;
-            
-            if (text.length() > 0) {
-              
-              try {
-                number = numberFormat.parse(text);
-                
-              } catch (ParseException e) {
-                throw new QueryNodeParseException(new MessageImpl(
-                    QueryParserMessages.COULD_NOT_PARSE_NUMBER, fieldNode
-                        .getTextAsString(), numberFormat.getClass()
-                        .getCanonicalName()), e);
-              }
-              
-              switch (numericConfig.getType()) {
-                case LONG:
-                  number = number.longValue();
-                  break;
-                case INT:
-                  number = number.intValue();
-                  break;
-                case DOUBLE:
-                  number = number.doubleValue();
-                  break;
-                case FLOAT:
-                  number = number.floatValue();
-              }
-              
-            } else {
-              throw new QueryNodeParseException(new MessageImpl(
-                  QueryParserMessages.NUMERIC_CANNOT_BE_EMPTY, fieldNode.getFieldAsString()));
-            }
-            
-            NumericQueryNode lowerNode = new NumericQueryNode(fieldNode
-                .getField(), number, numberFormat);
-            NumericQueryNode upperNode = new NumericQueryNode(fieldNode
-                .getField(), number, numberFormat);
-            
-            return new NumericRangeQueryNode(lowerNode, upperNode, true, true,
-                numericConfig);
-            
-          }
-          
-        }
-        
-      }
-      
-    }
-    
-    return node;
-    
-  }
-  
-  @Override
-  protected QueryNode preProcessNode(QueryNode node) throws QueryNodeException {
-    return node;
-  }
-  
-  @Override
-  protected List<QueryNode> setChildrenOrder(List<QueryNode> children)
-      throws QueryNodeException {
-    return children;
-  }
-  
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/89cc676f/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/NumericRangeQueryNodeProcessor.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/NumericRangeQueryNodeProcessor.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/NumericRangeQueryNodeProcessor.java
deleted file mode 100644
index bbe5284..0000000
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/NumericRangeQueryNodeProcessor.java
+++ /dev/null
@@ -1,168 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.queryparser.flexible.standard.processors;
-
-import java.text.NumberFormat;
-import java.text.ParseException;
-import java.util.List;
-
-import org.apache.lucene.queryparser.flexible.messages.MessageImpl;
-import org.apache.lucene.queryparser.flexible.core.QueryNodeException;
-import org.apache.lucene.queryparser.flexible.core.QueryNodeParseException;
-import org.apache.lucene.queryparser.flexible.core.config.FieldConfig;
-import org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler;
-import org.apache.lucene.queryparser.flexible.core.messages.QueryParserMessages;
-import org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode;
-import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode;
-import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessorImpl;
-import org.apache.lucene.queryparser.flexible.core.util.StringUtils;
-import org.apache.lucene.queryparser.flexible.standard.config.NumericConfig;
-import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler.ConfigurationKeys;
-import org.apache.lucene.queryparser.flexible.standard.nodes.NumericQueryNode;
-import org.apache.lucene.queryparser.flexible.standard.nodes.NumericRangeQueryNode;
-import org.apache.lucene.queryparser.flexible.standard.nodes.TermRangeQueryNode;
-
-/**
- * This processor is used to convert {@link TermRangeQueryNode}s to
- * {@link NumericRangeQueryNode}s. It looks for
- * {@link ConfigurationKeys#NUMERIC_CONFIG} set in the {@link FieldConfig} of
- * every {@link TermRangeQueryNode} found. If
- * {@link ConfigurationKeys#NUMERIC_CONFIG} is found, it considers that
- * {@link TermRangeQueryNode} to be a numeric range query and convert it to
- * {@link NumericRangeQueryNode}.
- * 
- * @see ConfigurationKeys#NUMERIC_CONFIG
- * @see TermRangeQueryNode
- * @see NumericConfig
- * @see NumericRangeQueryNode
- */
-public class NumericRangeQueryNodeProcessor extends QueryNodeProcessorImpl {
-  
-  /**
-   * Constructs an empty {@link NumericRangeQueryNode} object.
-   */
-  public NumericRangeQueryNodeProcessor() {
-  // empty constructor
-  }
-  
-  @Override
-  protected QueryNode postProcessNode(QueryNode node) throws QueryNodeException {
-    
-    if (node instanceof TermRangeQueryNode) {
-      QueryConfigHandler config = getQueryConfigHandler();
-      
-      if (config != null) {
-        TermRangeQueryNode termRangeNode = (TermRangeQueryNode) node;
-        FieldConfig fieldConfig = config.getFieldConfig(StringUtils
-            .toString(termRangeNode.getField()));
-        
-        if (fieldConfig != null) {
-          
-          NumericConfig numericConfig = fieldConfig
-              .get(ConfigurationKeys.NUMERIC_CONFIG);
-          
-          if (numericConfig != null) {
-            
-            FieldQueryNode lower = termRangeNode.getLowerBound();
-            FieldQueryNode upper = termRangeNode.getUpperBound();
-            
-            String lowerText = lower.getTextAsString();
-            String upperText = upper.getTextAsString();
-            NumberFormat numberFormat = numericConfig.getNumberFormat();
-            Number lowerNumber = null, upperNumber = null;
-            
-             if (lowerText.length() > 0) {
-              
-              try {
-                lowerNumber = numberFormat.parse(lowerText);
-                
-              } catch (ParseException e) {
-                throw new QueryNodeParseException(new MessageImpl(
-                    QueryParserMessages.COULD_NOT_PARSE_NUMBER, lower
-                        .getTextAsString(), numberFormat.getClass()
-                        .getCanonicalName()), e);
-              }
-              
-            }
-            
-             if (upperText.length() > 0) {
-            
-              try {
-                upperNumber = numberFormat.parse(upperText);
-                
-              } catch (ParseException e) {
-                throw new QueryNodeParseException(new MessageImpl(
-                    QueryParserMessages.COULD_NOT_PARSE_NUMBER, upper
-                        .getTextAsString(), numberFormat.getClass()
-                        .getCanonicalName()), e);
-              }
-            
-            }
-            
-            switch (numericConfig.getType()) {
-              case LONG:
-                if (upperNumber != null) upperNumber = upperNumber.longValue();
-                if (lowerNumber != null) lowerNumber = lowerNumber.longValue();
-                break;
-              case INT:
-                if (upperNumber != null) upperNumber = upperNumber.intValue();
-                if (lowerNumber != null) lowerNumber = lowerNumber.intValue();
-                break;
-              case DOUBLE:
-                if (upperNumber != null) upperNumber = upperNumber.doubleValue();
-                if (lowerNumber != null) lowerNumber = lowerNumber.doubleValue();
-                break;
-              case FLOAT:
-                if (upperNumber != null) upperNumber = upperNumber.floatValue();
-                if (lowerNumber != null) lowerNumber = lowerNumber.floatValue();
-            }
-            
-            NumericQueryNode lowerNode = new NumericQueryNode(
-                termRangeNode.getField(), lowerNumber, numberFormat);
-            NumericQueryNode upperNode = new NumericQueryNode(
-                termRangeNode.getField(), upperNumber, numberFormat);
-            
-            boolean lowerInclusive = termRangeNode.isLowerInclusive();
-            boolean upperInclusive = termRangeNode.isUpperInclusive();
-            
-            return new NumericRangeQueryNode(lowerNode, upperNode,
-                lowerInclusive, upperInclusive, numericConfig);
-            
-          }
-          
-        }
-        
-      }
-      
-    }
-    
-    return node;
-    
-  }
-  
-  @Override
-  protected QueryNode preProcessNode(QueryNode node) throws QueryNodeException {
-    return node;
-  }
-  
-  @Override
-  protected List<QueryNode> setChildrenOrder(List<QueryNode> children)
-      throws QueryNodeException {
-    return children;
-  }
-  
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/89cc676f/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/PointQueryNodeProcessor.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/PointQueryNodeProcessor.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/PointQueryNodeProcessor.java
new file mode 100644
index 0000000..81a8449
--- /dev/null
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/PointQueryNodeProcessor.java
@@ -0,0 +1,136 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.queryparser.flexible.standard.processors;
+
+import java.text.NumberFormat;
+import java.text.ParseException;
+import java.util.List;
+
+import org.apache.lucene.queryparser.flexible.messages.MessageImpl;
+import org.apache.lucene.queryparser.flexible.core.QueryNodeException;
+import org.apache.lucene.queryparser.flexible.core.QueryNodeParseException;
+import org.apache.lucene.queryparser.flexible.core.config.FieldConfig;
+import org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler;
+import org.apache.lucene.queryparser.flexible.core.messages.QueryParserMessages;
+import org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode;
+import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode;
+import org.apache.lucene.queryparser.flexible.core.nodes.RangeQueryNode;
+import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessorImpl;
+import org.apache.lucene.queryparser.flexible.standard.config.PointsConfig;
+import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler.ConfigurationKeys;
+import org.apache.lucene.queryparser.flexible.standard.nodes.PointQueryNode;
+import org.apache.lucene.queryparser.flexible.standard.nodes.PointRangeQueryNode;
+
+/**
+ * This processor is used to convert {@link FieldQueryNode}s to
+ * {@link PointRangeQueryNode}s. It looks for
+ * {@link ConfigurationKeys#POINTS_CONFIG} set in the {@link FieldConfig} of
+ * every {@link FieldQueryNode} found. If
+ * {@link ConfigurationKeys#POINTS_CONFIG} is found, it considers that
+ * {@link FieldQueryNode} to be a numeric query and convert it to
+ * {@link PointRangeQueryNode} with upper and lower inclusive and lower and
+ * upper equals to the value represented by the {@link FieldQueryNode} converted
+ * to {@link Number}. It means that <b>field:1</b> is converted to <b>field:[1
+ * TO 1]</b>. <br>
+ * <br>
+ * Note that {@link FieldQueryNode}s children of a
+ * {@link RangeQueryNode} are ignored.
+ * 
+ * @see ConfigurationKeys#POINTS_CONFIG
+ * @see FieldQueryNode
+ * @see PointsConfig
+ * @see PointQueryNode
+ */
+public class PointQueryNodeProcessor extends QueryNodeProcessorImpl {
+  
+  /**
+   * Constructs a {@link PointQueryNodeProcessor} object.
+   */
+  public PointQueryNodeProcessor() {
+  // empty constructor
+  }
+  
+  @Override
+  protected QueryNode postProcessNode(QueryNode node) throws QueryNodeException {
+    
+    if (node instanceof FieldQueryNode
+        && !(node.getParent() instanceof RangeQueryNode)) {
+      
+      QueryConfigHandler config = getQueryConfigHandler();
+      
+      if (config != null) {
+        FieldQueryNode fieldNode = (FieldQueryNode) node;
+        FieldConfig fieldConfig = config.getFieldConfig(fieldNode
+            .getFieldAsString());
+        
+        if (fieldConfig != null) {
+          PointsConfig numericConfig = fieldConfig.get(ConfigurationKeys.POINTS_CONFIG);
+          
+          if (numericConfig != null) {
+            
+            NumberFormat numberFormat = numericConfig.getNumberFormat();
+            String text = fieldNode.getTextAsString();
+            Number number = null;
+            
+            if (text.length() > 0) {
+              
+              try {
+                number = numberFormat.parse(text);
+                
+              } catch (ParseException e) {
+                throw new QueryNodeParseException(new MessageImpl(
+                    QueryParserMessages.COULD_NOT_PARSE_NUMBER, fieldNode
+                        .getTextAsString(), numberFormat.getClass()
+                        .getCanonicalName()), e);
+              }
+              
+              if (Integer.class.equals(numericConfig.getType())) {
+                number = number.intValue();
+              } else if (Long.class.equals(numericConfig.getType())) {
+                number = number.longValue();
+              } else if (Double.class.equals(numericConfig.getType())) {
+                number = number.doubleValue();
+              } else if (Float.class.equals(numericConfig.getType())) {
+                number = number.floatValue();
+              }
+              
+            } else {
+              throw new QueryNodeParseException(new MessageImpl(
+                  QueryParserMessages.NUMERIC_CANNOT_BE_EMPTY, fieldNode.getFieldAsString()));
+            }
+            
+            PointQueryNode lowerNode = new PointQueryNode(fieldNode.getField(), number, numberFormat);
+            PointQueryNode upperNode = new PointQueryNode(fieldNode.getField(), number, numberFormat);
+            
+            return new PointRangeQueryNode(lowerNode, upperNode, true, true, numericConfig);
+          }
+        }
+      }
+    }
+    return node;
+  }
+  
+  @Override
+  protected QueryNode preProcessNode(QueryNode node) throws QueryNodeException {
+    return node;
+  }
+  
+  @Override
+  protected List<QueryNode> setChildrenOrder(List<QueryNode> children) throws QueryNodeException {
+    return children;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/89cc676f/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/PointRangeQueryNodeProcessor.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/PointRangeQueryNodeProcessor.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/PointRangeQueryNodeProcessor.java
new file mode 100644
index 0000000..2ffc437
--- /dev/null
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/PointRangeQueryNodeProcessor.java
@@ -0,0 +1,148 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.queryparser.flexible.standard.processors;
+
+import java.text.NumberFormat;
+import java.text.ParseException;
+import java.util.List;
+
+import org.apache.lucene.queryparser.flexible.messages.MessageImpl;
+import org.apache.lucene.queryparser.flexible.core.QueryNodeException;
+import org.apache.lucene.queryparser.flexible.core.QueryNodeParseException;
+import org.apache.lucene.queryparser.flexible.core.config.FieldConfig;
+import org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler;
+import org.apache.lucene.queryparser.flexible.core.messages.QueryParserMessages;
+import org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode;
+import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode;
+import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessorImpl;
+import org.apache.lucene.queryparser.flexible.core.util.StringUtils;
+import org.apache.lucene.queryparser.flexible.standard.config.PointsConfig;
+import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler.ConfigurationKeys;
+import org.apache.lucene.queryparser.flexible.standard.nodes.PointQueryNode;
+import org.apache.lucene.queryparser.flexible.standard.nodes.PointRangeQueryNode;
+import org.apache.lucene.queryparser.flexible.standard.nodes.TermRangeQueryNode;
+
+/**
+ * This processor is used to convert {@link TermRangeQueryNode}s to
+ * {@link PointRangeQueryNode}s. It looks for
+ * {@link ConfigurationKeys#POINTS_CONFIG} set in the {@link FieldConfig} of
+ * every {@link TermRangeQueryNode} found. If
+ * {@link ConfigurationKeys#POINTS_CONFIG} is found, it considers that
+ * {@link TermRangeQueryNode} to be a numeric range query and convert it to
+ * {@link PointRangeQueryNode}.
+ * 
+ * @see ConfigurationKeys#POINTS_CONFIG
+ * @see TermRangeQueryNode
+ * @see PointsConfig
+ * @see PointRangeQueryNode
+ */
+public class PointRangeQueryNodeProcessor extends QueryNodeProcessorImpl {
+
+  /**
+   * Constructs an empty {@link PointRangeQueryNodeProcessor} object.
+   */
+  public PointRangeQueryNodeProcessor() {
+    // empty constructor
+  }
+
+  @Override
+  protected QueryNode postProcessNode(QueryNode node) throws QueryNodeException {
+
+    if (node instanceof TermRangeQueryNode) {
+      QueryConfigHandler config = getQueryConfigHandler();
+
+      if (config != null) {
+        TermRangeQueryNode termRangeNode = (TermRangeQueryNode) node;
+        FieldConfig fieldConfig = config.getFieldConfig(StringUtils.toString(termRangeNode.getField()));
+
+        if (fieldConfig != null) {
+          PointsConfig numericConfig = fieldConfig.get(ConfigurationKeys.POINTS_CONFIG);
+
+          if (numericConfig != null) {
+            FieldQueryNode lower = termRangeNode.getLowerBound();
+            FieldQueryNode upper = termRangeNode.getUpperBound();
+
+            String lowerText = lower.getTextAsString();
+            String upperText = upper.getTextAsString();
+            NumberFormat numberFormat = numericConfig.getNumberFormat();
+            Number lowerNumber = null, upperNumber = null;
+
+            if (lowerText.length() > 0) {
+
+              try {
+                lowerNumber = numberFormat.parse(lowerText);
+
+              } catch (ParseException e) {
+                throw new QueryNodeParseException(new MessageImpl(
+                    QueryParserMessages.COULD_NOT_PARSE_NUMBER, lower
+                    .getTextAsString(), numberFormat.getClass()
+                    .getCanonicalName()), e);
+              }
+
+            }
+
+            if (upperText.length() > 0) {
+
+              try {
+                upperNumber = numberFormat.parse(upperText);
+
+              } catch (ParseException e) {
+                throw new QueryNodeParseException(new MessageImpl(
+                    QueryParserMessages.COULD_NOT_PARSE_NUMBER, upper
+                    .getTextAsString(), numberFormat.getClass()
+                    .getCanonicalName()), e);
+              }
+            }
+
+            if (Integer.class.equals(numericConfig.getType())) {
+              if (upperNumber != null) upperNumber = upperNumber.intValue();
+              if (lowerNumber != null) lowerNumber = lowerNumber.intValue();
+            } else if (Long.class.equals(numericConfig.getType())) {
+              if (upperNumber != null) upperNumber = upperNumber.longValue();
+              if (lowerNumber != null) lowerNumber = lowerNumber.longValue();
+            } else if (Double.class.equals(numericConfig.getType())) {
+              if (upperNumber != null) upperNumber = upperNumber.doubleValue();
+              if (lowerNumber != null) lowerNumber = lowerNumber.doubleValue();
+            } else if (Float.class.equals(numericConfig.getType())) {
+              if (upperNumber != null) upperNumber = upperNumber.floatValue();
+              if (lowerNumber != null) lowerNumber = lowerNumber.floatValue();
+            }
+
+            PointQueryNode lowerNode = new PointQueryNode(termRangeNode.getField(), lowerNumber, numberFormat);
+            PointQueryNode upperNode = new PointQueryNode(termRangeNode.getField(), upperNumber, numberFormat);
+
+            boolean lowerInclusive = termRangeNode.isLowerInclusive();
+            boolean upperInclusive = termRangeNode.isUpperInclusive();
+
+            return new PointRangeQueryNode(lowerNode, upperNode, lowerInclusive, upperInclusive, numericConfig);
+          }
+        }  
+      }
+    }
+    return node;
+  }
+
+  @Override
+  protected QueryNode preProcessNode(QueryNode node) throws QueryNodeException {
+    return node;
+  }
+
+  @Override
+  protected List<QueryNode> setChildrenOrder(List<QueryNode> children) throws QueryNodeException {
+    return children;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/89cc676f/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/StandardQueryNodeProcessorPipeline.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/StandardQueryNodeProcessorPipeline.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/StandardQueryNodeProcessorPipeline.java
index 06f38c2..6e4a394 100644
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/StandardQueryNodeProcessorPipeline.java
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/StandardQueryNodeProcessorPipeline.java
@@ -55,8 +55,10 @@ public class StandardQueryNodeProcessorPipeline extends
     add(new FuzzyQueryNodeProcessor());
     add(new MatchAllDocsQueryNodeProcessor());
     add(new OpenRangeQueryNodeProcessor());
-    add(new NumericQueryNodeProcessor());
-    add(new NumericRangeQueryNodeProcessor());
+    add(new LegacyNumericQueryNodeProcessor());
+    add(new LegacyNumericRangeQueryNodeProcessor());
+    add(new PointQueryNodeProcessor());
+    add(new PointRangeQueryNodeProcessor());
     add(new LowercaseExpandedTermsQueryNodeProcessor());
     add(new TermRangeQueryNodeProcessor());
     add(new AllowLeadingWildcardProcessor());    

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/89cc676f/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestLegacyNumericQueryParser.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestLegacyNumericQueryParser.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestLegacyNumericQueryParser.java
new file mode 100644
index 0000000..c6ab7f5
--- /dev/null
+++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestLegacyNumericQueryParser.java
@@ -0,0 +1,535 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.queryparser.flexible.standard;
+
+import java.io.IOException;
+import java.text.DateFormat;
+import java.text.NumberFormat;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Collections;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Random;
+import java.util.TimeZone;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.MockAnalyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.LegacyDoubleField;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.FieldType.LegacyNumericType;
+import org.apache.lucene.document.FieldType;
+import org.apache.lucene.document.LegacyFloatField;
+import org.apache.lucene.document.LegacyIntField;
+import org.apache.lucene.document.LegacyLongField;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.RandomIndexWriter;
+import org.apache.lucene.queryparser.flexible.core.QueryNodeException;
+import org.apache.lucene.queryparser.flexible.core.parser.EscapeQuerySyntax;
+import org.apache.lucene.queryparser.flexible.standard.config.NumberDateFormat;
+import org.apache.lucene.queryparser.flexible.standard.config.LegacyNumericConfig;
+import org.apache.lucene.queryparser.flexible.standard.parser.EscapeQuerySyntaxImpl;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.TopDocs;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util.TestUtil;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestLegacyNumericQueryParser extends LuceneTestCase {
+  
+  private static enum NumberType {
+    NEGATIVE, ZERO, POSITIVE;
+  }
+  
+  final private static int[] DATE_STYLES = {DateFormat.FULL, DateFormat.LONG,
+      DateFormat.MEDIUM, DateFormat.SHORT};
+  
+  final private static int PRECISION_STEP = 8;
+  final private static String FIELD_NAME = "field";
+  private static Locale LOCALE;
+  private static TimeZone TIMEZONE;
+  private static Map<String,Number> RANDOM_NUMBER_MAP;
+  private static EscapeQuerySyntax ESCAPER = new EscapeQuerySyntaxImpl();
+  final private static String DATE_FIELD_NAME = "date";
+  private static int DATE_STYLE;
+  private static int TIME_STYLE;
+  
+  private static Analyzer ANALYZER;
+  
+  private static NumberFormat NUMBER_FORMAT;
+  
+  private static StandardQueryParser qp;
+  
+  private static NumberDateFormat DATE_FORMAT;
+  
+  private static Directory directory = null;
+  private static IndexReader reader = null;
+  private static IndexSearcher searcher = null;
+  
+  private static boolean checkDateFormatSanity(DateFormat dateFormat, long date) {
+    try {
+      return date == dateFormat.parse(dateFormat.format(new Date(date)))
+        .getTime();
+    } catch (ParseException e) {
+      return false;
+    }
+  }
+  
+  @BeforeClass
+  public static void beforeClass() throws Exception {
+    ANALYZER = new MockAnalyzer(random());
+    
+    qp = new StandardQueryParser(ANALYZER);
+    
+    final HashMap<String,Number> randomNumberMap = new HashMap<>();
+    
+    SimpleDateFormat dateFormat;
+    long randomDate;
+    boolean dateFormatSanityCheckPass;
+    int count = 0;
+    do {
+      if (count > 100) {
+        fail("This test has problems to find a sane random DateFormat/NumberFormat. Stopped trying after 100 iterations.");
+      }
+      
+      dateFormatSanityCheckPass = true;
+      LOCALE = randomLocale(random());
+      TIMEZONE = randomTimeZone(random());
+      DATE_STYLE = randomDateStyle(random());
+      TIME_STYLE = randomDateStyle(random());
+      
+      // assumes localized date pattern will have at least year, month, day,
+      // hour, minute
+      dateFormat = (SimpleDateFormat) DateFormat.getDateTimeInstance(
+          DATE_STYLE, TIME_STYLE, LOCALE);
+      
+      // not all date patterns includes era, full year, timezone and second,
+      // so we add them here
+      dateFormat.applyPattern(dateFormat.toPattern() + " G s Z yyyy");
+      dateFormat.setTimeZone(TIMEZONE);
+      
+      DATE_FORMAT = new NumberDateFormat(dateFormat);
+      
+      do {
+        randomDate = random().nextLong();
+        
+        // prune date value so it doesn't pass in insane values to some
+        // calendars.
+        randomDate = randomDate % 3400000000000l;
+        
+        // truncate to second
+        randomDate = (randomDate / 1000L) * 1000L;
+        
+        // only positive values
+        randomDate = Math.abs(randomDate);
+      } while (randomDate == 0L);
+      
+      dateFormatSanityCheckPass &= checkDateFormatSanity(dateFormat, randomDate);
+      
+      dateFormatSanityCheckPass &= checkDateFormatSanity(dateFormat, 0);
+      
+      dateFormatSanityCheckPass &= checkDateFormatSanity(dateFormat,
+          -randomDate);
+      
+      count++;
+    } while (!dateFormatSanityCheckPass);
+    
+    NUMBER_FORMAT = NumberFormat.getNumberInstance(LOCALE);
+    NUMBER_FORMAT.setMaximumFractionDigits((random().nextInt() & 20) + 1);
+    NUMBER_FORMAT.setMinimumFractionDigits((random().nextInt() & 20) + 1);
+    NUMBER_FORMAT.setMaximumIntegerDigits((random().nextInt() & 20) + 1);
+    NUMBER_FORMAT.setMinimumIntegerDigits((random().nextInt() & 20) + 1);
+    
+    double randomDouble;
+    long randomLong;
+    int randomInt;
+    float randomFloat;
+    
+    while ((randomLong = normalizeNumber(Math.abs(random().nextLong()))
+        .longValue()) == 0L)
+      ;
+    while ((randomDouble = normalizeNumber(Math.abs(random().nextDouble()))
+        .doubleValue()) == 0.0)
+      ;
+    while ((randomFloat = normalizeNumber(Math.abs(random().nextFloat()))
+        .floatValue()) == 0.0f)
+      ;
+    while ((randomInt = normalizeNumber(Math.abs(random().nextInt())).intValue()) == 0)
+      ;
+    
+    randomNumberMap.put(LegacyNumericType.LONG.name(), randomLong);
+    randomNumberMap.put(FieldType.LegacyNumericType.INT.name(), randomInt);
+    randomNumberMap.put(LegacyNumericType.FLOAT.name(), randomFloat);
+    randomNumberMap.put(LegacyNumericType.DOUBLE.name(), randomDouble);
+    randomNumberMap.put(DATE_FIELD_NAME, randomDate);
+    
+    RANDOM_NUMBER_MAP = Collections.unmodifiableMap(randomNumberMap);
+    
+    directory = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random(), directory,
+        newIndexWriterConfig(new MockAnalyzer(random()))
+            .setMaxBufferedDocs(TestUtil.nextInt(random(), 50, 1000))
+            .setMergePolicy(newLogMergePolicy()));
+    
+    Document doc = new Document();
+    HashMap<String,LegacyNumericConfig> numericConfigMap = new HashMap<>();
+    HashMap<String,Field> numericFieldMap = new HashMap<>();
+    qp.setLegacyNumericConfigMap(numericConfigMap);
+    
+    for (LegacyNumericType type : LegacyNumericType.values()) {
+      numericConfigMap.put(type.name(), new LegacyNumericConfig(PRECISION_STEP,
+          NUMBER_FORMAT, type));
+
+      FieldType ft = new FieldType(LegacyIntField.TYPE_NOT_STORED);
+      ft.setNumericType(type);
+      ft.setStored(true);
+      ft.setNumericPrecisionStep(PRECISION_STEP);
+      ft.freeze();
+      final Field field;
+
+      switch(type) {
+      case INT:
+        field = new LegacyIntField(type.name(), 0, ft);
+        break;
+      case FLOAT:
+        field = new LegacyFloatField(type.name(), 0.0f, ft);
+        break;
+      case LONG:
+        field = new LegacyLongField(type.name(), 0l, ft);
+        break;
+      case DOUBLE:
+        field = new LegacyDoubleField(type.name(), 0.0, ft);
+        break;
+      default:
+        fail();
+        field = null;
+      }
+      numericFieldMap.put(type.name(), field);
+      doc.add(field);
+    }
+    
+    numericConfigMap.put(DATE_FIELD_NAME, new LegacyNumericConfig(PRECISION_STEP,
+        DATE_FORMAT, LegacyNumericType.LONG));
+    FieldType ft = new FieldType(LegacyLongField.TYPE_NOT_STORED);
+    ft.setStored(true);
+    ft.setNumericPrecisionStep(PRECISION_STEP);
+    LegacyLongField dateField = new LegacyLongField(DATE_FIELD_NAME, 0l, ft);
+    numericFieldMap.put(DATE_FIELD_NAME, dateField);
+    doc.add(dateField);
+    
+    for (NumberType numberType : NumberType.values()) {
+      setFieldValues(numberType, numericFieldMap);
+      if (VERBOSE) System.out.println("Indexing document: " + doc);
+      writer.addDocument(doc);
+    }
+    
+    reader = writer.getReader();
+    searcher = newSearcher(reader);
+    writer.close();
+    
+  }
+  
+  private static Number getNumberType(NumberType numberType, String fieldName) {
+    
+    if (numberType == null) {
+      return null;
+    }
+    
+    switch (numberType) {
+      
+      case POSITIVE:
+        return RANDOM_NUMBER_MAP.get(fieldName);
+        
+      case NEGATIVE:
+        Number number = RANDOM_NUMBER_MAP.get(fieldName);
+        
+        if (LegacyNumericType.LONG.name().equals(fieldName)
+            || DATE_FIELD_NAME.equals(fieldName)) {
+          number = -number.longValue();
+          
+        } else if (FieldType.LegacyNumericType.DOUBLE.name().equals(fieldName)) {
+          number = -number.doubleValue();
+          
+        } else if (FieldType.LegacyNumericType.FLOAT.name().equals(fieldName)) {
+          number = -number.floatValue();
+          
+        } else if (LegacyNumericType.INT.name().equals(fieldName)) {
+          number = -number.intValue();
+          
+        } else {
+          throw new IllegalArgumentException("field name not found: "
+              + fieldName);
+        }
+        
+        return number;
+        
+      default:
+        return 0;
+        
+    }
+    
+  }
+  
+  private static void setFieldValues(NumberType numberType,
+      HashMap<String,Field> numericFieldMap) {
+    
+    Number number = getNumberType(numberType, LegacyNumericType.DOUBLE
+        .name());
+    numericFieldMap.get(LegacyNumericType.DOUBLE.name()).setDoubleValue(
+        number.doubleValue());
+    
+    number = getNumberType(numberType, FieldType.LegacyNumericType.INT.name());
+    numericFieldMap.get(FieldType.LegacyNumericType.INT.name()).setIntValue(
+        number.intValue());
+    
+    number = getNumberType(numberType, LegacyNumericType.LONG.name());
+    numericFieldMap.get(FieldType.LegacyNumericType.LONG.name()).setLongValue(
+        number.longValue());
+    
+    number = getNumberType(numberType, FieldType.LegacyNumericType.FLOAT.name());
+    numericFieldMap.get(FieldType.LegacyNumericType.FLOAT.name()).setFloatValue(
+        number.floatValue());
+    
+    number = getNumberType(numberType, DATE_FIELD_NAME);
+    numericFieldMap.get(DATE_FIELD_NAME).setLongValue(number.longValue());
+  }
+  
+  private static int randomDateStyle(Random random) {
+    return DATE_STYLES[random.nextInt(DATE_STYLES.length)];
+  }
+  
+  @Test
+  public void testInclusiveNumericRange() throws Exception {
+    assertRangeQuery(NumberType.ZERO, NumberType.ZERO, true, true, 1);
+    assertRangeQuery(NumberType.ZERO, NumberType.POSITIVE, true, true, 2);
+    assertRangeQuery(NumberType.NEGATIVE, NumberType.ZERO, true, true, 2);
+    assertRangeQuery(NumberType.NEGATIVE, NumberType.POSITIVE, true, true, 3);
+    assertRangeQuery(NumberType.NEGATIVE, NumberType.NEGATIVE, true, true, 1);
+  }
+  
+   @Test
+  // test disabled since standard syntax parser does not work with inclusive and
+  // exclusive at the same time
+  public void testInclusiveLowerNumericRange() throws Exception {
+    assertRangeQuery(NumberType.NEGATIVE, NumberType.ZERO, false, true, 1);
+    assertRangeQuery(NumberType.ZERO, NumberType.POSITIVE, false, true, 1);
+    assertRangeQuery(NumberType.NEGATIVE, NumberType.POSITIVE, false, true, 2);
+    assertRangeQuery(NumberType.NEGATIVE, NumberType.NEGATIVE, false, true, 0);
+   }
+  
+  @Test
+  // test disabled since standard syntax parser does not work with inclusive and
+  // exclusive at the same time
+  public void testInclusiveUpperNumericRange() throws Exception {
+    assertRangeQuery(NumberType.NEGATIVE, NumberType.ZERO, true, false, 1);
+    assertRangeQuery(NumberType.ZERO, NumberType.POSITIVE, true, false, 1);
+    assertRangeQuery(NumberType.NEGATIVE, NumberType.POSITIVE, true, false, 2);
+    assertRangeQuery(NumberType.NEGATIVE, NumberType.NEGATIVE, true, false, 0);
+  }
+  
+  @Test
+  public void testExclusiveNumericRange() throws Exception {
+    assertRangeQuery(NumberType.ZERO, NumberType.ZERO, false, false, 0);
+    assertRangeQuery(NumberType.ZERO, NumberType.POSITIVE, false, false, 0);
+    assertRangeQuery(NumberType.NEGATIVE, NumberType.ZERO, false, false, 0);
+    assertRangeQuery(NumberType.NEGATIVE, NumberType.POSITIVE, false, false, 1);
+    assertRangeQuery(NumberType.NEGATIVE, NumberType.NEGATIVE, false, false, 0);
+  }
+  
+  @Test
+  public void testOpenRangeNumericQuery() throws Exception {
+    assertOpenRangeQuery(NumberType.ZERO, "<", 1);
+    assertOpenRangeQuery(NumberType.POSITIVE, "<", 2);
+    assertOpenRangeQuery(NumberType.NEGATIVE, "<", 0);
+    
+    assertOpenRangeQuery(NumberType.ZERO, "<=", 2);
+    assertOpenRangeQuery(NumberType.POSITIVE, "<=", 3);
+    assertOpenRangeQuery(NumberType.NEGATIVE, "<=", 1);
+    
+    assertOpenRangeQuery(NumberType.ZERO, ">", 1);
+    assertOpenRangeQuery(NumberType.POSITIVE, ">", 0);
+    assertOpenRangeQuery(NumberType.NEGATIVE, ">", 2);
+    
+    assertOpenRangeQuery(NumberType.ZERO, ">=", 2);
+    assertOpenRangeQuery(NumberType.POSITIVE, ">=", 1);
+    assertOpenRangeQuery(NumberType.NEGATIVE, ">=", 3);
+    
+    assertOpenRangeQuery(NumberType.NEGATIVE, "=", 1);
+    assertOpenRangeQuery(NumberType.ZERO, "=", 1);
+    assertOpenRangeQuery(NumberType.POSITIVE, "=", 1);
+    
+    assertRangeQuery(NumberType.NEGATIVE, null, true, true, 3);
+    assertRangeQuery(NumberType.NEGATIVE, null, false, true, 2);
+    assertRangeQuery(NumberType.POSITIVE, null, true, false, 1);
+    assertRangeQuery(NumberType.ZERO, null, false, false, 1);
+
+    assertRangeQuery(null, NumberType.POSITIVE, true, true, 3);
+    assertRangeQuery(null, NumberType.POSITIVE, true, false, 2);
+    assertRangeQuery(null, NumberType.NEGATIVE, false, true, 1);
+    assertRangeQuery(null, NumberType.ZERO, false, false, 1);
+    
+    assertRangeQuery(null, null, false, false, 3);
+    assertRangeQuery(null, null, true, true, 3);
+    
+  }
+  
+  @Test
+  public void testSimpleNumericQuery() throws Exception {
+    assertSimpleQuery(NumberType.ZERO, 1);
+    assertSimpleQuery(NumberType.POSITIVE, 1);
+    assertSimpleQuery(NumberType.NEGATIVE, 1);
+  }
+  
+  public void assertRangeQuery(NumberType lowerType, NumberType upperType,
+      boolean lowerInclusive, boolean upperInclusive, int expectedDocCount)
+      throws QueryNodeException, IOException {
+    
+    StringBuilder sb = new StringBuilder();
+    
+    String lowerInclusiveStr = (lowerInclusive ? "[" : "{");
+    String upperInclusiveStr = (upperInclusive ? "]" : "}");
+    
+    for (LegacyNumericType type : LegacyNumericType.values()) {
+      String lowerStr = numberToString(getNumberType(lowerType, type.name()));
+      String upperStr = numberToString(getNumberType(upperType, type.name()));
+      
+      sb.append("+").append(type.name()).append(':').append(lowerInclusiveStr)
+          .append('"').append(lowerStr).append("\" TO \"").append(upperStr)
+          .append('"').append(upperInclusiveStr).append(' ');
+    }
+    
+    Number lowerDateNumber = getNumberType(lowerType, DATE_FIELD_NAME);
+    Number upperDateNumber = getNumberType(upperType, DATE_FIELD_NAME);
+    String lowerDateStr;
+    String upperDateStr;
+    
+    if (lowerDateNumber != null) {
+      lowerDateStr = ESCAPER.escape(
+          DATE_FORMAT.format(new Date(lowerDateNumber.longValue())), LOCALE,
+          EscapeQuerySyntax.Type.STRING).toString();
+      
+    } else {
+      lowerDateStr = "*";
+    }
+    
+    if (upperDateNumber != null) {
+    upperDateStr = ESCAPER.escape(
+          DATE_FORMAT.format(new Date(upperDateNumber.longValue())), LOCALE,
+          EscapeQuerySyntax.Type.STRING).toString();
+    
+    } else {
+      upperDateStr = "*";
+    }
+    
+    sb.append("+").append(DATE_FIELD_NAME).append(':')
+        .append(lowerInclusiveStr).append('"').append(lowerDateStr).append(
+            "\" TO \"").append(upperDateStr).append('"').append(
+            upperInclusiveStr);
+    
+    testQuery(sb.toString(), expectedDocCount);
+    
+  }
+  
+  public void assertOpenRangeQuery(NumberType boundType, String operator, int expectedDocCount)
+      throws QueryNodeException, IOException {
+
+    StringBuilder sb = new StringBuilder();
+    
+    for (LegacyNumericType type : FieldType.LegacyNumericType.values()) {
+      String boundStr = numberToString(getNumberType(boundType, type.name()));
+      
+      sb.append("+").append(type.name()).append(operator).append('"').append(boundStr).append('"').append(' ');
+    }
+    
+    String boundDateStr = ESCAPER.escape(
+        DATE_FORMAT.format(new Date(getNumberType(boundType, DATE_FIELD_NAME)
+            .longValue())), LOCALE, EscapeQuerySyntax.Type.STRING).toString();
+    
+    sb.append("+").append(DATE_FIELD_NAME).append(operator).append('"').append(boundDateStr).append('"');
+    
+    testQuery(sb.toString(), expectedDocCount);
+  }
+  
+  public void assertSimpleQuery(NumberType numberType, int expectedDocCount)
+      throws QueryNodeException, IOException {
+    StringBuilder sb = new StringBuilder();
+    
+    for (LegacyNumericType type : LegacyNumericType.values()) {
+      String numberStr = numberToString(getNumberType(numberType, type.name()));
+      sb.append('+').append(type.name()).append(":\"").append(numberStr)
+          .append("\" ");
+    }
+    
+    String dateStr = ESCAPER.escape(
+        DATE_FORMAT.format(new Date(getNumberType(numberType, DATE_FIELD_NAME)
+            .longValue())), LOCALE, EscapeQuerySyntax.Type.STRING).toString();
+    
+    sb.append('+').append(DATE_FIELD_NAME).append(":\"").append(dateStr)
+        .append('"');
+    
+    testQuery(sb.toString(), expectedDocCount);
+    
+  }
+  
+  private void testQuery(String queryStr, int expectedDocCount)
+      throws QueryNodeException, IOException {
+    if (VERBOSE) System.out.println("Parsing: " + queryStr);
+    
+    Query query = qp.parse(queryStr, FIELD_NAME);
+    if (VERBOSE) System.out.println("Querying: " + query);
+    TopDocs topDocs = searcher.search(query, 1000);
+    
+    String msg = "Query <" + queryStr + "> retrieved " + topDocs.totalHits
+        + " document(s), " + expectedDocCount + " document(s) expected.";
+    
+    if (VERBOSE) System.out.println(msg);
+    
+    assertEquals(msg, expectedDocCount, topDocs.totalHits);
+  }
+  
+  private static String numberToString(Number number) {
+    return number == null ? "*" : ESCAPER.escape(NUMBER_FORMAT.format(number),
+        LOCALE, EscapeQuerySyntax.Type.STRING).toString();
+  }
+  
+  private static Number normalizeNumber(Number number) throws ParseException {
+    return NUMBER_FORMAT.parse(NUMBER_FORMAT.format(number));
+  }
+  
+  @AfterClass
+  public static void afterClass() throws Exception {
+    searcher = null;
+    reader.close();
+    reader = null;
+    directory.close();
+    directory = null;
+    qp = null;
+    LOCALE = null;
+    TIMEZONE = null;
+    NUMBER_FORMAT = null;
+    DATE_FORMAT = null;
+    ESCAPER = null;
+  }
+  
+}


[14/50] [abbrv] lucene-solr git commit: SOLR-445: remove redundent numErrors from response, ensure 'errors' is always returned to client, even if empty

Posted by ho...@apache.org.
SOLR-445: remove redundent numErrors from response, ensure 'errors' is always returned to client, even if empty


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/92f81fb7
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/92f81fb7
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/92f81fb7

Branch: refs/heads/jira/SOLR-445
Commit: 92f81fb7a00ecfbc94a7a09b694b61ef8596374a
Parents: 50697ee
Author: Chris Hostetter <ho...@apache.org>
Authored: Wed Mar 9 10:05:06 2016 -0700
Committer: Chris Hostetter <ho...@apache.org>
Committed: Wed Mar 9 10:05:06 2016 -0700

----------------------------------------------------------------------
 .../processor/TolerantUpdateProcessor.java      | 10 ++----
 .../DistribTolerantUpdateProcessorTest.java     | 17 +++++-----
 .../processor/TolerantUpdateProcessorTest.java  | 34 ++++++++++++--------
 3 files changed, 31 insertions(+), 30 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/92f81fb7/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java b/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java
index cbfa1e2..dc07082 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java
@@ -262,14 +262,8 @@ public class TolerantUpdateProcessor extends UpdateRequestProcessor {
       }
     }
 
-    // good or bad populate the response header
-    if (0 < knownErrors.size()) { // nocommit: we should just always set errors, even if empty?
-      
-      header.add("numErrors", knownErrors.size()); // nocommit: eliminate from response, client can count
-      header.add("errors", KnownErr.formatForResponseHeader(knownErrors));
-    } else {
-      header.add("numErrors", 0); // nocommit: eliminate from response, client can count
-    }
+    header.add("errors", KnownErr.formatForResponseHeader(knownErrors));
+    // include in response so client knows what effective value was (may have been server side config)
     header.add("maxErrors", maxErrors);
 
     // annotate any error that might be thrown (or was already thrown)

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/92f81fb7/solr/core/src/test/org/apache/solr/cloud/DistribTolerantUpdateProcessorTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/DistribTolerantUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/cloud/DistribTolerantUpdateProcessorTest.java
index 2c545be..bebe642 100644
--- a/solr/core/src/test/org/apache/solr/cloud/DistribTolerantUpdateProcessorTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/DistribTolerantUpdateProcessorTest.java
@@ -113,7 +113,12 @@ public class DistribTolerantUpdateProcessorTest extends AbstractFullDistribZkTes
   private void assertUSucceedsWithErrors(String chain, SolrInputDocument[] docs,
                                          SolrParams requestParams,
                                          int numErrors,
-                                         String... ids) throws Exception {
+                                         String... idsShouldFail) throws Exception {
+    
+    // nocommit: retire numErrors from this method sig ... trappy
+    assertEquals("bad test, idsShouldFail.length doesn't match numErrors",
+                 numErrors, idsShouldFail.length);
+    
     ModifiableSolrParams newParams = new ModifiableSolrParams(requestParams);
     newParams.set("update.chain", chain);
     UpdateResponse response = indexDoc(newParams, docs);
@@ -122,17 +127,13 @@ public class DistribTolerantUpdateProcessorTest extends AbstractFullDistribZkTes
       response.getResponseHeader().get("errors");
     assertNotNull("Null errors in response: " + response.toString(), errors);
 
-    assertEquals("number of errors in response: " + response.toString(), ids.length, errors.size());
-    
-    // nocommit: retire numErrors, we've already checked errors.size()
-    assertEquals("Wrong numErrors in response: " + response.toString(),
-                 numErrors, response.getResponseHeader().get("numErrors"));
+    assertEquals("number of errors in response: " + response.toString(), idsShouldFail.length, errors.size());
     
-    Set<String> addErrorIdsExpected = new HashSet<String>(Arrays.asList(ids));
+    Set<String> addErrorIdsExpected = new HashSet<String>(Arrays.asList(idsShouldFail));
     
     for (SimpleOrderedMap<String> err : errors) {
       // nocommit: support other types
-      assertEquals("nocommit: error type not handled yet",
+      assertEquals("nocommit: error type not handled yet by this method",
                    "ADD", err.get("type"));
       
       String id = err.get("id");

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/92f81fb7/solr/core/src/test/org/apache/solr/update/processor/TolerantUpdateProcessorTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/processor/TolerantUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/update/processor/TolerantUpdateProcessorTest.java
index 1123a0f..61dac44 100644
--- a/solr/core/src/test/org/apache/solr/update/processor/TolerantUpdateProcessorTest.java
+++ b/solr/core/src/test/org/apache/solr/update/processor/TolerantUpdateProcessorTest.java
@@ -249,14 +249,16 @@ public class TolerantUpdateProcessorTest extends UpdateProcessorTestBase {
   public void testInvalidDelete() throws XPathExpressionException, SAXException {
     ignoreException("undefined field invalidfield");
     String response = update("tolerant-chain-max-errors-10", adoc("id", "1", "text", "the quick brown fox"));
-    assertNull(BaseTestHarness.validateXPath(response, "//int[@name='status']=0",
-        "//int[@name='numErrors']=0"));
+    assertNull(BaseTestHarness.validateXPath(response,
+                                             "//int[@name='status']=0",
+                                             "//arr[@name='errors']",
+                                             "count(//arr[@name='errors']/lst)=0"));
     
     response = update("tolerant-chain-max-errors-10", delQ("invalidfield:1"));
     assertNull(BaseTestHarness.validateXPath
                (response,
                 "//int[@name='status']=0",
-                "//int[@name='numErrors']=1",
+                "count(//arr[@name='errors']/lst)=1",
                 "//arr[@name='errors']/lst/str[@name='type']/text()='DELQ'",
                 "//arr[@name='errors']/lst/str[@name='id']/text()='invalidfield:1'",
                 "//arr[@name='errors']/lst/str[@name='message']/text()='undefined field invalidfield'"));
@@ -266,15 +268,20 @@ public class TolerantUpdateProcessorTest extends UpdateProcessorTestBase {
   public void testValidDelete() throws XPathExpressionException, SAXException {
     ignoreException("undefined field invalidfield");
     String response = update("tolerant-chain-max-errors-10", adoc("id", "1", "text", "the quick brown fox"));
-    assertNull(BaseTestHarness.validateXPath(response, "//int[@name='status']=0",
-        "//int[@name='numErrors']=0"));
+    assertNull(BaseTestHarness.validateXPath(response,
+                                             "//int[@name='status']=0",
+                                             "//arr[@name='errors']",
+                                             "count(//arr[@name='errors']/lst)=0"));
+
     assertU(commit());
     assertQ(req("q","*:*")
         ,"//result[@numFound='1']");
     
     response = update("tolerant-chain-max-errors-10", delQ("id:1"));
-    assertNull(BaseTestHarness.validateXPath(response, "//int[@name='status']=0",
-        "//int[@name='numErrors']=0"));
+    assertNull(BaseTestHarness.validateXPath(response,
+                                             "//int[@name='status']=0",
+                                             "//arr[@name='errors']",
+                                             "count(//arr[@name='errors']/lst)=0"));
     assertU(commit());
     assertQ(req("q","*:*")
         ,"//result[@numFound='0']");
@@ -283,11 +290,13 @@ public class TolerantUpdateProcessorTest extends UpdateProcessorTestBase {
   @Test
   public void testResponse() throws SAXException, XPathExpressionException, IOException {
     String response = update("tolerant-chain-max-errors-10", adoc("id", "1", "text", "the quick brown fox"));
-    assertNull(BaseTestHarness.validateXPath(response, "//int[@name='status']=0",
-        "//int[@name='numErrors']=0"));
+    assertNull(BaseTestHarness.validateXPath(response,
+                                             "//int[@name='status']=0",
+                                             "//arr[@name='errors']",
+                                             "count(//arr[@name='errors']/lst)=0"));
     response = update("tolerant-chain-max-errors-10", adoc("text", "the quick brown fox"));
     assertNull(BaseTestHarness.validateXPath(response, "//int[@name='status']=0",
-        "//int[@name='numErrors']=1",
+        "count(//arr[@name='errors']/lst)=1",
         "//arr[@name='errors']/lst/str[@name='id']/text()='(unknown)'",
         "//arr[@name='errors']/lst/str[@name='message']/text()='Document is missing mandatory uniqueKey field: id'"));
     
@@ -300,7 +309,7 @@ public class TolerantUpdateProcessorTest extends UpdateProcessorTestBase {
     builder.append("</add>");
     response = update("tolerant-chain-max-errors-10", builder.toString());
     assertNull(BaseTestHarness.validateXPath(response, "//int[@name='status']=0",
-        "//int[@name='numErrors']=10",
+        "count(//arr[@name='errors']/lst)=10",
         "not(//arr[@name='errors']/lst/str[@name='id']/text()='0')",
         "//arr[@name='errors']/lst/str[@name='id']/text()='1'",
         "not(//arr[@name='errors']/lst/str[@name='id']/text()='2')",
@@ -391,9 +400,6 @@ public class TolerantUpdateProcessorTest extends UpdateProcessorTestBase {
 
     assertEquals("number of errors", idsShouldFail.length, errors.size());
     
-    // nocommit: retire numErrors, we've already checked errors.size()
-    assertEquals(numErrors, response.getResponseHeader().get("numErrors"));
-    
     Set<String> addErrorIdsExpected = new HashSet<String>(Arrays.asList(idsShouldFail));
 
     for (SimpleOrderedMap<String> err : errors) {


[19/50] [abbrv] lucene-solr git commit: SOLR-8765: Throw SolrException rather than IAE on name validation

Posted by ho...@apache.org.
SOLR-8765: Throw SolrException rather than IAE on name validation


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/f24810bd
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/f24810bd
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/f24810bd

Branch: refs/heads/jira/SOLR-445
Commit: f24810bdf1e8b1949970ce743373794e0b1ffc96
Parents: 540e801
Author: Alan Woodward <ro...@apache.org>
Authored: Wed Mar 9 21:15:40 2016 +0000
Committer: Alan Woodward <ro...@apache.org>
Committed: Wed Mar 9 21:15:58 2016 +0000

----------------------------------------------------------------------
 .../client/solrj/util/SolrIdentifierValidator.java     |  4 +++-
 .../solrj/request/TestCollectionAdminRequest.java      |  9 +++++----
 .../solr/client/solrj/request/TestCoreAdmin.java       | 13 ++++++-------
 3 files changed, 14 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f24810bd/solr/solrj/src/java/org/apache/solr/client/solrj/util/SolrIdentifierValidator.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/util/SolrIdentifierValidator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/util/SolrIdentifierValidator.java
index 57f9909..9473a28 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/util/SolrIdentifierValidator.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/util/SolrIdentifierValidator.java
@@ -19,6 +19,8 @@ package org.apache.solr.client.solrj.util;
 import java.util.Locale;
 import java.util.regex.Pattern;
 
+import org.apache.solr.common.SolrException;
+
 /**
  * Ensures that provided identifiers align with Solr's recommendations/requirements for choosing
  * collection, core, etc identifiers.
@@ -34,7 +36,7 @@ public class SolrIdentifierValidator {
 
   public static String validateName(IdentifierType type, String name) {
     if (!validateIdentifier(name))
-      throw new IllegalArgumentException(getIdentifierMessage(type, name));
+      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, getIdentifierMessage(type, name));
     return name;
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f24810bd/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCollectionAdminRequest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCollectionAdminRequest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCollectionAdminRequest.java
index 5d5c315..c21e523 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCollectionAdminRequest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCollectionAdminRequest.java
@@ -20,6 +20,7 @@ import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.client.solrj.request.CollectionAdminRequest.Create;
 import org.apache.solr.client.solrj.request.CollectionAdminRequest.CreateAlias;
 import org.apache.solr.client.solrj.request.CollectionAdminRequest.CreateShard;
+import org.apache.solr.common.SolrException;
 import org.junit.Test;
 
 /**
@@ -33,7 +34,7 @@ public class TestCollectionAdminRequest extends LuceneTestCase {
     try {
       createRequest.setCollectionName("invalid$collection@name");
       fail();
-    } catch (IllegalArgumentException e) {
+    } catch (SolrException e) {
       final String exceptionMessage = e.getMessage();
       assertTrue(exceptionMessage.contains("Invalid collection"));
       assertTrue(exceptionMessage.contains("invalid$collection@name"));
@@ -47,7 +48,7 @@ public class TestCollectionAdminRequest extends LuceneTestCase {
     try {
       createRequest.setShards("invalid$shard@name");
       fail();
-    } catch (IllegalArgumentException e) {
+    } catch (SolrException e) {
       final String exceptionMessage = e.getMessage();
       assertTrue(exceptionMessage.contains("Invalid shard"));
       assertTrue(exceptionMessage.contains("invalid$shard@name"));
@@ -61,7 +62,7 @@ public class TestCollectionAdminRequest extends LuceneTestCase {
     try {
       createAliasRequest.setAliasName("invalid$alias@name");
       fail();
-    } catch (IllegalArgumentException e) {
+    } catch (SolrException e) {
       final String exceptionMessage = e.getMessage();
       assertTrue(exceptionMessage.contains("Invalid alias"));
       assertTrue(exceptionMessage.contains("invalid$alias@name"));
@@ -75,7 +76,7 @@ public class TestCollectionAdminRequest extends LuceneTestCase {
     try {
       createShardRequest.setShardName("invalid$shard@name");
       fail();
-    } catch (IllegalArgumentException e) {
+    } catch (SolrException e) {
       final String exceptionMessage = e.getMessage();
       assertTrue(exceptionMessage.contains("Invalid shard"));
       assertTrue(exceptionMessage.contains("invalid$shard@name"));

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f24810bd/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCoreAdmin.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCoreAdmin.java b/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCoreAdmin.java
index f3c3d55..ef4dad7 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCoreAdmin.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCoreAdmin.java
@@ -16,12 +16,11 @@
  */
 package org.apache.solr.client.solrj.request;
 
-import static org.hamcrest.CoreMatchers.notNullValue;
-import static org.hamcrest.core.Is.is;
-
 import java.io.File;
 import java.lang.invoke.MethodHandles;
 
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
+import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule;
 import org.apache.commons.io.FileUtils;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.SolrIgnoredThreadsFilter;
@@ -43,8 +42,8 @@ import org.junit.rules.TestRule;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
-import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule;
+import static org.hamcrest.CoreMatchers.notNullValue;
+import static org.hamcrest.core.Is.is;
 
 @ThreadLeakFilters(defaultFilters = true, filters = {SolrIgnoredThreadsFilter.class})
 public class TestCoreAdmin extends AbstractEmbeddedSolrServerTestCase {
@@ -167,7 +166,7 @@ public class TestCoreAdmin extends AbstractEmbeddedSolrServerTestCase {
     try {
       createRequest.setCoreName("invalid$core@name");
       fail();
-    } catch (IllegalArgumentException e) {
+    } catch (SolrException e) {
       final String exceptionMessage = e.getMessage();
       assertTrue(exceptionMessage.contains("Invalid core"));
       assertTrue(exceptionMessage.contains("invalid$core@name"));
@@ -180,7 +179,7 @@ public class TestCoreAdmin extends AbstractEmbeddedSolrServerTestCase {
     try {
       CoreAdminRequest.renameCore("validExistingCoreName", "invalid$core@name", null);
       fail();
-    } catch (IllegalArgumentException e) {
+    } catch (SolrException e) {
       final String exceptionMessage = e.getMessage();
       assertTrue(e.getMessage(), exceptionMessage.contains("Invalid core"));
       assertTrue(exceptionMessage.contains("invalid$core@name"));


[10/50] [abbrv] lucene-solr git commit: LUCENE-7088, LUCENE-7075: Add PointRangeQueryBuilder to xml-queryparser to replace LegacyNumericRangeQueryBuilder

Posted by ho...@apache.org.
LUCENE-7088, LUCENE-7075: Add PointRangeQueryBuilder to xml-queryparser to replace LegacyNumericRangeQueryBuilder


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/116ece2f
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/116ece2f
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/116ece2f

Branch: refs/heads/jira/SOLR-445
Commit: 116ece2fe4386496dd2131bd6b4f11ed01cce7fc
Parents: 770e508
Author: Robert Muir <rm...@apache.org>
Authored: Wed Mar 9 10:06:11 2016 -0500
Committer: Robert Muir <rm...@apache.org>
Committed: Wed Mar 9 10:07:33 2016 -0500

----------------------------------------------------------------------
 .../lucene/queryparser/xml/CoreParser.java      |  1 +
 .../LegacyNumericRangeQueryBuilder.java         |  2 +
 .../xml/builders/PointRangeQueryBuilder.java    | 95 ++++++++++++++++++++
 .../lucene/queryparser/xml/PointRangeQuery.xml  | 31 +++++++
 .../lucene/queryparser/xml/TestCoreParser.java  |  7 ++
 5 files changed, 136 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/116ece2f/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/CoreParser.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/CoreParser.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/CoreParser.java
index 6bbb626..1416f25 100644
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/CoreParser.java
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/CoreParser.java
@@ -67,6 +67,7 @@ public class CoreParser implements QueryBuilder {
     queryFactory.addBuilder("MatchAllDocsQuery", new MatchAllDocsQueryBuilder());
     queryFactory.addBuilder("BooleanQuery", new BooleanQueryBuilder(queryFactory));
     queryFactory.addBuilder("LegacyNumericRangeQuery", new LegacyNumericRangeQueryBuilder());
+    queryFactory.addBuilder("PointRangeQuery", new PointRangeQueryBuilder());
     queryFactory.addBuilder("RangeQuery", new RangeQueryBuilder());
     queryFactory.addBuilder("DisjunctionMaxQuery", new DisjunctionMaxQueryBuilder(queryFactory));
     if (parser != null) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/116ece2f/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/builders/LegacyNumericRangeQueryBuilder.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/builders/LegacyNumericRangeQueryBuilder.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/builders/LegacyNumericRangeQueryBuilder.java
index 2aba681..e195964 100644
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/builders/LegacyNumericRangeQueryBuilder.java
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/builders/LegacyNumericRangeQueryBuilder.java
@@ -83,7 +83,9 @@ import org.w3c.dom.Element;
  * A {@link ParserException} will be thrown if an error occurs parsing the
  * supplied <tt>lowerTerm</tt> or <tt>upperTerm</tt> into the numeric type
  * specified by <tt>type</tt>.
+ * @deprecated Index with points and use {@link PointRangeQueryBuilder} instead 
  */
+@Deprecated
 public class LegacyNumericRangeQueryBuilder implements QueryBuilder {
 
   @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/116ece2f/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/builders/PointRangeQueryBuilder.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/builders/PointRangeQueryBuilder.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/builders/PointRangeQueryBuilder.java
new file mode 100644
index 0000000..4548316
--- /dev/null
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/builders/PointRangeQueryBuilder.java
@@ -0,0 +1,95 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.queryparser.xml.builders;
+
+import org.apache.lucene.search.Query;
+import org.apache.lucene.document.DoublePoint;
+import org.apache.lucene.document.FloatPoint;
+import org.apache.lucene.document.IntPoint;
+import org.apache.lucene.document.LongPoint;
+import org.apache.lucene.index.PointValues;
+import org.apache.lucene.queryparser.xml.DOMUtils;
+import org.apache.lucene.queryparser.xml.ParserException;
+import org.apache.lucene.queryparser.xml.QueryBuilder;
+import org.w3c.dom.Element;
+
+/**
+ * Creates a range query across 1D {@link PointValues}. The table below specifies the required
+ * attributes and the defaults if optional attributes are omitted:
+ * <table summary="supported attributes">
+ * <tr>
+ * <th>Attribute name</th>
+ * <th>Values</th>
+ * <th>Required</th>
+ * <th>Default</th>
+ * </tr>
+ * <tr>
+ * <td>fieldName</td>
+ * <td>String</td>
+ * <td>Yes</td>
+ * <td>N/A</td>
+ * </tr>
+ * <tr>
+ * <td>lowerTerm</td>
+ * <td>Specified by <tt>type</tt></td>
+ * <td>Yes</td>
+ * <td>N/A</td>
+ * </tr>
+ * <tr>
+ * <td>upperTerm</td>
+ * <td>Specified by <tt>type</tt></td>
+ * <td>Yes</td>
+ * <td>N/A</td>
+ * </tr>
+ * <tr>
+ * <td>type</td>
+ * <td>int, long, float, double</td>
+ * <td>No</td>
+ * <td>int</td>
+ * </tr>
+ * </table>
+ * <p>
+ * A {@link ParserException} will be thrown if an error occurs parsing the
+ * supplied <tt>lowerTerm</tt> or <tt>upperTerm</tt> into the numeric type
+ * specified by <tt>type</tt>.
+ */
+public class PointRangeQueryBuilder implements QueryBuilder {
+
+  @Override
+  public Query getQuery(Element e) throws ParserException {
+    String field = DOMUtils.getAttributeWithInheritanceOrFail(e, "fieldName");
+    String lowerTerm = DOMUtils.getAttributeOrFail(e, "lowerTerm");
+    String upperTerm = DOMUtils.getAttributeOrFail(e, "upperTerm");
+
+    String type = DOMUtils.getAttribute(e, "type", "int");
+    try {
+      if (type.equalsIgnoreCase("int")) {
+        return IntPoint.newRangeQuery(field, Integer.valueOf(lowerTerm), Integer.valueOf(upperTerm));
+      } else if (type.equalsIgnoreCase("long")) {
+        return LongPoint.newRangeQuery(field, Long.valueOf(lowerTerm), Long.valueOf(upperTerm));
+      } else if (type.equalsIgnoreCase("double")) {
+        return DoublePoint.newRangeQuery(field, Double.valueOf(lowerTerm), Double.valueOf(upperTerm));
+      } else if (type.equalsIgnoreCase("float")) {
+        return FloatPoint.newRangeQuery(field, Float.valueOf(lowerTerm), Float.valueOf(upperTerm));
+      } else {
+        throw new ParserException("type attribute must be one of: [long, int, double, float]");
+      }
+    } catch (NumberFormatException nfe) {
+      throw new ParserException("Could not parse lowerTerm or upperTerm into a number", nfe);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/116ece2f/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/PointRangeQuery.xml
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/PointRangeQuery.xml b/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/PointRangeQuery.xml
new file mode 100644
index 0000000..45af138
--- /dev/null
+++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/PointRangeQuery.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<BooleanQuery fieldName="contents">
+  <Clause occurs="should">
+    <TermQuery>merger</TermQuery>
+  </Clause>
+  <Clause occurs="mustnot">
+    <TermQuery >sumitomo</TermQuery>    
+  </Clause>
+  <Clause occurs="must">
+    <TermQuery>bank</TermQuery>
+  </Clause>
+  <Clause occurs="must">
+    <PointRangeQuery fieldName="date3" lowerTerm="19870409" upperTerm="19870412"/>
+  </Clause>
+</BooleanQuery>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/116ece2f/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/TestCoreParser.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/TestCoreParser.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/TestCoreParser.java
index e675723..5cadec2 100644
--- a/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/TestCoreParser.java
+++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/TestCoreParser.java
@@ -22,6 +22,7 @@ import org.apache.lucene.analysis.MockTokenFilter;
 import org.apache.lucene.analysis.MockTokenizer;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
+import org.apache.lucene.document.IntPoint;
 import org.apache.lucene.document.LegacyIntField;
 import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.IndexReader;
@@ -72,6 +73,7 @@ public class TestCoreParser extends LuceneTestCase {
       doc.add(newTextField("date", date, Field.Store.YES));
       doc.add(newTextField("contents", content, Field.Store.YES));
       doc.add(new LegacyIntField("date2", Integer.valueOf(date), Field.Store.NO));
+      doc.add(new IntPoint("date3", Integer.valueOf(date)));
       writer.addDocument(doc);
       line = d.readLine();
     }
@@ -164,6 +166,11 @@ public class TestCoreParser extends LuceneTestCase {
     Query q = parse("LegacyNumericRangeQuery.xml");
     dumpResults("LegacyNumericRangeQuery", q, 5);
   }
+  
+  public void testPointRangeQuery() throws ParserException, IOException {
+    Query q = parse("PointRangeQuery.xml");
+    dumpResults("PointRangeQuery", q, 5);
+  }
 
   //================= Helper methods ===================================
 


[15/50] [abbrv] lucene-solr git commit: LUCENE-7076: improve deprecation message for LegacyNumericRangeQuery

Posted by ho...@apache.org.
LUCENE-7076: improve deprecation message for LegacyNumericRangeQuery


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/a7ff1c8d
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/a7ff1c8d
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/a7ff1c8d

Branch: refs/heads/jira/SOLR-445
Commit: a7ff1c8d3e3ca42f7e7f8696bf1f6048b3bfa2ba
Parents: 58623e0
Author: Robert Muir <rm...@apache.org>
Authored: Wed Mar 9 12:16:59 2016 -0500
Committer: Robert Muir <rm...@apache.org>
Committed: Wed Mar 9 12:16:59 2016 -0500

----------------------------------------------------------------------
 .../apache/lucene/search/LegacyNumericRangeQuery.java   | 12 +++++++++++-
 1 file changed, 11 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a7ff1c8d/lucene/core/src/java/org/apache/lucene/search/LegacyNumericRangeQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/LegacyNumericRangeQuery.java b/lucene/core/src/java/org/apache/lucene/search/LegacyNumericRangeQuery.java
index c61f7a8..fe6c9e2 100644
--- a/lucene/core/src/java/org/apache/lucene/search/LegacyNumericRangeQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/LegacyNumericRangeQuery.java
@@ -21,9 +21,14 @@ import java.io.IOException;
 import java.util.LinkedList;
 import java.util.Objects;
 
+import org.apache.lucene.document.DoublePoint;
 import org.apache.lucene.document.FieldType;
 import org.apache.lucene.document.FieldType.LegacyNumericType;
+import org.apache.lucene.document.FloatPoint;
+import org.apache.lucene.document.IntPoint;
+import org.apache.lucene.document.LongPoint;
 import org.apache.lucene.index.FilteredTermsEnum;
+import org.apache.lucene.index.PointValues;
 import org.apache.lucene.index.Terms;
 import org.apache.lucene.index.TermsEnum;
 import org.apache.lucene.util.AttributeSource;
@@ -158,7 +163,12 @@ import org.apache.lucene.index.Term; // for javadocs
  * precision step). This query type was developed for a geographic portal, where the performance for
  * e.g. bounding boxes or exact date/time stamps is important.</p>
  *
- * @deprecated Please use {@link PointRangeQuery} instead
+ * @deprecated Instead index with {@link IntPoint}, {@link LongPoint}, {@link FloatPoint}, {@link DoublePoint}, and
+ *             create range queries with {@link IntPoint#newRangeQuery(String, int, int) IntPoint.newRangeQuery()},
+ *             {@link LongPoint#newRangeQuery(String, long, long) LongPoint.newRangeQuery()},
+ *             {@link FloatPoint#newRangeQuery(String, float, float) FloatPoint.newRangeQuery()},
+ *             {@link DoublePoint#newRangeQuery(String, double, double) DoublePoint.newRangeQuery()} respectively.
+ *             See {@link PointValues} for background information on Points.
  *
  * @since 2.9
  **/


[12/50] [abbrv] lucene-solr git commit: CheckIndex failed to say it was checking points

Posted by ho...@apache.org.
CheckIndex failed to say it was checking points


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/dacbf333
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/dacbf333
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/dacbf333

Branch: refs/heads/jira/SOLR-445
Commit: dacbf333e40e44ccf4a7c83044392aabaea3425b
Parents: f1ad769
Author: Mike McCandless <mi...@apache.org>
Authored: Wed Mar 9 11:03:19 2016 -0500
Committer: Mike McCandless <mi...@apache.org>
Committed: Wed Mar 9 11:03:37 2016 -0500

----------------------------------------------------------------------
 .../org/apache/lucene/index/CheckIndex.java     |  7 +++++
 .../index/TestAllFilesCheckIndexHeader.java     |  3 +-
 .../index/TestAllFilesDetectTruncation.java     |  4 +--
 .../apache/lucene/index/TestPointValues.java    | 29 ++++++++++++++++++++
 .../lucene/index/TestSwappedIndexFiles.java     |  3 +-
 .../lucene/store/MockDirectoryWrapper.java      |  2 +-
 .../java/org/apache/lucene/util/TestUtil.java   | 14 ++++++----
 7 files changed, 48 insertions(+), 14 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dacbf333/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java b/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java
index 3c437c1..89b36ef 100644
--- a/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java
+++ b/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java
@@ -1683,9 +1683,14 @@ public final class CheckIndex implements Closeable {
    * @lucene.experimental
    */
   public static Status.PointsStatus testPoints(CodecReader reader, PrintStream infoStream, boolean failFast) throws IOException {
+    if (infoStream != null) {
+      infoStream.print("    test: points..............");
+    }
+    long startNS = System.nanoTime();
     FieldInfos fieldInfos = reader.getFieldInfos();
     Status.PointsStatus status = new Status.PointsStatus();
     try {
+
       if (fieldInfos.hasPointValues()) {
         PointsReader values = reader.getPointsReader();
         if (values == null) {
@@ -1840,6 +1845,8 @@ public final class CheckIndex implements Closeable {
           }
         }
       }
+      msg(infoStream, String.format(Locale.ROOT, "OK [%d fields, %d points] [took %.3f sec]", status.totalValueFields, status.totalValuePoints, nsToSec(System.nanoTime()-startNS)));
+
     } catch (Throwable e) {
       if (failFast) {
         IOUtils.reThrow(e);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dacbf333/lucene/core/src/test/org/apache/lucene/index/TestAllFilesCheckIndexHeader.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestAllFilesCheckIndexHeader.java b/lucene/core/src/test/org/apache/lucene/index/TestAllFilesCheckIndexHeader.java
index 68b7cc2..f6c1486 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestAllFilesCheckIndexHeader.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestAllFilesCheckIndexHeader.java
@@ -28,7 +28,6 @@ import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IOContext;
 import org.apache.lucene.store.IndexInput;
 import org.apache.lucene.store.IndexOutput;
-import org.apache.lucene.store.MockDirectoryWrapper;
 import org.apache.lucene.util.LineFileDocs;
 import org.apache.lucene.util.LuceneTestCase.SuppressFileSystems;
 import org.apache.lucene.util.LuceneTestCase;
@@ -132,7 +131,7 @@ public class TestAllFilesCheckIndexHeader extends LuceneTestCase {
 
       // CheckIndex should also fail:
       try {
-        TestUtil.checkIndex(dirCopy, true, true);
+        TestUtil.checkIndex(dirCopy, true, true, null);
         fail("wrong bytes not detected after randomizing first " + wrongBytes + " bytes out of " + victimLength + " for file " + victim);
       } catch (CorruptIndexException | EOFException | IndexFormatTooOldException e) {
         // expected

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dacbf333/lucene/core/src/test/org/apache/lucene/index/TestAllFilesDetectTruncation.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestAllFilesDetectTruncation.java b/lucene/core/src/test/org/apache/lucene/index/TestAllFilesDetectTruncation.java
index 16caae3..c751417 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestAllFilesDetectTruncation.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestAllFilesDetectTruncation.java
@@ -19,7 +19,6 @@ package org.apache.lucene.index;
 
 import java.io.EOFException;
 import java.io.IOException;
-import java.util.Arrays;
 import java.util.Collections;
 
 import org.apache.lucene.analysis.MockAnalyzer;
@@ -28,7 +27,6 @@ import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IOContext;
 import org.apache.lucene.store.IndexInput;
 import org.apache.lucene.store.IndexOutput;
-import org.apache.lucene.store.MockDirectoryWrapper;
 import org.apache.lucene.util.LineFileDocs;
 import org.apache.lucene.util.LuceneTestCase.SuppressFileSystems;
 import org.apache.lucene.util.LuceneTestCase;
@@ -116,7 +114,7 @@ public class TestAllFilesDetectTruncation extends LuceneTestCase {
 
       // CheckIndex should also fail:
       try {
-        TestUtil.checkIndex(dirCopy, true, true);
+        TestUtil.checkIndex(dirCopy, true, true, null);
         fail("truncation not detected after removing " + lostBytes + " bytes out of " + victimLength + " for file " + victim);
       } catch (CorruptIndexException | EOFException e) {
         // expected

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dacbf333/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java b/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java
index 55d4794..9b18f02 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java
@@ -17,6 +17,7 @@
 package org.apache.lucene.index;
 
 
+import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 
 import org.apache.lucene.analysis.MockAnalyzer;
@@ -40,6 +41,7 @@ import org.apache.lucene.index.PointValues.Relation;
 import org.apache.lucene.index.PointValues;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.FSDirectory;
+import org.apache.lucene.store.RAMDirectory;
 import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.TestUtil;
@@ -628,4 +630,31 @@ public class TestPointValues extends LuceneTestCase {
     w.close();
     dir.close();
   }
+
+  public void testCheckIndexIncludesPoints() throws Exception {
+    Directory dir = new RAMDirectory();
+    IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null));
+    Document doc = new Document();
+    doc.add(new IntPoint("int1", 17));
+    w.addDocument(doc);
+
+    doc = new Document();
+    doc.add(new IntPoint("int1", 44));
+    doc.add(new IntPoint("int2", -17));
+    w.addDocument(doc);
+    w.close();
+
+    ByteArrayOutputStream output = new ByteArrayOutputStream();
+    CheckIndex.Status status = TestUtil.checkIndex(dir, false, true, output);
+    assertEquals(1, status.segmentInfos.size());
+    CheckIndex.Status.SegmentInfoStatus segStatus = status.segmentInfos.get(0);
+    // total 3 point values were index:
+    assertEquals(3, segStatus.pointsStatus.totalValuePoints);
+    // ... across 2 fields:
+    assertEquals(2, segStatus.pointsStatus.totalValueFields);
+
+    // Make sure CheckIndex in fact declares that it is testing points!
+    assertTrue(output.toString(IOUtils.UTF_8).contains("test: points..."));
+    dir.close();
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dacbf333/lucene/core/src/test/org/apache/lucene/index/TestSwappedIndexFiles.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestSwappedIndexFiles.java b/lucene/core/src/test/org/apache/lucene/index/TestSwappedIndexFiles.java
index a284fdd..d220502 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestSwappedIndexFiles.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestSwappedIndexFiles.java
@@ -28,7 +28,6 @@ import org.apache.lucene.document.Document;
 import org.apache.lucene.store.BaseDirectoryWrapper;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IOContext;
-import org.apache.lucene.store.MockDirectoryWrapper;
 import org.apache.lucene.util.LineFileDocs;
 import org.apache.lucene.util.LuceneTestCase.SuppressFileSystems;
 import org.apache.lucene.util.LuceneTestCase;
@@ -118,7 +117,7 @@ public class TestSwappedIndexFiles extends LuceneTestCase {
 
       // CheckIndex should also fail:
       try {
-        TestUtil.checkIndex(dirCopy, true, true);
+        TestUtil.checkIndex(dirCopy, true, true, null);
         fail("wrong file " + victim + " not detected");
       } catch (CorruptIndexException | EOFException | IndexFormatTooOldException e) {
         // expected

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dacbf333/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java b/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java
index 7fe7c3b..c2544b4 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java
@@ -850,7 +850,7 @@ public class MockDirectoryWrapper extends BaseDirectoryWrapper {
             System.out.println("\nNOTE: MockDirectoryWrapper: now run CheckIndex");
           } 
 
-          TestUtil.checkIndex(this, getCrossCheckTermVectorsOnClose(), true);
+          TestUtil.checkIndex(this, getCrossCheckTermVectorsOnClose(), true, null);
         }
           
         // TODO: factor this out / share w/ TestIW.assertNoUnreferencedFiles

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dacbf333/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java b/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java
index 5e328ba..d772ae3 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java
@@ -279,28 +279,30 @@ public final class TestUtil {
   }
 
   public static CheckIndex.Status checkIndex(Directory dir, boolean crossCheckTermVectors) throws IOException {
-    return checkIndex(dir, crossCheckTermVectors, false);
+    return checkIndex(dir, crossCheckTermVectors, false, null);
   }
 
   /** If failFast is true, then throw the first exception when index corruption is hit, instead of moving on to other fields/segments to
    *  look for any other corruption.  */
-  public static CheckIndex.Status checkIndex(Directory dir, boolean crossCheckTermVectors, boolean failFast) throws IOException {
-    ByteArrayOutputStream bos = new ByteArrayOutputStream(1024);
+  public static CheckIndex.Status checkIndex(Directory dir, boolean crossCheckTermVectors, boolean failFast, ByteArrayOutputStream output) throws IOException {
+    if (output == null) {
+      output = new ByteArrayOutputStream(1024);
+    }
     // TODO: actually use the dir's locking, unless test uses a special method?
     // some tests e.g. exception tests become much more complicated if they have to close the writer
     try (CheckIndex checker = new CheckIndex(dir, NoLockFactory.INSTANCE.obtainLock(dir, "bogus"))) {
       checker.setCrossCheckTermVectors(crossCheckTermVectors);
       checker.setFailFast(failFast);
-      checker.setInfoStream(new PrintStream(bos, false, IOUtils.UTF_8), false);
+      checker.setInfoStream(new PrintStream(output, false, IOUtils.UTF_8), false);
       CheckIndex.Status indexStatus = checker.checkIndex(null);
       
       if (indexStatus == null || indexStatus.clean == false) {
         System.out.println("CheckIndex failed");
-        System.out.println(bos.toString(IOUtils.UTF_8));
+        System.out.println(output.toString(IOUtils.UTF_8));
         throw new RuntimeException("CheckIndex failed");
       } else {
         if (LuceneTestCase.INFOSTREAM) {
-          System.out.println(bos.toString(IOUtils.UTF_8));
+          System.out.println(output.toString(IOUtils.UTF_8));
         }
         return indexStatus;
       }


[43/50] [abbrv] lucene-solr git commit: remove dead code

Posted by ho...@apache.org.
remove dead code


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/fafbb2b6
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/fafbb2b6
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/fafbb2b6

Branch: refs/heads/jira/SOLR-445
Commit: fafbb2b6c75a8d0ee753f4a0d9936760d46494ce
Parents: af34993
Author: Mike McCandless <mi...@apache.org>
Authored: Fri Mar 11 05:34:19 2016 -0500
Committer: Mike McCandless <mi...@apache.org>
Committed: Fri Mar 11 05:34:19 2016 -0500

----------------------------------------------------------------------
 .../src/java/org/apache/lucene/util/bkd/OfflinePointReader.java   | 3 +--
 .../src/java/org/apache/lucene/util/bkd/OfflinePointWriter.java   | 3 ---
 2 files changed, 1 insertion(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/fafbb2b6/lucene/core/src/java/org/apache/lucene/util/bkd/OfflinePointReader.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/util/bkd/OfflinePointReader.java b/lucene/core/src/java/org/apache/lucene/util/bkd/OfflinePointReader.java
index 14d714c..83d863b 100644
--- a/lucene/core/src/java/org/apache/lucene/util/bkd/OfflinePointReader.java
+++ b/lucene/core/src/java/org/apache/lucene/util/bkd/OfflinePointReader.java
@@ -30,7 +30,6 @@ final class OfflinePointReader implements PointReader {
   private final byte[] packedValue;
   private long ord;
   private int docID;
-  final int bytesPerDoc;
 
   OfflinePointReader(Directory tempDir, String tempFileName, int packedBytesLength, long start, long length) throws IOException {
     this(tempDir.openInput(tempFileName, IOContext.READONCE), packedBytesLength, start, length);
@@ -38,7 +37,7 @@ final class OfflinePointReader implements PointReader {
 
   private OfflinePointReader(IndexInput in, int packedBytesLength, long start, long length) throws IOException {
     this.in = in;
-    bytesPerDoc = packedBytesLength + Long.BYTES + Integer.BYTES;
+    int bytesPerDoc = packedBytesLength + Long.BYTES + Integer.BYTES;
     long seekFP = start * bytesPerDoc;
     in.seek(seekFP);
     this.countLeft = length;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/fafbb2b6/lucene/core/src/java/org/apache/lucene/util/bkd/OfflinePointWriter.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/util/bkd/OfflinePointWriter.java b/lucene/core/src/java/org/apache/lucene/util/bkd/OfflinePointWriter.java
index f47f567..625e6fa 100644
--- a/lucene/core/src/java/org/apache/lucene/util/bkd/OfflinePointWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/util/bkd/OfflinePointWriter.java
@@ -28,7 +28,6 @@ final class OfflinePointWriter implements PointWriter {
   final Directory tempDir;
   final IndexOutput out;
   final int packedBytesLength;
-  final int bytesPerDoc;
   private long count;
   private boolean closed;
 
@@ -36,7 +35,6 @@ final class OfflinePointWriter implements PointWriter {
     this.out = tempDir.createTempOutput(tempFileNamePrefix, "bkd", IOContext.DEFAULT);
     this.tempDir = tempDir;
     this.packedBytesLength = packedBytesLength;
-    bytesPerDoc = packedBytesLength + Long.BYTES + Integer.BYTES;
   }
 
   /** Initializes on an already written/closed file, just so consumers can use {@link #getReader} to read the file. */
@@ -44,7 +42,6 @@ final class OfflinePointWriter implements PointWriter {
     this.out = out;
     this.tempDir = tempDir;
     this.packedBytesLength = packedBytesLength;
-    bytesPerDoc = packedBytesLength + Long.BYTES + Integer.BYTES;
     this.count = count;
     closed = true;
   }


[40/50] [abbrv] lucene-solr git commit: SOLR-8804: Fix a race condition in the ClusterStatus API call

Posted by ho...@apache.org.
SOLR-8804: Fix a race condition in the ClusterStatus API call


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/343d9c6f
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/343d9c6f
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/343d9c6f

Branch: refs/heads/jira/SOLR-445
Commit: 343d9c6fa4ccff3d997b5b8c143b839364569ae6
Parents: b0caca3
Author: Varun Thacker <va...@gmail.com>
Authored: Fri Mar 11 14:01:01 2016 +0530
Committer: Varun Thacker <va...@gmail.com>
Committed: Fri Mar 11 14:02:06 2016 +0530

----------------------------------------------------------------------
 solr/CHANGES.txt                                 |  3 +++
 .../apache/solr/handler/admin/ClusterStatus.java | 16 ++++++++++++----
 .../org/apache/solr/cloud/TestCollectionAPI.java | 19 +++++++++++++++++++
 3 files changed, 34 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/343d9c6f/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 4e969bf..1d91a3e 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -289,6 +289,9 @@ Bug Fixes
 * SOLR-8790: Collections API responses contain node name in the core-level responses that are
   returned. (Anshum Gupta)
 
+* SOLR-8804: Fix a race condition in the ClusterStatus API call whereby the call would fail when a concurrent delete
+  collection api command was executed (Alexey Serba, Varun Thacker)
+
 Optimizations
 ----------------------
 * SOLR-7876: Speed up queries and operations that use many terms when timeAllowed has not been

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/343d9c6f/solr/core/src/java/org/apache/solr/handler/admin/ClusterStatus.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/ClusterStatus.java b/solr/core/src/java/org/apache/solr/handler/admin/ClusterStatus.java
index ff60adc..63044fa 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/ClusterStatus.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/ClusterStatus.java
@@ -89,7 +89,7 @@ public class ClusterStatus {
     byte[] bytes = Utils.toJSON(clusterState);
     Map<String, Object> stateMap = (Map<String,Object>) Utils.fromJSON(bytes);
 
-    Set<String> collections = new HashSet<>();
+    Set<String> collections;
     String routeKey = message.getStr(ShardParams._ROUTE_);
     String shard = message.getStr(ZkStateReader.SHARD_ID_PROP);
     if (collection == null) {
@@ -98,11 +98,19 @@ public class ClusterStatus {
       collections = Collections.singleton(collection);
     }
 
-    NamedList<Object> collectionProps = new SimpleOrderedMap<Object>();
+    NamedList<Object> collectionProps = new SimpleOrderedMap<>();
 
     for (String name : collections) {
-      Map<String, Object> collectionStatus = null;
-      DocCollection clusterStateCollection = clusterState.getCollection(name);
+      Map<String, Object> collectionStatus;
+      DocCollection clusterStateCollection = clusterState.getCollectionOrNull(name);
+      if (clusterStateCollection == null) {
+        if (collection != null) {
+          throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Collection: " + name + " not found");
+        } else {
+          //collection might have got deleted at the same time
+          continue;
+        }
+      }
 
       Set<String> requestedShards = new HashSet<>();
       if (routeKey != null) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/343d9c6f/solr/core/src/test/org/apache/solr/cloud/TestCollectionAPI.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestCollectionAPI.java b/solr/core/src/test/org/apache/solr/cloud/TestCollectionAPI.java
index 45b6f73..dec2bf5 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestCollectionAPI.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestCollectionAPI.java
@@ -77,6 +77,7 @@ public class TestCollectionAPI extends ReplicaPropertiesBase {
     clusterStatusWithRouteKey();
     clusterStatusAliasTest();
     clusterStatusRolesTest();
+    clusterStatusBadCollectionTest();
     replicaPropTest();
     clusterStatusZNodeVersion();
     testClusterStateMigration();
@@ -318,6 +319,24 @@ public class TestCollectionAPI extends ReplicaPropertiesBase {
     }
   }
 
+  private void clusterStatusBadCollectionTest() throws Exception {
+    try (CloudSolrClient client = createCloudClient(null)) {
+      ModifiableSolrParams params = new ModifiableSolrParams();
+      params.set("action", CollectionParams.CollectionAction.CLUSTERSTATUS.toString());
+      params.set("collection", "bad_collection_name");
+      SolrRequest request = new QueryRequest(params);
+      request.setPath("/admin/collections");
+
+      try {
+        client.request(request);
+        fail("Collection does not exist. An exception should be thrown");
+      } catch (SolrException e) {
+        //expected
+        assertTrue(e.getMessage().contains("Collection: bad_collection_name not found"));
+      }
+    }
+  }
+
   private void replicaPropTest() throws Exception {
     try (CloudSolrClient client = createCloudClient(null)) {
       client.connect();


[39/50] [abbrv] lucene-solr git commit: SOLR-8765: Fix CollectionAdminRequest.SplitShard to accept requests without the 'shard' parameter

Posted by ho...@apache.org.
SOLR-8765: Fix CollectionAdminRequest.SplitShard to accept requests without the 'shard' parameter


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/b0caca3b
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/b0caca3b
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/b0caca3b

Branch: refs/heads/jira/SOLR-445
Commit: b0caca3b60b8653a5b2539c39455bf06bcc407bf
Parents: 209f5c2
Author: anshum <an...@apache.org>
Authored: Thu Mar 10 13:59:37 2016 -0800
Committer: anshum <an...@apache.org>
Committed: Thu Mar 10 16:49:02 2016 -0800

----------------------------------------------------------------------
 .../solr/handler/admin/CollectionsHandler.java  |  2 +-
 .../solrj/request/CollectionAdminRequest.java   | 36 +++++++++++++-------
 .../common/params/CollectionAdminParams.java    |  2 ++
 3 files changed, 26 insertions(+), 14 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b0caca3b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
index 06968c3..b4d0a1d 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
@@ -454,7 +454,7 @@ public class CollectionsHandler extends RequestHandlerBase {
         String splitKey = req.getParams().get("split.key");
 
         if (splitKey == null && shard == null) {
-          throw new SolrException(ErrorCode.BAD_REQUEST, "Missing required parameter: shard");
+          throw new SolrException(ErrorCode.BAD_REQUEST, "At least one of shard, or split.key should be specified.");
         }
         if (splitKey != null && shard != null) {
           throw new SolrException(ErrorCode.BAD_REQUEST,

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b0caca3b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java
index 76eb19f..0c25e09 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java
@@ -616,27 +616,30 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
   /**
    * Returns a SolrRequest to split a shard in a collection
    */
-  public static SplitShard splitShard(String collection, String shard) {
-    return new SplitShard(collection, shard);
+  public static SplitShard splitShard(String collection) {
+    return new SplitShard(collection);
   }
 
   // SPLITSHARD request
-  public static class SplitShard extends AsyncShardSpecificAdminRequest {
+  public static class SplitShard extends AsyncCollectionAdminRequest {
+    protected String collection;
     protected String ranges;
     protected String splitKey;
+    protected String shard;
 
     private Properties properties;
 
-    private SplitShard(String collection, String shard) {
-      super(CollectionAction.SPLITSHARD, collection, shard);
+    private SplitShard(String collection) {
+      super(CollectionAction.SPLITSHARD);
+      this.collection = collection;
     }
 
     /**
-     * @deprecated Use {@link #splitShard(String, String)}
+     * @deprecated Use {@link #splitShard(String)}
      */
     @Deprecated
     public SplitShard() {
-      super(CollectionAction.SPLITSHARD, null, null);
+      super(CollectionAction.SPLITSHARD);
     }
 
     public SplitShard setRanges(String ranges) { this.ranges = ranges; return this; }
@@ -660,15 +663,12 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
       return this;
     }
 
-    @Override
     @Deprecated
     public SplitShard setCollectionName(String collection) {
       this.collection = collection;
       return this;
     }
 
-    @Override
-    @Deprecated
     public SplitShard setShardName(String shard) {
       this.shard = shard;
       return this;
@@ -684,10 +684,20 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
     @Override
     public SolrParams getParams() {
       ModifiableSolrParams params = (ModifiableSolrParams) super.getParams();
-      params.set( "ranges", ranges);
 
-      if(splitKey != null)
-        params.set("split.key", this.splitKey);
+      if(this.collection == null) {
+        throw new IllegalArgumentException("You must set collection name for this request.");
+      }
+
+      params.set(CollectionAdminParams.COLLECTION, collection);
+
+      if (this.shard == null && this.splitKey == null) {
+        throw new IllegalArgumentException("You must set shardname OR splitkey for this request.");
+      }
+
+      params.set("shard", shard);
+      params.set("split.key", this.splitKey);
+      params.set( "ranges", ranges);
 
       if(properties != null) {
         addProperties(params, properties);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b0caca3b/solr/solrj/src/java/org/apache/solr/common/params/CollectionAdminParams.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/params/CollectionAdminParams.java b/solr/solrj/src/java/org/apache/solr/common/params/CollectionAdminParams.java
index b9bf717..e817dd9 100644
--- a/solr/solrj/src/java/org/apache/solr/common/params/CollectionAdminParams.java
+++ b/solr/solrj/src/java/org/apache/solr/common/params/CollectionAdminParams.java
@@ -21,4 +21,6 @@ public abstract class CollectionAdminParams {
   /* Param used by DELETESTATUS call to clear all stored responses */
   public static final String FLUSH = "flush";
 
+  public static final String COLLECTION = "collection";
+
 }


[33/50] [abbrv] lucene-solr git commit: LUCENE-7086: move SlowCompositeReaderWrapper to misc module, and throw clear exc if you try to use in with points

Posted by ho...@apache.org.
LUCENE-7086: move SlowCompositeReaderWrapper to misc module, and throw clear exc if you try to use in with points

Squashed commits:

commit e26b065c71388407bc6725256ca43d7bb30dee29
Author: Mike McCandless <mi...@apache.org>
Date:   Thu Mar 10 14:16:45 2016 -0500

    simplify the checking for incoming points

commit b7254376dcb398c7739aab4544118bb4526961d5
Merge: 8ec82a0 d35d569
Author: Mike McCandless <mi...@apache.org>
Date:   Thu Mar 10 14:05:24 2016 -0500

    Merge branch 'master' into slow_wrapper

    Conflicts:
    	lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCache.java
    	lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheReopen.java

commit 8ec82a0d9a0dd946d96da20962bb2ea95758edbe
Author: Mike McCandless <mi...@apache.org>
Date:   Thu Mar 10 13:56:25 2016 -0500

    LUCENE-7086: move SlowCompositeReaderWrapper to misc module


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/7523ca11
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/7523ca11
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/7523ca11

Branch: refs/heads/jira/SOLR-445
Commit: 7523ca116276fbcf03bfa630c859c4a58f77add4
Parents: 8cc978b
Author: Mike McCandless <mi...@apache.org>
Authored: Thu Mar 10 16:08:59 2016 -0500
Committer: Mike McCandless <mi...@apache.org>
Committed: Thu Mar 10 16:08:59 2016 -0500

----------------------------------------------------------------------
 .../lucene50/TestLucene50DocValuesFormat.java   |   2 +-
 .../classification/ClassificationTestBase.java  |   7 +-
 .../DocumentClassificationTestBase.java         |   4 +-
 .../classification/utils/DataSplitterTest.java  |   5 +-
 .../apache/lucene/index/CompositeReader.java    |   2 -
 .../org/apache/lucene/index/IndexReader.java    |   2 -
 .../org/apache/lucene/index/MultiDocValues.java |   6 +-
 .../index/SlowCompositeReaderWrapper.java       | 271 ------------------
 .../lucene54/TestLucene54DocValuesFormat.java   |   6 +-
 .../index/TestBinaryDocValuesUpdates.java       |  28 +-
 .../apache/lucene/index/TestCustomNorms.java    |   4 +-
 .../index/TestDemoParallelLeafReader.java       |  21 +-
 .../lucene/index/TestDirectoryReader.java       |   2 +-
 .../org/apache/lucene/index/TestDocValues.java  |  14 +-
 .../lucene/index/TestDocValuesIndexing.java     |  25 +-
 .../lucene/index/TestDocsAndPositions.java      |   4 +-
 .../apache/lucene/index/TestDocumentWriter.java |   2 +-
 .../apache/lucene/index/TestDuelingCodecs.java  |   4 +-
 .../index/TestExitableDirectoryReader.java      |  17 +-
 .../lucene/index/TestFilterLeafReader.java      |   8 +-
 .../test/org/apache/lucene/index/TestFlex.java  |   2 +-
 .../lucene/index/TestIndexReaderClose.java      |  52 +---
 .../apache/lucene/index/TestIndexWriter.java    |   6 +-
 .../lucene/index/TestLazyProxSkipping.java      |   2 +-
 .../apache/lucene/index/TestMultiDocValues.java |  16 +-
 .../lucene/index/TestMultiLevelSkipList.java    |   2 +-
 .../test/org/apache/lucene/index/TestNorms.java |   4 +-
 .../index/TestNumericDocValuesUpdates.java      |  30 +-
 .../org/apache/lucene/index/TestOmitNorms.java  |   8 +-
 .../apache/lucene/index/TestOmitPositions.java  |   2 +-
 .../org/apache/lucene/index/TestOmitTf.java     |   6 +-
 .../org/apache/lucene/index/TestOrdinalMap.java |   5 +-
 .../index/TestParallelCompositeReader.java      |  34 +--
 .../lucene/index/TestParallelLeafReader.java    |  28 +-
 .../index/TestParallelReaderEmptyIndex.java     |  22 +-
 .../lucene/index/TestParallelTermEnum.java      |   4 +-
 .../org/apache/lucene/index/TestPayloads.java   |  11 +-
 .../apache/lucene/index/TestPointValues.java    |   4 +-
 .../lucene/index/TestPostingsOffsets.java       |   5 +-
 .../apache/lucene/index/TestReaderClosed.java   |   6 +-
 .../index/TestReaderWrapperDVTypeCheck.java     |   5 +-
 .../lucene/index/TestSegmentTermEnum.java       |   2 +-
 .../apache/lucene/index/TestStressAdvance.java  |   2 +-
 .../org/apache/lucene/index/TestTermsEnum.java  |   6 +-
 .../lucene/search/TestDisjunctionMaxQuery.java  |   6 +-
 .../lucene/search/TestMinShouldMatch2.java      |   2 +-
 .../lucene/search/TestMultiPhraseEnum.java      |   8 +-
 .../apache/lucene/search/TestPhraseQuery.java   |   4 +-
 .../lucene/search/TestPositionIncrement.java    |  10 +-
 .../lucene/search/TestSimilarityProvider.java   |  11 +-
 .../apache/lucene/search/TestTermScorer.java    |   7 +-
 .../TestUsageTrackingFilterCachingPolicy.java   |  17 +-
 .../search/spans/TestFieldMaskingSpanQuery.java |  11 +-
 .../search/spans/TestNearSpansOrdered.java      |  29 +-
 .../lucene/search/spans/TestSpanCollection.java |   9 +-
 .../search/spans/TestSpanContainQuery.java      |   7 +-
 .../apache/lucene/search/spans/TestSpans.java   |  13 +-
 .../DefaultSortedSetDocValuesReaderState.java   |  55 +++-
 .../sortedset/TestSortedSetDocValuesFacets.java |  36 ---
 .../lucene/search/grouping/TestGrouping.java    |  12 +-
 .../apache/lucene/search/join/TestJoinUtil.java |   8 +-
 .../memory/TestMemoryIndexAgainstRAMDir.java    |  12 +-
 .../index/SlowCompositeReaderWrapper.java       | 275 +++++++++++++++++++
 .../index/TestSlowCompositeReaderWrapper.java   |  91 ++++++
 .../lucene/uninverting/TestDocTermOrds.java     |  12 +-
 .../lucene/uninverting/TestFieldCache.java      |  10 +-
 .../uninverting/TestFieldCacheReopen.java       |   2 +-
 .../uninverting/TestFieldCacheVsDocValues.java  |   2 +-
 .../uninverting/TestFieldCacheWithThreads.java  |   2 +-
 .../uninverting/TestLegacyFieldCache.java       |  10 +-
 .../lucene/queries/CommonTermsQueryTest.java    |   6 +-
 .../apache/lucene/queries/TermsQueryTest.java   |   4 +-
 .../function/TestSortedSetFieldSource.java      |   2 +-
 .../lucene/queries/payloads/PayloadHelper.java  |   3 +-
 .../queries/payloads/TestPayloadSpans.java      |  39 ++-
 .../queries/payloads/TestPayloadTermQuery.java  |  11 +-
 .../index/BaseDocValuesFormatTestCase.java      |  64 +++--
 .../index/BaseIndexFileFormatTestCase.java      |   6 +-
 .../lucene/index/BasePointsFormatTestCase.java  |   4 +-
 .../index/BasePostingsFormatTestCase.java       |  66 ++---
 .../index/BaseStoredFieldsFormatTestCase.java   |   2 +-
 .../index/BaseTermVectorsFormatTestCase.java    |  12 +-
 .../org/apache/lucene/search/QueryUtils.java    |   6 +-
 .../lucene/search/spans/MultiSpansWrapper.java  |  51 ----
 .../org/apache/lucene/util/LuceneTestCase.java  |  53 ++--
 .../lucene/analysis/TestMockAnalyzer.java       |   2 +-
 .../TestCompressingStoredFieldsFormat.java      |   5 +-
 .../TestCompressingTermVectorsFormat.java       |   8 +-
 .../lucene/index/TestAssertingLeafReader.java   |   5 +-
 89 files changed, 828 insertions(+), 880 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene50/TestLucene50DocValuesFormat.java
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene50/TestLucene50DocValuesFormat.java b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene50/TestLucene50DocValuesFormat.java
index 2c38728..9f174dd 100644
--- a/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene50/TestLucene50DocValuesFormat.java
+++ b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene50/TestLucene50DocValuesFormat.java
@@ -200,7 +200,7 @@ public class TestLucene50DocValuesFormat extends BaseCompressingDocValuesFormatT
     
     // now compare again after the merge
     ir = writer.getReader();
-    LeafReader ar = getOnlySegmentReader(ir);
+    LeafReader ar = getOnlyLeafReader(ir);
     Terms terms = ar.terms("indexed");
     if (terms != null) {
       assertEquals(terms.size(), ar.getSortedSetDocValues("dv").getValueCount());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/classification/src/test/org/apache/lucene/classification/ClassificationTestBase.java
----------------------------------------------------------------------
diff --git a/lucene/classification/src/test/org/apache/lucene/classification/ClassificationTestBase.java b/lucene/classification/src/test/org/apache/lucene/classification/ClassificationTestBase.java
index 605b490..331a74b 100644
--- a/lucene/classification/src/test/org/apache/lucene/classification/ClassificationTestBase.java
+++ b/lucene/classification/src/test/org/apache/lucene/classification/ClassificationTestBase.java
@@ -27,7 +27,6 @@ import org.apache.lucene.document.TextField;
 import org.apache.lucene.index.IndexWriterConfig;
 import org.apache.lucene.index.LeafReader;
 import org.apache.lucene.index.RandomIndexWriter;
-import org.apache.lucene.index.SlowCompositeReaderWrapper;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.BytesRef;
@@ -190,7 +189,8 @@ public abstract class ClassificationTestBase<T> extends LuceneTestCase {
     indexWriter.addDocument(doc);
 
     indexWriter.commit();
-    return SlowCompositeReaderWrapper.wrap(indexWriter.getReader());
+    indexWriter.forceMerge(1);
+    return getOnlyLeafReader(indexWriter.getReader());
   }
 
   protected LeafReader getRandomIndex(Analyzer analyzer, int size) throws IOException {
@@ -213,7 +213,8 @@ public abstract class ClassificationTestBase<T> extends LuceneTestCase {
       indexWriter.addDocument(doc);
     }
     indexWriter.commit();
-    return SlowCompositeReaderWrapper.wrap(indexWriter.getReader());
+    indexWriter.forceMerge(1);
+    return getOnlyLeafReader(indexWriter.getReader());
   }
 
   private String createRandomString(Random random) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/classification/src/test/org/apache/lucene/classification/document/DocumentClassificationTestBase.java
----------------------------------------------------------------------
diff --git a/lucene/classification/src/test/org/apache/lucene/classification/document/DocumentClassificationTestBase.java b/lucene/classification/src/test/org/apache/lucene/classification/document/DocumentClassificationTestBase.java
index 316802b..4193bde 100644
--- a/lucene/classification/src/test/org/apache/lucene/classification/document/DocumentClassificationTestBase.java
+++ b/lucene/classification/src/test/org/apache/lucene/classification/document/DocumentClassificationTestBase.java
@@ -30,7 +30,6 @@ import org.apache.lucene.document.Field;
 import org.apache.lucene.index.IndexWriterConfig;
 import org.apache.lucene.index.LeafReader;
 import org.apache.lucene.index.RandomIndexWriter;
-import org.apache.lucene.index.SlowCompositeReaderWrapper;
 import org.apache.lucene.util.BytesRef;
 import org.junit.Before;
 
@@ -202,7 +201,8 @@ public abstract class DocumentClassificationTestBase<T> extends ClassificationTe
     indexWriter.addDocument(doc);
 
     indexWriter.commit();
-    return SlowCompositeReaderWrapper.wrap(indexWriter.getReader());
+    indexWriter.forceMerge(1);
+    return getOnlyLeafReader(indexWriter.getReader());
   }
 
   protected Document getVideoGameDocument() {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/classification/src/test/org/apache/lucene/classification/utils/DataSplitterTest.java
----------------------------------------------------------------------
diff --git a/lucene/classification/src/test/org/apache/lucene/classification/utils/DataSplitterTest.java b/lucene/classification/src/test/org/apache/lucene/classification/utils/DataSplitterTest.java
index d69fefb..2984bb5 100644
--- a/lucene/classification/src/test/org/apache/lucene/classification/utils/DataSplitterTest.java
+++ b/lucene/classification/src/test/org/apache/lucene/classification/utils/DataSplitterTest.java
@@ -27,7 +27,6 @@ import org.apache.lucene.index.LeafReader;
 import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.RandomIndexWriter;
-import org.apache.lucene.index.SlowCompositeReaderWrapper;
 import org.apache.lucene.store.BaseDirectoryWrapper;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.TestUtil;
@@ -75,9 +74,9 @@ public class DataSplitterTest extends LuceneTestCase {
     }
 
     indexWriter.commit();
+    indexWriter.forceMerge(1);
 
-    originalIndex = SlowCompositeReaderWrapper.wrap(indexWriter.getReader());
-
+    originalIndex = getOnlyLeafReader(indexWriter.getReader());
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/java/org/apache/lucene/index/CompositeReader.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/CompositeReader.java b/lucene/core/src/java/org/apache/lucene/index/CompositeReader.java
index 0f6a44e..83bb92a 100644
--- a/lucene/core/src/java/org/apache/lucene/index/CompositeReader.java
+++ b/lucene/core/src/java/org/apache/lucene/index/CompositeReader.java
@@ -26,8 +26,6 @@ import org.apache.lucene.store.*;
  be used to get stored fields from the underlying LeafReaders,
  but it is not possible to directly retrieve postings. To do that, get
  the {@link LeafReaderContext} for all sub-readers via {@link #leaves()}.
- Alternatively, you can mimic an {@link LeafReader} (with a serious slowdown),
- by wrapping composite readers with {@link SlowCompositeReaderWrapper}.
  
  <p>IndexReader instances for indexes on disk are usually constructed
  with a call to one of the static <code>DirectoryReader.open()</code> methods,

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/java/org/apache/lucene/index/IndexReader.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/IndexReader.java b/lucene/core/src/java/org/apache/lucene/index/IndexReader.java
index 865f816..976f548 100644
--- a/lucene/core/src/java/org/apache/lucene/index/IndexReader.java
+++ b/lucene/core/src/java/org/apache/lucene/index/IndexReader.java
@@ -56,8 +56,6 @@ import org.apache.lucene.util.IOUtils;
   be used to get stored fields from the underlying LeafReaders,
   but it is not possible to directly retrieve postings. To do that, get
   the sub-readers via {@link CompositeReader#getSequentialSubReaders}.
-  Alternatively, you can mimic an {@link LeafReader} (with a serious slowdown),
-  by wrapping composite readers with {@link SlowCompositeReaderWrapper}.
  </ul>
  
  <p>IndexReader instances for indexes on disk are usually constructed

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/java/org/apache/lucene/index/MultiDocValues.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/MultiDocValues.java b/lucene/core/src/java/org/apache/lucene/index/MultiDocValues.java
index 383139f..3394797 100644
--- a/lucene/core/src/java/org/apache/lucene/index/MultiDocValues.java
+++ b/lucene/core/src/java/org/apache/lucene/index/MultiDocValues.java
@@ -486,8 +486,8 @@ public class MultiDocValues {
 
     private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(OrdinalMap.class);
 
-    // cache key of whoever asked for this awful thing
-    final Object owner;
+    /** Cache key of whoever asked for this awful thing */
+    public final Object owner;
     // globalOrd -> (globalOrd - segmentOrd) where segmentOrd is the the ordinal in the first segment that contains this term
     final PackedLongValues globalOrdDeltas;
     // globalOrd -> first segment container
@@ -703,7 +703,7 @@ public class MultiDocValues {
     LongValues currentGlobalOrds;
     
     /** Creates a new MultiSortedSetDocValues over <code>values</code> */
-    MultiSortedSetDocValues(SortedSetDocValues values[], int docStarts[], OrdinalMap mapping) throws IOException {
+    public MultiSortedSetDocValues(SortedSetDocValues values[], int docStarts[], OrdinalMap mapping) throws IOException {
       assert docStarts.length == values.length + 1;
       this.values = values;
       this.docStarts = docStarts;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/java/org/apache/lucene/index/SlowCompositeReaderWrapper.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/SlowCompositeReaderWrapper.java b/lucene/core/src/java/org/apache/lucene/index/SlowCompositeReaderWrapper.java
deleted file mode 100644
index e44c53c..0000000
--- a/lucene/core/src/java/org/apache/lucene/index/SlowCompositeReaderWrapper.java
+++ /dev/null
@@ -1,271 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.index;
-
-
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.lucene.index.MultiDocValues.MultiSortedDocValues;
-import org.apache.lucene.index.MultiDocValues.MultiSortedSetDocValues;
-import org.apache.lucene.index.MultiDocValues.OrdinalMap;
-import org.apache.lucene.util.Bits;
-
-/**
- * This class forces a composite reader (eg a {@link
- * MultiReader} or {@link DirectoryReader}) to emulate a
- * {@link LeafReader}.  This requires implementing the postings
- * APIs on-the-fly, using the static methods in {@link
- * MultiFields}, {@link MultiDocValues}, by stepping through
- * the sub-readers to merge fields/terms, appending docs, etc.
- *
- * <p><b>NOTE</b>: this class almost always results in a
- * performance hit.  If this is important to your use case,
- * you'll get better performance by gathering the sub readers using
- * {@link IndexReader#getContext()} to get the
- * leaves and then operate per-LeafReader,
- * instead of using this class.
- */
-public final class SlowCompositeReaderWrapper extends LeafReader {
-
-  private final CompositeReader in;
-  private final Fields fields;
-  private final boolean merging;
-  
-  /** This method is sugar for getting an {@link LeafReader} from
-   * an {@link IndexReader} of any kind. If the reader is already atomic,
-   * it is returned unchanged, otherwise wrapped by this class.
-   */
-  public static LeafReader wrap(IndexReader reader) throws IOException {
-    if (reader instanceof CompositeReader) {
-      return new SlowCompositeReaderWrapper((CompositeReader) reader, false);
-    } else {
-      assert reader instanceof LeafReader;
-      return (LeafReader) reader;
-    }
-  }
-
-  SlowCompositeReaderWrapper(CompositeReader reader, boolean merging) throws IOException {
-    super();
-    in = reader;
-    fields = MultiFields.getFields(in);
-    in.registerParentReader(this);
-    this.merging = merging;
-  }
-
-  @Override
-  public String toString() {
-    return "SlowCompositeReaderWrapper(" + in + ")";
-  }
-
-  @Override
-  public void addCoreClosedListener(CoreClosedListener listener) {
-    addCoreClosedListenerAsReaderClosedListener(in, listener);
-  }
-
-  @Override
-  public void removeCoreClosedListener(CoreClosedListener listener) {
-    removeCoreClosedListenerAsReaderClosedListener(in, listener);
-  }
-
-  @Override
-  public Fields fields() {
-    ensureOpen();
-    return fields;
-  }
-
-  @Override
-  public NumericDocValues getNumericDocValues(String field) throws IOException {
-    ensureOpen();
-    return MultiDocValues.getNumericValues(in, field);
-  }
-
-  @Override
-  public Bits getDocsWithField(String field) throws IOException {
-    ensureOpen();
-    return MultiDocValues.getDocsWithField(in, field);
-  }
-
-  @Override
-  public BinaryDocValues getBinaryDocValues(String field) throws IOException {
-    ensureOpen();
-    return MultiDocValues.getBinaryValues(in, field);
-  }
-  
-  @Override
-  public SortedNumericDocValues getSortedNumericDocValues(String field) throws IOException {
-    ensureOpen();
-    return MultiDocValues.getSortedNumericValues(in, field);
-  }
-
-  @Override
-  public SortedDocValues getSortedDocValues(String field) throws IOException {
-    ensureOpen();
-    OrdinalMap map = null;
-    synchronized (cachedOrdMaps) {
-      map = cachedOrdMaps.get(field);
-      if (map == null) {
-        // uncached, or not a multi dv
-        SortedDocValues dv = MultiDocValues.getSortedValues(in, field);
-        if (dv instanceof MultiSortedDocValues) {
-          map = ((MultiSortedDocValues)dv).mapping;
-          if (map.owner == getCoreCacheKey() && merging == false) {
-            cachedOrdMaps.put(field, map);
-          }
-        }
-        return dv;
-      }
-    }
-    int size = in.leaves().size();
-    final SortedDocValues[] values = new SortedDocValues[size];
-    final int[] starts = new int[size+1];
-    for (int i = 0; i < size; i++) {
-      LeafReaderContext context = in.leaves().get(i);
-      final LeafReader reader = context.reader();
-      final FieldInfo fieldInfo = reader.getFieldInfos().fieldInfo(field);
-      if (fieldInfo != null && fieldInfo.getDocValuesType() != DocValuesType.SORTED) {
-        return null;
-      }
-      SortedDocValues v = reader.getSortedDocValues(field);
-      if (v == null) {
-        v = DocValues.emptySorted();
-      }
-      values[i] = v;
-      starts[i] = context.docBase;
-    }
-    starts[size] = maxDoc();
-    return new MultiSortedDocValues(values, starts, map);
-  }
-  
-  @Override
-  public SortedSetDocValues getSortedSetDocValues(String field) throws IOException {
-    ensureOpen();
-    OrdinalMap map = null;
-    synchronized (cachedOrdMaps) {
-      map = cachedOrdMaps.get(field);
-      if (map == null) {
-        // uncached, or not a multi dv
-        SortedSetDocValues dv = MultiDocValues.getSortedSetValues(in, field);
-        if (dv instanceof MultiSortedSetDocValues) {
-          map = ((MultiSortedSetDocValues)dv).mapping;
-          if (map.owner == getCoreCacheKey() && merging == false) {
-            cachedOrdMaps.put(field, map);
-          }
-        }
-        return dv;
-      }
-    }
-   
-    assert map != null;
-    int size = in.leaves().size();
-    final SortedSetDocValues[] values = new SortedSetDocValues[size];
-    final int[] starts = new int[size+1];
-    for (int i = 0; i < size; i++) {
-      LeafReaderContext context = in.leaves().get(i);
-      final LeafReader reader = context.reader();
-      final FieldInfo fieldInfo = reader.getFieldInfos().fieldInfo(field);
-      if(fieldInfo != null && fieldInfo.getDocValuesType() != DocValuesType.SORTED_SET){
-        return null;
-      }
-      SortedSetDocValues v = reader.getSortedSetDocValues(field);
-      if (v == null) {
-        v = DocValues.emptySortedSet();
-      }
-      values[i] = v;
-      starts[i] = context.docBase;
-    }
-    starts[size] = maxDoc();
-    return new MultiSortedSetDocValues(values, starts, map);
-  }
-  
-  // TODO: this could really be a weak map somewhere else on the coreCacheKey,
-  // but do we really need to optimize slow-wrapper any more?
-  private final Map<String,OrdinalMap> cachedOrdMaps = new HashMap<>();
-
-  @Override
-  public NumericDocValues getNormValues(String field) throws IOException {
-    ensureOpen();
-    return MultiDocValues.getNormValues(in, field);
-  }
-  
-  @Override
-  public Fields getTermVectors(int docID) throws IOException {
-    ensureOpen();
-    return in.getTermVectors(docID);
-  }
-
-  @Override
-  public int numDocs() {
-    // Don't call ensureOpen() here (it could affect performance)
-    return in.numDocs();
-  }
-
-  @Override
-  public int maxDoc() {
-    // Don't call ensureOpen() here (it could affect performance)
-    return in.maxDoc();
-  }
-
-  @Override
-  public void document(int docID, StoredFieldVisitor visitor) throws IOException {
-    ensureOpen();
-    in.document(docID, visitor);
-  }
-
-  @Override
-  public Bits getLiveDocs() {
-    ensureOpen();
-    return MultiFields.getLiveDocs(in);
-  }
-
-  @Override
-  public PointValues getPointValues() {
-    ensureOpen();
-    return null;
-  }
-
-  @Override
-  public FieldInfos getFieldInfos() {
-    ensureOpen();
-    return MultiFields.getMergedFieldInfos(in);
-  }
-
-  @Override
-  public Object getCoreCacheKey() {
-    return in.getCoreCacheKey();
-  }
-
-  @Override
-  public Object getCombinedCoreAndDeletesKey() {
-    return in.getCombinedCoreAndDeletesKey();
-  }
-
-  @Override
-  protected void doClose() throws IOException {
-    // TODO: as this is a wrapper, should we really close the delegate?
-    in.close();
-  }
-
-  @Override
-  public void checkIntegrity() throws IOException {
-    ensureOpen();
-    for (LeafReaderContext ctx : in.leaves()) {
-      ctx.reader().checkIntegrity();
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/codecs/lucene54/TestLucene54DocValuesFormat.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/codecs/lucene54/TestLucene54DocValuesFormat.java b/lucene/core/src/test/org/apache/lucene/codecs/lucene54/TestLucene54DocValuesFormat.java
index cede1d7..b6f178d 100644
--- a/lucene/core/src/test/org/apache/lucene/codecs/lucene54/TestLucene54DocValuesFormat.java
+++ b/lucene/core/src/test/org/apache/lucene/codecs/lucene54/TestLucene54DocValuesFormat.java
@@ -362,7 +362,7 @@ public class TestLucene54DocValuesFormat extends BaseCompressingDocValuesFormatT
     
     // now compare again after the merge
     ir = writer.getReader();
-    LeafReader ar = getOnlySegmentReader(ir);
+    LeafReader ar = getOnlyLeafReader(ir);
     Terms terms = ar.terms("indexed");
     if (terms != null) {
       assertEquals(terms.size(), ar.getSortedSetDocValues("dv").getValueCount());
@@ -541,7 +541,7 @@ public class TestLucene54DocValuesFormat extends BaseCompressingDocValuesFormatT
       w.forceMerge(1);
       DirectoryReader r = DirectoryReader.open(w);
       w.close();
-      SegmentReader sr = getOnlySegmentReader(r);
+      LeafReader sr = getOnlyLeafReader(r);
       assertEquals(maxDoc, sr.maxDoc());
       SortedSetDocValues values = sr.getSortedSetDocValues("sset");
       assertNotNull(values);
@@ -591,7 +591,7 @@ public class TestLucene54DocValuesFormat extends BaseCompressingDocValuesFormatT
       w.forceMerge(1);
       DirectoryReader r = DirectoryReader.open(w);
       w.close();
-      SegmentReader sr = getOnlySegmentReader(r);
+      LeafReader sr = getOnlyLeafReader(r);
       assertEquals(maxDoc, sr.maxDoc());
       SortedNumericDocValues values = sr.getSortedNumericDocValues("snum");
       assertNotNull(values);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/index/TestBinaryDocValuesUpdates.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestBinaryDocValuesUpdates.java b/lucene/core/src/test/org/apache/lucene/index/TestBinaryDocValuesUpdates.java
index 4025f58..8dc8a3c 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestBinaryDocValuesUpdates.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestBinaryDocValuesUpdates.java
@@ -253,16 +253,14 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
       writer.close();
     }
     
-    LeafReader slow = SlowCompositeReaderWrapper.wrap(reader);
-    
-    Bits liveDocs = slow.getLiveDocs();
+    Bits liveDocs = MultiFields.getLiveDocs(reader);
     boolean[] expectedLiveDocs = new boolean[] { true, false, false, true, true, true };
     for (int i = 0; i < expectedLiveDocs.length; i++) {
       assertEquals(expectedLiveDocs[i], liveDocs.get(i));
     }
     
     long[] expectedValues = new long[] { 1, 2, 3, 17, 5, 17};
-    BinaryDocValues bdv = slow.getBinaryDocValues("val");
+    BinaryDocValues bdv = MultiDocValues.getBinaryValues(reader, "val");
     for (int i = 0; i < expectedValues.length; i++) {
       assertEquals(expectedValues[i], getValue(bdv, i));
     }
@@ -469,10 +467,9 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
     
     final DirectoryReader reader = DirectoryReader.open(dir);
     
-    LeafReader r = SlowCompositeReaderWrapper.wrap(reader);
-    BinaryDocValues bdv = r.getBinaryDocValues("bdv");
-    SortedDocValues sdv = r.getSortedDocValues("sorted");
-    for (int i = 0; i < r.maxDoc(); i++) {
+    BinaryDocValues bdv = MultiDocValues.getBinaryValues(reader, "bdv");
+    SortedDocValues sdv = MultiDocValues.getSortedValues(reader, "sorted");
+    for (int i = 0; i < reader.maxDoc(); i++) {
       assertEquals(17, getValue(bdv, i));
       BytesRef term = sdv.get(i);
       assertEquals(new BytesRef("value"), term);
@@ -499,9 +496,8 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
     writer.close();
     
     final DirectoryReader reader = DirectoryReader.open(dir);
-    final LeafReader r = SlowCompositeReaderWrapper.wrap(reader);
-    BinaryDocValues bdv = r.getBinaryDocValues("bdv");
-    for (int i = 0; i < r.maxDoc(); i++) {
+    BinaryDocValues bdv = MultiDocValues.getBinaryValues(reader, "bdv");
+    for (int i = 0; i < reader.maxDoc(); i++) {
       assertEquals(3, getValue(bdv, i));
     }
     reader.close();
@@ -598,9 +594,8 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
     writer.close();
     
     final DirectoryReader reader = DirectoryReader.open(dir);
-    final LeafReader r = SlowCompositeReaderWrapper.wrap(reader);
-    BinaryDocValues bdv = r.getBinaryDocValues("bdv");
-    for (int i = 0; i < r.maxDoc(); i++) {
+    BinaryDocValues bdv = MultiDocValues.getBinaryValues(reader, "bdv");
+    for (int i = 0; i < reader.maxDoc(); i++) {
       assertEquals(3, getValue(bdv, i));
     }
     reader.close();
@@ -1018,9 +1013,8 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
     writer.close();
     
     DirectoryReader reader = DirectoryReader.open(dir);
-    LeafReader r = SlowCompositeReaderWrapper.wrap(reader);
-    BinaryDocValues f1 = r.getBinaryDocValues("f1");
-    BinaryDocValues f2 = r.getBinaryDocValues("f2");
+    BinaryDocValues f1 = MultiDocValues.getBinaryValues(reader, "f1");
+    BinaryDocValues f2 = MultiDocValues.getBinaryValues(reader, "f2");
     assertEquals(12L, getValue(f1, 0));
     assertEquals(13L, getValue(f2, 0));
     assertEquals(17L, getValue(f1, 1));

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/index/TestCustomNorms.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestCustomNorms.java b/lucene/core/src/test/org/apache/lucene/index/TestCustomNorms.java
index 1d0ba54..c513093 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestCustomNorms.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestCustomNorms.java
@@ -66,8 +66,8 @@ public class TestCustomNorms extends LuceneTestCase {
     }
     writer.commit();
     writer.close();
-    LeafReader open = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir));
-    NumericDocValues norms = open.getNormValues(floatTestField);
+    DirectoryReader open = DirectoryReader.open(dir);
+    NumericDocValues norms = MultiDocValues.getNormValues(open, floatTestField);
     assertNotNull(norms);
     for (int i = 0; i < open.maxDoc(); i++) {
       Document document = open.document(i);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java b/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java
index 0034cee..9f3339c 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java
@@ -405,13 +405,8 @@ public class TestDemoParallelLeafReader extends LuceneTestCase {
             //TestUtil.checkIndex(dir);
 
             SegmentInfos infos = SegmentInfos.readLatestCommit(dir);
-            final LeafReader parLeafReader;
-            if (infos.size() == 1) {
-              parLeafReader = new SegmentReader(infos.info(0), IOContext.DEFAULT);
-            } else {
-              // This just means we didn't forceMerge above:
-              parLeafReader = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir));
-            }
+            assert infos.size() == 1;
+            final LeafReader parLeafReader = new SegmentReader(infos.info(0), IOContext.DEFAULT);
 
             //checkParallelReader(leaf, parLeafReader, schemaGen);
 
@@ -682,9 +677,7 @@ public class TestDemoParallelLeafReader extends LuceneTestCase {
           w.addDocument(newDoc);
         }
 
-        if (random().nextBoolean()) {
-          w.forceMerge(1);
-        }
+        w.forceMerge(1);
 
         w.close();
       }
@@ -750,9 +743,7 @@ public class TestDemoParallelLeafReader extends LuceneTestCase {
           }
         }
 
-        if (random().nextBoolean()) {
-          w.forceMerge(1);
-        }
+        w.forceMerge(1);
 
         w.close();
       }
@@ -845,9 +836,7 @@ public class TestDemoParallelLeafReader extends LuceneTestCase {
           }
         }
 
-        if (random().nextBoolean()) {
-          w.forceMerge(1);
-        }
+        w.forceMerge(1);
 
         w.close();
       }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReader.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReader.java b/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReader.java
index 24b2c50..8e62094 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReader.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReader.java
@@ -761,7 +761,7 @@ public class TestDirectoryReader extends LuceneTestCase {
     writer.commit();
   
     DirectoryReader r = DirectoryReader.open(dir);
-    LeafReader r1 = getOnlySegmentReader(r);
+    LeafReader r1 = getOnlyLeafReader(r);
     assertEquals(26, r1.terms("field").size());
     assertEquals(10, r1.terms("number").size());
     writer.addDocument(doc);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/index/TestDocValues.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDocValues.java b/lucene/core/src/test/org/apache/lucene/index/TestDocValues.java
index ad4d60c..2266caf 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestDocValues.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestDocValues.java
@@ -41,7 +41,7 @@ public class TestDocValues extends LuceneTestCase {
     IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(null));
     iw.addDocument(new Document());
     DirectoryReader dr = DirectoryReader.open(iw);
-    LeafReader r = getOnlySegmentReader(dr);
+    LeafReader r = getOnlyLeafReader(dr);
     
     // ok
     assertNotNull(DocValues.getBinary(r, "bogus"));
@@ -66,7 +66,7 @@ public class TestDocValues extends LuceneTestCase {
     doc.add(new StringField("foo", "bar", Field.Store.NO));
     iw.addDocument(doc);
     DirectoryReader dr = DirectoryReader.open(iw);
-    LeafReader r = getOnlySegmentReader(dr);
+    LeafReader r = getOnlyLeafReader(dr);
    
     // errors
     expectThrows(IllegalStateException.class, () -> {
@@ -103,7 +103,7 @@ public class TestDocValues extends LuceneTestCase {
     doc.add(new NumericDocValuesField("foo", 3));
     iw.addDocument(doc);
     DirectoryReader dr = DirectoryReader.open(iw);
-    LeafReader r = getOnlySegmentReader(dr);
+    LeafReader r = getOnlyLeafReader(dr);
     
     // ok
     assertNotNull(DocValues.getNumeric(r, "foo"));
@@ -136,7 +136,7 @@ public class TestDocValues extends LuceneTestCase {
     doc.add(new BinaryDocValuesField("foo", new BytesRef("bar")));
     iw.addDocument(doc);
     DirectoryReader dr = DirectoryReader.open(iw);
-    LeafReader r = getOnlySegmentReader(dr);
+    LeafReader r = getOnlyLeafReader(dr);
     
     // ok
     assertNotNull(DocValues.getBinary(r, "foo"));
@@ -171,7 +171,7 @@ public class TestDocValues extends LuceneTestCase {
     doc.add(new SortedDocValuesField("foo", new BytesRef("bar")));
     iw.addDocument(doc);
     DirectoryReader dr = DirectoryReader.open(iw);
-    LeafReader r = getOnlySegmentReader(dr);
+    LeafReader r = getOnlyLeafReader(dr);
     
     // ok
     assertNotNull(DocValues.getBinary(r, "foo"));
@@ -202,7 +202,7 @@ public class TestDocValues extends LuceneTestCase {
     doc.add(new SortedSetDocValuesField("foo", new BytesRef("bar")));
     iw.addDocument(doc);
     DirectoryReader dr = DirectoryReader.open(iw);
-    LeafReader r = getOnlySegmentReader(dr);
+    LeafReader r = getOnlyLeafReader(dr);
     
     // ok
     assertNotNull(DocValues.getSortedSet(r, "foo"));
@@ -237,7 +237,7 @@ public class TestDocValues extends LuceneTestCase {
     doc.add(new SortedNumericDocValuesField("foo", 3));
     iw.addDocument(doc);
     DirectoryReader dr = DirectoryReader.open(iw);
-    LeafReader r = getOnlySegmentReader(dr);
+    LeafReader r = getOnlyLeafReader(dr);
     
     // ok
     assertNotNull(DocValues.getSortedNumeric(r, "foo"));

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/index/TestDocValuesIndexing.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDocValuesIndexing.java b/lucene/core/src/test/org/apache/lucene/index/TestDocValuesIndexing.java
index 44b5b75..2e0cbd9 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestDocValuesIndexing.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestDocValuesIndexing.java
@@ -58,7 +58,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
     doc.add(newStringField("id", "1", Field.Store.YES));
     doc.add(new NumericDocValuesField("dv", 1));
     w.addDocument(doc);
-    IndexReader r1 = w.getReader();
+    DirectoryReader r1 = w.getReader();
     w.close();
 
     Directory d2 = newDirectory();
@@ -67,12 +67,12 @@ public class TestDocValuesIndexing extends LuceneTestCase {
     doc.add(newStringField("id", "2", Field.Store.YES));
     doc.add(new NumericDocValuesField("dv", 2));
     w.addDocument(doc);
-    IndexReader r2 = w.getReader();
+    DirectoryReader r2 = w.getReader();
     w.close();
 
     Directory d3 = newDirectory();
     w = new RandomIndexWriter(random(), d3);
-    w.addIndexes(SlowCodecReaderWrapper.wrap(SlowCompositeReaderWrapper.wrap(r1)), SlowCodecReaderWrapper.wrap(SlowCompositeReaderWrapper.wrap(r2)));
+    w.addIndexes(SlowCodecReaderWrapper.wrap(getOnlyLeafReader(r1)), SlowCodecReaderWrapper.wrap(getOnlyLeafReader(r2)));
     r1.close();
     d1.close();
     r2.close();
@@ -81,7 +81,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
     w.forceMerge(1);
     DirectoryReader r3 = w.getReader();
     w.close();
-    LeafReader sr = getOnlySegmentReader(r3);
+    LeafReader sr = getOnlyLeafReader(r3);
     assertEquals(2, sr.numDocs());
     NumericDocValues docValues = sr.getNumericDocValues("dv");
     assertNotNull(docValues);
@@ -109,7 +109,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
 
     DirectoryReader r = w.getReader();
     w.close();
-    assertEquals(17, DocValues.getNumeric(getOnlySegmentReader(r), "field").get(0));
+    assertEquals(17, DocValues.getNumeric(getOnlyLeafReader(r), "field").get(0));
     r.close();
     d.close();
   }
@@ -130,7 +130,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
 
     DirectoryReader r = w.getReader();
     w.close();
-    assertEquals(17, DocValues.getNumeric(getOnlySegmentReader(r), "field").get(0));
+    assertEquals(17, DocValues.getNumeric(getOnlyLeafReader(r), "field").get(0));
     r.close();
     d.close();
   }
@@ -150,7 +150,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
     });
 
     DirectoryReader r = w.getReader();
-    assertEquals(17, getOnlySegmentReader(r).getNumericDocValues("field").get(0));
+    assertEquals(17, getOnlyLeafReader(r).getNumericDocValues("field").get(0));
     r.close();
     w.close();
     d.close();
@@ -171,7 +171,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
     w.addDocument(doc);
     w.forceMerge(1);
     DirectoryReader r = w.getReader();
-    BinaryDocValues s = DocValues.getSorted(getOnlySegmentReader(r), "field");
+    BinaryDocValues s = DocValues.getSorted(getOnlyLeafReader(r), "field");
 
     BytesRef bytes1 = s.get(0);
     assertEquals(bytes.length, bytes1.length);
@@ -199,19 +199,18 @@ public class TestDocValuesIndexing extends LuceneTestCase {
       writer.addDocument(doc);
     }
     DirectoryReader r = writer.getReader();
-    LeafReader slow = SlowCompositeReaderWrapper.wrap(r);
-    FieldInfos fi = slow.getFieldInfos();
+    FieldInfos fi = MultiFields.getMergedFieldInfos(r);
     FieldInfo dvInfo = fi.fieldInfo("dv");
     assertTrue(dvInfo.getDocValuesType() != DocValuesType.NONE);
-    NumericDocValues dv = slow.getNumericDocValues("dv");
+    NumericDocValues dv = MultiDocValues.getNumericValues(r, "dv");
     for (int i = 0; i < 50; i++) {
       assertEquals(i, dv.get(i));
-      Document d = slow.document(i);
+      Document d = r.document(i);
       // cannot use d.get("dv") due to another bug!
       assertNull(d.getField("dv"));
       assertEquals(Integer.toString(i), d.get("docId"));
     }
-    slow.close();
+    r.close();
     writer.close();
     dir.close();
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/index/TestDocsAndPositions.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDocsAndPositions.java b/lucene/core/src/test/org/apache/lucene/index/TestDocsAndPositions.java
index dc49db1..2ef5824 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestDocsAndPositions.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestDocsAndPositions.java
@@ -335,7 +335,7 @@ public class TestDocsAndPositions extends LuceneTestCase {
     doc.add(newStringField("foo", "bar", Field.Store.NO));
     writer.addDocument(doc);
     DirectoryReader reader = writer.getReader();
-    LeafReader r = getOnlySegmentReader(reader);
+    LeafReader r = getOnlyLeafReader(reader);
     PostingsEnum disi = TestUtil.docs(random(), r, "foo", new BytesRef("bar"), null, PostingsEnum.NONE);
     int docid = disi.docID();
     assertEquals(-1, docid);
@@ -360,7 +360,7 @@ public class TestDocsAndPositions extends LuceneTestCase {
     doc.add(newTextField("foo", "bar", Field.Store.NO));
     writer.addDocument(doc);
     DirectoryReader reader = writer.getReader();
-    LeafReader r = getOnlySegmentReader(reader);
+    LeafReader r = getOnlyLeafReader(reader);
     PostingsEnum disi = r.postings(new Term("foo", "bar"), PostingsEnum.ALL);
     int docid = disi.docID();
     assertEquals(-1, docid);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/index/TestDocumentWriter.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDocumentWriter.java b/lucene/core/src/test/org/apache/lucene/index/TestDocumentWriter.java
index 489a185..a814c4c 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestDocumentWriter.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestDocumentWriter.java
@@ -284,7 +284,7 @@ public class TestDocumentWriter extends LuceneTestCase {
 
     TestUtil.checkIndex(dir);
 
-    SegmentReader reader = getOnlySegmentReader(DirectoryReader.open(dir));
+    LeafReader reader = getOnlyLeafReader(DirectoryReader.open(dir));
     FieldInfos fi = reader.getFieldInfos();
     // f1
     assertFalse("f1 should have no norms", fi.fieldInfo("f1").hasNorms());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/index/TestDuelingCodecs.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDuelingCodecs.java b/lucene/core/src/test/org/apache/lucene/index/TestDuelingCodecs.java
index b79e638..62fe28a 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestDuelingCodecs.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestDuelingCodecs.java
@@ -162,8 +162,8 @@ public class TestDuelingCodecs extends LuceneTestCase {
     createRandomIndex(numdocs, leftWriter, seed);
     createRandomIndex(numdocs, rightWriter, seed);
 
-    leftReader = wrapReader(leftWriter.getReader(), false);
-    rightReader = wrapReader(rightWriter.getReader(), false);
+    leftReader = wrapReader(leftWriter.getReader());
+    rightReader = wrapReader(rightWriter.getReader());
     
     // check that our readers are valid
     TestUtil.checkReader(leftReader);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/index/TestExitableDirectoryReader.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestExitableDirectoryReader.java b/lucene/core/src/test/org/apache/lucene/index/TestExitableDirectoryReader.java
index 36c9e0d..65cf84e 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestExitableDirectoryReader.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestExitableDirectoryReader.java
@@ -78,8 +78,8 @@ public class TestExitableDirectoryReader extends LuceneTestCase {
       }
     }
 
-    public TestReader(IndexReader reader) throws IOException {
-      super(SlowCompositeReaderWrapper.wrap(reader));
+    public TestReader(LeafReader reader) throws IOException {
+      super(reader);
     }
 
     @Override
@@ -107,6 +107,7 @@ public class TestExitableDirectoryReader extends LuceneTestCase {
     Document d3 = new Document();
     d3.add(newTextField("default", "ones two four", Field.Store.YES));
     writer.addDocument(d3);
+    writer.forceMerge(1);
 
     writer.commit();
     writer.close();
@@ -122,43 +123,39 @@ public class TestExitableDirectoryReader extends LuceneTestCase {
     // Not checking the validity of the result, all we are bothered about in this test is the timing out.
     directoryReader = DirectoryReader.open(directory);
     exitableDirectoryReader = new ExitableDirectoryReader(directoryReader, new QueryTimeoutImpl(1000));
-    reader = new TestReader(exitableDirectoryReader);
+    reader = new TestReader(getOnlyLeafReader(exitableDirectoryReader));
     searcher = new IndexSearcher(reader);
     searcher.search(query, 10);
     reader.close();
-    exitableDirectoryReader.close();
 
 
     // Set a really low timeout value (1 millisecond) and expect an Exception
     directoryReader = DirectoryReader.open(directory);
     exitableDirectoryReader = new ExitableDirectoryReader(directoryReader, new QueryTimeoutImpl(1));
-    reader = new TestReader(exitableDirectoryReader);
+    reader = new TestReader(getOnlyLeafReader(exitableDirectoryReader));
     IndexSearcher slowSearcher = new IndexSearcher(reader);
     expectThrows(ExitingReaderException.class, () -> {
       slowSearcher.search(query, 10);
     });
     reader.close();
-    exitableDirectoryReader.close();
    
     // Set maximum time out and expect the query to complete. 
     // Not checking the validity of the result, all we are bothered about in this test is the timing out.
     directoryReader = DirectoryReader.open(directory);
     exitableDirectoryReader = new ExitableDirectoryReader(directoryReader, new QueryTimeoutImpl(Long.MAX_VALUE));
-    reader = new TestReader(exitableDirectoryReader);
+    reader = new TestReader(getOnlyLeafReader(exitableDirectoryReader));
     searcher = new IndexSearcher(reader);
     searcher.search(query, 10);
     reader.close();
-    exitableDirectoryReader.close();
 
     // Set a negative time allowed and expect the query to complete (should disable timeouts)
     // Not checking the validity of the result, all we are bothered about in this test is the timing out.
     directoryReader = DirectoryReader.open(directory);
     exitableDirectoryReader = new ExitableDirectoryReader(directoryReader, new QueryTimeoutImpl(-189034L));
-    reader = new TestReader(exitableDirectoryReader);
+    reader = new TestReader(getOnlyLeafReader(exitableDirectoryReader));
     searcher = new IndexSearcher(reader);
     searcher.search(query, 10);
     reader.close();
-    exitableDirectoryReader.close();
 
     directory.close();
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/index/TestFilterLeafReader.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestFilterLeafReader.java b/lucene/core/src/test/org/apache/lucene/index/TestFilterLeafReader.java
index cad47a4..e9f6fe2 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestFilterLeafReader.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestFilterLeafReader.java
@@ -98,8 +98,8 @@ public class TestFilterLeafReader extends LuceneTestCase {
       }
     }
     
-    public TestReader(IndexReader reader) throws IOException {
-      super(SlowCompositeReaderWrapper.wrap(reader));
+    public TestReader(LeafReader reader) throws IOException {
+      super(reader);
     }
 
     @Override
@@ -128,7 +128,7 @@ public class TestFilterLeafReader extends LuceneTestCase {
     Document d3 = new Document();
     d3.add(newTextField("default", "two four", Field.Store.YES));
     writer.addDocument(d3);
-
+    writer.forceMerge(1);
     writer.close();
 
     Directory target = newDirectory();
@@ -137,7 +137,7 @@ public class TestFilterLeafReader extends LuceneTestCase {
     ((BaseDirectoryWrapper) target).setCrossCheckTermVectorsOnClose(false);
 
     writer = new IndexWriter(target, newIndexWriterConfig(new MockAnalyzer(random())));
-    try (LeafReader reader = new TestReader(DirectoryReader.open(directory))) {
+    try (LeafReader reader = new TestReader(getOnlyLeafReader(DirectoryReader.open(directory)))) {
       writer.addIndexes(SlowCodecReaderWrapper.wrap(reader));
     }
     writer.close();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/index/TestFlex.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestFlex.java b/lucene/core/src/test/org/apache/lucene/index/TestFlex.java
index 3d716e1..d91301f 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestFlex.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestFlex.java
@@ -70,7 +70,7 @@ public class TestFlex extends LuceneTestCase {
     w.addDocument(doc);
     w.forceMerge(1);
     DirectoryReader r = w.getReader();
-    TermsEnum terms = getOnlySegmentReader(r).fields().terms("f").iterator();
+    TermsEnum terms = getOnlyLeafReader(r).fields().terms("f").iterator();
     assertTrue(terms.next() != null);
     try {
       assertEquals(0, terms.ord());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/index/TestIndexReaderClose.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexReaderClose.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexReaderClose.java
index 6b22fd7..91dcb6e 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexReaderClose.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexReaderClose.java
@@ -37,14 +37,15 @@ public class TestIndexReaderClose extends LuceneTestCase {
   public void testCloseUnderException() throws IOException {
     Directory dir = newDirectory();
     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random(), new MockAnalyzer(random())));
+    writer.addDocument(new Document());
     writer.commit();
     writer.close();
     final int iters = 1000 +  1 + random().nextInt(20);
     for (int j = 0; j < iters; j++) {
       DirectoryReader open = DirectoryReader.open(dir);
       final boolean throwOnClose = !rarely();
-      LeafReader wrap = SlowCompositeReaderWrapper.wrap(open);
-      FilterLeafReader reader = new FilterLeafReader(wrap) {
+      LeafReader leaf = getOnlyLeafReader(open);
+      FilterLeafReader reader = new FilterLeafReader(leaf) {
         @Override
         protected void doClose() throws IOException {
           super.doClose();
@@ -87,54 +88,10 @@ public class TestIndexReaderClose extends LuceneTestCase {
         reader.close(); // call it again
       }
       assertEquals(0, count.get());
-      wrap.close();
     }
     dir.close();
   }
 
-  public void testCoreListenerOnSlowCompositeReaderWrapper() throws IOException {
-    RandomIndexWriter w = new RandomIndexWriter(random(), newDirectory());
-    final int numDocs = TestUtil.nextInt(random(), 1, 5);
-    for (int i = 0; i < numDocs; ++i) {
-      w.addDocument(new Document());
-      if (random().nextBoolean()) {
-        w.commit();
-      }
-    }
-    w.commit();
-    w.close();
-
-    final IndexReader reader = DirectoryReader.open(w.w.getDirectory());
-    final LeafReader leafReader = SlowCompositeReaderWrapper.wrap(reader);
-    
-    final int numListeners = TestUtil.nextInt(random(), 1, 10);
-    final List<LeafReader.CoreClosedListener> listeners = new ArrayList<>();
-    AtomicInteger counter = new AtomicInteger(numListeners);
-    
-    for (int i = 0; i < numListeners; ++i) {
-      CountCoreListener listener = new CountCoreListener(counter, leafReader.getCoreCacheKey());
-      listeners.add(listener);
-      leafReader.addCoreClosedListener(listener);
-    }
-    for (int i = 0; i < 100; ++i) {
-      leafReader.addCoreClosedListener(listeners.get(random().nextInt(listeners.size())));
-    }
-    final int removed = random().nextInt(numListeners);
-    Collections.shuffle(listeners, random());
-    for (int i = 0; i < removed; ++i) {
-      leafReader.removeCoreClosedListener(listeners.get(i));
-    }
-    assertEquals(numListeners, counter.get());
-    // make sure listeners are registered on the wrapped reader and that closing any of them has the same effect
-    if (random().nextBoolean()) {
-      reader.close();
-    } else {
-      leafReader.close();
-    }
-    assertEquals(removed, counter.get());
-    w.w.getDirectory().close();
-  }
-
   public void testCoreListenerOnWrapperWithDifferentCacheKey() throws IOException {
     RandomIndexWriter w = new RandomIndexWriter(random(), newDirectory());
     final int numDocs = TestUtil.nextInt(random(), 1, 5);
@@ -144,13 +101,14 @@ public class TestIndexReaderClose extends LuceneTestCase {
         w.commit();
       }
     }
+    w.forceMerge(1);
     w.commit();
     w.close();
 
     final IndexReader reader = DirectoryReader.open(w.w.getDirectory());
     // We explicitly define a different cache key
     final Object coreCacheKey = new Object();
-    final LeafReader leafReader = new FilterLeafReader(SlowCompositeReaderWrapper.wrap(reader)) {
+    final LeafReader leafReader = new FilterLeafReader(getOnlyLeafReader(reader)) {
       @Override
       public Object getCoreCacheKey() {
         return coreCacheKey;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java
index 7461618..2c3543e 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java
@@ -692,7 +692,7 @@ public class TestIndexWriter extends LuceneTestCase {
     writer.addDocument(doc);  
     writer.close();
     DirectoryReader reader = DirectoryReader.open(dir);
-    LeafReader subreader = getOnlySegmentReader(reader);
+    LeafReader subreader = getOnlyLeafReader(reader);
     TermsEnum te = subreader.fields().terms("").iterator();
     assertEquals(new BytesRef("a"), te.next());
     assertEquals(new BytesRef("b"), te.next());
@@ -713,7 +713,7 @@ public class TestIndexWriter extends LuceneTestCase {
     writer.addDocument(doc);  
     writer.close();
     DirectoryReader reader = DirectoryReader.open(dir);
-    LeafReader subreader = getOnlySegmentReader(reader);
+    LeafReader subreader = getOnlyLeafReader(reader);
     TermsEnum te = subreader.fields().terms("").iterator();
     assertEquals(new BytesRef(""), te.next());
     assertEquals(new BytesRef("a"), te.next());
@@ -2549,7 +2549,7 @@ public class TestIndexWriter extends LuceneTestCase {
     w.commit();
     w.close();
     DirectoryReader r = DirectoryReader.open(d);
-    assertEquals(0, getOnlySegmentReader(r).getNormValues("foo").get(0));
+    assertEquals(0, getOnlyLeafReader(r).getNormValues("foo").get(0));
     r.close();
     d.close();
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/index/TestLazyProxSkipping.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestLazyProxSkipping.java b/lucene/core/src/test/org/apache/lucene/index/TestLazyProxSkipping.java
index a928fd2..ff79e5e 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestLazyProxSkipping.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestLazyProxSkipping.java
@@ -104,7 +104,7 @@ public class TestLazyProxSkipping extends LuceneTestCase {
         writer.forceMerge(1);
         writer.close();
 
-      SegmentReader reader = getOnlySegmentReader(DirectoryReader.open(directory));
+      LeafReader reader = getOnlyLeafReader(DirectoryReader.open(directory));
 
       this.searcher = newSearcher(reader);
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/index/TestMultiDocValues.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestMultiDocValues.java b/lucene/core/src/test/org/apache/lucene/index/TestMultiDocValues.java
index 121e85c..5b70c38 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestMultiDocValues.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestMultiDocValues.java
@@ -56,7 +56,7 @@ public class TestMultiDocValues extends LuceneTestCase {
     DirectoryReader ir = iw.getReader();
     iw.forceMerge(1);
     DirectoryReader ir2 = iw.getReader();
-    LeafReader merged = getOnlySegmentReader(ir2);
+    LeafReader merged = getOnlyLeafReader(ir2);
     iw.close();
     
     NumericDocValues multi = MultiDocValues.getNumericValues(ir, "numbers");
@@ -91,7 +91,7 @@ public class TestMultiDocValues extends LuceneTestCase {
     DirectoryReader ir = iw.getReader();
     iw.forceMerge(1);
     DirectoryReader ir2 = iw.getReader();
-    LeafReader merged = getOnlySegmentReader(ir2);
+    LeafReader merged = getOnlyLeafReader(ir2);
     iw.close();
     
     BinaryDocValues multi = MultiDocValues.getBinaryValues(ir, "bytes");
@@ -131,7 +131,7 @@ public class TestMultiDocValues extends LuceneTestCase {
     DirectoryReader ir = iw.getReader();
     iw.forceMerge(1);
     DirectoryReader ir2 = iw.getReader();
-    LeafReader merged = getOnlySegmentReader(ir2);
+    LeafReader merged = getOnlyLeafReader(ir2);
     iw.close();
     
     SortedDocValues multi = MultiDocValues.getSortedValues(ir, "bytes");
@@ -173,7 +173,7 @@ public class TestMultiDocValues extends LuceneTestCase {
     DirectoryReader ir = iw.getReader();
     iw.forceMerge(1);
     DirectoryReader ir2 = iw.getReader();
-    LeafReader merged = getOnlySegmentReader(ir2);
+    LeafReader merged = getOnlyLeafReader(ir2);
     iw.close();
     
     SortedDocValues multi = MultiDocValues.getSortedValues(ir, "bytes");
@@ -214,7 +214,7 @@ public class TestMultiDocValues extends LuceneTestCase {
     DirectoryReader ir = iw.getReader();
     iw.forceMerge(1);
     DirectoryReader ir2 = iw.getReader();
-    LeafReader merged = getOnlySegmentReader(ir2);
+    LeafReader merged = getOnlyLeafReader(ir2);
     iw.close();
     
     SortedSetDocValues multi = MultiDocValues.getSortedSetValues(ir, "bytes");
@@ -276,7 +276,7 @@ public class TestMultiDocValues extends LuceneTestCase {
     DirectoryReader ir = iw.getReader();
     iw.forceMerge(1);
     DirectoryReader ir2 = iw.getReader();
-    LeafReader merged = getOnlySegmentReader(ir2);
+    LeafReader merged = getOnlyLeafReader(ir2);
     iw.close();
     
     SortedSetDocValues multi = MultiDocValues.getSortedSetValues(ir, "bytes");
@@ -337,7 +337,7 @@ public class TestMultiDocValues extends LuceneTestCase {
     DirectoryReader ir = iw.getReader();
     iw.forceMerge(1);
     DirectoryReader ir2 = iw.getReader();
-    LeafReader merged = getOnlySegmentReader(ir2);
+    LeafReader merged = getOnlyLeafReader(ir2);
     iw.close();
     
     SortedNumericDocValues multi = MultiDocValues.getSortedNumericValues(ir, "nums");
@@ -388,7 +388,7 @@ public class TestMultiDocValues extends LuceneTestCase {
     DirectoryReader ir = iw.getReader();
     iw.forceMerge(1);
     DirectoryReader ir2 = iw.getReader();
-    LeafReader merged = getOnlySegmentReader(ir2);
+    LeafReader merged = getOnlyLeafReader(ir2);
     iw.close();
     
     Bits multi = MultiDocValues.getDocsWithField(ir, "numbers");

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/index/TestMultiLevelSkipList.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestMultiLevelSkipList.java b/lucene/core/src/test/org/apache/lucene/index/TestMultiLevelSkipList.java
index a82444b..bc14cb8 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestMultiLevelSkipList.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestMultiLevelSkipList.java
@@ -80,7 +80,7 @@ public class TestMultiLevelSkipList extends LuceneTestCase {
     writer.forceMerge(1);
     writer.close();
 
-    LeafReader reader = getOnlySegmentReader(DirectoryReader.open(dir));
+    LeafReader reader = getOnlyLeafReader(DirectoryReader.open(dir));
     
     for (int i = 0; i < 2; i++) {
       counter = 0;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/index/TestNorms.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestNorms.java b/lucene/core/src/test/org/apache/lucene/index/TestNorms.java
index 562cefb..45db69a 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestNorms.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestNorms.java
@@ -111,8 +111,8 @@ public class TestNorms extends LuceneTestCase {
   public void testMaxByteNorms() throws IOException {
     Directory dir = newFSDirectory(createTempDir("TestNorms.testMaxByteNorms"));
     buildIndex(dir);
-    LeafReader open = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir));
-    NumericDocValues normValues = open.getNormValues(byteTestField);
+    DirectoryReader open = DirectoryReader.open(dir);
+    NumericDocValues normValues = MultiDocValues.getNormValues(open, byteTestField);
     assertNotNull(normValues);
     for (int i = 0; i < open.maxDoc(); i++) {
       Document document = open.document(i);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/index/TestNumericDocValuesUpdates.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestNumericDocValuesUpdates.java b/lucene/core/src/test/org/apache/lucene/index/TestNumericDocValuesUpdates.java
index 727f6ff..15ecc0f 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestNumericDocValuesUpdates.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestNumericDocValuesUpdates.java
@@ -238,16 +238,14 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
       writer.close();
     }
     
-    LeafReader slow = SlowCompositeReaderWrapper.wrap(reader);
-    
-    Bits liveDocs = slow.getLiveDocs();
+    Bits liveDocs = MultiFields.getLiveDocs(reader);
     boolean[] expectedLiveDocs = new boolean[] { true, false, false, true, true, true };
     for (int i = 0; i < expectedLiveDocs.length; i++) {
       assertEquals(expectedLiveDocs[i], liveDocs.get(i));
     }
     
     long[] expectedValues = new long[] { 1, 2, 3, 17, 5, 17};
-    NumericDocValues ndv = slow.getNumericDocValues("val");
+    NumericDocValues ndv = MultiDocValues.getNumericValues(reader, "val");
     for (int i = 0; i < expectedValues.length; i++) {
       assertEquals(expectedValues[i], ndv.get(i));
     }
@@ -460,10 +458,9 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
     
     final DirectoryReader reader = DirectoryReader.open(dir);
     
-    LeafReader r = SlowCompositeReaderWrapper.wrap(reader);
-    NumericDocValues ndv = r.getNumericDocValues("ndv");
-    SortedDocValues sdv = r.getSortedDocValues("sorted");
-    for (int i = 0; i < r.maxDoc(); i++) {
+    NumericDocValues ndv = MultiDocValues.getNumericValues(reader, "ndv");
+    SortedDocValues sdv = MultiDocValues.getSortedValues(reader, "sorted");
+    for (int i = 0; i < reader.maxDoc(); i++) {
       assertEquals(17, ndv.get(i));
       final BytesRef term = sdv.get(i);
       assertEquals(new BytesRef("value"), term);
@@ -491,9 +488,8 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
     writer.close();
     
     final DirectoryReader reader = DirectoryReader.open(dir);
-    final LeafReader r = SlowCompositeReaderWrapper.wrap(reader);
-    NumericDocValues ndv = r.getNumericDocValues("ndv");
-    for (int i = 0; i < r.maxDoc(); i++) {
+    NumericDocValues ndv = MultiDocValues.getNumericValues(reader, "ndv");
+    for (int i = 0; i < reader.maxDoc(); i++) {
       assertEquals(3, ndv.get(i));
     }
     reader.close();
@@ -592,9 +588,8 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
     writer.close();
     
     final DirectoryReader reader = DirectoryReader.open(dir);
-    final LeafReader r = SlowCompositeReaderWrapper.wrap(reader);
-    NumericDocValues ndv = r.getNumericDocValues("ndv");
-    for (int i = 0; i < r.maxDoc(); i++) {
+    NumericDocValues ndv = MultiDocValues.getNumericValues(reader, "ndv");
+    for (int i = 0; i < reader.maxDoc(); i++) {
       assertEquals(3, ndv.get(i));
     }
     reader.close();
@@ -806,7 +801,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
     writer.close();
     
     reader = DirectoryReader.open(dir);
-    LeafReader ar = getOnlySegmentReader(reader);
+    LeafReader ar = getOnlyLeafReader(reader);
     assertEquals(DocValuesType.NUMERIC, ar.getFieldInfos().fieldInfo("foo").getDocValuesType());
     IndexSearcher searcher = new IndexSearcher(reader);
     TopFieldDocs td;
@@ -1103,9 +1098,8 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
     writer.close();
     
     DirectoryReader reader = DirectoryReader.open(dir);
-    LeafReader r = SlowCompositeReaderWrapper.wrap(reader);
-    NumericDocValues f1 = r.getNumericDocValues("f1");
-    NumericDocValues f2 = r.getNumericDocValues("f2");
+    NumericDocValues f1 = MultiDocValues.getNumericValues(reader, "f1");
+    NumericDocValues f2 = MultiDocValues.getNumericValues(reader, "f2");
     assertEquals(12L, f1.get(0));
     assertEquals(13L, f2.get(0));
     assertEquals(17L, f1.get(1));

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/index/TestOmitNorms.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestOmitNorms.java b/lucene/core/src/test/org/apache/lucene/index/TestOmitNorms.java
index 83dfd19..bc3c3e8 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestOmitNorms.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestOmitNorms.java
@@ -66,7 +66,7 @@ public class TestOmitNorms extends LuceneTestCase {
     // flush
     writer.close();
 
-    SegmentReader reader = getOnlySegmentReader(DirectoryReader.open(ram));
+    LeafReader reader = getOnlyLeafReader(DirectoryReader.open(ram));
     FieldInfos fi = reader.getFieldInfos();
     assertTrue("OmitNorms field bit should be set.", fi.fieldInfo("f1").omitsNorms());
     assertTrue("OmitNorms field bit should be set.", fi.fieldInfo("f2").omitsNorms());
@@ -120,7 +120,7 @@ public class TestOmitNorms extends LuceneTestCase {
     // flush
     writer.close();
 
-    SegmentReader reader = getOnlySegmentReader(DirectoryReader.open(ram));
+    LeafReader reader = getOnlyLeafReader(DirectoryReader.open(ram));
     FieldInfos fi = reader.getFieldInfos();
     assertTrue("OmitNorms field bit should be set.", fi.fieldInfo("f1").omitsNorms());
     assertTrue("OmitNorms field bit should be set.", fi.fieldInfo("f2").omitsNorms());
@@ -168,7 +168,7 @@ public class TestOmitNorms extends LuceneTestCase {
     // flush
     writer.close();
 
-    SegmentReader reader = getOnlySegmentReader(DirectoryReader.open(ram));
+    LeafReader reader = getOnlyLeafReader(DirectoryReader.open(ram));
     FieldInfos fi = reader.getFieldInfos();
     assertTrue("OmitNorms field bit should not be set.", !fi.fieldInfo("f1").omitsNorms());
     assertTrue("OmitNorms field bit should be set.", fi.fieldInfo("f2").omitsNorms());
@@ -297,7 +297,7 @@ public class TestOmitNorms extends LuceneTestCase {
     // fully merge and validate MultiNorms against single segment.
     riw.forceMerge(1);
     DirectoryReader ir2 = riw.getReader();
-    NumericDocValues norms2 = getOnlySegmentReader(ir2).getNormValues(field);
+    NumericDocValues norms2 = getOnlyLeafReader(ir2).getNormValues(field);
 
     if (norms1 == null) {
       assertNull(norms2);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/index/TestOmitPositions.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestOmitPositions.java b/lucene/core/src/test/org/apache/lucene/index/TestOmitPositions.java
index 38c7251..f5a74b5 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestOmitPositions.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestOmitPositions.java
@@ -153,7 +153,7 @@ public class TestOmitPositions extends LuceneTestCase {
     // flush
     writer.close();
 
-    SegmentReader reader = getOnlySegmentReader(DirectoryReader.open(ram));
+    LeafReader reader = getOnlyLeafReader(DirectoryReader.open(ram));
     FieldInfos fi = reader.getFieldInfos();
     // docs + docs = docs
     assertEquals(IndexOptions.DOCS, fi.fieldInfo("f1").getIndexOptions());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/index/TestOmitTf.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestOmitTf.java b/lucene/core/src/test/org/apache/lucene/index/TestOmitTf.java
index 807c704..3c12a0d 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestOmitTf.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestOmitTf.java
@@ -105,7 +105,7 @@ public class TestOmitTf extends LuceneTestCase {
     // flush
     writer.close();
 
-    SegmentReader reader = getOnlySegmentReader(DirectoryReader.open(ram));
+    LeafReader reader = getOnlyLeafReader(DirectoryReader.open(ram));
     FieldInfos fi = reader.getFieldInfos();
     assertEquals("OmitTermFreqAndPositions field bit should be set.", IndexOptions.DOCS, fi.fieldInfo("f1").getIndexOptions());
     assertEquals("OmitTermFreqAndPositions field bit should be set.", IndexOptions.DOCS, fi.fieldInfo("f2").getIndexOptions());
@@ -157,7 +157,7 @@ public class TestOmitTf extends LuceneTestCase {
     // flush
     writer.close();
 
-    SegmentReader reader = getOnlySegmentReader(DirectoryReader.open(ram));
+    LeafReader reader = getOnlyLeafReader(DirectoryReader.open(ram));
     FieldInfos fi = reader.getFieldInfos();
     assertEquals("OmitTermFreqAndPositions field bit should be set.", IndexOptions.DOCS, fi.fieldInfo("f1").getIndexOptions());
     assertEquals("OmitTermFreqAndPositions field bit should be set.", IndexOptions.DOCS, fi.fieldInfo("f2").getIndexOptions());
@@ -200,7 +200,7 @@ public class TestOmitTf extends LuceneTestCase {
     // flush
     writer.close();
 
-    SegmentReader reader = getOnlySegmentReader(DirectoryReader.open(ram));
+    LeafReader reader = getOnlyLeafReader(DirectoryReader.open(ram));
     FieldInfos fi = reader.getFieldInfos();
     assertEquals("OmitTermFreqAndPositions field bit should not be set.", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, fi.fieldInfo("f1").getIndexOptions());
     assertEquals("OmitTermFreqAndPositions field bit should be set.", IndexOptions.DOCS, fi.fieldInfo("f2").getIndexOptions());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/index/TestOrdinalMap.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestOrdinalMap.java b/lucene/core/src/test/org/apache/lucene/index/TestOrdinalMap.java
index e0fab18..1f9ff11 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestOrdinalMap.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestOrdinalMap.java
@@ -83,13 +83,12 @@ public class TestOrdinalMap extends LuceneTestCase {
     }
     iw.commit();
     DirectoryReader r = iw.getReader();
-    LeafReader ar = SlowCompositeReaderWrapper.wrap(r);
-    SortedDocValues sdv = ar.getSortedDocValues("sdv");
+    SortedDocValues sdv = MultiDocValues.getSortedValues(r, "sdv");
     if (sdv instanceof MultiSortedDocValues) {
       OrdinalMap map = ((MultiSortedDocValues) sdv).mapping;
       assertEquals(RamUsageTester.sizeOf(map, ORDINAL_MAP_ACCUMULATOR), map.ramBytesUsed());
     }
-    SortedSetDocValues ssdv = ar.getSortedSetDocValues("ssdv");
+    SortedSetDocValues ssdv = MultiDocValues.getSortedSetValues(r, "ssdv");
     if (ssdv instanceof MultiSortedSetDocValues) {
       OrdinalMap map = ((MultiSortedSetDocValues) ssdv).mapping;
       assertEquals(RamUsageTester.sizeOf(map, ORDINAL_MAP_ACCUMULATOR), map.ramBytesUsed());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/index/TestParallelCompositeReader.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestParallelCompositeReader.java b/lucene/core/src/test/org/apache/lucene/index/TestParallelCompositeReader.java
index 7078380..166c9e4d 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestParallelCompositeReader.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestParallelCompositeReader.java
@@ -18,6 +18,8 @@ package org.apache.lucene.index;
 
 
 import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
 import java.util.Random;
 
 import org.apache.lucene.analysis.MockAnalyzer;
@@ -276,32 +278,6 @@ public class TestParallelCompositeReader extends LuceneTestCase {
     dir2.close();
   }
   
-  public void testIncompatibleIndexes3() throws IOException {
-    Directory dir1 = getDir1(random());
-    Directory dir2 = getDir2(random());
-
-    CompositeReader ir1 = new MultiReader(DirectoryReader.open(dir1), SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir1))),
-        ir2 = new MultiReader(DirectoryReader.open(dir2), DirectoryReader.open(dir2));
-    CompositeReader[] readers = new CompositeReader[] {ir1, ir2};
-
-    expectThrows(IllegalArgumentException.class, () -> {
-      new ParallelCompositeReader(readers);
-    });
-
-    expectThrows(IllegalArgumentException.class, () -> {
-      new ParallelCompositeReader(random().nextBoolean(), readers, readers);
-    });
-
-    assertEquals(1, ir1.getRefCount());
-    assertEquals(1, ir2.getRefCount());
-    ir1.close();
-    ir2.close();
-    assertEquals(0, ir1.getRefCount());
-    assertEquals(0, ir2.getRefCount());
-    dir1.close();
-    dir2.close();
-  }
-  
   public void testIgnoreStoredFields() throws IOException {
     Directory dir1 = getDir1(random());
     Directory dir2 = getDir2(random());
@@ -317,7 +293,7 @@ public class TestParallelCompositeReader extends LuceneTestCase {
     assertNull(pr.document(0).get("f3"));
     assertNull(pr.document(0).get("f4"));
     // check that fields are there
-    LeafReader slow = SlowCompositeReaderWrapper.wrap(pr);
+    Fields slow = MultiFields.getFields(pr);
     assertNotNull(slow.terms("f1"));
     assertNotNull(slow.terms("f2"));
     assertNotNull(slow.terms("f3"));
@@ -333,7 +309,7 @@ public class TestParallelCompositeReader extends LuceneTestCase {
     assertNull(pr.document(0).get("f3"));
     assertNull(pr.document(0).get("f4"));
     // check that fields are there
-    slow = SlowCompositeReaderWrapper.wrap(pr);
+    slow = MultiFields.getFields(pr);
     assertNull(slow.terms("f1"));
     assertNull(slow.terms("f2"));
     assertNotNull(slow.terms("f3"));
@@ -349,7 +325,7 @@ public class TestParallelCompositeReader extends LuceneTestCase {
     assertNull(pr.document(0).get("f3"));
     assertNull(pr.document(0).get("f4"));
     // check that fields are there
-    slow = SlowCompositeReaderWrapper.wrap(pr);
+    slow = MultiFields.getFields(pr);
     assertNull(slow.terms("f1"));
     assertNull(slow.terms("f2"));
     assertNotNull(slow.terms("f3"));

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/index/TestParallelLeafReader.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestParallelLeafReader.java b/lucene/core/src/test/org/apache/lucene/index/TestParallelLeafReader.java
index 31aa603..f7f401f 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestParallelLeafReader.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestParallelLeafReader.java
@@ -63,8 +63,8 @@ public class TestParallelLeafReader extends LuceneTestCase {
   public void testFieldNames() throws Exception {
     Directory dir1 = getDir1(random());
     Directory dir2 = getDir2(random());
-    ParallelLeafReader pr = new ParallelLeafReader(SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir1)),
-                                                       SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir2)));
+    ParallelLeafReader pr = new ParallelLeafReader(getOnlyLeafReader(DirectoryReader.open(dir1)),
+                                                   getOnlyLeafReader(DirectoryReader.open(dir2)));
     FieldInfos fieldInfos = pr.getFieldInfos();
     assertEquals(4, fieldInfos.size());
     assertNotNull(fieldInfos.fieldInfo("f1"));
@@ -81,8 +81,8 @@ public class TestParallelLeafReader extends LuceneTestCase {
     Directory dir2 = getDir2(random());
     LeafReader ir1, ir2;
     // close subreaders, ParallelReader will not change refCounts, but close on its own close
-    ParallelLeafReader pr = new ParallelLeafReader(ir1 = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir1)),
-                                                       ir2 = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir2)));
+    ParallelLeafReader pr = new ParallelLeafReader(ir1 = getOnlyLeafReader(DirectoryReader.open(dir1)),
+                                                   ir2 = getOnlyLeafReader(DirectoryReader.open(dir2)));
                                                        
     // check RefCounts
     assertEquals(1, ir1.getRefCount());
@@ -97,8 +97,8 @@ public class TestParallelLeafReader extends LuceneTestCase {
   public void testRefCounts2() throws IOException {
     Directory dir1 = getDir1(random());
     Directory dir2 = getDir2(random());
-    LeafReader ir1 = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir1));
-    LeafReader ir2 = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir2));
+    LeafReader ir1 = getOnlyLeafReader(DirectoryReader.open(dir1));
+    LeafReader ir2 = getOnlyLeafReader(DirectoryReader.open(dir2));
     // don't close subreaders, so ParallelReader will increment refcounts
     ParallelLeafReader pr = new ParallelLeafReader(false, ir1, ir2);
     // check RefCounts
@@ -117,7 +117,7 @@ public class TestParallelLeafReader extends LuceneTestCase {
   
   public void testCloseInnerReader() throws Exception {
     Directory dir1 = getDir1(random());
-    LeafReader ir1 = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir1));
+    LeafReader ir1 = getOnlyLeafReader(DirectoryReader.open(dir1));
     
     // with overlapping
     ParallelLeafReader pr = new ParallelLeafReader(true,
@@ -149,8 +149,8 @@ public class TestParallelLeafReader extends LuceneTestCase {
     w2.addDocument(d3);
     w2.close();
     
-    LeafReader ir1 = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir1));
-    LeafReader ir2 = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir2));
+    LeafReader ir1 = getOnlyLeafReader(DirectoryReader.open(dir1));
+    LeafReader ir2 = getOnlyLeafReader(DirectoryReader.open(dir2));
 
     // indexes don't have the same number of documents
     expectThrows(IllegalArgumentException.class, () -> {
@@ -175,8 +175,8 @@ public class TestParallelLeafReader extends LuceneTestCase {
   public void testIgnoreStoredFields() throws IOException {
     Directory dir1 = getDir1(random());
     Directory dir2 = getDir2(random());
-    LeafReader ir1 = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir1));
-    LeafReader ir2 = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir2));
+    LeafReader ir1 = getOnlyLeafReader(DirectoryReader.open(dir1));
+    LeafReader ir2 = getOnlyLeafReader(DirectoryReader.open(dir2));
     
     // with overlapping
     ParallelLeafReader pr = new ParallelLeafReader(false,
@@ -276,8 +276,8 @@ public class TestParallelLeafReader extends LuceneTestCase {
     dir1 = getDir1(random);
     dir2 = getDir2(random);
     ParallelLeafReader pr = new ParallelLeafReader(
-        SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir1)),
-        SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir2)));
+        getOnlyLeafReader(DirectoryReader.open(dir1)),
+        getOnlyLeafReader(DirectoryReader.open(dir2)));
     TestUtil.checkReader(pr);
     return newSearcher(pr);
   }
@@ -293,6 +293,7 @@ public class TestParallelLeafReader extends LuceneTestCase {
     d2.add(newTextField("f1", "v2", Field.Store.YES));
     d2.add(newTextField("f2", "v2", Field.Store.YES));
     w1.addDocument(d2);
+    w1.forceMerge(1);
     w1.close();
     return dir1;
   }
@@ -308,6 +309,7 @@ public class TestParallelLeafReader extends LuceneTestCase {
     d4.add(newTextField("f3", "v2", Field.Store.YES));
     d4.add(newTextField("f4", "v2", Field.Store.YES));
     w2.addDocument(d4);
+    w2.forceMerge(1);
     w2.close();
     return dir2;
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/index/TestParallelReaderEmptyIndex.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestParallelReaderEmptyIndex.java b/lucene/core/src/test/org/apache/lucene/index/TestParallelReaderEmptyIndex.java
index 373a125..61c84dc 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestParallelReaderEmptyIndex.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestParallelReaderEmptyIndex.java
@@ -50,15 +50,7 @@ public class TestParallelReaderEmptyIndex extends LuceneTestCase {
 
     IndexWriter iwOut = new IndexWriter(rdOut, newIndexWriterConfig(new MockAnalyzer(random())));
     
-    ParallelLeafReader apr = new ParallelLeafReader(
-        SlowCompositeReaderWrapper.wrap(DirectoryReader.open(rd1)),
-        SlowCompositeReaderWrapper.wrap(DirectoryReader.open(rd2)));
-    
-    // When unpatched, Lucene crashes here with a NoSuchElementException (caused by ParallelTermEnum)
-    iwOut.addIndexes(SlowCodecReaderWrapper.wrap(apr));
-    iwOut.forceMerge(1);
-    
-    // 2nd try with a readerless parallel reader
+    // add a readerless parallel reader
     iwOut.addIndexes(SlowCodecReaderWrapper.wrap(new ParallelLeafReader()));
     iwOut.forceMerge(1);
 
@@ -136,16 +128,18 @@ public class TestParallelReaderEmptyIndex extends LuceneTestCase {
     Directory rdOut = newDirectory();
 
     IndexWriter iwOut = new IndexWriter(rdOut, newIndexWriterConfig(new MockAnalyzer(random())));
-    final DirectoryReader reader1, reader2;
-    ParallelLeafReader pr = new ParallelLeafReader(
-        SlowCompositeReaderWrapper.wrap(reader1 = DirectoryReader.open(rd1)),
-        SlowCompositeReaderWrapper.wrap(reader2 = DirectoryReader.open(rd2)));
+    DirectoryReader reader1 = DirectoryReader.open(rd1);
+    DirectoryReader reader2 = DirectoryReader.open(rd2);
+    ParallelLeafReader pr = new ParallelLeafReader(false,
+                                                   getOnlyLeafReader(reader1),
+                                                   getOnlyLeafReader(reader2));
 
     // When unpatched, Lucene crashes here with an ArrayIndexOutOfBoundsException (caused by TermVectorsWriter)
     iwOut.addIndexes(SlowCodecReaderWrapper.wrap(pr));
 
-    // ParallelReader closes any IndexReader you added to it:
     pr.close();
+    reader1.close();
+    reader2.close();
     
     // assert subreaders were closed
     assertEquals(0, reader1.getRefCount());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7523ca11/lucene/core/src/test/org/apache/lucene/index/TestParallelTermEnum.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestParallelTermEnum.java b/lucene/core/src/test/org/apache/lucene/index/TestParallelTermEnum.java
index c51fd2d..a83c549 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestParallelTermEnum.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestParallelTermEnum.java
@@ -59,8 +59,8 @@ public class TestParallelTermEnum extends LuceneTestCase {
 
     iw2.close();
 
-    this.ir1 = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(rd1));
-    this.ir2 = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(rd2));
+    this.ir1 = getOnlyLeafReader(DirectoryReader.open(rd1));
+    this.ir2 = getOnlyLeafReader(DirectoryReader.open(rd2));
   }
 
   @Override


[29/50] [abbrv] lucene-solr git commit: SOLR-445: refactored KnownErr -> ToleratedUpdateError in solr-common

Posted by ho...@apache.org.
SOLR-445: refactored KnownErr -> ToleratedUpdateError in solr-common

Also added some nehnacments to that class, and replaced the hack in CloudSolrClient with usage of this class


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/c37d5a86
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/c37d5a86
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/c37d5a86

Branch: refs/heads/jira/SOLR-445
Commit: c37d5a8618c1af41a5cda45463ab1288ea73b99b
Parents: 5b405b6
Author: Chris Hostetter <ho...@apache.org>
Authored: Thu Mar 10 11:22:08 2016 -0700
Committer: Chris Hostetter <ho...@apache.org>
Committed: Thu Mar 10 11:22:08 2016 -0700

----------------------------------------------------------------------
 .../processor/TolerantUpdateProcessor.java      | 124 +++------------
 .../cloud/TestTolerantUpdateProcessorCloud.java |  25 ++--
 .../processor/TolerantUpdateProcessorTest.java  |  37 +----
 .../solr/client/solrj/impl/CloudSolrClient.java |   8 +-
 .../solr/common/ToleratedUpdateError.java       | 150 +++++++++++++++++++
 .../solr/common/TestToleratedUpdateError.java   |  89 +++++++++++
 6 files changed, 281 insertions(+), 152 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c37d5a86/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java b/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java
index dc07082..30e5f80 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java
@@ -29,6 +29,8 @@ import org.apache.lucene.util.CharsRefBuilder;
 import org.apache.solr.cloud.ZkController;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
+import org.apache.solr.common.ToleratedUpdateError;
+import org.apache.solr.common.ToleratedUpdateError.CmdType;
 import org.apache.solr.common.cloud.DocCollection;
 import org.apache.solr.common.cloud.Slice;
 import org.apache.solr.common.params.ShardParams;
@@ -53,7 +55,7 @@ import org.slf4j.LoggerFactory;
  * will receive a 200 response, but gets a list of errors (keyed by
  * unique key) unless <code>maxErrors</code> is reached. 
  * If <code>maxErrors</code> occur, the first exception caught will be re-thrown, 
- * Solr will respond with 5XX or 4XX (depending on the exception) and
+ * Solr will respond with 5XX or 4XX (depending on the underlying exceptions) and
  * it won't finish processing the batch. This means that the last docs
  * in the batch may not be added in this case even if they are valid. 
  * </p>
@@ -94,7 +96,7 @@ public class TolerantUpdateProcessor extends UpdateRequestProcessor {
    * Known errors that occurred in this batch, in order encountered (may not be the same as the 
    * order the commands were originally executed in due to the async distributed updates).
    */
-  private final List<KnownErr> knownErrors = new ArrayList<KnownErr>();
+  private final List<ToleratedUpdateError> knownErrors = new ArrayList<ToleratedUpdateError>();
 
   // Kludge: Because deleteByQuery updates are forwarded to every leader, we can get identical
   // errors reported by every leader for the same underlying problem.
@@ -109,13 +111,13 @@ public class TolerantUpdateProcessor extends UpdateRequestProcessor {
   //   dbq: foo:bar
   //
   // ...but i can't figure out a way to accurately identify & return duplicate 
-  // KnownErrs from duplicate identical underlying requests w/o erroneously returning identical 
-  // KnownErrs for the *same* underlying request but from diff shards.
+  // ToleratedUpdateErrors from duplicate identical underlying requests w/o erroneously returning identical 
+  // ToleratedUpdateErrors for the *same* underlying request but from diff shards.
   //
   // So as a kludge, we keep track of them for deduping against identical remote failures
   //
   // :nocommit: probably need to use this for "commit" as well?
-  private Set<KnownErr> knownDBQErrors = new HashSet<>();
+  private Set<ToleratedUpdateError> knownDBQErrors = new HashSet<>();
         
   private final FirstErrTracker firstErrTracker = new FirstErrTracker();
   private final DistribPhase distribPhase;
@@ -153,9 +155,10 @@ public class TolerantUpdateProcessor extends UpdateRequestProcessor {
       if (isLeader || distribPhase.equals(DistribPhase.NONE)) {
         // nocommit: should we skip if condition and always do this? see comment in else...
         
-        knownErrors.add(new KnownErr(CmdType.ADD,
-                                     getPrintableId(id, cmd.getReq().getSchema().getUniqueKeyField()),
-                                     t.getMessage()));
+        knownErrors.add(new ToleratedUpdateError
+                        (CmdType.ADD,
+                         getPrintableId(id, cmd.getReq().getSchema().getUniqueKeyField()),
+                         t.getMessage()));
         if (knownErrors.size() > maxErrors) {
           firstErrTracker.throwFirst();
         }
@@ -185,9 +188,9 @@ public class TolerantUpdateProcessor extends UpdateRequestProcessor {
 
       // nocommit: do we need isLeader type logic like processAdd ? does processAdd even need it?
       
-      KnownErr err = new KnownErr(cmd.isDeleteById() ? CmdType.DELID : CmdType.DELQ,
-                                  cmd.isDeleteById() ? cmd.id : cmd.query,
-                                  t.getMessage());
+      ToleratedUpdateError err = new ToleratedUpdateError(cmd.isDeleteById() ? CmdType.DELID : CmdType.DELQ,
+                                                          cmd.isDeleteById() ? cmd.id : cmd.query,
+                                                          t.getMessage());
       knownErrors.add(err);
 
       // NOTE: we're not using this to dedup before adding to knownErrors.
@@ -241,8 +244,9 @@ public class TolerantUpdateProcessor extends UpdateRequestProcessor {
         }
 
         for (int i = 0; i < remoteErrMetadata.size(); i++) {
-          KnownErr err = KnownErr.parseMetadataIfKnownErr(remoteErrMetadata.getName(i),
-                                                          remoteErrMetadata.getVal(i));
+          ToleratedUpdateError err =
+            ToleratedUpdateError.parseMetadataIfToleratedUpdateError(remoteErrMetadata.getName(i),
+                                                                     remoteErrMetadata.getVal(i));
           if (null == err) {
             // some metadata unrelated to this update processor
             continue;
@@ -262,7 +266,7 @@ public class TolerantUpdateProcessor extends UpdateRequestProcessor {
       }
     }
 
-    header.add("errors", KnownErr.formatForResponseHeader(knownErrors));
+    header.add("errors", ToleratedUpdateError.formatForResponseHeader(knownErrors));
     // include in response so client knows what effective value was (may have been server side config)
     header.add("maxErrors", maxErrors);
 
@@ -364,7 +368,7 @@ public class TolerantUpdateProcessor extends UpdateRequestProcessor {
      * Annotates the first exception (which may already have been thrown, or be thrown in the future) with 
      * the metadata from this update processor.  For use in {@link TolerantUpdateProcessor#finish}
      */
-    public void annotate(List<KnownErr> errors) {
+    public void annotate(List<ToleratedUpdateError> errors) {
 
       if (null == first) {
         return; // no exception to annotate
@@ -378,8 +382,8 @@ public class TolerantUpdateProcessor extends UpdateRequestProcessor {
         first.setMetadata(firstErrMetadata);
       }
 
-      for (KnownErr ke : errors) {
-        firstErrMetadata.add(ke.getMetadataKey(), ke.getMetadataValue());
+      for (ToleratedUpdateError te : errors) {
+        firstErrMetadata.add(te.getMetadataKey(), te.getMetadataValue());
       }
     }
     
@@ -388,91 +392,5 @@ public class TolerantUpdateProcessor extends UpdateRequestProcessor {
     public SolrException getFirst() {
       return first;
     }
-    
-  }
-
-  /**
-   * Helper class for dealing with SolrException metadata (String) keys 
-   */
-  public static final class KnownErr {
-    
-    private final static String META_PRE =  TolerantUpdateProcessor.class.getName() + "--";
-    private final static int META_PRE_LEN = META_PRE.length();
-
-    /** returns a map of simple objects suitable for putting in a SolrQueryResponse */
-    public static List<SimpleOrderedMap<String>> formatForResponseHeader(List<KnownErr> errs) {
-      List<SimpleOrderedMap<String>> result = new ArrayList<>(errs.size());
-      for (KnownErr e : errs) {
-        SimpleOrderedMap<String> entry = new SimpleOrderedMap<String>();
-        entry.add("type", e.type.toString());
-        entry.add("id", e.id);
-        entry.add("message", e.errorValue);
-        result.add(entry);
-      }
-      return result;
-    }
-    
-    /** returns a KnownErr instance if this metadataKey is one we care about, else null */
-    public static KnownErr parseMetadataIfKnownErr(String metadataKey, String metadataVal) {
-      if (! metadataKey.startsWith(META_PRE)) {
-        return null; // not a key we care about
-      }
-      final int typeEnd = metadataKey.indexOf(':', META_PRE_LEN);
-      assert 0 < typeEnd; // nocommit: better error handling
-      return new KnownErr(CmdType.valueOf(metadataKey.substring(META_PRE_LEN, typeEnd)),
-                          metadataKey.substring(typeEnd+1), metadataVal);
-    }
-
-    public final CmdType type;
-    /** may be null depending on type */
-    public final String id;
-    public final String errorValue; // nocommit: refactor: rename errMessage?
-    
-    public KnownErr(CmdType type, String id, String errorValue) {
-      this.type = type;
-      assert null != type;
-      
-      assert null != id;
-      this.id = id;
-      
-      assert null != errorValue;
-      this.errorValue = errorValue;
-    }
-    
-    public String getMetadataKey() {
-      return META_PRE + type + ":" + id;
-    }
-    public String getMetadataValue() {
-      return errorValue.toString();
-    }
-    public String toString() {
-      return getMetadataKey() + "=>" + getMetadataValue();
-    }
-    public int hashCode() {
-      int h = this.getClass().hashCode();
-      h = h * 31 + type.hashCode();
-      h = h * 31 + id.hashCode();
-      h = h * 31 + errorValue.hashCode();
-      return h;
-    }
-    public boolean equals(Object o) {
-      if (o instanceof KnownErr) {
-        KnownErr that = (KnownErr)o;
-        return that.type.equals(this.type)
-          && that.id.equals(this.id)
-          && that.errorValue.equals(this.errorValue);
-      }
-      return false;
-    }
-  }
-  
-  /**
-   * Helper class for dealing with SolrException metadata (String) keys 
-   */
-  public static enum CmdType {
-    ADD, DELID, DELQ; // nocommit: others supported types? (commit?) ..
-
-    // if we add support for things like commit, parsing/toString/hashCode logic
-    // needs to be smarter to account for 'id' being null ... "usesId" should be a prop of enum instances
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c37d5a86/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java b/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java
index bee23a7..93200d8 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java
@@ -39,6 +39,8 @@ import org.apache.solr.common.SolrDocumentList;
 import org.apache.solr.common.SolrInputDocument;
 import org.apache.solr.common.SolrInputField;
 import org.apache.solr.common.SolrException;
+import org.apache.solr.common.ToleratedUpdateError;
+import org.apache.solr.common.ToleratedUpdateError.CmdType;
 import org.apache.solr.common.cloud.ClusterState;
 import org.apache.solr.common.cloud.Replica;
 import org.apache.solr.common.cloud.Slice;
@@ -49,9 +51,6 @@ import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SimpleOrderedMap;
 import org.apache.solr.util.RevertDefaultThreadHandlerRule;
 
-import org.apache.solr.update.processor.TolerantUpdateProcessor.KnownErr;
-import org.apache.solr.update.processor.TolerantUpdateProcessor.CmdType;
-
 import org.junit.AfterClass;
 import org.junit.Before;
 import org.junit.BeforeClass;
@@ -536,11 +535,13 @@ public class TestTolerantUpdateProcessorCloud extends SolrCloudTestCase {
       // verify that the Exceptions metadata can tell us what failed.
       NamedList<String> remoteErrMetadata = e.getMetadata();
       assertNotNull("no metadata in: " + e.toString(), remoteErrMetadata);
-      Set<KnownErr> actualKnownErrs = new LinkedHashSet<KnownErr>(remoteErrMetadata.size());
+      Set<ToleratedUpdateError> actualKnownErrs
+        = new LinkedHashSet<ToleratedUpdateError>(remoteErrMetadata.size());
       int actualKnownErrsCount = 0;
       for (int i = 0; i < remoteErrMetadata.size(); i++) {
-        KnownErr err = KnownErr.parseMetadataIfKnownErr(remoteErrMetadata.getName(i),
-                                                        remoteErrMetadata.getVal(i));
+        ToleratedUpdateError err =
+          ToleratedUpdateError.parseMetadataIfToleratedUpdateError(remoteErrMetadata.getName(i),
+                                                                   remoteErrMetadata.getVal(i));
         if (null == err) {
           // some metadata unrelated to this update processor
           continue;
@@ -552,7 +553,7 @@ public class TestTolerantUpdateProcessorCloud extends SolrCloudTestCase {
                    11, actualKnownErrsCount);
       assertEquals("at least one dup error in metadata: " + remoteErrMetadata.toString(),
                    actualKnownErrsCount, actualKnownErrs.size());
-      for (KnownErr err : actualKnownErrs) {
+      for (ToleratedUpdateError err : actualKnownErrs) {
         assertEquals("only expected type of error is ADD: " + err,
                      CmdType.ADD, err.type);
         assertTrue("failed err msg didn't match expected value: " + err,
@@ -609,11 +610,13 @@ public class TestTolerantUpdateProcessorCloud extends SolrCloudTestCase {
       // verify that the Exceptions metadata can tell us what failed.
       NamedList<String> remoteErrMetadata = e.getMetadata();
       assertNotNull("no metadata in: " + e.toString(), remoteErrMetadata);
-      Set<KnownErr> actualKnownErrs = new LinkedHashSet<KnownErr>(remoteErrMetadata.size());
+      Set<ToleratedUpdateError> actualKnownErrs
+        = new LinkedHashSet<ToleratedUpdateError>(remoteErrMetadata.size());
       int actualKnownErrsCount = 0;
       for (int i = 0; i < remoteErrMetadata.size(); i++) {
-        KnownErr err = KnownErr.parseMetadataIfKnownErr(remoteErrMetadata.getName(i),
-                                                        remoteErrMetadata.getVal(i));
+        ToleratedUpdateError err =
+          ToleratedUpdateError.parseMetadataIfToleratedUpdateError(remoteErrMetadata.getName(i),
+                                                                   remoteErrMetadata.getVal(i));
         if (null == err) {
           // some metadata unrelated to this update processor
           continue;
@@ -625,7 +628,7 @@ public class TestTolerantUpdateProcessorCloud extends SolrCloudTestCase {
                    11, actualKnownErrsCount);
       assertEquals("at least one dup error in metadata: " + remoteErrMetadata.toString(),
                    actualKnownErrsCount, actualKnownErrs.size());
-      for (KnownErr err : actualKnownErrs) {
+      for (ToleratedUpdateError err : actualKnownErrs) {
         assertEquals("only expected type of error is ADD: " + err,
                      CmdType.ADD, err.type);
         assertTrue("failed id had unexpected prefix: " + err,

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c37d5a86/solr/core/src/test/org/apache/solr/update/processor/TolerantUpdateProcessorTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/processor/TolerantUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/update/processor/TolerantUpdateProcessorTest.java
index 61dac44..7470ea0 100644
--- a/solr/core/src/test/org/apache/solr/update/processor/TolerantUpdateProcessorTest.java
+++ b/solr/core/src/test/org/apache/solr/update/processor/TolerantUpdateProcessorTest.java
@@ -30,6 +30,8 @@ import javax.xml.xpath.XPathExpressionException;
 import org.apache.solr.client.solrj.util.ClientUtils;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrInputDocument;
+import org.apache.solr.common.ToleratedUpdateError;
+import org.apache.solr.common.ToleratedUpdateError.CmdType;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.SimpleOrderedMap;
@@ -40,8 +42,6 @@ import org.apache.solr.request.SolrRequestHandler;
 import org.apache.solr.response.SolrQueryResponse;
 import org.apache.solr.servlet.DirectSolrConnection;
 import org.apache.solr.update.AddUpdateCommand;
-import org.apache.solr.update.processor.TolerantUpdateProcessor.KnownErr;
-import org.apache.solr.update.processor.TolerantUpdateProcessor.CmdType;
 import org.apache.solr.util.BaseTestHarness;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
@@ -333,39 +333,6 @@ public class TolerantUpdateProcessorTest extends UpdateProcessorTestBase {
     
   }
 
-  public void testKnownErrClass() {
-
-    assertNull(KnownErr.parseMetadataIfKnownErr("some other key", "some value"));
-
-    for (KnownErr in : new KnownErr[] {
-        new KnownErr(CmdType.ADD, "doc1", "some error"),
-        new KnownErr(CmdType.DELID, "doc1", "some diff error"),
-        new KnownErr(CmdType.DELQ, "-field:yakko other_field:wakko", "some other error"),
-      }) {
-      KnownErr out = KnownErr.parseMetadataIfKnownErr(in.getMetadataKey(), in.getMetadataValue());
-      assertNotNull(out);
-      assertEquals(out.type, in.type);
-      assertEquals(out.id, in.id);
-      assertEquals(out.errorValue, in.errorValue);
-      assertEquals(out.hashCode(), in.hashCode());
-      assertEquals(out.toString(), in.toString());
-      
-      assertEquals(out, in);
-      assertEquals(in, out);
-
-    }
-    
-    assertFalse((new KnownErr(CmdType.ADD, "doc1", "some error")).equals
-                (new KnownErr(CmdType.ADD, "doc2", "some error")));
-    assertFalse((new KnownErr(CmdType.ADD, "doc1", "some error")).equals
-                (new KnownErr(CmdType.ADD, "doc1", "some errorxx")));
-    assertFalse((new KnownErr(CmdType.ADD, "doc1", "some error")).equals
-                (new KnownErr(CmdType.DELID, "doc1", "some error")));
-    
-
-    // nocommit: add randomized testing, particularly with non-trivial 'id' values
-    
-  }
 
   
   public String update(String chain, String xml) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c37d5a86/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java
index 655844f..febb56f 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java
@@ -54,6 +54,7 @@ import org.apache.solr.client.solrj.request.UpdateRequest;
 import org.apache.solr.client.solrj.util.ClientUtils;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
+import org.apache.solr.common.ToleratedUpdateError;
 import org.apache.solr.common.cloud.Aliases;
 import org.apache.solr.common.cloud.ClusterState;
 import org.apache.solr.common.cloud.DocCollection;
@@ -776,13 +777,14 @@ public class CloudSolrClient extends SolrClient {
     if (null != toleratedErrors) {
       cheader.add("errors", toleratedErrors);
       if (maxToleratedErrors < toleratedErrors.size()) {
+        // cumulative errors are too high, we need to throw a client exception w/correct metadata
+        
         NamedList metadata = new NamedList<String>();
         SolrException toThrow = new SolrException(ErrorCode.BAD_REQUEST, "nocommit: need better msg");
         toThrow.setMetadata(metadata);
         for (SimpleOrderedMap<String> err : toleratedErrors) {
-          // nocommit: hack, refactor KnownErr into solr-common and re-use here...
-          metadata.add("org.apache.solr.update.processor.TolerantUpdateProcessor--" +
-                       err.get("type") + ":" + err.get("id"), err.get("message"));
+          ToleratedUpdateError te = ToleratedUpdateError.parseMap(err);
+          metadata.add(te.getMetadataKey(), te.getMetadataValue());
         }
         throw toThrow;
       }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c37d5a86/solr/solrj/src/java/org/apache/solr/common/ToleratedUpdateError.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/ToleratedUpdateError.java b/solr/solrj/src/java/org/apache/solr/common/ToleratedUpdateError.java
new file mode 100644
index 0000000..7261a21
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/common/ToleratedUpdateError.java
@@ -0,0 +1,150 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.common;
+
+import java.util.ArrayList;
+import java.util.List;
+import org.apache.solr.common.util.SimpleOrderedMap;
+
+
+/**
+ * nocommit: more javadocs, mention (but obviously no link) to TolerantUpdateProcessor
+ */
+public final class ToleratedUpdateError {
+    
+  private final static String META_PRE =  ToleratedUpdateError.class.getName() + "--";
+  private final static int META_PRE_LEN = META_PRE.length();
+  
+  /** 
+   * returns a list of maps of simple objects suitable for putting in a SolrQueryResponse header 
+   * @see #getSimpleMap
+   * @see #parseMap
+   */
+  public static List<SimpleOrderedMap<String>> formatForResponseHeader(List<ToleratedUpdateError> errs) {
+    List<SimpleOrderedMap<String>> result = new ArrayList<>(errs.size());
+    for (ToleratedUpdateError e : errs) {
+      result.add(e.getSimpleMap());
+    }
+    return result;
+  }
+  
+  /** 
+   * returns a ToleratedUpdateError instance from the data in this Map 
+   * @see #getSimpleMap
+   */
+  public static ToleratedUpdateError parseMap(SimpleOrderedMap<String> data) {
+    // nocommit: error handling and clean exception reporting if data is bogus
+    return new ToleratedUpdateError(CmdType.valueOf(data.get("type")), data.get("id"), data.get("message"));
+  }
+  
+  /** 
+   * returns a ToleratedUpdateError instance if this metadataKey is one we care about, else null 
+   * @see #getMetadataKey
+   * @see #getMetadataValue
+   */
+  public static ToleratedUpdateError parseMetadataIfToleratedUpdateError(String metadataKey,
+                                                                         String metadataVal) {
+    if (! metadataKey.startsWith(META_PRE)) {
+      return null; // not a key we care about
+    }
+    final int typeEnd = metadataKey.indexOf(':', META_PRE_LEN);
+    assert 0 < typeEnd; // nocommit: better error handling
+    return new ToleratedUpdateError(CmdType.valueOf(metadataKey.substring(META_PRE_LEN, typeEnd)),
+                                    metadataKey.substring(typeEnd+1), metadataVal);
+  }
+
+  // nocommit: make these private & provide getter methods
+  public final CmdType type;
+  public final String id; // may be null depending on type
+  public final String errorValue; // nocommit: refactor: rename message?
+  
+  public ToleratedUpdateError(CmdType type, String id, String errorValue) {
+    this.type = type;
+    assert null != type;
+    
+    assert null != id;
+    this.id = id;
+    
+    assert null != errorValue;
+    this.errorValue = errorValue;
+  }
+
+  /**
+   * returns a string suitable for use as a key in {@link SolrException#setMetadata}
+   *
+   * @see #parseMetadataIfToleratedUpdateError
+   */
+  public String getMetadataKey() {
+    return META_PRE + type + ":" + id;
+  }
+  
+  /**
+   * returns a string suitable for use as a value in {@link SolrException#setMetadata}
+   *
+   * @see #parseMetadataIfToleratedUpdateError
+   */
+  public String getMetadataValue() {
+    return errorValue.toString();
+  }
+  
+  /** 
+   * returns a map of simple objects suitable for putting in a SolrQueryResponse header 
+   * @see #formatForResponseHeader
+   * @see #parseMap
+   */
+  public SimpleOrderedMap<String> getSimpleMap() {
+    SimpleOrderedMap<String> entry = new SimpleOrderedMap<String>();
+    entry.add("type", type.toString());
+    entry.add("id", id);
+    entry.add("message", errorValue);
+    return entry;
+  }
+  
+  public String toString() {
+    return getMetadataKey() + "=>" + getMetadataValue();
+  }
+  
+  public int hashCode() {
+    int h = this.getClass().hashCode();
+    h = h * 31 + type.hashCode();
+    h = h * 31 + id.hashCode();
+    h = h * 31 + errorValue.hashCode();
+    return h;
+  }
+  
+  public boolean equals(Object o) {
+    if (o instanceof ToleratedUpdateError) {
+      ToleratedUpdateError that = (ToleratedUpdateError)o;
+      return that.type.equals(this.type)
+        && that.id.equals(this.id)
+        && that.errorValue.equals(this.errorValue);
+    }
+    return false;
+  }
+  
+  /**
+   * Helper class for dealing with SolrException metadata (String) keys 
+   */
+  public static enum CmdType {
+    ADD, DELID, DELQ; 
+
+    // if we add support for things like commit, parsing/toString/hashCode logic
+    // needs to be smarter to account for 'id' being null ... "usesId" should be a prop of enum instances
+  }
+}
+
+  

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c37d5a86/solr/solrj/src/test/org/apache/solr/common/TestToleratedUpdateError.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/common/TestToleratedUpdateError.java b/solr/solrj/src/test/org/apache/solr/common/TestToleratedUpdateError.java
new file mode 100644
index 0000000..6759b79
--- /dev/null
+++ b/solr/solrj/src/test/org/apache/solr/common/TestToleratedUpdateError.java
@@ -0,0 +1,89 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.common;
+
+import org.apache.solr.common.ToleratedUpdateError;
+import org.apache.solr.common.ToleratedUpdateError.CmdType;
+
+import org.apache.lucene.util.LuceneTestCase;
+
+/** Basic testing of the serialization/encapsulation code in ToleratedUpdateError */
+public class TestToleratedUpdateError extends LuceneTestCase {
+  
+  // nocommit: add randomized testing, particularly with non-trivial 'id' values
+
+  public void checkRoundTripComparisons(Coppier coppier) {
+
+    assertNull(ToleratedUpdateError.parseMetadataIfToleratedUpdateError("some other key", "some value"));
+    
+    for (ToleratedUpdateError in : new ToleratedUpdateError[] {
+        new ToleratedUpdateError(CmdType.ADD, "doc1", "some error"),
+        new ToleratedUpdateError(CmdType.DELID, "doc1", "some diff error"),
+        new ToleratedUpdateError(CmdType.DELQ, "-field:yakko other_field:wakko", "some other error"),
+      }) {
+      
+      ToleratedUpdateError out = coppier.copy(in);
+      
+      assertNotNull(out);
+      assertEquals(out.type, in.type);
+      assertEquals(out.id, in.id);
+      assertEquals(out.errorValue, in.errorValue);
+      assertEquals(out.hashCode(), in.hashCode());
+      assertEquals(out.toString(), in.toString());
+
+      assertEquals(in.getMetadataKey(), out.getMetadataKey());
+      assertEquals(in.getMetadataValue(), out.getMetadataValue());
+      
+      assertEquals(out, in);
+      assertEquals(in, out);
+
+    }
+    
+    assertFalse((new ToleratedUpdateError(CmdType.ADD, "doc1", "some error")).equals
+                (new ToleratedUpdateError(CmdType.ADD, "doc2", "some error")));
+    assertFalse((new ToleratedUpdateError(CmdType.ADD, "doc1", "some error")).equals
+                (new ToleratedUpdateError(CmdType.ADD, "doc1", "some errorxx")));
+    assertFalse((new ToleratedUpdateError(CmdType.ADD, "doc1", "some error")).equals
+                (new ToleratedUpdateError(CmdType.DELID, "doc1", "some error")));
+    
+  }
+  
+  public void testMetadataRoundTripComparisons(Coppier coppier) {
+    checkRoundTripComparisons(new Coppier() {
+      public ToleratedUpdateError copy(ToleratedUpdateError in) {
+        return ToleratedUpdateError.parseMetadataIfToleratedUpdateError
+          (in.getMetadataKey(), in.getMetadataValue());
+      }
+    });
+  }
+  
+  public void testMapRoundTripComparisons() {
+    checkRoundTripComparisons(new Coppier() {
+      public ToleratedUpdateError copy(ToleratedUpdateError in) {
+        return ToleratedUpdateError.parseMap(in.getSimpleMap());
+      }
+    });
+  }
+
+  private static abstract class Coppier {
+    public abstract ToleratedUpdateError copy(ToleratedUpdateError in);
+  }
+}
+
+
+
+


[17/50] [abbrv] lucene-solr git commit: don't use slow composite wrapper in these tests

Posted by ho...@apache.org.
don't use slow composite wrapper in these tests


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/dee8b5e4
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/dee8b5e4
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/dee8b5e4

Branch: refs/heads/jira/SOLR-445
Commit: dee8b5e40a3d37da180630c1b2839c1836f97c4d
Parents: 4015f12
Author: Mike McCandless <mi...@apache.org>
Authored: Wed Mar 9 14:10:10 2016 -0500
Committer: Mike McCandless <mi...@apache.org>
Committed: Wed Mar 9 14:11:04 2016 -0500

----------------------------------------------------------------------
 .../apache/lucene/search/join/TestBlockJoin.java  | 18 +++++++++---------
 .../lucene/queryparser/xml/TestCoreParser.java    |  2 +-
 2 files changed, 10 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dee8b5e4/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java
----------------------------------------------------------------------
diff --git a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java
index 9c39299..01a8135 100644
--- a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java
+++ b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java
@@ -176,7 +176,7 @@ public class TestBlockJoin extends LuceneTestCase {
     
     IndexReader r = w.getReader();
     w.close();
-    IndexSearcher s = newSearcher(r);
+    IndexSearcher s = newSearcher(r, false);
 
     // Create a filter that defines "parent" documents in the index - in this case resumes
     BitSetProducer parentsFilter = new QueryBitSetProducer(new TermQuery(new Term("docType", "resume")));
@@ -263,7 +263,7 @@ public class TestBlockJoin extends LuceneTestCase {
 
     IndexReader r = w.getReader();
     w.close();
-    IndexSearcher s = newSearcher(r);
+    IndexSearcher s = newSearcher(r, false);
 
     // Hacky: this causes the query to need 2 rewrite
     // iterations: 
@@ -336,7 +336,7 @@ public class TestBlockJoin extends LuceneTestCase {
 
     IndexReader r = w.getReader();
     w.close();
-    IndexSearcher s = newSearcher(r);
+    IndexSearcher s = newSearcher(r, false);
 
     // Create a filter that defines "parent" documents in the index - in this case resumes
     BitSetProducer parentsFilter = new QueryBitSetProducer(new TermQuery(new Term("docType", "resume")));
@@ -640,7 +640,7 @@ public class TestBlockJoin extends LuceneTestCase {
       }
     }
 
-    final IndexSearcher s = newSearcher(r);
+    final IndexSearcher s = newSearcher(r, false);
 
     final IndexSearcher joinS = new IndexSearcher(joinR);
 
@@ -1051,7 +1051,7 @@ public class TestBlockJoin extends LuceneTestCase {
 
     IndexReader r = w.getReader();
     w.close();
-    IndexSearcher s = newSearcher(r);
+    IndexSearcher s = newSearcher(r, false);
 
     // Create a filter that defines "parent" documents in the index - in this case resumes
     BitSetProducer parentsFilter = new QueryBitSetProducer(new TermQuery(new Term("docType", "resume")));
@@ -1575,7 +1575,7 @@ public class TestBlockJoin extends LuceneTestCase {
 
     IndexReader r = w.getReader();
     w.close();
-    IndexSearcher s = newSearcher(r);
+    IndexSearcher s = newSearcher(r, false);
 
     // Create a filter that defines "parent" documents in the index - in this case resumes
     BitSetProducer parentsFilter = new QueryBitSetProducer(new TermQuery(new Term("docType", "resume")));
@@ -1613,7 +1613,7 @@ public class TestBlockJoin extends LuceneTestCase {
 
     IndexReader r = w.getReader();
     w.close();
-    IndexSearcher s = newSearcher(r);
+    IndexSearcher s = newSearcher(r, false);
 
     // Create a filter that defines "parent" documents in the index - in this case resumes
     BitSetProducer parentsFilter = new QueryBitSetProducer(new TermQuery(new Term("docType", "resume")));
@@ -1644,7 +1644,7 @@ public class TestBlockJoin extends LuceneTestCase {
     w.forceMerge(1);
 
     final IndexReader r = w.getReader();
-    final IndexSearcher s = newSearcher(r);
+    final IndexSearcher s = newSearcher(r, false);
     w.close();
 
     BitSetProducer parentFilter = new QueryBitSetProducer(new TermQuery(new Term("docType", "resume")));
@@ -1690,7 +1690,7 @@ public class TestBlockJoin extends LuceneTestCase {
     }
 
     final IndexReader r = w.getReader();
-    final IndexSearcher s = newSearcher(r);
+    final IndexSearcher s = newSearcher(r, false);
     w.close();
 
     BitSetProducer resumeFilter = new QueryBitSetProducer(new TermQuery(new Term("docType", "resume")));

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dee8b5e4/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/TestCoreParser.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/TestCoreParser.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/TestCoreParser.java
index 5cadec2..c8b357e 100644
--- a/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/TestCoreParser.java
+++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/TestCoreParser.java
@@ -80,7 +80,7 @@ public class TestCoreParser extends LuceneTestCase {
     d.close();
     writer.close();
     reader = DirectoryReader.open(dir);
-    searcher = newSearcher(reader);
+    searcher = newSearcher(reader, false);
 
   }