You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by cp...@apache.org on 2016/10/24 18:32:05 UTC

[01/50] [abbrv] lucene-solr:jira/solr-8542-v2: LUCENE-7493: FacetCollector.search now accepts limit=0, for getting facets but not search hits

Repository: lucene-solr
Updated Branches:
  refs/heads/jira/solr-8542-v2 bfa05b830 -> 38052f356


LUCENE-7493: FacetCollector.search now accepts limit=0, for getting facets but not search hits


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/739981b6
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/739981b6
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/739981b6

Branch: refs/heads/jira/solr-8542-v2
Commit: 739981b6c8e6ccd60279216b320d8a25d06c70e9
Parents: e836071
Author: Mike McCandless <mi...@apache.org>
Authored: Tue Oct 18 09:23:24 2016 -0400
Committer: Mike McCandless <mi...@apache.org>
Committed: Tue Oct 18 09:23:24 2016 -0400

----------------------------------------------------------------------
 lucene/CHANGES.txt                              |  3 ++
 .../apache/lucene/facet/FacetsCollector.java    | 42 ++++++++++++--------
 .../apache/lucene/facet/TestDrillDownQuery.java | 11 +++++
 3 files changed, 40 insertions(+), 16 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/739981b6/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 745d8fd..6d83c53 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -89,6 +89,9 @@ Bug Fixes
 * LUCENE-6914: Fixed DecimalDigitFilter in case of supplementary code points.
   (Hossman)
 
+* LUCENE-7493: FacetCollector.search threw an unexpected exception if
+  you asked for zero hits but wanted facets (Mahesh via Mike McCandless)
+
 Improvements
 
 * LUCENE-7439: FuzzyQuery now matches all terms within the specified

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/739981b6/lucene/facet/src/java/org/apache/lucene/facet/FacetsCollector.java
----------------------------------------------------------------------
diff --git a/lucene/facet/src/java/org/apache/lucene/facet/FacetsCollector.java b/lucene/facet/src/java/org/apache/lucene/facet/FacetsCollector.java
index d3f2eb8..b942f7e 100644
--- a/lucene/facet/src/java/org/apache/lucene/facet/FacetsCollector.java
+++ b/lucene/facet/src/java/org/apache/lucene/facet/FacetsCollector.java
@@ -36,6 +36,7 @@ import org.apache.lucene.search.TopDocsCollector;
 import org.apache.lucene.search.TopFieldCollector;
 import org.apache.lucene.search.TopFieldDocs;
 import org.apache.lucene.search.TopScoreDocCollector;
+import org.apache.lucene.search.TotalHitCountCollector;
 import org.apache.lucene.util.ArrayUtil;
 import org.apache.lucene.util.BitDocIdSet;
 import org.apache.lucene.util.FixedBitSet;
@@ -251,23 +252,32 @@ public class FacetsCollector extends SimpleCollector implements Collector {
                                          + after.doc + " limit=" + limit);
     }
 
-    TopDocsCollector<?> hitsCollector;
-    if (sort != null) {
-      if (after != null && !(after instanceof FieldDoc)) {
-        // TODO: if we fix type safety of TopFieldDocs we can
-        // remove this
-        throw new IllegalArgumentException("after must be a FieldDoc; got " + after);
-      }
-      boolean fillFields = true;
-      hitsCollector = TopFieldCollector.create(sort, n,
-                                               (FieldDoc) after,
-                                               fillFields,
-                                               doDocScores,
-                                               doMaxScore);
+    TopDocs topDocs = null;
+    if (n==0) {
+      TotalHitCountCollector totalHitCountCollector = new TotalHitCountCollector();
+      searcher.search(q, MultiCollector.wrap(totalHitCountCollector, fc));
+      topDocs = new TopDocs(totalHitCountCollector.getTotalHits(), new ScoreDoc[0], Float.NaN);
     } else {
-      hitsCollector = TopScoreDocCollector.create(n, after);
+      TopDocsCollector<?> hitsCollector;
+      if (sort != null) {
+        if (after != null && !(after instanceof FieldDoc)) {
+          // TODO: if we fix type safety of TopFieldDocs we can
+          // remove this
+          throw new IllegalArgumentException("after must be a FieldDoc; got " + after);
+        }
+        boolean fillFields = true;
+        hitsCollector = TopFieldCollector.create(sort, n,
+                                                 (FieldDoc) after,
+                                                 fillFields,
+                                                 doDocScores,
+                                                 doMaxScore);
+      } else {
+        hitsCollector = TopScoreDocCollector.create(n, after);
+      }
+      searcher.search(q, MultiCollector.wrap(hitsCollector, fc));
+    
+      topDocs = hitsCollector.topDocs();
     }
-    searcher.search(q, MultiCollector.wrap(hitsCollector, fc));
-    return hitsCollector.topDocs();
+    return topDocs;
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/739981b6/lucene/facet/src/test/org/apache/lucene/facet/TestDrillDownQuery.java
----------------------------------------------------------------------
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/TestDrillDownQuery.java b/lucene/facet/src/test/org/apache/lucene/facet/TestDrillDownQuery.java
index f76e839..bf8d0f4 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/TestDrillDownQuery.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/TestDrillDownQuery.java
@@ -182,6 +182,17 @@ public class TestDrillDownQuery extends FacetTestCase {
     assertEquals(10, docs.totalHits);
   }
   
+  public void testZeroLimit() throws IOException {
+    IndexSearcher searcher = newSearcher(reader);
+    DrillDownQuery q = new DrillDownQuery(config);
+    q.add("b", "1");
+    int limit = 0;
+    FacetsCollector facetCollector = new FacetsCollector();
+    FacetsCollector.search(searcher, q, limit, facetCollector);
+    Facets facets = getTaxonomyFacetCounts(taxo, config, facetCollector, config.getDimConfig("b").indexFieldName);
+    assertNotNull(facets.getTopChildren(10, "b"));
+  }
+  
   public void testScoring() throws IOException {
     // verify that drill-down queries do not modify scores
     IndexSearcher searcher = newSearcher(reader);


[31/50] [abbrv] lucene-solr:jira/solr-8542-v2: SOLR-9546: reverted some changes

Posted by cp...@apache.org.
SOLR-9546: reverted some changes


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/49ca9cea
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/49ca9cea
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/49ca9cea

Branch: refs/heads/jira/solr-8542-v2
Commit: 49ca9cea7283ab54086fdedd09889d171c777052
Parents: ccbafdc
Author: Noble Paul <no...@apache.org>
Authored: Fri Oct 21 19:16:15 2016 +0530
Committer: Noble Paul <no...@apache.org>
Committed: Fri Oct 21 19:16:15 2016 +0530

----------------------------------------------------------------------
 .../apache/solr/search/mlt/CloudMLTQParser.java | 29 ++++++++++++++------
 1 file changed, 21 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/49ca9cea/solr/core/src/java/org/apache/solr/search/mlt/CloudMLTQParser.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/mlt/CloudMLTQParser.java b/solr/core/src/java/org/apache/solr/search/mlt/CloudMLTQParser.java
index 9ff5a3c..0f85feb 100644
--- a/solr/core/src/java/org/apache/solr/search/mlt/CloudMLTQParser.java
+++ b/solr/core/src/java/org/apache/solr/search/mlt/CloudMLTQParser.java
@@ -69,16 +69,29 @@ public class CloudMLTQParser extends QParser {
     Map<String,Float> boostFields = new HashMap<>();
     MoreLikeThis mlt = new MoreLikeThis(req.getSearcher().getIndexReader());
     
-    mlt.setMinTermFreq(localParams.getInt("mintf", MoreLikeThis.DEFAULT_MIN_TERM_FREQ));
-    mlt.setMinDocFreq(localParams.getInt("mindf", MoreLikeThis.DEFAULT_MIN_DOC_FREQ));
-    mlt.setMinWordLen(localParams.getInt("minwl", MoreLikeThis.DEFAULT_MIN_WORD_LENGTH));
-    mlt.setMaxWordLen(localParams.getInt("maxwl", MoreLikeThis.DEFAULT_MIN_WORD_LENGTH));
-    mlt.setMaxQueryTerms(localParams.getInt("maxqt",MoreLikeThis.DEFAULT_MAX_QUERY_TERMS));
-    mlt.setMaxNumTokensParsed(localParams.getInt("maxntp",MoreLikeThis.DEFAULT_MAX_NUM_TOKENS_PARSED));
-    mlt.setMaxDocFreq(localParams.getInt("maxdf", MoreLikeThis.DEFAULT_MAX_DOC_FREQ));
+    if(localParams.getInt("mintf") != null)
+      mlt.setMinTermFreq(localParams.getInt("mintf"));
+
+    mlt.setMinDocFreq(localParams.getInt("mindf", 0));
+
+    if(localParams.get("minwl") != null)
+      mlt.setMinWordLen(localParams.getInt("minwl"));
+
+    if(localParams.get("maxwl") != null)
+      mlt.setMaxWordLen(localParams.getInt("maxwl"));
+
+    if(localParams.get("maxqt") != null)
+      mlt.setMaxQueryTerms(localParams.getInt("maxqt"));
+
+    if(localParams.get("maxntp") != null)
+      mlt.setMaxNumTokensParsed(localParams.getInt("maxntp"));
+    
+    if(localParams.get("maxdf") != null) {
+      mlt.setMaxDocFreq(localParams.getInt("maxdf"));
+    }
 
     if(localParams.get("boost") != null) {
-      mlt.setBoost(localParams.getBool("boost", false));
+      mlt.setBoost(localParams.getBool("boost"));
       boostFields = SolrPluginUtils.parseFieldBoosts(qf);
     }
 


[02/50] [abbrv] lucene-solr:jira/solr-8542-v2: LUCENE-7489: Wrap only once in case GCD compression is used.

Posted by cp...@apache.org.
LUCENE-7489: Wrap only once in case GCD compression is used.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/a17e9200
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/a17e9200
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/a17e9200

Branch: refs/heads/jira/solr-8542-v2
Commit: a17e92006f087a0601d9329bf9b9c946ca72478b
Parents: 739981b
Author: Adrien Grand <jp...@gmail.com>
Authored: Tue Oct 18 16:07:52 2016 +0200
Committer: Adrien Grand <jp...@gmail.com>
Committed: Tue Oct 18 16:08:29 2016 +0200

----------------------------------------------------------------------
 .../lucene70/Lucene70DocValuesProducer.java     | 63 +++++++++-----------
 1 file changed, 27 insertions(+), 36 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a17e9200/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70DocValuesProducer.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70DocValuesProducer.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70DocValuesProducer.java
index 755da79..637c8ee 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70DocValuesProducer.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70DocValuesProducer.java
@@ -424,47 +424,38 @@ final class Lucene70DocValuesProducer extends DocValuesProducer implements Close
       };
     } else {
       final RandomAccessInput slice = data.randomAccessSlice(entry.valuesOffset, entry.valuesLength);
-      LongValues values = DirectReader.getInstance(slice, entry.bitsPerValue);
-      if (entry.gcd != 1) {
-        values = applyGcd(values, entry.gcd);
-      }
-      if (entry.minValue != 0) {
-        values = applyDelta(values, entry.minValue);
-      }
+      final LongValues values = DirectReader.getInstance(slice, entry.bitsPerValue);
       if (entry.table != null) {
-        values = applyTable(values, entry.table);
+        final long[] table = entry.table;
+        return new LongValues() {
+          @Override
+          public long get(long index) {
+            return table[(int) values.get(index)];
+          }
+        };
+      } else if (entry.gcd != 1) {
+        final long gcd = entry.gcd;
+        final long minValue = entry.minValue;
+        return new LongValues() {
+          @Override
+          public long get(long index) {
+            return values.get(index) * gcd + minValue;
+          }
+        };
+      } else if (entry.minValue != 0) {
+        final long minValue = entry.minValue;
+        return new LongValues() {
+          @Override
+          public long get(long index) {
+            return values.get(index) + minValue;
+          }
+        };
+      } else {
+        return values;
       }
-      return values;
     }
   }
 
-  private LongValues applyDelta(LongValues values, long delta) {
-    return new LongValues() {
-      @Override
-      public long get(long index) {
-        return delta + values.get(index);
-      }
-    };
-  }
-
-  private LongValues applyGcd(LongValues values, long gcd) {
-    return new LongValues() {
-      @Override
-      public long get(long index) {
-        return values.get(index) * gcd;
-      }
-    };
-  }
-
-  private LongValues applyTable(LongValues values, long[] table) {
-    return new LongValues() {
-      @Override
-      public long get(long index) {
-        return table[(int) values.get(index)];
-      }
-    };
-  }
-
   @Override
   public BinaryDocValues getBinary(FieldInfo field) throws IOException {
     BinaryEntry entry = binaries.get(field.name);


[38/50] [abbrv] lucene-solr:jira/solr-8542-v2: SOLR-9518: Kerberos Delegation Tokens don't work without a chrooted ZK

Posted by cp...@apache.org.
SOLR-9518: Kerberos Delegation Tokens don't work without a chrooted ZK


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/9b49c72d
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/9b49c72d
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/9b49c72d

Branch: refs/heads/jira/solr-8542-v2
Commit: 9b49c72dbc4d27a3160b34b5e38e095ca85daa6f
Parents: 0ec1f22
Author: Noble Paul <no...@apache.org>
Authored: Mon Oct 24 13:06:40 2016 +0530
Committer: Noble Paul <no...@apache.org>
Committed: Mon Oct 24 13:06:40 2016 +0530

----------------------------------------------------------------------
 solr/CHANGES.txt                                         |  2 ++
 .../solr/security/DelegationTokenKerberosFilter.java     | 11 ++++++++---
 .../java/org/apache/solr/security/KerberosPlugin.java    |  9 +++++----
 3 files changed, 15 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9b49c72d/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index e1c3971..92a994f 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -200,6 +200,8 @@ Bug Fixes
 
 * SOLR-9325: solr.log is now written to $SOLR_LOGS_DIR without changing log4j.properties (janhoy)
 
+* SOLR-9518: Kerberos Delegation Tokens don't work without a chrooted ZK (Ishan Chattopadhyaya,via noble)
+
 Optimizations
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9b49c72d/solr/core/src/java/org/apache/solr/security/DelegationTokenKerberosFilter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/security/DelegationTokenKerberosFilter.java b/solr/core/src/java/org/apache/solr/security/DelegationTokenKerberosFilter.java
index ca27861..421de52 100644
--- a/solr/core/src/java/org/apache/solr/security/DelegationTokenKerberosFilter.java
+++ b/solr/core/src/java/org/apache/solr/security/DelegationTokenKerberosFilter.java
@@ -50,6 +50,11 @@ import org.apache.zookeeper.data.ACL;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+/**
+ * This is an authentication filter based on Hadoop's {@link DelegationTokenAuthenticationFilter}.
+ * The Kerberos plugin can be configured to use delegation tokens, which allow an
+ * application to reuse the authentication of an end-user or another application.
+ */
 public class DelegationTokenKerberosFilter extends DelegationTokenAuthenticationFilter {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
@@ -149,10 +154,10 @@ public class DelegationTokenKerberosFilter extends DelegationTokenAuthentication
       throw new IllegalArgumentException("zkClient required");
     }
     String zkHost = zkClient.getZkServerAddress();
-    String zkChroot = zkHost.substring(zkHost.indexOf("/"));
-    zkChroot = zkChroot.startsWith("/") ? zkChroot.substring(1) : zkChroot;
+    String zkChroot = zkHost.contains("/")? zkHost.substring(zkHost.indexOf("/")): "";
     String zkNamespace = zkChroot + SecurityAwareZkACLProvider.SECURITY_ZNODE_PATH;
-    String zkConnectionString = zkHost.substring(0, zkHost.indexOf("/"));
+    zkNamespace = zkNamespace.startsWith("/") ? zkNamespace.substring(1) : zkNamespace;
+    String zkConnectionString = zkHost.contains("/")? zkHost.substring(0, zkHost.indexOf("/")): zkHost;
     SolrZkToCuratorCredentialsACLs curatorToSolrZk = new SolrZkToCuratorCredentialsACLs(zkClient);
     final int connectionTimeoutMs = 30000; // this value is currently hard coded, see SOLR-7561.
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9b49c72d/solr/core/src/java/org/apache/solr/security/KerberosPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/security/KerberosPlugin.java b/solr/core/src/java/org/apache/solr/security/KerberosPlugin.java
index d4a2823..3655ac9 100644
--- a/solr/core/src/java/org/apache/solr/security/KerberosPlugin.java
+++ b/solr/core/src/java/org/apache/solr/security/KerberosPlugin.java
@@ -142,12 +142,13 @@ public class KerberosPlugin extends AuthenticationPlugin implements HttpClientBu
           String zkHost = controller.getZkServerAddress();
           putParam(params, "token.validity", DELEGATION_TOKEN_VALIDITY, "36000");
           params.put("zk-dt-secret-manager.enable", "true");
+
+          String chrootPath = zkHost.contains("/")? zkHost.substring(zkHost.indexOf("/")): "";
+          String znodeWorkingPath = chrootPath + SecurityAwareZkACLProvider.SECURITY_ZNODE_PATH + "/zkdtsm";
           // Note - Curator complains if the znodeWorkingPath starts with /
-          String chrootPath = zkHost.substring(zkHost.indexOf("/"));
-          String relativePath = chrootPath.startsWith("/") ? chrootPath.substring(1) : chrootPath;
+          znodeWorkingPath = znodeWorkingPath.startsWith("/")? znodeWorkingPath.substring(1): znodeWorkingPath;
           putParam(params, "zk-dt-secret-manager.znodeWorkingPath",
-              DELEGATION_TOKEN_SECRET_MANAGER_ZNODE_WORKING_PATH,
-              relativePath + SecurityAwareZkACLProvider.SECURITY_ZNODE_PATH + "/zkdtsm");
+              DELEGATION_TOKEN_SECRET_MANAGER_ZNODE_WORKING_PATH, znodeWorkingPath);
           putParam(params, "signer.secret.provider.zookeeper.path",
               DELEGATION_TOKEN_SECRET_PROVIDER_ZK_PATH, "/token");
           // ensure krb5 is setup properly before running curator


[17/50] [abbrv] lucene-solr:jira/solr-8542-v2: SOLR-7580: Move defaults in bin/solr.in.sh into bin/solr (incl. Windows)

Posted by cp...@apache.org.
SOLR-7580: Move defaults in bin/solr.in.sh into bin/solr (incl. Windows)


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/8ae3304c
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/8ae3304c
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/8ae3304c

Branch: refs/heads/jira/solr-8542-v2
Commit: 8ae3304c8631e1ba9a14aa0fc576e13b9198dfe5
Parents: 67ba19a
Author: David Smiley <ds...@apache.org>
Authored: Wed Oct 19 16:38:06 2016 -0400
Committer: David Smiley <ds...@apache.org>
Committed: Wed Oct 19 16:38:06 2016 -0400

----------------------------------------------------------------------
 solr/CHANGES.txt     | 10 +++++++--
 solr/bin/solr        | 54 +++++++++++++++++++++++++++++++---------------
 solr/bin/solr.cmd    | 55 ++++++++++++++++++++++++++++++++++-------------
 solr/bin/solr.in.cmd | 55 +++++++++++++++++------------------------------
 solr/bin/solr.in.sh  | 30 +++++++-------------------
 5 files changed, 113 insertions(+), 91 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ae3304c/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 880718c..56f3b80 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -80,10 +80,10 @@ Upgrade Notes
 ----------------------
 
 * If you use the JSON Facet API (json.facet) with method=stream, you must now set sort='index asc' to get the streaming
-behavior; otherwise it won't stream.  Reminder: "method" is a hint that doesn't change defaults of other parameters.
+  behavior; otherwise it won't stream.  Reminder: "method" is a hint that doesn't change defaults of other parameters.
 
 * If you use the JSON Facet API (json.facet) to facet on a numeric field and if you use mincount=0 or if you set the
-prefix, then you will now get an error as these options are incompatible with numeric faceting.
+  prefix, then you will now get an error as these options are incompatible with numeric faceting.
 
 * Solr's logging verbosity at the INFO level has been greatly reduced, and
   you may need to update the log configs to use the DEBUG level to get the
@@ -94,6 +94,9 @@ prefix, then you will now get an error as these options are incompatible with nu
   addition, MiniSolrCloudCluster#uploadConfigSet(File, String) has been
   deprecated in favour of #uploadConfigSet(Path, String)
 
+* The bin/solr.in.sh (bin/solr.in.cmd on Windows) is now completely commented by default. Previously, this wasn't so,
+  which had the effect of masking existing environment variables.
+
 New Features
 ----------------------
 * SOLR-5725: facet.method=enum can bypass exact counts calculation with facet.exists=true, it just returns 1 for 
@@ -298,6 +301,9 @@ Other Changes
 
 * SOLR-9634: Deprecate collection methods on MiniSolrCloudCluster (Alan Woodward)
 
+* SOLR-7580: Moved defaults within bin/solr.in.sh (and bin/solr.in.cmd on Windows) to bin/solr (and bin/solr.cmd)
+  such that the default state of these files is to set nothing. This makes Solr work better with Docker. (David Smiley)
+
 ==================  6.2.1 ==================
 
 Bug Fixes

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ae3304c/solr/bin/solr
----------------------------------------------------------------------
diff --git a/solr/bin/solr b/solr/bin/solr
index 90fffa1..df6b4d0 100755
--- a/solr/bin/solr
+++ b/solr/bin/solr
@@ -1409,15 +1409,21 @@ if [ "`echo $java_ver_out | grep -i "IBM J9"`" != "" ]; then
   JAVA_VENDOR="IBM J9"
 fi
 
+# Establish default opts no env var set (otherwise init to empty)
+if [ -z ${GC_LOG_OPTS+x} ]; then
+  GC_LOG_OPTS=('-verbose:gc' '-XX:+PrintHeapAtGC' '-XX:+PrintGCDetails' \
+    '-XX:+PrintGCDateStamps' '-XX:+PrintGCTimeStamps' '-XX:+PrintTenuringDistribution' \
+    '-XX:+PrintGCApplicationStoppedTime')
+else
+  GC_LOG_OPTS=($GC_LOG_OPTS)
+fi
 # if verbose gc logging enabled, setup the location of the log file
 if [ "$GC_LOG_OPTS" != "" ]; then
   gc_log_flag="-Xloggc"
   if [ "$JAVA_VENDOR" == "IBM J9" ]; then
     gc_log_flag="-Xverbosegclog"
   fi
-  GC_LOG_OPTS=($GC_LOG_OPTS "$gc_log_flag:$SOLR_LOGS_DIR/solr_gc.log")
-else
-  GC_LOG_OPTS=()
+  GC_LOG_OPTS+=("$gc_log_flag:$SOLR_LOGS_DIR/solr_gc.log")
 fi
 
 # If ZK_HOST is defined, the assume SolrCloud mode
@@ -1484,6 +1490,12 @@ else
   JAVA_MEM_OPTS=("-Xms$SOLR_HEAP" "-Xmx$SOLR_HEAP")
 fi
 
+# Pick default for Java thread stack size, and then add to SOLR_OPTS
+if [ -z ${SOLR_JAVA_STACK_SIZE+x} ]; then
+  SOLR_JAVA_STACK_SIZE='-Xss256k'
+fi
+SOLR_OPTS+=($SOLR_JAVA_STACK_SIZE)
+
 if [ -z "$SOLR_TIMEZONE" ]; then
   SOLR_TIMEZONE='UTC'
 fi
@@ -1496,20 +1508,28 @@ function launch_solr() {
   
   SOLR_ADDL_ARGS="$2"
 
-  GC_TUNE=($GC_TUNE)
-  # deal with Java version specific GC and other flags
-  if [ "${JAVA_VERSION:0:3}" == "1.7" ]; then
-    # Specific Java version hacking
-    GC_TUNE+=('-XX:CMSFullGCsBeforeCompaction=1' '-XX:CMSTriggerPermRatio=80')
-    if [ "$JAVA_VENDOR" != "IBM J9" ]; then
-      JAVA_MINOR_VERSION=${JAVA_VERSION:(-2)}
-      if [[ $JAVA_MINOR_VERSION -ge 40 && $JAVA_MINOR_VERSION -le 51 ]]; then
-        GC_TUNE+=('-XX:-UseSuperWord')
-        echo -e "\nWARNING: Java version $JAVA_VERSION has known bugs with Lucene and requires the -XX:-UseSuperWord flag. Please consider upgrading your JVM.\n"
-      fi
-    fi
+  # define default GC_TUNE
+  if [ -z ${GC_TUNE+x} ]; then
+      GC_TUNE=('-XX:NewRatio=3' \
+        '-XX:SurvivorRatio=4' \
+        '-XX:TargetSurvivorRatio=90' \
+        '-XX:MaxTenuringThreshold=8' \
+        '-XX:+UseConcMarkSweepGC' \
+        '-XX:+UseParNewGC' \
+        '-XX:ConcGCThreads=4' '-XX:ParallelGCThreads=4' \
+        '-XX:+CMSScavengeBeforeRemark' \
+        '-XX:PretenureSizeThreshold=64m' \
+        '-XX:+UseCMSInitiatingOccupancyOnly' \
+        '-XX:CMSInitiatingOccupancyFraction=50' \
+        '-XX:CMSMaxAbortablePrecleanTime=6000' \
+        '-XX:+CMSParallelRemarkEnabled' \
+        '-XX:+ParallelRefProcEnabled' \
+        '-XX:-OmitStackTraceInFastThrow')
+  else
+    GC_TUNE=($GC_TUNE)
   fi
 
+
   # If SSL-related system props are set, add them to SOLR_OPTS
   if [ -n "$SOLR_SSL_OPTS" ]; then
     # If using SSL and solr.jetty.https.port not set explicitly, use the jetty.port
@@ -1540,11 +1560,11 @@ function launch_solr() {
     fi
 
     if [ "$SOLR_OPTS" != "" ]; then
-      echo -e "    SOLR_OPTS        = ${SOLR_OPTS[@]}"
+      echo -e "    SOLR_OPTS       = ${SOLR_OPTS[@]}"
     fi
 
     if [ "$SOLR_ADDL_ARGS" != "" ]; then
-      echo -e "    SOLR_ADDL_ARGS   = $SOLR_ADDL_ARGS"
+      echo -e "    SOLR_ADDL_ARGS  = $SOLR_ADDL_ARGS"
     fi
 
     if [ "$ENABLE_REMOTE_JMX_OPTS" == "true" ]; then

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ae3304c/solr/bin/solr.cmd
----------------------------------------------------------------------
diff --git a/solr/bin/solr.cmd b/solr/bin/solr.cmd
index 5376143..10ea6d6 100644
--- a/solr/bin/solr.cmd
+++ b/solr/bin/solr.cmd
@@ -62,6 +62,9 @@ IF NOT "%SOLR_HOST%"=="" (
 ) ELSE (
   set "SOLR_TOOL_HOST=localhost"
 )
+IF "%SOLR_JETTY_HOST%"=="" (
+  set SOLR_JETTY_HOST=0.0.0.0
+)
 
 REM Verify Java is available
 IF DEFINED SOLR_JAVA_HOME set "JAVA_HOME=%SOLR_JAVA_HOME%"
@@ -910,16 +913,36 @@ IF "%ENABLE_REMOTE_JMX_OPTS%"=="true" (
 
 IF NOT "%SOLR_HEAP%"=="" set SOLR_JAVA_MEM=-Xms%SOLR_HEAP% -Xmx%SOLR_HEAP%
 IF "%SOLR_JAVA_MEM%"=="" set SOLR_JAVA_MEM=-Xms512m -Xmx512m
+IF "%SOLR_JAVA_STACK_SIZE%"=="" set SOLR_JAVA_STACK_SIZE=-Xss256k
+set SOLR_OPTS=%SOLR_JAVA_STACK_SIZE% %SOLR_OPTS%
 IF "%SOLR_TIMEZONE%"=="" set SOLR_TIMEZONE=UTC
 
-IF "!JAVA_MAJOR_VERSION!"=="7" (
-  set "GC_TUNE=%GC_TUNE% -XX:CMSFullGCsBeforeCompaction=1 -XX:CMSTriggerPermRatio=80"
-  IF !JAVA_BUILD! GEQ 40 (
-    IF !JAVA_BUILD! LEQ 51 (
-      set "GC_TUNE=!GC_TUNE! -XX:-UseSuperWord"
-      @echo WARNING: Java version !JAVA_VERSION_INFO! has known bugs with Lucene and requires the -XX:-UseSuperWord flag. Please consider upgrading your JVM.
-    )
-  )
+IF "%GC_TUNE%"=="" (
+  set GC_TUNE=-XX:NewRatio=3 ^
+   -XX:SurvivorRatio=4 ^
+   -XX:TargetSurvivorRatio=90 ^
+   -XX:MaxTenuringThreshold=8 ^
+   -XX:+UseConcMarkSweepGC ^
+   -XX:+UseParNewGC ^
+   -XX:ConcGCThreads=4 -XX:ParallelGCThreads=4 ^
+   -XX:+CMSScavengeBeforeRemark ^
+   -XX:PretenureSizeThreshold=64m ^
+   -XX:+UseCMSInitiatingOccupancyOnly ^
+   -XX:CMSInitiatingOccupancyFraction=50 ^
+   -XX:CMSMaxAbortablePrecleanTime=6000 ^
+   -XX:+CMSParallelRemarkEnabled ^
+   -XX:+ParallelRefProcEnabled ^
+   -XX:-OmitStackTraceInFastThrow
+)
+
+IF "%GC_LOG_OPTS%"=="" (
+  set GC_LOG_OPTS=-verbose:gc ^
+   -XX:+PrintHeapAtGC ^
+   -XX:+PrintGCDetails ^
+   -XX:+PrintGCDateStamps ^
+   -XX:+PrintGCTimeStamps ^
+   -XX:+PrintTenuringDistribution ^
+   -XX:+PrintGCApplicationStoppedTime
 )
 
 IF "%verbose%"=="1" (
@@ -1009,15 +1032,17 @@ IF "%FG%"=="1" (
   echo %SOLR_PORT%>"%SOLR_TIP%"\bin\solr-%SOLR_PORT%.port
   "%JAVA%" %SERVEROPT% %SOLR_JAVA_MEM% %START_OPTS% %GCLOG_OPT%:"!SOLR_LOGS_DIR!/solr_gc.log" ^
     -Dlog4j.configuration="%LOG4J_CONFIG%" -DSTOP.PORT=!STOP_PORT! -DSTOP.KEY=%STOP_KEY% ^
-    -Djetty.port=%SOLR_PORT% -Dsolr.solr.home="%SOLR_HOME%" -Dsolr.install.dir="%SOLR_TIP%" ^
-    -Djetty.home="%SOLR_SERVER_DIR%" -Djava.io.tmpdir="%SOLR_SERVER_DIR%\tmp" -jar start.jar "%SOLR_JETTY_CONFIG%"
+    -Dsolr.solr.home="%SOLR_HOME%" -Dsolr.install.dir="%SOLR_TIP%" ^
+    -Djetty.host=%SOLR_JETTY_HOST% -Djetty.port=%SOLR_PORT% -Djetty.home="%SOLR_SERVER_DIR%" ^
+    -Djava.io.tmpdir="%SOLR_SERVER_DIR%\tmp" -jar start.jar "%SOLR_JETTY_CONFIG%"
 ) ELSE (
-  START /B "Solr-%SOLR_PORT%" /D "%SOLR_SERVER_DIR%" "%JAVA%" %SERVEROPT% %SOLR_JAVA_MEM% %START_OPTS% ^
-    %GCLOG_OPT%:"!SOLR_LOGS_DIR!/solr_gc.log" -Dlog4j.configuration="%LOG4J_CONFIG%" -DSTOP.PORT=!STOP_PORT! ^
+  START /B "Solr-%SOLR_PORT%" /D "%SOLR_SERVER_DIR%" ^
+    "%JAVA%" %SERVEROPT% %SOLR_JAVA_MEM% %START_OPTS% %GCLOG_OPT%:"!SOLR_LOGS_DIR!/solr_gc.log" ^
+    -Dlog4j.configuration="%LOG4J_CONFIG%" -DSTOP.PORT=!STOP_PORT! -DSTOP.KEY=%STOP_KEY% ^
     -Dsolr.log.muteconsole ^
-    -DSTOP.KEY=%STOP_KEY% -Djetty.port=%SOLR_PORT% -Dsolr.solr.home="%SOLR_HOME%" -Dsolr.install.dir="%SOLR_TIP%" ^
-    -Djetty.home="%SOLR_SERVER_DIR%" -Djava.io.tmpdir="%SOLR_SERVER_DIR%\tmp" -jar start.jar ^
-    "%SOLR_JETTY_CONFIG%" > "!SOLR_LOGS_DIR!\solr-%SOLR_PORT%-console.log"
+    -Dsolr.solr.home="%SOLR_HOME%" -Dsolr.install.dir="%SOLR_TIP%" ^
+    -Djetty.host=%SOLR_JETTY_HOST% -Djetty.port=%SOLR_PORT% -Djetty.home="%SOLR_SERVER_DIR%" ^
+    -Djava.io.tmpdir="%SOLR_SERVER_DIR%\tmp" -jar start.jar "%SOLR_JETTY_CONFIG%" > "!SOLR_LOGS_DIR!\solr-%SOLR_PORT%-console.log"
   echo %SOLR_PORT%>"%SOLR_TIP%"\bin\solr-%SOLR_PORT%.port
 
   REM now wait to see Solr come online ...

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ae3304c/solr/bin/solr.in.cmd
----------------------------------------------------------------------
diff --git a/solr/bin/solr.in.cmd b/solr/bin/solr.in.cmd
index 5a2d00f..06da233 100644
--- a/solr/bin/solr.in.cmd
+++ b/solr/bin/solr.in.cmd
@@ -16,40 +16,23 @@
 
 @echo off
 
+REM Settings here will override settings in existing env vars or in bin/solr.  The default shipped state
+REM of this file is completely commented.
+
 REM By default the script will use JAVA_HOME to determine which java
 REM to use, but you can set a specific path for Solr to use without
 REM affecting other Java applications on your server/workstation.
 REM set SOLR_JAVA_HOME=
 
 REM Increase Java Min/Max Heap as needed to support your indexing / query needs
-set SOLR_JAVA_MEM=-Xms512m -Xmx512m
+REM set SOLR_JAVA_MEM=-Xms512m -Xmx512m
 
 REM Enable verbose GC logging
-set GC_LOG_OPTS=-verbose:gc -XX:+PrintHeapAtGC -XX:+PrintGCDetails -XX:+PrintGCDateStamps -XX:+PrintGCTimeStamps -XX:+PrintTenuringDistribution -XX:+PrintGCApplicationStoppedTime
-
-REM Changes the logging level. Valid values: ALL, TRACE, DEBUG, INFO, WARN, ERROR, FATAL, OFF. Default is INFO
-REM This is an alternative to changing the rootLogger in log4j.properties
-REM set SOLR_LOG_LEVEL=INFO
-
-REM Location where Solr should write logs to. Absolute or relative to solr start dir
-REM set SOLR_LOGS_DIR=logs
+REM set GC_LOG_OPTS=-verbose:gc -XX:+PrintHeapAtGC -XX:+PrintGCDetails -XX:+PrintGCDateStamps -XX:+PrintGCTimeStamps -XX:+PrintTenuringDistribution -XX:+PrintGCApplicationStoppedTime
 
-REM These GC settings have shown to work well for a number of common Solr workloads
-set GC_TUNE=-XX:NewRatio=3 ^
- -XX:SurvivorRatio=4 ^
- -XX:TargetSurvivorRatio=90 ^
- -XX:MaxTenuringThreshold=8 ^
- -XX:+UseConcMarkSweepGC ^
- -XX:+UseParNewGC ^
- -XX:ConcGCThreads=4 -XX:ParallelGCThreads=4 ^
- -XX:+CMSScavengeBeforeRemark ^
- -XX:PretenureSizeThreshold=64m ^
- -XX:+UseCMSInitiatingOccupancyOnly ^
- -XX:CMSInitiatingOccupancyFraction=50 ^
- -XX:CMSMaxAbortablePrecleanTime=6000 ^
- -XX:+CMSParallelRemarkEnabled ^
- -XX:+ParallelRefProcEnabled ^
- -XX:-OmitStackTraceInFastThrow
+REM Various GC settings have shown to work well for a number of common Solr workloads.
+REM See solr.cmd GC_TUNE for the default list.
+REM set GC_TUNE=-XX:NewRatio=3 -XX:SurvivorRatio=4     etc.
 
 REM Set the ZooKeeper connection string if using an external ZooKeeper ensemble
 REM e.g. host1:2181,host2:2181/chroot
@@ -69,20 +52,11 @@ REM set SOLR_TIMEZONE=UTC
 REM Set to true to activate the JMX RMI connector to allow remote JMX client applications
 REM to monitor the JVM hosting Solr; set to "false" to disable that behavior
 REM (false is recommended in production environments)
-set ENABLE_REMOTE_JMX_OPTS=false
+REM set ENABLE_REMOTE_JMX_OPTS=false
 
 REM The script will use SOLR_PORT+10000 for the RMI_PORT or you can set it here
 REM set RMI_PORT=18983
 
-REM Set the host interface to listen on. Jetty will listen on all interfaces (0.0.0.0) by default.
-REM This must be an IPv4 ("a.b.c.d") or bracketed IPv6 ("[x::y]") address, not a hostname!
-set SOLR_JETTY_HOST=0.0.0.0
-
-set SOLR_OPTS=%SOLR_OPTS% -Djetty.host=%SOLR_JETTY_HOST%
-
-REM Set the thread stack size
-set SOLR_OPTS=%SOLR_OPTS% -Xss256k
-
 REM Anything you add to the SOLR_OPTS variable will be included in the java
 REM start command line as-is, in ADDITION to other options. If you specify the
 REM -a option on start script, those options will be appended as well. Examples:
@@ -94,6 +68,17 @@ REM Path to a directory for Solr to store cores and their data. By default, Solr
 REM If solr.xml is not stored in ZooKeeper, this directory needs to contain solr.xml
 REM set SOLR_HOME=
 
+REM Changes the logging level. Valid values: ALL, TRACE, DEBUG, INFO, WARN, ERROR, FATAL, OFF. Default is INFO
+REM This is an alternative to changing the rootLogger in log4j.properties
+REM set SOLR_LOG_LEVEL=INFO
+
+REM Location where Solr should write logs to. Absolute or relative to solr start dir
+REM set SOLR_LOGS_DIR=logs
+
+REM Set the host interface to listen on. Jetty will listen on all interfaces (0.0.0.0) by default.
+REM This must be an IPv4 ("a.b.c.d") or bracketed IPv6 ("[x::y]") address, not a hostname!
+REM set SOLR_JETTY_HOST=0.0.0.0
+
 REM Sets the port Solr binds to, default is 8983
 REM set SOLR_PORT=8983
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8ae3304c/solr/bin/solr.in.sh
----------------------------------------------------------------------
diff --git a/solr/bin/solr.in.sh b/solr/bin/solr.in.sh
index dd41f46..2fcaabb 100644
--- a/solr/bin/solr.in.sh
+++ b/solr/bin/solr.in.sh
@@ -13,38 +13,27 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+# Settings here will override settings in existing env vars or in bin/solr.  The default shipped state
+# of this file is completely commented.
+
 # By default the script will use JAVA_HOME to determine which java
 # to use, but you can set a specific path for Solr to use without
 # affecting other Java applications on your server/workstation.
 #SOLR_JAVA_HOME=""
 
 # Increase Java Heap as needed to support your indexing / query needs
-SOLR_HEAP="512m"
+#SOLR_HEAP="512m"
 
 # Expert: If you want finer control over memory options, specify them directly
 # Comment out SOLR_HEAP if you are using this though, that takes precedence
 #SOLR_JAVA_MEM="-Xms512m -Xmx512m"
 
 # Enable verbose GC logging
-GC_LOG_OPTS="-verbose:gc -XX:+PrintHeapAtGC -XX:+PrintGCDetails \
--XX:+PrintGCDateStamps -XX:+PrintGCTimeStamps -XX:+PrintTenuringDistribution -XX:+PrintGCApplicationStoppedTime"
+#GC_LOG_OPTS="-verbose:gc -XX:+PrintHeapAtGC -XX:+PrintGCDetails \
+#-XX:+PrintGCDateStamps -XX:+PrintGCTimeStamps -XX:+PrintTenuringDistribution -XX:+PrintGCApplicationStoppedTime"
 
 # These GC settings have shown to work well for a number of common Solr workloads
-GC_TUNE="-XX:NewRatio=3 \
--XX:SurvivorRatio=4 \
--XX:TargetSurvivorRatio=90 \
--XX:MaxTenuringThreshold=8 \
--XX:+UseConcMarkSweepGC \
--XX:+UseParNewGC \
--XX:ConcGCThreads=4 -XX:ParallelGCThreads=4 \
--XX:+CMSScavengeBeforeRemark \
--XX:PretenureSizeThreshold=64m \
--XX:+UseCMSInitiatingOccupancyOnly \
--XX:CMSInitiatingOccupancyFraction=50 \
--XX:CMSMaxAbortablePrecleanTime=6000 \
--XX:+CMSParallelRemarkEnabled \
--XX:+ParallelRefProcEnabled \
--XX:-OmitStackTraceInFastThrow"
+#GC_TUNE="-XX:NewRatio=3 -XX:SurvivorRatio=4    etc.
 
 # Set the ZooKeeper connection string if using an external ZooKeeper ensemble
 # e.g. host1:2181,host2:2181/chroot
@@ -64,14 +53,11 @@ GC_TUNE="-XX:NewRatio=3 \
 # Set to true to activate the JMX RMI connector to allow remote JMX client applications
 # to monitor the JVM hosting Solr; set to "false" to disable that behavior
 # (false is recommended in production environments)
-ENABLE_REMOTE_JMX_OPTS="false"
+#ENABLE_REMOTE_JMX_OPTS="false"
 
 # The script will use SOLR_PORT+10000 for the RMI_PORT or you can set it here
 # RMI_PORT=18983
 
-# Set the thread stack size
-SOLR_OPTS="$SOLR_OPTS -Xss256k"
-
 # Anything you add to the SOLR_OPTS variable will be included in the java
 # start command line as-is, in ADDITION to other options. If you specify the
 # -a option on start script, those options will be appended as well. Examples:


[49/50] [abbrv] lucene-solr:jira/solr-8542-v2: in TestLTROnSolrCloud replace use of deprecated MiniSolrCloudCluster.uploadConfigDir

Posted by cp...@apache.org.
in TestLTROnSolrCloud replace use of deprecated MiniSolrCloudCluster.uploadConfigDir


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/4ca9262a
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/4ca9262a
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/4ca9262a

Branch: refs/heads/jira/solr-8542-v2
Commit: 4ca9262ad7bbca09e7db0e729fe33d093a8dab95
Parents: 8805809
Author: Christine Poerschke <cp...@apache.org>
Authored: Mon Oct 24 13:10:38 2016 -0500
Committer: Christine Poerschke <cp...@apache.org>
Committed: Mon Oct 24 13:10:38 2016 -0500

----------------------------------------------------------------------
 .../ltr/src/test/org/apache/solr/ltr/TestLTROnSolrCloud.java       | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4ca9262a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTROnSolrCloud.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTROnSolrCloud.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTROnSolrCloud.java
index c9f00dd..a8f938d 100644
--- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTROnSolrCloud.java
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTROnSolrCloud.java
@@ -122,7 +122,7 @@ public class TestLTROnSolrCloud extends TestRerankBase {
     jc = JettyConfig.builder(jc).withServlets(extraServlets).build();
     solrCluster = new MiniSolrCloudCluster(numServers, tmpSolrHome.toPath(), jc);
     File configDir = tmpSolrHome.toPath().resolve("collection1/conf").toFile();
-    solrCluster.uploadConfigDir(configDir, "conf1");
+    solrCluster.uploadConfigSet(configDir.toPath(), "conf1");
 
     solrCluster.getSolrClient().setDefaultCollection(COLLECTION);
 


[39/50] [abbrv] lucene-solr:jira/solr-8542-v2: LUCENE-7462: Give doc values APIs an `advanceExact` method.

Posted by cp...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/search/similarities/BM25Similarity.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/similarities/BM25Similarity.java b/lucene/core/src/java/org/apache/lucene/search/similarities/BM25Similarity.java
index 0cb86db..ff390b3 100644
--- a/lucene/core/src/java/org/apache/lucene/search/similarities/BM25Similarity.java
+++ b/lucene/core/src/java/org/apache/lucene/search/similarities/BM25Similarity.java
@@ -244,11 +244,7 @@ public class BM25Similarity extends Similarity {
       if (norms == null) {
         norm = k1;
       } else {
-        int normsDocID = norms.docID();
-        if (normsDocID < doc) {
-          normsDocID = norms.advance(doc);
-        }
-        if (normsDocID == doc) {
+        if (norms.advanceExact(doc)) {
           norm = cache[(byte)norms.longValue() & 0xFF];
         } else {
           norm = cache[0];
@@ -310,7 +306,7 @@ public class BM25Similarity extends Similarity {
           "tfNorm, computed from:", subs);
     } else {
       byte norm;
-      if (norms.advance(doc) == doc) {
+      if (norms.advanceExact(doc)) {
         norm = (byte) norms.longValue();
       } else {
         norm = 0;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/search/similarities/SimilarityBase.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/similarities/SimilarityBase.java b/lucene/core/src/java/org/apache/lucene/search/similarities/SimilarityBase.java
index ed837c4..925dc59 100644
--- a/lucene/core/src/java/org/apache/lucene/search/similarities/SimilarityBase.java
+++ b/lucene/core/src/java/org/apache/lucene/search/similarities/SimilarityBase.java
@@ -279,11 +279,7 @@ public abstract class SimilarityBase extends Similarity {
       if (norms == null) {
         return 1F;
       }
-      int normsDocID = norms.docID();
-      if (normsDocID < doc) {
-        normsDocID = norms.advance(doc);
-      }
-      if (normsDocID == doc) {
+      if (norms.advanceExact(doc)) {
         return decodeNormValue((byte) norms.longValue());
       } else {
         return decodeNormValue((byte) 0);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/search/similarities/TFIDFSimilarity.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/similarities/TFIDFSimilarity.java b/lucene/core/src/java/org/apache/lucene/search/similarities/TFIDFSimilarity.java
index 6cd87b5..cd8acd6 100644
--- a/lucene/core/src/java/org/apache/lucene/search/similarities/TFIDFSimilarity.java
+++ b/lucene/core/src/java/org/apache/lucene/search/similarities/TFIDFSimilarity.java
@@ -599,11 +599,7 @@ public abstract class TFIDFSimilarity extends Similarity {
         return raw;
       } else {
         long normValue;
-        int normsDocID = norms.docID();
-        if (normsDocID < doc) {
-          normsDocID = norms.advance(doc);
-        }
-        if (normsDocID == doc) {
+        if (norms.advanceExact(doc)) {
           normValue = norms.longValue();
         } else {
           normValue = 0;
@@ -649,7 +645,7 @@ public abstract class TFIDFSimilarity extends Similarity {
   private Explanation explainField(int doc, Explanation freq, IDFStats stats, NumericDocValues norms) throws IOException {
     Explanation tfExplanation = Explanation.match(tf(freq.getValue()), "tf(freq="+freq.getValue()+"), with freq of:", freq);
     float norm;
-    if (norms != null && norms.advance(doc) == doc) {
+    if (norms != null && norms.advanceExact(doc)) {
       norm = decodeNormValue(norms.longValue());
     } else {
       norm = 1f;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/test/org/apache/lucene/codecs/lucene70/TestIndexedDISI.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/codecs/lucene70/TestIndexedDISI.java b/lucene/core/src/test/org/apache/lucene/codecs/lucene70/TestIndexedDISI.java
index 18b4590..64bfbd5 100644
--- a/lucene/core/src/test/org/apache/lucene/codecs/lucene70/TestIndexedDISI.java
+++ b/lucene/core/src/test/org/apache/lucene/codecs/lucene70/TestIndexedDISI.java
@@ -153,7 +153,7 @@ public class TestIndexedDISI extends LuceneTestCase {
 
   public void testRandom() throws IOException {
     try (Directory dir = newDirectory()) {
-      for (int i = 0; i < 100; ++i) {
+      for (int i = 0; i < 10; ++i) {
         doTestRandom(dir);
       }
     }
@@ -217,6 +217,32 @@ public class TestIndexedDISI extends LuceneTestCase {
       }
     }
 
+    for (int step : new int[] {10, 100, 1000, 10000, 100000}) {
+      try (IndexInput in = dir.openInput("foo", IOContext.DEFAULT)) {
+        IndexedDISI disi = new IndexedDISI(in, 0L, length, cardinality);
+        BitSetIterator disi2 = new BitSetIterator(set, cardinality);
+        int index = -1;
+        for (int target = 0; target < set.length(); ) {
+          target += TestUtil.nextInt(random(), 0, step);
+          int doc = disi2.docID();
+          while (doc < target) {
+            doc = disi2.nextDoc();
+            index++;
+          }
+
+          boolean exists = disi.advanceExact(target);
+          assertEquals(doc == target, exists);
+          if (exists) {
+            assertEquals(index, disi.index());
+          } else if (random().nextBoolean()) {
+            assertEquals(doc, disi.nextDoc());
+            assertEquals(index, disi.index());
+            target = doc;
+          }
+        }
+      }
+    }
+
     dir.deleteFile("foo");
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/test/org/apache/lucene/codecs/lucene70/TestLucene70DocValuesFormat.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/codecs/lucene70/TestLucene70DocValuesFormat.java b/lucene/core/src/test/org/apache/lucene/codecs/lucene70/TestLucene70DocValuesFormat.java
index 5ad701e..8661298 100644
--- a/lucene/core/src/test/org/apache/lucene/codecs/lucene70/TestLucene70DocValuesFormat.java
+++ b/lucene/core/src/test/org/apache/lucene/codecs/lucene70/TestLucene70DocValuesFormat.java
@@ -104,7 +104,7 @@ public class TestLucene70DocValuesFormat extends BaseCompressingDocValuesFormatT
   public void testSortedVariableLengthBigVsStoredFields() throws Exception {
     int numIterations = atLeast(1);
     for (int i = 0; i < numIterations; i++) {
-      doTestSortedVsStoredFields(atLeast(300), 1, 32766);
+      doTestSortedVsStoredFields(atLeast(300), 1d, 1, 32766);
     }
   }
   
@@ -112,7 +112,7 @@ public class TestLucene70DocValuesFormat extends BaseCompressingDocValuesFormatT
   public void testSortedVariableLengthManyVsStoredFields() throws Exception {
     int numIterations = atLeast(1);
     for (int i = 0; i < numIterations; i++) {
-      doTestSortedVsStoredFields(TestUtil.nextInt(random(), 1024, 2049), 1, 500);
+      doTestSortedVsStoredFields(TestUtil.nextInt(random(), 1024, 2049), 1d, 1, 500);
     }
   }
   

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/facet/src/java/org/apache/lucene/facet/sortedset/SortedSetDocValuesFacetCounts.java
----------------------------------------------------------------------
diff --git a/lucene/facet/src/java/org/apache/lucene/facet/sortedset/SortedSetDocValuesFacetCounts.java b/lucene/facet/src/java/org/apache/lucene/facet/sortedset/SortedSetDocValuesFacetCounts.java
index 1219494..4fff6a6 100644
--- a/lucene/facet/src/java/org/apache/lucene/facet/sortedset/SortedSetDocValuesFacetCounts.java
+++ b/lucene/facet/src/java/org/apache/lucene/facet/sortedset/SortedSetDocValuesFacetCounts.java
@@ -199,10 +199,7 @@ public class SortedSetDocValuesFacetCounts extends Facets {
           int doc;
           while ((doc = docs.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
             //System.out.println("    doc=" + doc);
-            if (doc > segValues.docID()) {
-              segValues.advance(doc);
-            }
-            if (doc == segValues.docID()) {
+            if (segValues.advanceExact(doc)) {
               int term = (int) segValues.nextOrd();
               while (term != SortedSetDocValues.NO_MORE_ORDS) {
                 //System.out.println("      segOrd=" + segOrd + " ord=" + term + " globalOrd=" + ordinalMap.getGlobalOrd(segOrd, term));
@@ -219,10 +216,7 @@ public class SortedSetDocValuesFacetCounts extends Facets {
           int doc;
           while ((doc = docs.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
             //System.out.println("    doc=" + doc);
-            if (doc > segValues.docID()) {
-              segValues.advance(doc);
-            }
-            if (doc == segValues.docID()) {
+            if (segValues.advanceExact(doc)) {
               int term = (int) segValues.nextOrd();
               while (term != SortedSetDocValues.NO_MORE_ORDS) {
                 //System.out.println("      ord=" + term);
@@ -246,10 +240,7 @@ public class SortedSetDocValuesFacetCounts extends Facets {
         // just aggregate directly into counts:
         int doc;
         while ((doc = docs.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
-          if (doc > segValues.docID()) {
-            segValues.advance(doc);
-          }
-          if (doc == segValues.docID()) {
+          if (segValues.advanceExact(doc)) {
             int term = (int) segValues.nextOrd();
             while (term != SortedSetDocValues.NO_MORE_ORDS) {
               counts[term]++;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/join/src/java/org/apache/lucene/search/join/BlockJoinSelector.java
----------------------------------------------------------------------
diff --git a/lucene/join/src/java/org/apache/lucene/search/join/BlockJoinSelector.java b/lucene/join/src/java/org/apache/lucene/search/join/BlockJoinSelector.java
index a2e0c55..359b3cb 100644
--- a/lucene/join/src/java/org/apache/lucene/search/join/BlockJoinSelector.java
+++ b/lucene/join/src/java/org/apache/lucene/search/join/BlockJoinSelector.java
@@ -102,7 +102,7 @@ public class BlockJoinSelector {
     }
     return new SortedDocValues() {
 
-      private int ord;
+      private int ord = -1;
       private int docID = -1;
 
       @Override
@@ -169,6 +169,60 @@ public class BlockJoinSelector {
       }
 
       @Override
+      public boolean advanceExact(int targetParentDocID) throws IOException {
+        if (targetParentDocID < docID) {
+          throw new IllegalArgumentException("target must be after the current document: current=" + docID + " target=" + targetParentDocID);
+        }
+        int previousDocId = docID;
+        docID = targetParentDocID;
+        if (targetParentDocID == previousDocId) {
+          return ord != -1;
+        }
+        docID = targetParentDocID;
+        ord = -1;
+        if (parents.get(targetParentDocID) == false) {
+          return false;
+        }
+        int prevParentDocId = docID == 0 ? -1 : parents.prevSetBit(docID - 1);
+        int childDoc = values.docID();
+        if (childDoc <= prevParentDocId) {
+          childDoc = values.advance(prevParentDocId + 1);
+        }
+        if (childDoc >= docID) {
+          return false;
+        }
+        
+        boolean hasValue = false;
+        for (int doc = values.docID(); doc < docID; doc = values.nextDoc()) {
+          if (children.get(doc)) {
+            ord = values.ordValue();
+            hasValue = true;
+            values.nextDoc();
+            break;
+          }
+        }
+        if (hasValue == false) {
+          return false;
+        }
+
+        for (int doc = values.docID(); doc < docID; doc = values.nextDoc()) {
+          if (children.get(doc)) {
+            switch (selection) {
+              case MIN:
+                ord = Math.min(ord, values.ordValue());
+                break;
+              case MAX:
+                ord = Math.max(ord, values.ordValue());
+                break;
+              default:
+                throw new AssertionError();
+            }
+          }
+        }
+        return true;
+      }
+
+      @Override
       public int ordValue() {
         return ord;
       }
@@ -288,6 +342,54 @@ public class BlockJoinSelector {
       }
 
       @Override
+      public boolean advanceExact(int targetParentDocID) throws IOException {
+        if (targetParentDocID <= parentDocID) {
+          throw new IllegalArgumentException("target must be after the current document: current=" + parentDocID + " target=" + targetParentDocID);
+        }
+        parentDocID = targetParentDocID;
+        if (parents.get(targetParentDocID) == false) {
+          return false;
+        }
+        int prevParentDocId = parentDocID == 0 ? -1 : parents.prevSetBit(parentDocID - 1);
+        int childDoc = values.docID();
+        if (childDoc <= prevParentDocId) {
+          childDoc = values.advance(prevParentDocId + 1);
+        }
+        if (childDoc >= parentDocID) {
+          return false;
+        }
+        
+        boolean hasValue = false;
+        for (int doc = values.docID(); doc < parentDocID; doc = values.nextDoc()) {
+          if (children.get(doc)) {
+            value = values.longValue();
+            hasValue = true;
+            values.nextDoc();
+            break;
+          }
+        }
+        if (hasValue == false) {
+          return false;
+        }
+
+        for (int doc = values.docID(); doc < parentDocID; doc = values.nextDoc()) {
+          if (children.get(doc)) {
+            switch (selection) {
+              case MIN:
+                value = Math.min(value, values.longValue());
+                break;
+              case MAX:
+                value = Math.max(value, values.longValue());
+                break;
+              default:
+                throw new AssertionError();
+            }
+          }
+        }
+        return true;
+      }
+
+      @Override
       public long longValue() {
         return value;
       }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/join/src/java/org/apache/lucene/search/join/GenericTermsCollector.java
----------------------------------------------------------------------
diff --git a/lucene/join/src/java/org/apache/lucene/search/join/GenericTermsCollector.java b/lucene/join/src/java/org/apache/lucene/search/join/GenericTermsCollector.java
index 3ad0fe3..47b1b62 100644
--- a/lucene/join/src/java/org/apache/lucene/search/join/GenericTermsCollector.java
+++ b/lucene/join/src/java/org/apache/lucene/search/join/GenericTermsCollector.java
@@ -74,6 +74,13 @@ interface GenericTermsCollector extends Collector {
         }
 
         @Override
+        public boolean advanceExact(int dest) throws IOException {
+          boolean exists = target.advanceExact(dest);
+          out.println("\nadvanceExact(" + dest + ") -> exists# "+exists);
+          return exists;
+        }
+
+        @Override
         public long cost() {
           return target.cost();
         }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinSelector.java
----------------------------------------------------------------------
diff --git a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinSelector.java b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinSelector.java
index 41f994c..04cb771 100644
--- a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinSelector.java
+++ b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinSelector.java
@@ -150,6 +150,12 @@ public class TestBlockJoinSelector extends LuceneTestCase {
     }
 
     @Override
+    public boolean advanceExact(int target) throws IOException {
+      docID = target;
+      return ords[docID] != -1;
+    }
+
+    @Override
     public int ordValue() {
       assert ords[docID] != -1;
       return ords[docID];
@@ -257,6 +263,12 @@ public class TestBlockJoinSelector extends LuceneTestCase {
     }
 
     @Override
+    public boolean advanceExact(int target) throws IOException {
+      docID = target;
+      return docsWithValue.get(docID);
+    }
+
+    @Override
     public long longValue() {
       return values[docID];
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java
----------------------------------------------------------------------
diff --git a/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java b/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java
index ccbbf24..218d26c 100644
--- a/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java
+++ b/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java
@@ -970,6 +970,12 @@ public class MemoryIndex {
           }
 
           @Override
+          public boolean advanceExact(int target) throws IOException {
+            docID = target;
+            return docID == 0;
+          }
+
+          @Override
           public long cost() {
             return 1;
           }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/misc/src/test/org/apache/lucene/search/TestDiversifiedTopDocsCollector.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/test/org/apache/lucene/search/TestDiversifiedTopDocsCollector.java b/lucene/misc/src/test/org/apache/lucene/search/TestDiversifiedTopDocsCollector.java
index b64afc5..043141a 100644
--- a/lucene/misc/src/test/org/apache/lucene/search/TestDiversifiedTopDocsCollector.java
+++ b/lucene/misc/src/test/org/apache/lucene/search/TestDiversifiedTopDocsCollector.java
@@ -144,6 +144,11 @@ public class TestDiversifiedTopDocsCollector extends LuceneTestCase {
         }
 
         @Override
+        public boolean advanceExact(int target) throws IOException {
+          return sdv.advanceExact(target + context.docBase);
+        }
+
+        @Override
         public long cost() {
           return 0;
         }
@@ -187,6 +192,10 @@ public class TestDiversifiedTopDocsCollector extends LuceneTestCase {
           return vals.advance(target);
         }
         @Override
+        public boolean advanceExact(int target) throws IOException {
+          return vals.advanceExact(target);
+        }
+        @Override
         public long cost() {
           return vals.cost();
         }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/test-framework/src/java/org/apache/lucene/index/AssertingLeafReader.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/AssertingLeafReader.java b/lucene/test-framework/src/java/org/apache/lucene/index/AssertingLeafReader.java
index 6686ec4..37c549e 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/AssertingLeafReader.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/AssertingLeafReader.java
@@ -398,6 +398,7 @@ public class AssertingLeafReader extends FilterLeafReader {
     private final NumericDocValues in;
     private final int maxDoc;
     private int lastDocID = -1;
+    private boolean exists;
     
     public AssertingNumericDocValues(NumericDocValues in, int maxDoc) {
       this.in = in;
@@ -420,6 +421,7 @@ public class AssertingLeafReader extends FilterLeafReader {
       assert docID == NO_MORE_DOCS || docID < maxDoc;
       assert docID == in.docID();
       lastDocID = docID;
+      exists = docID != NO_MORE_DOCS;
       return docID;
     }
 
@@ -432,10 +434,23 @@ public class AssertingLeafReader extends FilterLeafReader {
       assert docID >= target;
       assert docID == NO_MORE_DOCS || docID < maxDoc;
       lastDocID = docID;
+      exists = docID != NO_MORE_DOCS;
       return docID;
     }
 
     @Override
+    public boolean advanceExact(int target) throws IOException {
+      assertThread("Numeric doc values", creationThread);
+      assert target >= 0;
+      assert target >= in.docID();
+      assert target < maxDoc;
+      exists = in.advanceExact(target);
+      assert in.docID() == target;
+      lastDocID = target;
+      return exists;
+    }
+
+    @Override
     public long cost() {
       assertThread("Numeric doc values", creationThread);
       long cost = in.cost();
@@ -446,8 +461,7 @@ public class AssertingLeafReader extends FilterLeafReader {
     @Override
     public long longValue() throws IOException {
       assertThread("Numeric doc values", creationThread);
-      assert in.docID() != -1;
-      assert in.docID() != NO_MORE_DOCS;
+      assert exists;
       return in.longValue();
     }    
 
@@ -463,6 +477,7 @@ public class AssertingLeafReader extends FilterLeafReader {
     private final BinaryDocValues in;
     private final int maxDoc;
     private int lastDocID = -1;
+    private boolean exists;
     
     public AssertingBinaryDocValues(BinaryDocValues in, int maxDoc) {
       this.in = in;
@@ -485,6 +500,7 @@ public class AssertingLeafReader extends FilterLeafReader {
       assert docID == NO_MORE_DOCS || docID < maxDoc;
       assert docID == in.docID();
       lastDocID = docID;
+      exists = docID != NO_MORE_DOCS;
       return docID;
     }
 
@@ -497,10 +513,23 @@ public class AssertingLeafReader extends FilterLeafReader {
       assert docID >= target;
       assert docID == NO_MORE_DOCS || docID < maxDoc;
       lastDocID = docID;
+      exists = docID != NO_MORE_DOCS;
       return docID;
     }
 
     @Override
+    public boolean advanceExact(int target) throws IOException {
+      assertThread("Numeric doc values", creationThread);
+      assert target >= 0;
+      assert target >= in.docID();
+      assert target < maxDoc;
+      exists = in.advanceExact(target);
+      assert in.docID() == target;
+      lastDocID = target;
+      return exists;
+    }
+
+    @Override
     public long cost() {
       assertThread("Binary doc values", creationThread);
       long cost = in.cost();
@@ -511,8 +540,7 @@ public class AssertingLeafReader extends FilterLeafReader {
     @Override
     public BytesRef binaryValue() throws IOException {
       assertThread("Binary doc values", creationThread);
-      assert in.docID() != -1;
-      assert in.docID() != NO_MORE_DOCS;
+      assert exists;
       return in.binaryValue();
     }
 
@@ -529,6 +557,7 @@ public class AssertingLeafReader extends FilterLeafReader {
     private final int maxDoc;
     private final int valueCount;
     private int lastDocID = -1;
+    private boolean exists;
     
     public AssertingSortedDocValues(SortedDocValues in, int maxDoc) {
       this.in = in;
@@ -551,6 +580,7 @@ public class AssertingLeafReader extends FilterLeafReader {
       assert docID == NO_MORE_DOCS || docID < maxDoc;
       assert docID == in.docID();
       lastDocID = docID;
+      exists = docID != NO_MORE_DOCS;
       return docID;
     }
 
@@ -563,10 +593,23 @@ public class AssertingLeafReader extends FilterLeafReader {
       assert docID >= target;
       assert docID == NO_MORE_DOCS || docID < maxDoc;
       lastDocID = docID;
+      exists = docID != NO_MORE_DOCS;
       return docID;
     }
 
     @Override
+    public boolean advanceExact(int target) throws IOException {
+      assertThread("Numeric doc values", creationThread);
+      assert target >= 0;
+      assert target >= in.docID();
+      assert target < maxDoc;
+      exists = in.advanceExact(target);
+      assert in.docID() == target;
+      lastDocID = target;
+      return exists;
+    }
+
+    @Override
     public long cost() {
       assertThread("Sorted doc values", creationThread);
       long cost = in.cost();
@@ -577,6 +620,7 @@ public class AssertingLeafReader extends FilterLeafReader {
     @Override
     public int ordValue() {
       assertThread("Sorted doc values", creationThread);
+      assert exists;
       int ord = in.ordValue();
       assert ord >= -1 && ord < valueCount;
       return ord;
@@ -625,6 +669,7 @@ public class AssertingLeafReader extends FilterLeafReader {
     private final int maxDoc;
     private int lastDocID = -1;
     private int valueUpto;
+    private boolean exists;
     
     public AssertingSortedNumericDocValues(SortedNumericDocValues in, int maxDoc) {
       this.in = in;
@@ -645,6 +690,7 @@ public class AssertingLeafReader extends FilterLeafReader {
       assert docID == in.docID();
       lastDocID = docID;
       valueUpto = 0;
+      exists = docID != NO_MORE_DOCS;
       return docID;
     }
 
@@ -659,10 +705,24 @@ public class AssertingLeafReader extends FilterLeafReader {
       assert docID == NO_MORE_DOCS || docID < maxDoc;
       lastDocID = docID;
       valueUpto = 0;
+      exists = docID != NO_MORE_DOCS;
       return docID;
     }
 
     @Override
+    public boolean advanceExact(int target) throws IOException {
+      assertThread("Numeric doc values", creationThread);
+      assert target >= 0;
+      assert target >= in.docID();
+      assert target < maxDoc;
+      exists = in.advanceExact(target);
+      assert in.docID() == target;
+      lastDocID = target;
+      valueUpto = 0;
+      return exists;
+    }
+
+    @Override
     public long cost() {
       assertThread("Sorted numeric doc values", creationThread);
       long cost = in.cost();
@@ -673,6 +733,7 @@ public class AssertingLeafReader extends FilterLeafReader {
     @Override
     public long nextValue() throws IOException {
       assertThread("Sorted numeric doc values", creationThread);
+      assert exists;
       assert valueUpto < in.docValueCount(): "valueUpto=" + valueUpto + " in.docValueCount()=" + in.docValueCount();
       valueUpto++;
       return in.nextValue();
@@ -681,6 +742,7 @@ public class AssertingLeafReader extends FilterLeafReader {
     @Override
     public int docValueCount() {
       assertThread("Sorted numeric doc values", creationThread);
+      assert exists;
       assert in.docValueCount() > 0;
       return in.docValueCount();
     } 
@@ -693,7 +755,8 @@ public class AssertingLeafReader extends FilterLeafReader {
     private final int maxDoc;
     private final long valueCount;
     private int lastDocID = -1;
-    long lastOrd = NO_MORE_ORDS;
+    private long lastOrd = NO_MORE_ORDS;
+    private boolean exists;
     
     public AssertingSortedSetDocValues(SortedSetDocValues in, int maxDoc) {
       this.in = in;
@@ -717,6 +780,7 @@ public class AssertingLeafReader extends FilterLeafReader {
       assert docID == in.docID();
       lastDocID = docID;
       lastOrd = -2;
+      exists = docID != NO_MORE_DOCS;
       return docID;
     }
 
@@ -731,10 +795,24 @@ public class AssertingLeafReader extends FilterLeafReader {
       assert docID == NO_MORE_DOCS || docID < maxDoc;
       lastDocID = docID;
       lastOrd = -2;
+      exists = docID != NO_MORE_DOCS;
       return docID;
     }
 
     @Override
+    public boolean advanceExact(int target) throws IOException {
+      assertThread("Numeric doc values", creationThread);
+      assert target >= 0;
+      assert target >= in.docID();
+      assert target < maxDoc;
+      exists = in.advanceExact(target);
+      assert in.docID() == target;
+      lastDocID = target;
+      lastOrd = -2;
+      return exists;
+    }
+
+    @Override
     public long cost() {
       assertThread("Sorted set doc values", creationThread);
       long cost = in.cost();
@@ -746,6 +824,7 @@ public class AssertingLeafReader extends FilterLeafReader {
     public long nextOrd() throws IOException {
       assertThread("Sorted set doc values", creationThread);
       assert lastOrd != NO_MORE_ORDS;
+      assert exists;
       long ord = in.nextOrd();
       assert ord < valueCount;
       assert ord == NO_MORE_ORDS || ord > lastOrd;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/test-framework/src/java/org/apache/lucene/index/BaseDocValuesFormatTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BaseDocValuesFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BaseDocValuesFormatTestCase.java
index b9bf745..d55f212 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/BaseDocValuesFormatTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/BaseDocValuesFormatTestCase.java
@@ -30,6 +30,8 @@ import java.util.Map;
 import java.util.Set;
 import java.util.TreeSet;
 import java.util.concurrent.CountDownLatch;
+import java.util.function.LongSupplier;
+import java.util.function.Supplier;
 
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.MockAnalyzer;
@@ -556,7 +558,6 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     IndexReader ireader = DirectoryReader.open(directory); // read-only=true
     assert ireader.leaves().size() == 1;
     BinaryDocValues dv = ireader.leaves().get(0).reader().getBinaryDocValues("dv");
-    BytesRef scratch = new BytesRef();
     for(int i=0;i<2;i++) {
       Document doc2 = ireader.leaves().get(0).reader().document(i);
       String expected;
@@ -1185,20 +1186,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     dir.close();
   }
 
-  static abstract class LongProducer {
-    abstract long next();
-  }
-
-  private void doTestNumericsVsStoredFields(final long minValue, final long maxValue) throws Exception {
-    doTestNumericsVsStoredFields(new LongProducer() {
-      @Override
-      long next() {
-        return TestUtil.nextLong(random(), minValue, maxValue);
-      }
-    });
-  }
-
-  private void doTestNumericsVsStoredFields(LongProducer longs) throws Exception {
+  private void doTestNumericsVsStoredFields(double density, LongSupplier longs) throws Exception {
     Directory dir = newDirectory();
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir, conf);
@@ -1216,8 +1204,12 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     // for numbers of values <= 256, all storage layouts are tested
     assert numDocs > 256;
     for (int i = 0; i < numDocs; i++) {
+      if (random().nextDouble() > density) {
+        writer.addDocument(new Document());
+        continue;
+      }
       idField.setStringValue(Integer.toString(i));
-      long value = longs.next();
+      long value = longs.getAsLong();
       storedField.setStringValue(Long.toString(value));
       dvField.setLongValue(value);
       writer.addDocument(doc);
@@ -1241,20 +1233,28 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     
     // compare
     DirectoryReader ir = DirectoryReader.open(dir);
+    TestUtil.checkReader(ir);
     for (LeafReaderContext context : ir.leaves()) {
       LeafReader r = context.reader();
-      NumericDocValues docValues = r.getNumericDocValues("dv");
+      NumericDocValues docValues = DocValues.getNumeric(r, "dv");
+      docValues.nextDoc();
       for (int i = 0; i < r.maxDoc(); i++) {
-        long storedValue = Long.parseLong(r.document(i).get("stored"));
-        assertEquals(i, docValues.nextDoc());
-        assertEquals(storedValue, docValues.longValue());
+        String storedValue = r.document(i).get("stored");
+        if (storedValue == null) {
+          assertTrue(docValues.docID() > i);
+        } else {
+          assertEquals(i, docValues.docID());
+          assertEquals(Long.parseLong(storedValue), docValues.longValue());
+          docValues.nextDoc();
+        }
       }
+      assertEquals(DocIdSetIterator.NO_MORE_DOCS, docValues.docID());
     }
     ir.close();
     dir.close();
   }
   
-  private void doTestSortedNumericsVsStoredFields(LongProducer counts, LongProducer values) throws Exception {
+  private void doTestSortedNumericsVsStoredFields(LongSupplier counts, LongSupplier values) throws Exception {
     Directory dir = newDirectory();
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir, conf);
@@ -1268,10 +1268,10 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
       Document doc = new Document();
       doc.add(new StringField("id", Integer.toString(i), Field.Store.NO));
       
-      int valueCount = (int) counts.next();
+      int valueCount = (int) counts.getAsLong();
       long valueArray[] = new long[valueCount];
       for (int j = 0; j < valueCount; j++) {
-        long value = values.next();
+        long value = values.getAsLong();
         valueArray[j] = value;
         doc.add(new SortedNumericDocValuesField("dv", value));
       }
@@ -1300,6 +1300,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     
     // compare
     DirectoryReader ir = DirectoryReader.open(dir);
+    TestUtil.checkReader(ir);
     for (LeafReaderContext context : ir.leaves()) {
       LeafReader r = context.reader();
       SortedNumericDocValues docValues = DocValues.getSortedNumeric(r, "dv");
@@ -1326,39 +1327,74 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
   public void testBooleanNumericsVsStoredFields() throws Exception {
     int numIterations = atLeast(1);
     for (int i = 0; i < numIterations; i++) {
-      doTestNumericsVsStoredFields(0, 1);
+      doTestNumericsVsStoredFields(1, () -> random().nextInt(2));
     }
   }
-  
+
+  public void testSparseBooleanNumericsVsStoredFields() throws Exception {
+    int numIterations = atLeast(1);
+    for (int i = 0; i < numIterations; i++) {
+      doTestNumericsVsStoredFields(random().nextDouble(), () -> random().nextInt(2));
+    }
+  }
+
   public void testByteNumericsVsStoredFields() throws Exception {
     int numIterations = atLeast(1);
     for (int i = 0; i < numIterations; i++) {
-      doTestNumericsVsStoredFields(Byte.MIN_VALUE, Byte.MAX_VALUE);
+      doTestNumericsVsStoredFields(1, () -> TestUtil.nextInt(random(), Byte.MIN_VALUE, Byte.MAX_VALUE));
     }
   }
-  
+
+  public void testSparseByteNumericsVsStoredFields() throws Exception {
+    int numIterations = atLeast(1);
+    for (int i = 0; i < numIterations; i++) {
+      doTestNumericsVsStoredFields(random().nextDouble(), () -> TestUtil.nextInt(random(), Byte.MIN_VALUE, Byte.MAX_VALUE));
+    }
+  }
+
   public void testShortNumericsVsStoredFields() throws Exception {
     int numIterations = atLeast(1);
     for (int i = 0; i < numIterations; i++) {
-      doTestNumericsVsStoredFields(Short.MIN_VALUE, Short.MAX_VALUE);
+      doTestNumericsVsStoredFields(1, () -> TestUtil.nextInt(random(), Short.MIN_VALUE, Short.MAX_VALUE));
     }
   }
-  
+
+  public void testSparseShortNumericsVsStoredFields() throws Exception {
+    int numIterations = atLeast(1);
+    for (int i = 0; i < numIterations; i++) {
+      doTestNumericsVsStoredFields(random().nextDouble(), () -> TestUtil.nextInt(random(), Short.MIN_VALUE, Short.MAX_VALUE));
+    }
+  }
+
   public void testIntNumericsVsStoredFields() throws Exception {
     int numIterations = atLeast(1);
     for (int i = 0; i < numIterations; i++) {
-      doTestNumericsVsStoredFields(Integer.MIN_VALUE, Integer.MAX_VALUE);
+      doTestNumericsVsStoredFields(1, random()::nextInt);
+    }
+  }
+  
+  public void testSparseIntNumericsVsStoredFields() throws Exception {
+    int numIterations = atLeast(1);
+    for (int i = 0; i < numIterations; i++) {
+      doTestNumericsVsStoredFields(random().nextDouble(), random()::nextInt);
     }
   }
   
   public void testLongNumericsVsStoredFields() throws Exception {
     int numIterations = atLeast(1);
     for (int i = 0; i < numIterations; i++) {
-      doTestNumericsVsStoredFields(Long.MIN_VALUE, Long.MAX_VALUE);
+      doTestNumericsVsStoredFields(1, random()::nextLong);
     }
   }
   
-  private void doTestBinaryVsStoredFields(int minLength, int maxLength) throws Exception {
+  public void testSparseLongNumericsVsStoredFields() throws Exception {
+    int numIterations = atLeast(1);
+    for (int i = 0; i < numIterations; i++) {
+      doTestNumericsVsStoredFields(random().nextDouble(), random()::nextLong);
+    }
+  }
+
+  private void doTestBinaryVsStoredFields(double density, Supplier<byte[]> bytes) throws Exception {
     Directory dir = newDirectory();
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir, conf);
@@ -1373,15 +1409,12 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     // index some docs
     int numDocs = atLeast(300);
     for (int i = 0; i < numDocs; i++) {
-      idField.setStringValue(Integer.toString(i));
-      final int length;
-      if (minLength == maxLength) {
-        length = minLength; // fixed length
-      } else {
-        length = TestUtil.nextInt(random(), minLength, maxLength);
+      if (random().nextDouble() > density) {
+        writer.addDocument(new Document());
+        continue;
       }
-      byte buffer[] = new byte[length];
-      random().nextBytes(buffer);
+      idField.setStringValue(Integer.toString(i));
+      byte[] buffer = bytes.get();
       storedField.setBytesValue(buffer);
       dvField.setBytesValue(buffer);
       writer.addDocument(doc);
@@ -1399,28 +1432,44 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     
     // compare
     DirectoryReader ir = writer.getReader();
+    TestUtil.checkReader(ir);
     for (LeafReaderContext context : ir.leaves()) {
       LeafReader r = context.reader();
-      BinaryDocValues docValues = r.getBinaryDocValues("dv");
+      BinaryDocValues docValues = DocValues.getBinary(r, "dv");
+      docValues.nextDoc();
       for (int i = 0; i < r.maxDoc(); i++) {
         BytesRef binaryValue = r.document(i).getBinaryValue("stored");
-        assertEquals(i, docValues.nextDoc());
-        assertEquals(binaryValue, docValues.binaryValue());
+        if (binaryValue == null) {
+          assertTrue(docValues.docID() > i);
+        } else {
+          assertEquals(i, docValues.docID());
+          assertEquals(binaryValue, docValues.binaryValue());
+          docValues.nextDoc();
+        }
       }
+      assertEquals(DocIdSetIterator.NO_MORE_DOCS, docValues.docID());
     }
     ir.close();
     
     // compare again
     writer.forceMerge(1);
     ir = writer.getReader();
+    TestUtil.checkReader(ir);
     for (LeafReaderContext context : ir.leaves()) {
       LeafReader r = context.reader();
-      BinaryDocValues docValues = r.getBinaryDocValues("dv");
+      BinaryDocValues docValues = DocValues.getBinary(r, "dv");
+      docValues.nextDoc();
       for (int i = 0; i < r.maxDoc(); i++) {
         BytesRef binaryValue = r.document(i).getBinaryValue("stored");
-        assertEquals(i, docValues.nextDoc());
-        assertEquals(binaryValue, docValues.binaryValue());
+        if (binaryValue == null) {
+          assertTrue(docValues.docID() > i);
+        } else {
+          assertEquals(i, docValues.docID());
+          assertEquals(binaryValue, docValues.binaryValue());
+          docValues.nextDoc();
+        }
       }
+      assertEquals(DocIdSetIterator.NO_MORE_DOCS, docValues.docID());
     }
     ir.close();
     writer.close();
@@ -1428,21 +1477,46 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
   }
   
   public void testBinaryFixedLengthVsStoredFields() throws Exception {
+    doTestBinaryFixedLengthVsStoredFields(1);
+  }
+
+  public void testSparseBinaryFixedLengthVsStoredFields() throws Exception {
+    doTestBinaryFixedLengthVsStoredFields(random().nextDouble());
+  }
+
+  private void doTestBinaryFixedLengthVsStoredFields(double density) throws Exception {
     int numIterations = atLeast(1);
     for (int i = 0; i < numIterations; i++) {
       int fixedLength = TestUtil.nextInt(random(), 0, 10);
-      doTestBinaryVsStoredFields(fixedLength, fixedLength);
+      doTestBinaryVsStoredFields(density, () -> {
+        byte buffer[] = new byte[fixedLength];
+        random().nextBytes(buffer);
+        return buffer;
+      });
     }
   }
-  
+
   public void testBinaryVariableLengthVsStoredFields() throws Exception {
+    doTestBinaryVariableLengthVsStoredFields(1);
+  }
+
+  public void testSparseBinaryVariableLengthVsStoredFields() throws Exception {
+    doTestBinaryVariableLengthVsStoredFields(random().nextDouble());
+  }
+
+  public void doTestBinaryVariableLengthVsStoredFields(double density) throws Exception {
     int numIterations = atLeast(1);
     for (int i = 0; i < numIterations; i++) {
-      doTestBinaryVsStoredFields(0, 10);
+      doTestBinaryVsStoredFields(density, () -> {
+        final int length = random().nextInt(10);
+        byte buffer[] = new byte[length];
+        random().nextBytes(buffer);
+        return buffer;
+      });
     }
   }
   
-  protected void doTestSortedVsStoredFields(int numDocs, int minLength, int maxLength) throws Exception {
+  protected void doTestSortedVsStoredFields(int numDocs, double density, Supplier<byte[]> bytes) throws Exception {
     Directory dir = newFSDirectory(createTempDir("dvduel"));
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir, conf);
@@ -1456,15 +1530,12 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     
     // index some docs
     for (int i = 0; i < numDocs; i++) {
-      idField.setStringValue(Integer.toString(i));
-      final int length;
-      if (minLength == maxLength) {
-        length = minLength; // fixed length
-      } else {
-        length = TestUtil.nextInt(random(), minLength, maxLength);
+      if (random().nextDouble() > density) {
+        writer.addDocument(new Document());
+        continue;
       }
-      byte buffer[] = new byte[length];
-      random().nextBytes(buffer);
+      idField.setStringValue(Integer.toString(i));
+      byte[] buffer = bytes.get();
       storedField.setBytesValue(buffer);
       dvField.setBytesValue(buffer);
       writer.addDocument(doc);
@@ -1482,28 +1553,44 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     
     // compare
     DirectoryReader ir = writer.getReader();
+    TestUtil.checkReader(ir);
     for (LeafReaderContext context : ir.leaves()) {
       LeafReader r = context.reader();
       BinaryDocValues docValues = DocValues.getBinary(r, "dv");
+      docValues.nextDoc();
       for (int i = 0; i < r.maxDoc(); i++) {
         BytesRef binaryValue = r.document(i).getBinaryValue("stored");
-        assertEquals(i, docValues.nextDoc());
-        assertEquals(binaryValue, docValues.binaryValue());
+        if (binaryValue == null) {
+          assertTrue(docValues.docID() > i);
+        } else {
+          assertEquals(i, docValues.docID());
+          assertEquals(binaryValue, docValues.binaryValue());
+          docValues.nextDoc();
+        }
       }
+      assertEquals(DocIdSetIterator.NO_MORE_DOCS, docValues.docID());
     }
     ir.close();
     writer.forceMerge(1);
     
     // compare again
     ir = writer.getReader();
+    TestUtil.checkReader(ir);
     for (LeafReaderContext context : ir.leaves()) {
       LeafReader r = context.reader();
       BinaryDocValues docValues = DocValues.getBinary(r, "dv");
+      docValues.nextDoc();
       for (int i = 0; i < r.maxDoc(); i++) {
         BytesRef binaryValue = r.document(i).getBinaryValue("stored");
-        assertEquals(i, docValues.nextDoc());
-        assertEquals(binaryValue, docValues.binaryValue());
+        if (binaryValue == null) {
+          assertTrue(docValues.docID() > i);
+        } else {
+          assertEquals(i, docValues.docID());
+          assertEquals(binaryValue, docValues.binaryValue());
+          docValues.nextDoc();
+        }
       }
+      assertEquals(DocIdSetIterator.NO_MORE_DOCS, docValues.docID());
     }
     ir.close();
     writer.close();
@@ -1514,17 +1601,41 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     int numIterations = atLeast(1);
     for (int i = 0; i < numIterations; i++) {
       int fixedLength = TestUtil.nextInt(random(), 1, 10);
-      doTestSortedVsStoredFields(atLeast(300), fixedLength, fixedLength);
+      doTestSortedVsStoredFields(atLeast(300), 1, fixedLength, fixedLength);
     }
   }
   
-  public void testSortedVariableLengthVsStoredFields() throws Exception {
+  public void testSparseSortedFixedLengthVsStoredFields() throws Exception {
     int numIterations = atLeast(1);
     for (int i = 0; i < numIterations; i++) {
-      doTestSortedVsStoredFields(atLeast(300), 1, 10);
+      int fixedLength = TestUtil.nextInt(random(), 1, 10);
+      doTestSortedVsStoredFields(atLeast(300), random().nextDouble(), fixedLength, fixedLength);
     }
   }
   
+  public void testSortedVariableLengthVsStoredFields() throws Exception {
+    int numIterations = atLeast(1);
+    for (int i = 0; i < numIterations; i++) {
+      doTestSortedVsStoredFields(atLeast(300), 1, 1, 10);
+    }
+  }
+
+  public void testSparseSortedVariableLengthVsStoredFields() throws Exception {
+    int numIterations = atLeast(1);
+    for (int i = 0; i < numIterations; i++) {
+      doTestSortedVsStoredFields(atLeast(300), random().nextDouble(), 1, 10);
+    }
+  }
+
+  protected void doTestSortedVsStoredFields(int numDocs, double density, int minLength, int maxLength) throws Exception {
+    doTestSortedVsStoredFields(numDocs, density, () -> {
+      int length = TestUtil.nextInt(random(), minLength, maxLength);
+      byte[] buffer = new byte[length];
+      random().nextBytes(buffer);
+      return buffer;
+    });
+  }
+
   public void testSortedSetOneValue() throws IOException {
     Directory directory = newDirectory();
     RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory);
@@ -2001,6 +2112,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     
     // compare
     DirectoryReader ir = writer.getReader();
+    TestUtil.checkReader(ir);
     for (LeafReaderContext context : ir.leaves()) {
       LeafReader r = context.reader();
       SortedSetDocValues docValues = r.getSortedSetDocValues("dv");
@@ -2029,6 +2141,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     
     // compare again
     ir = writer.getReader();
+    TestUtil.checkReader(ir);
     for (LeafReaderContext context : ir.leaves()) {
       LeafReader r = context.reader();
       SortedSetDocValues docValues = r.getSortedSetDocValues("dv");
@@ -2067,18 +2180,8 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     int numIterations = atLeast(1);
     for (int i = 0; i < numIterations; i++) {
       doTestSortedNumericsVsStoredFields(
-          new LongProducer() {
-            @Override
-            long next() {
-              return 1;
-            }
-          },
-          new LongProducer() {
-            @Override
-            long next() {
-              return TestUtil.nextLong(random(), Long.MIN_VALUE, Long.MAX_VALUE);
-            }
-          }
+          () -> 1,
+          random()::nextLong
       );
     }
   }
@@ -2087,18 +2190,8 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     int numIterations = atLeast(1);
     for (int i = 0; i < numIterations; i++) {
       doTestSortedNumericsVsStoredFields(
-          new LongProducer() {
-            @Override
-            long next() {
-              return random().nextBoolean() ? 0 : 1;
-            }
-          },
-          new LongProducer() {
-            @Override
-            long next() {
-              return TestUtil.nextLong(random(), Long.MIN_VALUE, Long.MAX_VALUE);
-            }
-          }
+          () -> random().nextBoolean() ? 0 : 1,
+          random()::nextLong
       );
     }
   }
@@ -2107,18 +2200,8 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     int numIterations = atLeast(1);
     for (int i = 0; i < numIterations; i++) {
       doTestSortedNumericsVsStoredFields(
-          new LongProducer() {
-            @Override
-            long next() {
-              return TestUtil.nextLong(random(), 0, 50);
-            }
-          },
-          new LongProducer() {
-            @Override
-            long next() {
-              return TestUtil.nextLong(random(), Long.MIN_VALUE, Long.MAX_VALUE);
-            }
-          }
+          () -> TestUtil.nextLong(random(), 0, 50),
+          random()::nextLong
       );
     }
   }
@@ -2131,18 +2214,8 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     int numIterations = atLeast(1);
     for (int i = 0; i < numIterations; i++) {
       doTestSortedNumericsVsStoredFields(
-          new LongProducer() {
-            @Override
-            long next() {
-              return TestUtil.nextLong(random(), 0, 6);
-            }
-          },
-          new LongProducer() {
-            @Override
-            long next() {
-              return values[random().nextInt(values.length)];
-            }
-          }
+          () -> TestUtil.nextLong(random(), 0, 6),
+          () -> values[random().nextInt(values.length)]
       );
     }
   }
@@ -2198,22 +2271,31 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
   }
 
   public void testGCDCompression() throws Exception {
+    doTestGCDCompression(1);
+  }
+
+  public void testSparseGCDCompression() throws Exception {
+    doTestGCDCompression(random().nextDouble());
+  }
+
+  private void doTestGCDCompression(double density) throws Exception {
     int numIterations = atLeast(1);
     for (int i = 0; i < numIterations; i++) {
       final long min = - (((long) random().nextInt(1 << 30)) << 32);
       final long mul = random().nextInt() & 0xFFFFFFFFL;
-      final LongProducer longs = new LongProducer() {
-        @Override
-        long next() {
-          return min + mul * random().nextInt(1 << 20);
-        }
+      final LongSupplier longs = () -> {
+        return min + mul * random().nextInt(1 << 20);
       };
-      doTestNumericsVsStoredFields(longs);
+      doTestNumericsVsStoredFields(density, longs);
     }
   }
 
   public void testZeros() throws Exception {
-    doTestNumericsVsStoredFields(0, 0);
+    doTestNumericsVsStoredFields(1, () -> 0);
+  }
+
+  public void testSparseZeros() throws Exception {
+    doTestNumericsVsStoredFields(random().nextDouble(), () -> 0);
   }
 
   public void testZeroOrMin() throws Exception {
@@ -2221,13 +2303,10 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     // the GCD of 0 and MIN_VALUE is negative
     int numIterations = atLeast(1);
     for (int i = 0; i < numIterations; i++) {
-      final LongProducer longs = new LongProducer() {
-        @Override
-        long next() {
-          return random().nextBoolean() ? 0 : Long.MIN_VALUE;
-        }
+      final LongSupplier longs = () -> {
+        return random().nextBoolean() ? 0 : Long.MIN_VALUE;
       };
-      doTestNumericsVsStoredFields(longs);
+      doTestNumericsVsStoredFields(1, longs);
     }
   }
   

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java
index d56e6cb..7a7abc0 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java
@@ -362,6 +362,12 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
                                      }
 
                                      @Override
+                                    public boolean advanceExact(int target) throws IOException {
+                                      docID = target;
+                                      return target == 0;
+                                    }
+
+                                     @Override
                                      public long cost() {
                                        return 1;
                                      }
@@ -415,6 +421,12 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
                                      }
 
                                      @Override
+                                    public boolean advanceExact(int target) throws IOException {
+                                      docID = target;
+                                      return target == 0;
+                                    }
+
+                                     @Override
                                      public long cost() {
                                        return 1;
                                      }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/test-framework/src/java/org/apache/lucene/index/BaseNormsFormatTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BaseNormsFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BaseNormsFormatTestCase.java
index 64e99da..cd62218 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/BaseNormsFormatTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/BaseNormsFormatTestCase.java
@@ -21,6 +21,7 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.Random;
 import java.util.concurrent.CountDownLatch;
+import java.util.function.LongSupplier;
 
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.MockAnalyzer;
@@ -59,9 +60,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
     int iterations = atLeast(1);
     final Random r = random();
     for (int i = 0; i < iterations; i++) {
-      doTestNormsVersusDocValues(1, new LongProducer() {
+      doTestNormsVersusDocValues(1, new LongSupplier() {
         @Override
-        long next() {
+        public long getAsLong() {
           return TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE);
         }
       });
@@ -73,9 +74,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
     int iterations = atLeast(1);
     final Random r = random();
     for (int i = 0; i < iterations; i++) {
-      doTestNormsVersusDocValues(random().nextDouble(), new LongProducer() {
+      doTestNormsVersusDocValues(random().nextDouble(), new LongSupplier() {
         @Override
-        long next() {
+        public long getAsLong() {
           return TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE);
         }
       });
@@ -86,9 +87,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
     int iterations = atLeast(1);
     final Random r = random();
     for (int i = 0; i < iterations; i++) {
-      doTestNormsVersusDocValues(1, new LongProducer() {
+      doTestNormsVersusDocValues(1, new LongSupplier() {
         @Override
-        long next() {
+        public long getAsLong() {
           return TestUtil.nextLong(r, Short.MIN_VALUE, Short.MAX_VALUE);
         }
       });
@@ -100,9 +101,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
     int iterations = atLeast(1);
     final Random r = random();
     for (int i = 0; i < iterations; i++) {
-      doTestNormsVersusDocValues(random().nextDouble(), new LongProducer() {
+      doTestNormsVersusDocValues(random().nextDouble(), new LongSupplier() {
         @Override
-        long next() {
+        public long getAsLong() {
           return TestUtil.nextLong(r, Short.MIN_VALUE, Short.MAX_VALUE);
         }
       });
@@ -113,9 +114,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
     int iterations = atLeast(1);
     final Random r = random();
     for (int i = 0; i < iterations; i++) {
-      doTestNormsVersusDocValues(1, new LongProducer() {
+      doTestNormsVersusDocValues(1, new LongSupplier() {
         @Override
-        long next() {
+        public long getAsLong() {
           return TestUtil.nextLong(r, Long.MIN_VALUE, Long.MAX_VALUE);
         }
       });
@@ -127,9 +128,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
     int iterations = atLeast(1);
     final Random r = random();
     for (int i = 0; i < iterations; i++) {
-      doTestNormsVersusDocValues(random().nextDouble(), new LongProducer() {
+      doTestNormsVersusDocValues(random().nextDouble(), new LongSupplier() {
         @Override
-        long next() {
+        public long getAsLong() {
           return TestUtil.nextLong(r, Long.MIN_VALUE, Long.MAX_VALUE);
         }
       });
@@ -140,9 +141,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
     int iterations = atLeast(1);
     final Random r = random();
     for (int i = 0; i < iterations; i++) {
-      doTestNormsVersusDocValues(1, new LongProducer() {
+      doTestNormsVersusDocValues(1, new LongSupplier() {
         @Override
-        long next() {
+        public long getAsLong() {
           int thingToDo = r.nextInt(3);
           switch (thingToDo) {
             case 0: return Long.MIN_VALUE;
@@ -159,9 +160,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
     int iterations = atLeast(1);
     final Random r = random();
     for (int i = 0; i < iterations; i++) {
-      doTestNormsVersusDocValues(random().nextDouble(), new LongProducer() {
+      doTestNormsVersusDocValues(random().nextDouble(), new LongSupplier() {
         @Override
-        long next() {
+        public long getAsLong() {
           int thingToDo = r.nextInt(3);
           switch (thingToDo) {
             case 0: return Long.MIN_VALUE;
@@ -177,9 +178,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
     int iterations = atLeast(1);
     final Random r = random();
     for (int i = 0; i < iterations; i++) {
-      doTestNormsVersusDocValues(1, new LongProducer() {
+      doTestNormsVersusDocValues(1, new LongSupplier() {
         @Override
-        long next() {
+        public long getAsLong() {
           return r.nextBoolean() ? 20 : 3;
         }
       });
@@ -191,9 +192,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
     int iterations = atLeast(1);
     final Random r = random();
     for (int i = 0; i < iterations; i++) {
-      doTestNormsVersusDocValues(random().nextDouble(), new LongProducer() {
+      doTestNormsVersusDocValues(random().nextDouble(), new LongSupplier() {
         @Override
-        long next() {
+        public long getAsLong() {
           return r.nextBoolean() ? 20 : 3;
         }
       });
@@ -204,9 +205,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
     int iterations = atLeast(1);
     final Random r = random();
     for (int i = 0; i < iterations; i++) {
-      doTestNormsVersusDocValues(1, new LongProducer() {
+      doTestNormsVersusDocValues(1, new LongSupplier() {
         @Override
-        long next() {
+        public long getAsLong() {
           return r.nextBoolean() ? 1000000L : -5000;
         }
       });
@@ -218,9 +219,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
     int iterations = atLeast(1);
     final Random r = random();
     for (int i = 0; i < iterations; i++) {
-      doTestNormsVersusDocValues(random().nextDouble(), new LongProducer() {
+      doTestNormsVersusDocValues(random().nextDouble(), new LongSupplier() {
         @Override
-        long next() {
+        public long getAsLong() {
           return r.nextBoolean() ? 1000000L : -5000;
         }
       });
@@ -230,9 +231,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
   public void testAllZeros() throws Exception {
     int iterations = atLeast(1);
     for (int i = 0; i < iterations; i++) {
-      doTestNormsVersusDocValues(1, new LongProducer() {
+      doTestNormsVersusDocValues(1, new LongSupplier() {
         @Override
-        long next() {
+        public long getAsLong() {
           return 0;
         }
       });
@@ -243,9 +244,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
     assumeTrue("Requires sparse norms support", codecSupportsSparsity());
     int iterations = atLeast(1);
     for (int i = 0; i < iterations; i++) {
-      doTestNormsVersusDocValues(random().nextDouble(), new LongProducer() {
+      doTestNormsVersusDocValues(random().nextDouble(), new LongSupplier() {
         @Override
-        long next() {
+        public long getAsLong() {
           return 0;
         }
       });
@@ -256,9 +257,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
     int iterations = atLeast(1);
     final Random r = random();
     for (int i = 0; i < iterations; i++) {
-      doTestNormsVersusDocValues(1, new LongProducer() {
+      doTestNormsVersusDocValues(1, new LongSupplier() {
         @Override
-        long next() {
+        public long getAsLong() {
           return r.nextInt(100) == 0 ? TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE) : 0;
         }
       });
@@ -270,9 +271,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
     final Random r = random();
     for (int i = 0; i < iterations; i++) {
       final long commonValue = TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE);
-      doTestNormsVersusDocValues(1, new LongProducer() {
+      doTestNormsVersusDocValues(1, new LongSupplier() {
         @Override
-        long next() {
+        public long getAsLong() {
           return r.nextInt(100) == 0 ? TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE) : commonValue;
         }
       });
@@ -285,9 +286,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
     final Random r = random();
     for (int i = 0; i < iterations; i++) {
       final long commonValue = TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE);
-      doTestNormsVersusDocValues(random().nextDouble(), new LongProducer() {
+      doTestNormsVersusDocValues(random().nextDouble(), new LongSupplier() {
         @Override
-        long next() {
+        public long getAsLong() {
           return r.nextInt(100) == 0 ? TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE) : commonValue;
         }
       });
@@ -300,9 +301,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
     for (int i = 0; i < iterations; i++) {
       final long commonValue = TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE);
       final long uncommonValue = TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE);
-      doTestNormsVersusDocValues(1, new LongProducer() {
+      doTestNormsVersusDocValues(1, new LongSupplier() {
         @Override
-        long next() {
+        public long getAsLong() {
           return r.nextInt(100) == 0 ? uncommonValue : commonValue;
         }
       });
@@ -316,9 +317,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
     for (int i = 0; i < iterations; i++) {
       final long commonValue = TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE);
       final long uncommonValue = TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE);
-      doTestNormsVersusDocValues(random().nextDouble(), new LongProducer() {
+      doTestNormsVersusDocValues(random().nextDouble(), new LongSupplier() {
         @Override
-        long next() {
+        public long getAsLong() {
           return r.nextInt(100) == 0 ? uncommonValue : commonValue;
         }
       });
@@ -337,9 +338,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
     for (int j = 0; j < numOtherValues; ++j) {
       otherValues[j] = TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE);
     }
-    doTestNormsVersusDocValues(1, new LongProducer() {
+    doTestNormsVersusDocValues(1, new LongSupplier() {
       @Override
-      long next() {
+      public long getAsLong() {
         return r.nextInt(100) == 0 ? otherValues[r.nextInt(numOtherValues - 1)] : commonValues[r.nextInt(N - 1)];
       }
     });
@@ -358,9 +359,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
     for (int j = 0; j < numOtherValues; ++j) {
       otherValues[j] = TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE);
     }
-    doTestNormsVersusDocValues(random().nextDouble(), new LongProducer() {
+    doTestNormsVersusDocValues(random().nextDouble(), new LongSupplier() {
       @Override
-      long next() {
+      public long getAsLong() {
         return r.nextInt(100) == 0 ? otherValues[r.nextInt(numOtherValues - 1)] : commonValues[r.nextInt(N - 1)];
       }
     });
@@ -386,9 +387,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
         for (int j = 0; j < numOtherValues; ++j) {
           otherValues[j] = TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE);
         }
-        doTestNormsVersusDocValues(1, new LongProducer() {
+        doTestNormsVersusDocValues(1, new LongSupplier() {
           @Override
-          long next() {
+          public long getAsLong() {
             return r.nextInt(100) == 0 ? otherValues[r.nextInt(numOtherValues - 1)] : commonValues[r.nextInt(N - 1)];
           }
         });
@@ -417,9 +418,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
         for (int j = 0; j < numOtherValues; ++j) {
           otherValues[j] = TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE);
         }
-        doTestNormsVersusDocValues(random().nextDouble(), new LongProducer() {
+        doTestNormsVersusDocValues(random().nextDouble(), new LongSupplier() {
           @Override
-          long next() {
+          public long getAsLong() {
             return r.nextInt(100) == 0 ? otherValues[r.nextInt(numOtherValues - 1)] : commonValues[r.nextInt(N - 1)];
           }
         });
@@ -427,7 +428,7 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
     }
   }
 
-  private void doTestNormsVersusDocValues(double density, LongProducer longs) throws Exception {
+  private void doTestNormsVersusDocValues(double density, LongSupplier longs) throws Exception {
     int numDocs = atLeast(500);
     final FixedBitSet docsWithField = new FixedBitSet(numDocs);
     final int numDocsWithField = Math.max(1, (int) (density * numDocs));
@@ -445,7 +446,7 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
     }
     long norms[] = new long[numDocsWithField];
     for (int i = 0; i < numDocsWithField; i++) {
-      norms[i] = longs.next();
+      norms[i] = longs.getAsLong();
     }
     
     Directory dir = newDirectory();
@@ -519,10 +520,6 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
     }
   }
   
-  static abstract class LongProducer {
-    abstract long next();
-  }
-  
   static class CannedNormSimilarity extends Similarity {
     final long norms[];
     int index = 0;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/solr/core/src/java/org/apache/solr/request/DocValuesFacets.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/request/DocValuesFacets.java b/solr/core/src/java/org/apache/solr/request/DocValuesFacets.java
index 09ad836..0dab34b 100644
--- a/solr/core/src/java/org/apache/solr/request/DocValuesFacets.java
+++ b/solr/core/src/java/org/apache/solr/request/DocValuesFacets.java
@@ -272,11 +272,8 @@ public class DocValuesFacets {
     final LongValues ordmap = map == null ? null : map.getGlobalOrds(subIndex);
     int doc;
     while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
-      if (doc > si.docID()) {
-        si.advance(doc);
-      }
       int term;
-      if (doc == si.docID()) {
+      if (si.advanceExact(doc)) {
         term = si.ordValue();
       } else {
         term = -1;
@@ -301,10 +298,7 @@ public class DocValuesFacets {
     
     int doc;
     while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
-      if (doc > si.docID()) {
-        si.advance(doc);
-      }
-      if (doc == si.docID()) {
+      if (si.advanceExact(doc)) {
         segCounts[1+si.ordValue()]++;
       } else {
         segCounts[0]++;
@@ -334,10 +328,7 @@ public class DocValuesFacets {
     final LongValues ordMap = map == null ? null : map.getGlobalOrds(subIndex);
     int doc;
     while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
-      if (doc > si.docID()) {
-        si.advance(doc);
-      }
-      if (doc == si.docID()) {
+      if (si.advanceExact(doc)) {
         // strange do-while to collect the missing count (first ord is NO_MORE_ORDS)
         int term = (int) si.nextOrd();
         do {
@@ -365,10 +356,7 @@ public class DocValuesFacets {
     
     int doc;
     while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
-      if (doc > si.docID()) {
-        si.advance(doc);
-      }
-      if (doc == si.docID()) {
+      if (si.advanceExact(doc)) {
         int term = (int) si.nextOrd();
         do {
           segCounts[1+term]++;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/solr/core/src/java/org/apache/solr/request/PerSegmentSingleValuedFaceting.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/request/PerSegmentSingleValuedFaceting.java b/solr/core/src/java/org/apache/solr/request/PerSegmentSingleValuedFaceting.java
index 0c88b80..3db7b4c 100644
--- a/solr/core/src/java/org/apache/solr/request/PerSegmentSingleValuedFaceting.java
+++ b/solr/core/src/java/org/apache/solr/request/PerSegmentSingleValuedFaceting.java
@@ -293,11 +293,8 @@ class PerSegmentSingleValuedFaceting {
         // specialized version when collecting counts for all terms
         int doc;
         while ((doc = iter.nextDoc()) < DocIdSetIterator.NO_MORE_DOCS) {
-          if (doc > si.docID()) {
-            si.advance(doc);
-          }
           int t;
-          if (doc == si.docID()) {
+          if (si.advanceExact(doc)) {
             t = 1+si.ordValue();
           } else {
             t = 0;
@@ -309,11 +306,8 @@ class PerSegmentSingleValuedFaceting {
         // version that adjusts term numbers because we aren't collecting the full range
         int doc;
         while ((doc = iter.nextDoc()) < DocIdSetIterator.NO_MORE_DOCS) {
-          if (doc > si.docID()) {
-            si.advance(doc);
-          }
           int term;
-          if (doc == si.docID()) {
+          if (si.advanceExact(doc)) {
             term = si.ordValue();
           } else {
             term = -1;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
index 933477b..5ac1975 100644
--- a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
@@ -795,7 +795,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
               continue;
             }
             Long val;
-            if (ndv.advance(localId) == localId) {
+            if (ndv.advanceExact(localId)) {
               val = ndv.longValue();
             } else {
               continue;
@@ -820,7 +820,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
               continue;
             }
             BytesRef value;
-            if (bdv.advance(localId) == localId) {
+            if (bdv.advanceExact(localId)) {
               value = BytesRef.deepCopyOf(bdv.binaryValue());
             } else {
               continue;
@@ -832,7 +832,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
             if (sdv == null) {
               continue;
             }
-            if (sdv.advance(localId) == localId) {
+            if (sdv.advanceExact(localId)) {
               final BytesRef bRef = sdv.binaryValue();
               // Special handling for Boolean fields since they're stored as 'T' and 'F'.
               if (schemaField.getType() instanceof BoolField) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArrayDV.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArrayDV.java b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArrayDV.java
index fb60945..88adf67 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArrayDV.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArrayDV.java
@@ -186,10 +186,7 @@ class FacetFieldProcessorByArrayDV extends FacetFieldProcessorByArray {
 
     int doc;
     while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
-      if (doc > singleDv.docID()) {
-        singleDv.advance(doc);
-      }
-      if (doc == singleDv.docID()) {
+      if (singleDv.advanceExact(doc)) {
         counts[ singleDv.ordValue() + 1 ]++;
       } else {
         counts[ 0 ]++;
@@ -211,10 +208,7 @@ class FacetFieldProcessorByArrayDV extends FacetFieldProcessorByArray {
 
     int doc;
     while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
-      if (doc > multiDv.docID()) {
-        multiDv.advance(doc);
-      }
-      if (doc == multiDv.docID()) {
+      if (multiDv.advanceExact(doc)) {
         for(;;) {
           int segOrd = (int)multiDv.nextOrd();
           if (segOrd < 0) break;
@@ -247,10 +241,7 @@ class FacetFieldProcessorByArrayDV extends FacetFieldProcessorByArray {
   private void collectDocs(SortedDocValues singleDv, DocIdSetIterator disi, LongValues toGlobal) throws IOException {
     int doc;
     while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
-      if (doc > singleDv.docID()) {
-        singleDv.advance(doc);
-      }
-      if (doc == singleDv.docID()) {
+      if (singleDv.advanceExact(doc)) {
         int segOrd = singleDv.ordValue();
         collect(doc, segOrd, toGlobal);
       }
@@ -260,10 +251,7 @@ class FacetFieldProcessorByArrayDV extends FacetFieldProcessorByArray {
   private void collectCounts(SortedDocValues singleDv, DocIdSetIterator disi, LongValues toGlobal) throws IOException {
     int doc;
     while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
-      if (doc > singleDv.docID()) {
-        singleDv.advance(doc);
-      }
-      if (doc == singleDv.docID()) {
+      if (singleDv.advanceExact(doc)) {
         int segOrd = singleDv.ordValue();
         int ord = (int)toGlobal.get(segOrd);
         countAcc.incrementCount(ord, 1);
@@ -274,10 +262,7 @@ class FacetFieldProcessorByArrayDV extends FacetFieldProcessorByArray {
   private void collectDocs(SortedSetDocValues multiDv, DocIdSetIterator disi, LongValues toGlobal) throws IOException {
     int doc;
     while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
-      if (doc > multiDv.docID()) {
-        multiDv.advance(doc);
-      }
-      if (doc == multiDv.docID()) {
+      if (multiDv.advanceExact(doc)) {
         for(;;) {
           int segOrd = (int)multiDv.nextOrd();
           if (segOrd < 0) break;
@@ -290,10 +275,7 @@ class FacetFieldProcessorByArrayDV extends FacetFieldProcessorByArray {
   private void collectCounts(SortedSetDocValues multiDv, DocIdSetIterator disi, LongValues toGlobal) throws IOException {
     int doc;
     while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
-      if (doc > multiDv.docID()) {
-        multiDv.advance(doc);
-      }
-      if (doc == multiDv.docID()) {
+      if (multiDv.advanceExact(doc)) {
         for(;;) {
           int segOrd = (int)multiDv.nextOrd();
           if (segOrd < 0) break;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/solr/core/src/java/org/apache/solr/uninverting/FieldCacheImpl.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/uninverting/FieldCacheImpl.java b/solr/core/src/java/org/apache/solr/uninverting/FieldCacheImpl.java
index b63e5e9..2224010 100644
--- a/solr/core/src/java/org/apache/solr/uninverting/FieldCacheImpl.java
+++ b/solr/core/src/java/org/apache/solr/uninverting/FieldCacheImpl.java
@@ -692,6 +692,12 @@ class FieldCacheImpl implements FieldCache {
         }
 
         @Override
+        public boolean advanceExact(int target) throws IOException {
+          docID = target;
+          return docsWithField.get(docID);
+        }
+
+        @Override
         public long cost() {
           return values.size();
         }
@@ -821,6 +827,12 @@ class FieldCacheImpl implements FieldCache {
         }
 
         @Override
+        public boolean advanceExact(int target) throws IOException {
+          docID = target;
+          return docToTermOrd.get(docID) != 0;
+        }
+
+        @Override
         public long cost() {
           return 0;
         }
@@ -1022,6 +1034,12 @@ class FieldCacheImpl implements FieldCache {
         }
 
         @Override
+        public boolean advanceExact(int target) throws IOException {
+          docID = target;
+          return docsWithField.get(docID);
+        }
+
+        @Override
         public long cost() {
           return 0;
         }


[44/50] [abbrv] lucene-solr:jira/solr-8542-v2: SOLR-7506: Roll over GC logs by default via bin/solr scripts

Posted by cp...@apache.org.
SOLR-7506: Roll over GC logs by default via bin/solr scripts


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/ef573746
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/ef573746
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/ef573746

Branch: refs/heads/jira/solr-8542-v2
Commit: ef5737466e4597c21c80b167f1db295c081578d4
Parents: 61e180b
Author: Jan H�ydahl <ja...@apache.org>
Authored: Mon Oct 24 14:22:24 2016 +0200
Committer: Jan H�ydahl <ja...@apache.org>
Committed: Mon Oct 24 14:22:24 2016 +0200

----------------------------------------------------------------------
 solr/CHANGES.txt                                           | 1 +
 solr/bin/solr                                              | 5 +++--
 solr/bin/solr.cmd                                          | 8 ++++----
 solr/core/src/java/org/apache/solr/util/SolrCLI.java       | 4 ++--
 solr/core/src/test/org/apache/solr/util/UtilsToolTest.java | 6 +++++-
 5 files changed, 15 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ef573746/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 04d4d77..e223b4d 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -234,6 +234,7 @@ Optimizations
 
 * SOLR-9506: cache IndexFingerprint for each segment (Pushkar Raste, yonik, noble)
 
+* SOLR-7506: Roll over GC logs by default via bin/solr scripts (shalin, janhoy)
 
 Other Changes
 ----------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ef573746/solr/bin/solr
----------------------------------------------------------------------
diff --git a/solr/bin/solr b/solr/bin/solr
index d2936de..9d55e0a 100755
--- a/solr/bin/solr
+++ b/solr/bin/solr
@@ -1411,13 +1411,14 @@ if [ -z ${GC_LOG_OPTS+x} ]; then
 else
   GC_LOG_OPTS=($GC_LOG_OPTS)
 fi
-# if verbose gc logging enabled, setup the location of the log file
+
+# if verbose gc logging enabled, setup the location of the log file and rotation
 if [ "$GC_LOG_OPTS" != "" ]; then
   gc_log_flag="-Xloggc"
   if [ "$JAVA_VENDOR" == "IBM J9" ]; then
     gc_log_flag="-Xverbosegclog"
   fi
-  GC_LOG_OPTS+=("$gc_log_flag:$SOLR_LOGS_DIR/solr_gc.log")
+  GC_LOG_OPTS+=("$gc_log_flag:$SOLR_LOGS_DIR/solr_gc.log" -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=9 -XX:GCLogFileSize=20M)
 fi
 
 # If ZK_HOST is defined, the assume SolrCloud mode

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ef573746/solr/bin/solr.cmd
----------------------------------------------------------------------
diff --git a/solr/bin/solr.cmd b/solr/bin/solr.cmd
index 317a789..4ab188f 100644
--- a/solr/bin/solr.cmd
+++ b/solr/bin/solr.cmd
@@ -1013,23 +1013,23 @@ IF NOT EXIST "%SOLR_SERVER_DIR%\tmp" (
 )
 
 IF "%JAVA_VENDOR%" == "IBM J9" (
-  set "GCLOG_OPT=-Xverbosegclog"
+  set GCLOG_OPT="-Xverbosegclog:!SOLR_LOGS_DIR!\solr_gc.log" -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=9 -XX:GCLogFileSize=20M
 ) else (
-  set "GCLOG_OPT=-Xloggc"
+  set GCLOG_OPT="-Xloggc:!SOLR_LOGS_DIR!\solr_gc.log" -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=9 -XX:GCLogFileSize=20M
 )
 
 IF "%FG%"=="1" (
   REM run solr in the foreground
   title "Solr-%SOLR_PORT%"
   echo %SOLR_PORT%>"%SOLR_TIP%"\bin\solr-%SOLR_PORT%.port
-  "%JAVA%" %SERVEROPT% %SOLR_JAVA_MEM% %START_OPTS% %GCLOG_OPT%:"!SOLR_LOGS_DIR!/solr_gc.log" ^
+  "%JAVA%" %SERVEROPT% %SOLR_JAVA_MEM% %START_OPTS% %GCLOG_OPT% ^
     -Dlog4j.configuration="%LOG4J_CONFIG%" -DSTOP.PORT=!STOP_PORT! -DSTOP.KEY=%STOP_KEY% ^
     -Dsolr.solr.home="%SOLR_HOME%" -Dsolr.install.dir="%SOLR_TIP%" ^
     -Djetty.host=%SOLR_JETTY_HOST% -Djetty.port=%SOLR_PORT% -Djetty.home="%SOLR_SERVER_DIR%" ^
     -Djava.io.tmpdir="%SOLR_SERVER_DIR%\tmp" -jar start.jar "%SOLR_JETTY_CONFIG%"
 ) ELSE (
   START /B "Solr-%SOLR_PORT%" /D "%SOLR_SERVER_DIR%" ^
-    "%JAVA%" %SERVEROPT% %SOLR_JAVA_MEM% %START_OPTS% %GCLOG_OPT%:"!SOLR_LOGS_DIR!/solr_gc.log" ^
+    "%JAVA%" %SERVEROPT% %SOLR_JAVA_MEM% %START_OPTS% %GCLOG_OPT% ^
     -Dlog4j.configuration="%LOG4J_CONFIG%" -DSTOP.PORT=!STOP_PORT! -DSTOP.KEY=%STOP_KEY% ^
     -Dsolr.log.muteconsole ^
     -Dsolr.solr.home="%SOLR_HOME%" -Dsolr.install.dir="%SOLR_TIP%" ^

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ef573746/solr/core/src/java/org/apache/solr/util/SolrCLI.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/util/SolrCLI.java b/solr/core/src/java/org/apache/solr/util/SolrCLI.java
index 76e5ee9..ebaeda8 100644
--- a/solr/core/src/java/org/apache/solr/util/SolrCLI.java
+++ b/solr/core/src/java/org/apache/solr/util/SolrCLI.java
@@ -3444,13 +3444,13 @@ public class SolrCLI {
         Files.createDirectories(archivePath);
       }
       List<Path> archived = Files.find(archivePath, 1, (f, a) 
-          -> a.isRegularFile() && String.valueOf(f.getFileName()).startsWith("solr_gc_"))
+          -> a.isRegularFile() && String.valueOf(f.getFileName()).matches("^solr_gc[_.].+"))
           .collect(Collectors.toList());
       for (Path p : archived) {
         Files.delete(p);
       }
       List<Path> files = Files.find(logsPath, 1, (f, a) 
-          -> a.isRegularFile() && String.valueOf(f.getFileName()).startsWith("solr_gc_"))
+          -> a.isRegularFile() && String.valueOf(f.getFileName()).matches("^solr_gc[_.].+"))
           .collect(Collectors.toList());
       if (files.size() > 0) {
         out("Archiving " + files.size() + " old GC log files to " + archivePath);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ef573746/solr/core/src/test/org/apache/solr/util/UtilsToolTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/util/UtilsToolTest.java b/solr/core/src/test/org/apache/solr/util/UtilsToolTest.java
index 6b2d31c..0ca65ed 100644
--- a/solr/core/src/test/org/apache/solr/util/UtilsToolTest.java
+++ b/solr/core/src/test/org/apache/solr/util/UtilsToolTest.java
@@ -55,6 +55,10 @@ public class UtilsToolTest extends SolrTestCaseJ4 {
       "solr_log_20160304", 
       "solr-8983-console.log",
       "solr_gc_log_20160102", 
+      "solr_gcnotremove", 
+      "solr_gc.log", 
+      "solr_gc.log.0", 
+      "solr_gc.log.0.current", 
       "solr_gc_log_2");
   
   @Before
@@ -136,7 +140,7 @@ public class UtilsToolTest extends SolrTestCaseJ4 {
     String[] args = {"utils", "-archive_gc_logs", "-l", dir.toString()};
     assertEquals(files.size(), fileCount());
     assertEquals(0, runTool(args));
-    assertEquals(files.size()-2, fileCount());
+    assertEquals(files.size()-5, fileCount());
     assertFalse(listFiles().contains("solr_gc_log_2"));
     assertTrue(Files.exists(dir.resolve("archived").resolve("solr_gc_log_2")));
     assertEquals(0, runTool(args));


[32/50] [abbrv] lucene-solr:jira/solr-8542-v2: SOLR-9326: Ability to create/delete/list snapshots at collection level.

Posted by cp...@apache.org.
SOLR-9326: Ability to create/delete/list snapshots at collection level.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/57ba9614
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/57ba9614
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/57ba9614

Branch: refs/heads/jira/solr-8542-v2
Commit: 57ba96145ce8233034c67ffaead22d3bd7f3460f
Parents: 49ca9ce
Author: yonik <yo...@apache.org>
Authored: Fri Oct 21 07:08:47 2016 -0400
Committer: yonik <yo...@apache.org>
Committed: Fri Oct 21 09:47:02 2016 -0400

----------------------------------------------------------------------
 solr/CHANGES.txt                                |   3 +
 .../java/org/apache/solr/cloud/BackupCmd.java   |  75 ++++-
 .../apache/solr/cloud/CreateSnapshotCmd.java    | 179 ++++++++++++
 .../apache/solr/cloud/DeleteSnapshotCmd.java    | 160 +++++++++++
 .../cloud/OverseerCollectionMessageHandler.java |   2 +
 .../snapshots/CollectionSnapshotMetaData.java   | 242 ++++++++++++++++
 .../core/snapshots/SolrSnapshotManager.java     | 180 ++++++++++++
 .../solr/handler/admin/CollectionsHandler.java  |  54 +++-
 .../solr/handler/admin/CoreAdminOperation.java  |   7 +-
 .../solr/handler/admin/CreateSnapshotOp.java    |  10 +-
 .../solr/handler/admin/DeleteSnapshotOp.java    |   4 +
 .../core/snapshots/TestSolrCloudSnapshots.java  | 285 +++++++++++++++++++
 .../solrj/request/CollectionAdminRequest.java   | 116 +++++++-
 .../solr/common/params/CollectionParams.java    |   3 +
 14 files changed, 1309 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/57ba9614/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 7228559..b4b0a33 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -149,6 +149,9 @@ New Features
 * SOLR-8370: Display configured Similarity in Schema-Browser, both global/default and per-field/field-type 
   (janhoy, Alexandre Rafalovitch)
 
+* SOLR-9326: Ability to create/delete/list snapshots at collection level.
+  (Hrishikesh Gadre via yonik)
+
 Bug Fixes
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/57ba9614/solr/core/src/java/org/apache/solr/cloud/BackupCmd.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/BackupCmd.java b/solr/core/src/java/org/apache/solr/cloud/BackupCmd.java
index 648eee8..b859d8e 100644
--- a/solr/core/src/java/org/apache/solr/cloud/BackupCmd.java
+++ b/solr/core/src/java/org/apache/solr/cloud/BackupCmd.java
@@ -19,16 +19,21 @@ package org.apache.solr.cloud;
 import java.lang.invoke.MethodHandles;
 import java.net.URI;
 import java.time.Instant;
+import java.util.Collection;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Optional;
 import java.util.Properties;
 
 import org.apache.solr.common.SolrException;
+import org.apache.solr.common.SolrException.ErrorCode;
 import org.apache.solr.common.cloud.ClusterState;
 import org.apache.solr.common.cloud.DocCollection;
 import org.apache.solr.common.cloud.Replica;
+import org.apache.solr.common.cloud.Replica.State;
 import org.apache.solr.common.cloud.Slice;
+import org.apache.solr.common.cloud.SolrZkClient;
 import org.apache.solr.common.cloud.ZkNodeProps;
 import org.apache.solr.common.params.CoreAdminParams;
 import org.apache.solr.common.params.ModifiableSolrParams;
@@ -36,6 +41,10 @@ import org.apache.solr.common.util.NamedList;
 import org.apache.solr.core.CoreContainer;
 import org.apache.solr.core.backup.BackupManager;
 import org.apache.solr.core.backup.repository.BackupRepository;
+import org.apache.solr.core.snapshots.CollectionSnapshotMetaData;
+import org.apache.solr.core.snapshots.CollectionSnapshotMetaData.CoreSnapshotMetaData;
+import org.apache.solr.core.snapshots.CollectionSnapshotMetaData.SnapshotStatus;
+import org.apache.solr.core.snapshots.SolrSnapshotManager;
 import org.apache.solr.handler.component.ShardHandler;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -63,6 +72,21 @@ public class BackupCmd implements OverseerCollectionMessageHandler.Cmd {
     String asyncId = message.getStr(ASYNC);
     String repo = message.getStr(CoreAdminParams.BACKUP_REPOSITORY);
 
+    String commitName = message.getStr(CoreAdminParams.COMMIT_NAME);
+    Optional<CollectionSnapshotMetaData> snapshotMeta = Optional.empty();
+    if (commitName != null) {
+      SolrZkClient zkClient = ocmh.overseer.getZkController().getZkClient();
+      snapshotMeta = SolrSnapshotManager.getCollectionLevelSnapshot(zkClient, collectionName, commitName);
+      if (!snapshotMeta.isPresent()) {
+        throw new SolrException(ErrorCode.BAD_REQUEST, "Snapshot with name " + commitName
+            + " does not exist for collection " + collectionName);
+      }
+      if (snapshotMeta.get().getStatus() != SnapshotStatus.Successful) {
+        throw new SolrException(ErrorCode.BAD_REQUEST, "Snapshot with name " + commitName + " for collection " + collectionName
+            + " has not completed successfully. The status is " + snapshotMeta.get().getStatus());
+      }
+    }
+
     Map<String, String> requestMap = new HashMap<>();
     Instant startTime = Instant.now();
 
@@ -85,8 +109,28 @@ public class BackupCmd implements OverseerCollectionMessageHandler.Cmd {
     log.info("Starting backup of collection={} with backupName={} at location={}", collectionName, backupName,
         backupPath);
 
+    Collection<String> shardsToConsider = Collections.emptySet();
+    if (snapshotMeta.isPresent()) {
+      shardsToConsider = snapshotMeta.get().getShards();
+    }
+
     for (Slice slice : ocmh.zkStateReader.getClusterState().getCollection(collectionName).getActiveSlices()) {
-      Replica replica = slice.getLeader();
+      Replica replica = null;
+
+      if (snapshotMeta.isPresent()) {
+        if (!shardsToConsider.contains(slice.getName())) {
+          log.warn("Skipping the backup for shard {} since it wasn't part of the collection {} when snapshot {} was created.",
+              slice.getName(), collectionName, snapshotMeta.get().getName());
+          continue;
+        }
+        replica = selectReplicaWithSnapshot(snapshotMeta.get(), slice);
+      } else {
+        // Note - Actually this can return a null value when there is no leader for this shard.
+        replica = slice.getLeader();
+        if (replica == null) {
+          throw new SolrException(ErrorCode.SERVER_ERROR, "No 'leader' replica available for shard " + slice.getName() + " of collection " + collectionName);
+        }
+      }
 
       String coreName = replica.getStr(CORE_NAME_PROP);
 
@@ -96,6 +140,9 @@ public class BackupCmd implements OverseerCollectionMessageHandler.Cmd {
       params.set(CoreAdminParams.BACKUP_REPOSITORY, repo);
       params.set(CoreAdminParams.BACKUP_LOCATION, backupPath.toASCIIString()); // note: index dir will be here then the "snapshot." + slice name
       params.set(CORE_NAME_PROP, coreName);
+      if (snapshotMeta.isPresent()) {
+        params.set(CoreAdminParams.COMMIT_NAME, snapshotMeta.get().getName());
+      }
 
       ocmh.sendShardRequest(replica.getNodeName(), params, shardHandler, asyncId, requestMap);
       log.debug("Sent backup request to core={} for backupName={}", coreName, backupName);
@@ -129,4 +176,30 @@ public class BackupCmd implements OverseerCollectionMessageHandler.Cmd {
 
     log.info("Completed backing up ZK data for backupName={}", backupName);
   }
+
+  private Replica selectReplicaWithSnapshot(CollectionSnapshotMetaData snapshotMeta, Slice slice) {
+    // The goal here is to choose the snapshot of the replica which was the leader at the time snapshot was created.
+    // If that is not possible, we choose any other replica for the given shard.
+    Collection<CoreSnapshotMetaData> snapshots = snapshotMeta.getReplicaSnapshotsForShard(slice.getName());
+
+    Optional<CoreSnapshotMetaData> leaderCore = snapshots.stream().filter(x -> x.isLeader()).findFirst();
+    if (leaderCore.isPresent()) {
+      log.info("Replica {} was the leader when snapshot {} was created.", leaderCore.get().getCoreName(), snapshotMeta.getName());
+      Replica r = slice.getReplica(leaderCore.get().getCoreName());
+      if ((r != null) && !r.getState().equals(State.DOWN)) {
+        return r;
+      }
+    }
+
+    Optional<Replica> r = slice.getReplicas().stream()
+                               .filter(x -> x.getState() != State.DOWN && snapshotMeta.isSnapshotExists(slice.getName(), x))
+                               .findFirst();
+
+    if (!r.isPresent()) {
+      throw new SolrException(ErrorCode.SERVER_ERROR,
+          "Unable to find any live replica with a snapshot named " + snapshotMeta.getName() + " for shard " + slice.getName());
+    }
+
+    return r.get();
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/57ba9614/solr/core/src/java/org/apache/solr/cloud/CreateSnapshotCmd.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/CreateSnapshotCmd.java b/solr/core/src/java/org/apache/solr/cloud/CreateSnapshotCmd.java
new file mode 100644
index 0000000..5de65a4
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/cloud/CreateSnapshotCmd.java
@@ -0,0 +1,179 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.cloud;
+
+import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.CORE_NAME_PROP;
+import static org.apache.solr.common.params.CommonAdminParams.ASYNC;
+import static org.apache.solr.common.params.CommonParams.NAME;
+
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.SolrException.ErrorCode;
+import org.apache.solr.common.cloud.ClusterState;
+import org.apache.solr.common.cloud.DocCollection;
+import org.apache.solr.common.cloud.Replica;
+import org.apache.solr.common.cloud.Slice;
+import org.apache.solr.common.cloud.SolrZkClient;
+import org.apache.solr.common.cloud.ZkNodeProps;
+import org.apache.solr.common.cloud.Replica.State;
+import org.apache.solr.common.params.CoreAdminParams;
+import org.apache.solr.common.params.CoreAdminParams.CoreAdminAction;
+import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.core.snapshots.CollectionSnapshotMetaData;
+import org.apache.solr.core.snapshots.CollectionSnapshotMetaData.CoreSnapshotMetaData;
+import org.apache.solr.core.snapshots.CollectionSnapshotMetaData.SnapshotStatus;
+import org.apache.solr.core.snapshots.SolrSnapshotManager;
+import org.apache.solr.handler.component.ShardHandler;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * This class implements the functionality of creating a collection level snapshot.
+ */
+public class CreateSnapshotCmd implements OverseerCollectionMessageHandler.Cmd {
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private final OverseerCollectionMessageHandler ocmh;
+
+  public CreateSnapshotCmd (OverseerCollectionMessageHandler ocmh) {
+    this.ocmh = ocmh;
+  }
+
+  @Override
+  public void call(ClusterState state, ZkNodeProps message, NamedList results) throws Exception {
+    String collectionName =  message.getStr(COLLECTION_PROP);
+    String commitName =  message.getStr(CoreAdminParams.COMMIT_NAME);
+    String asyncId = message.getStr(ASYNC);
+    SolrZkClient zkClient = this.ocmh.overseer.getZkController().getZkClient();
+    Date creationDate = new Date();
+
+    if(SolrSnapshotManager.snapshotExists(zkClient, collectionName, commitName)) {
+      throw new SolrException(ErrorCode.BAD_REQUEST, "Snapshot with name " + commitName
+          + " already exists for collection " + collectionName);
+    }
+
+    log.info("Creating a snapshot for collection={} with commitName={}", collectionName, commitName);
+
+    // Create a node in ZK to store the collection level snapshot meta-data.
+    SolrSnapshotManager.createCollectionLevelSnapshot(zkClient, collectionName, new CollectionSnapshotMetaData(commitName));
+    log.info("Created a ZK path to store snapshot information for collection={} with commitName={}", collectionName, commitName);
+
+    Map<String, String> requestMap = new HashMap<>();
+    NamedList shardRequestResults = new NamedList();
+    Map<String, Slice> shardByCoreName = new HashMap<>();
+    ShardHandler shardHandler = ocmh.shardHandlerFactory.getShardHandler();
+
+    for (Slice slice : ocmh.zkStateReader.getClusterState().getCollection(collectionName).getSlices()) {
+      for (Replica replica : slice.getReplicas()) {
+        if (replica.getState() != State.ACTIVE) {
+          log.info("Replica {} is not active. Hence not sending the createsnapshot request", replica.getCoreName());
+          continue; // Since replica is not active - no point sending a request.
+        }
+
+        String coreName = replica.getStr(CORE_NAME_PROP);
+
+        ModifiableSolrParams params = new ModifiableSolrParams();
+        params.set(CoreAdminParams.ACTION, CoreAdminAction.CREATESNAPSHOT.toString());
+        params.set(NAME, slice.getName());
+        params.set(CORE_NAME_PROP, coreName);
+        params.set(CoreAdminParams.COMMIT_NAME, commitName);
+
+        ocmh.sendShardRequest(replica.getNodeName(), params, shardHandler, asyncId, requestMap);
+        log.debug("Sent createsnapshot request to core={} with commitName={}", coreName, commitName);
+
+        shardByCoreName.put(coreName, slice);
+      }
+    }
+
+    // At this point we want to make sure that at-least one replica for every shard
+    // is able to create the snapshot. If that is not the case, then we fail the request.
+    // This is to take care of the situation where e.g. entire shard is unavailable.
+    Set<String> failedShards = new HashSet<>();
+
+    ocmh.processResponses(shardRequestResults, shardHandler, false, null, asyncId, requestMap);
+    NamedList success = (NamedList) shardRequestResults.get("success");
+    List<CoreSnapshotMetaData> replicas = new ArrayList<>();
+    if (success != null) {
+      for ( int i = 0 ; i < success.size() ; i++) {
+        NamedList resp = (NamedList)success.getVal(i);
+
+        // Check if this core is the leader for the shard. The idea here is that during the backup
+        // operation we preferably use the snapshot of the "leader" replica since it is most likely
+        // to have latest state.
+        String coreName = (String)resp.get(CoreAdminParams.CORE);
+        Slice slice = shardByCoreName.remove(coreName);
+        boolean leader = (slice.getLeader() != null && slice.getLeader().getCoreName().equals(coreName));
+        resp.add(SolrSnapshotManager.SHARD_ID, slice.getName());
+        resp.add(SolrSnapshotManager.LEADER, leader);
+
+        CoreSnapshotMetaData c = new CoreSnapshotMetaData(resp);
+        replicas.add(c);
+        log.info("Snapshot with commitName {} is created successfully for core {}", commitName, c.getCoreName());
+      }
+    }
+
+    if (!shardByCoreName.isEmpty()) { // One or more failures.
+      log.warn("Unable to create a snapshot with name {} for following cores {}", commitName, shardByCoreName.keySet());
+
+      // Count number of failures per shard.
+      Map<String, Integer> failuresByShardId = new HashMap<>();
+      for (Map.Entry<String,Slice> entry : shardByCoreName.entrySet()) {
+        int f = 0;
+        if (failuresByShardId.get(entry.getValue().getName()) != null) {
+          f = failuresByShardId.get(entry.getValue().getName());
+        }
+        failuresByShardId.put(entry.getValue().getName(), f + 1);
+      }
+
+      // Now that we know number of failures per shard, we can figure out
+      // if at-least one replica per shard was able to create a snapshot or not.
+      DocCollection collectionStatus = ocmh.zkStateReader.getClusterState().getCollection(collectionName);
+      for (Map.Entry<String,Integer> entry : failuresByShardId.entrySet()) {
+        int replicaCount = collectionStatus.getSlice(entry.getKey()).getReplicas().size();
+        if (replicaCount <= entry.getValue()) {
+          failedShards.add(entry.getKey());
+        }
+      }
+    }
+
+    if (failedShards.isEmpty()) { // No failures.
+      CollectionSnapshotMetaData meta = new CollectionSnapshotMetaData(commitName, SnapshotStatus.Successful, creationDate, replicas);
+      SolrSnapshotManager.updateCollectionLevelSnapshot(zkClient, collectionName, meta);
+      log.info("Saved following snapshot information for collection={} with commitName={} in Zookeeper : {}", collectionName,
+          commitName, meta.toNamedList());
+    } else {
+      log.warn("Failed to create a snapshot for collection {} with commitName = {}. Snapshot could not be captured for following shards {}",
+          collectionName, commitName, failedShards);
+      // Update the ZK meta-data to include only cores with the snapshot. This will enable users to figure out
+      // which cores have the named snapshot.
+      CollectionSnapshotMetaData meta = new CollectionSnapshotMetaData(commitName, SnapshotStatus.Failed, creationDate, replicas);
+      SolrSnapshotManager.updateCollectionLevelSnapshot(zkClient, collectionName, meta);
+      log.info("Saved following snapshot information for collection={} with commitName={} in Zookeeper : {}", collectionName,
+          commitName, meta.toNamedList());
+      throw new SolrException(ErrorCode.SERVER_ERROR, "Failed to create snapshot on shards " + failedShards);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/57ba9614/solr/core/src/java/org/apache/solr/cloud/DeleteSnapshotCmd.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/DeleteSnapshotCmd.java b/solr/core/src/java/org/apache/solr/cloud/DeleteSnapshotCmd.java
new file mode 100644
index 0000000..765f4b9
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/cloud/DeleteSnapshotCmd.java
@@ -0,0 +1,160 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.cloud;
+
+import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.CORE_NAME_PROP;
+import static org.apache.solr.common.params.CommonAdminParams.ASYNC;
+import static org.apache.solr.common.params.CommonParams.NAME;
+
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Set;
+
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.SolrException.ErrorCode;
+import org.apache.solr.common.cloud.ClusterState;
+import org.apache.solr.common.cloud.Replica;
+import org.apache.solr.common.cloud.Slice;
+import org.apache.solr.common.cloud.SolrZkClient;
+import org.apache.solr.common.cloud.ZkNodeProps;
+import org.apache.solr.common.cloud.Replica.State;
+import org.apache.solr.common.params.CoreAdminParams;
+import org.apache.solr.common.params.CoreAdminParams.CoreAdminAction;
+import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.common.util.Utils;
+import org.apache.solr.core.snapshots.CollectionSnapshotMetaData;
+import org.apache.solr.core.snapshots.CollectionSnapshotMetaData.CoreSnapshotMetaData;
+import org.apache.solr.core.snapshots.CollectionSnapshotMetaData.SnapshotStatus;
+import org.apache.solr.core.snapshots.SolrSnapshotManager;
+import org.apache.solr.handler.component.ShardHandler;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * This class implements the functionality of deleting a collection level snapshot.
+ */
+public class DeleteSnapshotCmd implements OverseerCollectionMessageHandler.Cmd {
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private final OverseerCollectionMessageHandler ocmh;
+
+  public DeleteSnapshotCmd (OverseerCollectionMessageHandler ocmh) {
+    this.ocmh = ocmh;
+  }
+
+  @Override
+  public void call(ClusterState state, ZkNodeProps message, NamedList results) throws Exception {
+    String collectionName =  message.getStr(COLLECTION_PROP);
+    String commitName =  message.getStr(CoreAdminParams.COMMIT_NAME);
+    String asyncId = message.getStr(ASYNC);
+    Map<String, String> requestMap = new HashMap<>();
+    NamedList shardRequestResults = new NamedList();
+    ShardHandler shardHandler = ocmh.shardHandlerFactory.getShardHandler();
+    SolrZkClient zkClient = ocmh.overseer.getZkController().getZkClient();
+
+    Optional<CollectionSnapshotMetaData> meta = SolrSnapshotManager.getCollectionLevelSnapshot(zkClient, collectionName, commitName);
+    if (!meta.isPresent()) { // Snapshot not found. Nothing to do.
+      return;
+    }
+
+    log.info("Deleting a snapshot for collection={} with commitName={}", collectionName, commitName);
+
+    Set<String> existingCores = new HashSet<>();
+    for (Slice s : ocmh.zkStateReader.getClusterState().getCollection(collectionName).getSlices()) {
+      for (Replica r : s.getReplicas()) {
+        existingCores.add(r.getCoreName());
+      }
+    }
+
+    Set<String> coresWithSnapshot = new HashSet<>();
+    for (CoreSnapshotMetaData m : meta.get().getReplicaSnapshots()) {
+      if (existingCores.contains(m.getCoreName())) {
+        coresWithSnapshot.add(m.getCoreName());
+      }
+    }
+
+    log.info("Existing cores with snapshot for collection={} are {}", collectionName, existingCores);
+    for (Slice slice : ocmh.zkStateReader.getClusterState().getCollection(collectionName).getSlices()) {
+      for (Replica replica : slice.getReplicas()) {
+        if (replica.getState() == State.DOWN) {
+          continue; // Since replica is down - no point sending a request.
+        }
+
+        // Note - when a snapshot is found in_progress state - it is the result of overseer
+        // failure while handling the snapshot creation. Since we don't know the exact set of
+        // replicas to contact at this point, we try on all replicas.
+        if (meta.get().getStatus() == SnapshotStatus.InProgress || coresWithSnapshot.contains(replica.getCoreName())) {
+          String coreName = replica.getStr(CORE_NAME_PROP);
+
+          ModifiableSolrParams params = new ModifiableSolrParams();
+          params.set(CoreAdminParams.ACTION, CoreAdminAction.DELETESNAPSHOT.toString());
+          params.set(NAME, slice.getName());
+          params.set(CORE_NAME_PROP, coreName);
+          params.set(CoreAdminParams.COMMIT_NAME, commitName);
+
+          log.info("Sending deletesnapshot request to core={} with commitName={}", coreName, commitName);
+          ocmh.sendShardRequest(replica.getNodeName(), params, shardHandler, asyncId, requestMap);
+        }
+      }
+    }
+
+    ocmh.processResponses(shardRequestResults, shardHandler, false, null, asyncId, requestMap);
+    NamedList success = (NamedList) shardRequestResults.get("success");
+    List<CoreSnapshotMetaData> replicas = new ArrayList<>();
+    if (success != null) {
+      for ( int i = 0 ; i < success.size() ; i++) {
+        NamedList resp = (NamedList)success.getVal(i);
+        // Unfortunately async processing logic doesn't provide the "core" name automatically.
+        String coreName = (String)resp.get("core");
+        coresWithSnapshot.remove(coreName);
+      }
+    }
+
+    if (!coresWithSnapshot.isEmpty()) { // One or more failures.
+      log.warn("Failed to delete a snapshot for collection {} with commitName = {}. Snapshot could not be deleted for following cores {}",
+          collectionName, commitName, coresWithSnapshot);
+
+      List<CoreSnapshotMetaData> replicasWithSnapshot = new ArrayList<>();
+      for (CoreSnapshotMetaData m : meta.get().getReplicaSnapshots()) {
+        if (coresWithSnapshot.contains(m.getCoreName())) {
+          replicasWithSnapshot.add(m);
+        }
+      }
+
+      // Update the ZK meta-data to include only cores with the snapshot. This will enable users to figure out
+      // which cores still contain the named snapshot.
+      CollectionSnapshotMetaData newResult = new CollectionSnapshotMetaData(meta.get().getName(), SnapshotStatus.Failed,
+          meta.get().getCreationDate(), replicasWithSnapshot);
+      SolrSnapshotManager.updateCollectionLevelSnapshot(zkClient, collectionName, newResult);
+      log.info("Saved snapshot information for collection={} with commitName={} in Zookeeper as follows", collectionName, commitName,
+          Utils.toJSON(newResult));
+      throw new SolrException(ErrorCode.SERVER_ERROR, "Failed to delete snapshot on cores " + coresWithSnapshot);
+
+    } else {
+      // Delete the ZK path so that we eliminate the references of this snapshot from collection level meta-data.
+      SolrSnapshotManager.deleteCollectionLevelSnapshot(zkClient, collectionName, commitName);
+      log.info("Deleted Zookeeper snapshot metdata for collection={} with commitName={}", collectionName, commitName);
+      log.info("Successfully deleted snapshot for collection={} with commitName={}", collectionName, commitName);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/57ba9614/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java b/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java
index 0520488..a21f18f 100644
--- a/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java
+++ b/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java
@@ -182,6 +182,8 @@ public class OverseerCollectionMessageHandler implements OverseerMessageHandler
         .put(DELETENODE, new DeleteNodeCmd(this))
         .put(BACKUP, new BackupCmd(this))
         .put(RESTORE, new RestoreCmd(this))
+        .put(CREATESNAPSHOT, new CreateSnapshotCmd(this))
+        .put(DELETESNAPSHOT, new DeleteSnapshotCmd(this))
         .put(SPLITSHARD, new SplitShardCmd(this))
         .put(ADDROLE, new OverseerRoleCmd(this, ADDROLE, overseerPrioritizer))
         .put(REMOVEROLE, new OverseerRoleCmd(this, REMOVEROLE, overseerPrioritizer))

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/57ba9614/solr/core/src/java/org/apache/solr/core/snapshots/CollectionSnapshotMetaData.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/snapshots/CollectionSnapshotMetaData.java b/solr/core/src/java/org/apache/solr/core/snapshots/CollectionSnapshotMetaData.java
new file mode 100644
index 0000000..4170861
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/core/snapshots/CollectionSnapshotMetaData.java
@@ -0,0 +1,242 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.core.snapshots;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Date;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import org.apache.solr.common.cloud.Replica;
+import org.apache.solr.common.params.CoreAdminParams;
+import org.apache.solr.common.util.NamedList;
+import org.noggit.JSONWriter;
+
+/**
+ * This class defines the meta-data about a collection level snapshot
+ */
+public class CollectionSnapshotMetaData implements JSONWriter.Writable {
+  public static class CoreSnapshotMetaData implements JSONWriter.Writable {
+    private final String coreName;
+    private final String indexDirPath;
+    private final long generationNumber;
+    private final boolean leader;
+    private final String shardId;
+    private final Collection<String> files;
+
+    public CoreSnapshotMetaData(String coreName, String indexDirPath, long generationNumber, String shardId, boolean leader, Collection<String> files) {
+      this.coreName = coreName;
+      this.indexDirPath = indexDirPath;
+      this.generationNumber = generationNumber;
+      this.shardId = shardId;
+      this.leader = leader;
+      this.files = files;
+    }
+
+    @SuppressWarnings({"unchecked", "rawtypes"})
+    public CoreSnapshotMetaData(NamedList resp) {
+      this.coreName = (String)resp.get(CoreAdminParams.CORE);
+      this.indexDirPath = (String)resp.get(SolrSnapshotManager.INDEX_DIR_PATH);
+      this.generationNumber = (Long)resp.get(SolrSnapshotManager.GENERATION_NUM);
+      this.shardId = (String)resp.get(SolrSnapshotManager.SHARD_ID);
+      this.leader = (Boolean)resp.get(SolrSnapshotManager.LEADER);
+      this.files = (Collection<String>)resp.get(SolrSnapshotManager.FILE_LIST);
+    }
+
+    public String getCoreName() {
+      return coreName;
+    }
+
+    public String getIndexDirPath() {
+      return indexDirPath;
+    }
+
+    public long getGenerationNumber() {
+      return generationNumber;
+    }
+
+    public Collection<String> getFiles() {
+      return files;
+    }
+
+    public String getShardId() {
+      return shardId;
+    }
+
+    public boolean isLeader() {
+      return leader;
+    }
+
+    @Override
+    public void write(JSONWriter arg0) {
+      LinkedHashMap<String, Object> info = new LinkedHashMap<String, Object>();
+      info.put(CoreAdminParams.CORE, getCoreName());
+      info.put(SolrSnapshotManager.INDEX_DIR_PATH, getIndexDirPath());
+      info.put(SolrSnapshotManager.GENERATION_NUM, getGenerationNumber());
+      info.put(SolrSnapshotManager.SHARD_ID, getShardId());
+      info.put(SolrSnapshotManager.LEADER, isLeader());
+      info.put(SolrSnapshotManager.FILE_LIST, getFiles());
+      arg0.write(info);
+    }
+
+    @SuppressWarnings({"rawtypes", "unchecked"})
+    public NamedList toNamedList() {
+      NamedList result = new NamedList();
+      result.add(CoreAdminParams.CORE, getCoreName());
+      result.add(SolrSnapshotManager.INDEX_DIR_PATH, getIndexDirPath());
+      result.add(SolrSnapshotManager.GENERATION_NUM, getGenerationNumber());
+      result.add(SolrSnapshotManager.SHARD_ID, getShardId());
+      result.add(SolrSnapshotManager.LEADER, isLeader());
+      result.add(SolrSnapshotManager.FILE_LIST, getFiles());
+      return result;
+    }
+  }
+
+  public static enum SnapshotStatus {
+    Successful, InProgress, Failed;
+  }
+
+  private final String name;
+  private final SnapshotStatus status;
+  private final Date creationDate;
+  private final List<CoreSnapshotMetaData> replicaSnapshots;
+
+  public CollectionSnapshotMetaData(String name) {
+    this(name, SnapshotStatus.InProgress, new Date(), Collections.<CoreSnapshotMetaData>emptyList());
+  }
+
+  public CollectionSnapshotMetaData(String name, SnapshotStatus status, Date creationTime, List<CoreSnapshotMetaData> replicaSnapshots) {
+    this.name = name;
+    this.status = status;
+    this.creationDate = creationTime;
+    this.replicaSnapshots = replicaSnapshots;
+  }
+
+  @SuppressWarnings("unchecked")
+  public CollectionSnapshotMetaData(Map<String, Object> data) {
+    this.name = (String)data.get(CoreAdminParams.NAME);
+    this.status = SnapshotStatus.valueOf((String)data.get(SolrSnapshotManager.SNAPSHOT_STATUS));
+    this.creationDate = new Date((Long)data.get(SolrSnapshotManager.CREATION_DATE));
+    this.replicaSnapshots = new ArrayList<>();
+
+    List<Object> r = (List<Object>) data.get(SolrSnapshotManager.SNAPSHOT_REPLICAS);
+    for (Object x : r) {
+      Map<String, Object> info = (Map<String, Object>)x;
+      String coreName = (String)info.get(CoreAdminParams.CORE);
+      String indexDirPath = (String)info.get(SolrSnapshotManager.INDEX_DIR_PATH);
+      long generationNumber = (Long) info.get(SolrSnapshotManager.GENERATION_NUM);
+      String shardId = (String)info.get(SolrSnapshotManager.SHARD_ID);
+      boolean leader = (Boolean) info.get(SolrSnapshotManager.LEADER);
+      Collection<String> files = (Collection<String>)info.get(SolrSnapshotManager.FILE_LIST);
+      replicaSnapshots.add(new CoreSnapshotMetaData(coreName, indexDirPath, generationNumber, shardId, leader, files));
+    }
+  }
+
+  @SuppressWarnings("unchecked")
+  public CollectionSnapshotMetaData(NamedList<Object> data) {
+    this.name = (String)data.get(CoreAdminParams.NAME);
+    String statusStr = (String)data.get(SolrSnapshotManager.SNAPSHOT_STATUS);
+    this.creationDate = new Date((Long)data.get(SolrSnapshotManager.CREATION_DATE));
+    this.status = SnapshotStatus.valueOf(statusStr);
+    this.replicaSnapshots = new ArrayList<>();
+
+    NamedList<Object> r = (NamedList<Object>) data.get(SolrSnapshotManager.SNAPSHOT_REPLICAS);
+    for (Map.Entry<String,Object> x : r) {
+      NamedList<Object> info = (NamedList<Object>)x.getValue();
+      String coreName = (String)info.get(CoreAdminParams.CORE);
+      String indexDirPath = (String)info.get(SolrSnapshotManager.INDEX_DIR_PATH);
+      long generationNumber = (Long) info.get(SolrSnapshotManager.GENERATION_NUM);
+      String shardId = (String)info.get(SolrSnapshotManager.SHARD_ID);
+      boolean leader = (Boolean) info.get(SolrSnapshotManager.LEADER);
+      Collection<String> files = (Collection<String>)info.get(SolrSnapshotManager.FILE_LIST);
+      replicaSnapshots.add(new CoreSnapshotMetaData(coreName, indexDirPath, generationNumber, shardId, leader, files));
+    }
+  }
+
+  public String getName() {
+    return name;
+  }
+
+  public SnapshotStatus getStatus() {
+    return status;
+  }
+
+  public Date getCreationDate() {
+    return creationDate;
+  }
+
+  public List<CoreSnapshotMetaData> getReplicaSnapshots() {
+    return replicaSnapshots;
+  }
+
+  public List<CoreSnapshotMetaData> getReplicaSnapshotsForShard(String shardId) {
+    List<CoreSnapshotMetaData> result = new ArrayList<>();
+    for (CoreSnapshotMetaData d : replicaSnapshots) {
+      if (d.getShardId().equals(shardId)) {
+        result.add(d);
+      }
+    }
+    return result;
+  }
+
+  public boolean isSnapshotExists(String shardId, Replica r) {
+    for (CoreSnapshotMetaData d : replicaSnapshots) {
+      if (d.getShardId().equals(shardId) && d.getCoreName().equals(r.getCoreName())) {
+        return true;
+      }
+    }
+    return false;
+  }
+
+  public Collection<String> getShards() {
+    Set<String> result = new HashSet<>();
+    for (CoreSnapshotMetaData d : replicaSnapshots) {
+      result.add(d.getShardId());
+    }
+    return result;
+  }
+
+  @Override
+  public void write(JSONWriter arg0) {
+    LinkedHashMap<String, Object> result = new LinkedHashMap<>();
+    result.put(CoreAdminParams.NAME, this.name);
+    result.put(SolrSnapshotManager.SNAPSHOT_STATUS, this.status.toString());
+    result.put(SolrSnapshotManager.CREATION_DATE, this.getCreationDate().getTime());
+    result.put(SolrSnapshotManager.SNAPSHOT_REPLICAS, this.replicaSnapshots);
+    arg0.write(result);
+  }
+
+  @SuppressWarnings({"rawtypes", "unchecked"})
+  public NamedList toNamedList() {
+    NamedList result = new NamedList();
+    result.add(CoreAdminParams.NAME, this.name);
+    result.add(SolrSnapshotManager.SNAPSHOT_STATUS, this.status.toString());
+    result.add(SolrSnapshotManager.CREATION_DATE, this.getCreationDate().getTime());
+
+    NamedList replicas = new NamedList();
+    for (CoreSnapshotMetaData x : replicaSnapshots) {
+      replicas.add(x.getCoreName(), x.toNamedList());
+    }
+    result.add(SolrSnapshotManager.SNAPSHOT_REPLICAS, replicas);
+
+    return result;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/57ba9614/solr/core/src/java/org/apache/solr/core/snapshots/SolrSnapshotManager.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/snapshots/SolrSnapshotManager.java b/solr/core/src/java/org/apache/solr/core/snapshots/SolrSnapshotManager.java
index 4257baf..354307d 100644
--- a/solr/core/src/java/org/apache/solr/core/snapshots/SolrSnapshotManager.java
+++ b/solr/core/src/java/org/apache/solr/core/snapshots/SolrSnapshotManager.java
@@ -18,9 +18,12 @@ package org.apache.solr.core.snapshots;
 
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashSet;
 import java.util.List;
+import java.util.Map;
+import java.util.Optional;
 import java.util.Set;
 import org.apache.lucene.index.IndexCommit;
 import org.apache.lucene.index.IndexDeletionPolicy;
@@ -28,9 +31,13 @@ import org.apache.lucene.index.IndexWriterConfig;
 import org.apache.lucene.index.IndexWriterConfig.OpenMode;
 import org.apache.lucene.index.NoMergePolicy;
 import org.apache.lucene.store.Directory;
+import org.apache.solr.common.cloud.SolrZkClient;
+import org.apache.solr.common.util.Utils;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.core.snapshots.SolrSnapshotMetaDataManager.SnapshotMetaData;
 import org.apache.solr.update.SolrIndexWriter;
+import org.apache.zookeeper.CreateMode;
+import org.apache.zookeeper.KeeperException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -40,6 +47,172 @@ import org.slf4j.LoggerFactory;
 public class SolrSnapshotManager {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
+  public static final String INDEX_DIR_PATH = "indexDirPath";
+  public static final String GENERATION_NUM = "generation";
+  public static final String SNAPSHOT_STATUS = "status";
+  public static final String CREATION_DATE = "creationDate";
+  public static final String SNAPSHOT_REPLICAS = "replicas";
+  public static final String SNAPSHOTS_INFO = "snapshots";
+  public static final String LEADER = "leader";
+  public static final String SHARD_ID = "shard_id";
+  public static final String FILE_LIST = "files";
+
+  /**
+   * This method returns if a named snapshot exists for the specified collection.
+   *
+   * @param zkClient Zookeeper client
+   * @param collectionName The name of the collection
+   * @param commitName The name of the snapshot
+   * @return true if the named snapshot exists
+   *         false Otherwise
+   * @throws KeeperException In case of Zookeeper error
+   * @throws InterruptedException In case of thread interruption.
+   */
+  public static boolean snapshotExists(SolrZkClient zkClient, String collectionName, String commitName)
+      throws KeeperException, InterruptedException {
+    String zkPath = getSnapshotMetaDataZkPath(collectionName, Optional.ofNullable(commitName));
+    return zkClient.exists(zkPath, true);
+  }
+
+  /**
+   * This method creates an entry for the named snapshot for the specified collection in Zookeeper.
+   *
+   * @param zkClient Zookeeper client
+   * @param collectionName The name of the collection
+   * @param meta The {@linkplain CollectionSnapshotMetaData} corresponding to named snapshot
+   * @throws KeeperException In case of Zookeeper error
+   * @throws InterruptedException In case of thread interruption.
+   */
+  public static void createCollectionLevelSnapshot(SolrZkClient zkClient, String collectionName,
+      CollectionSnapshotMetaData meta) throws KeeperException, InterruptedException {
+    String zkPath = getSnapshotMetaDataZkPath(collectionName, Optional.of(meta.getName()));
+    zkClient.makePath(zkPath, Utils.toJSON(meta), CreateMode.PERSISTENT, true);
+  }
+
+  /**
+   * This method updates an entry for the named snapshot for the specified collection in Zookeeper.
+   *
+   * @param zkClient Zookeeper client
+   * @param collectionName  The name of the collection
+   * @param meta The {@linkplain CollectionSnapshotMetaData} corresponding to named snapshot
+   * @throws KeeperException In case of Zookeeper error
+   * @throws InterruptedException In case of thread interruption.
+   */
+  public static void updateCollectionLevelSnapshot(SolrZkClient zkClient, String collectionName,
+      CollectionSnapshotMetaData meta) throws KeeperException, InterruptedException {
+    String zkPath = getSnapshotMetaDataZkPath(collectionName, Optional.of(meta.getName()));
+    zkClient.setData(zkPath, Utils.toJSON(meta), -1, true);
+  }
+
+  /**
+   * This method deletes an entry for the named snapshot for the specified collection in Zookeeper.
+   *
+   * @param zkClient Zookeeper client
+   * @param collectionName The name of the collection
+   * @param commitName  The name of the snapshot
+   * @throws InterruptedException In case of thread interruption.
+   * @throws KeeperException  In case of Zookeeper error
+   */
+  public static void deleteCollectionLevelSnapshot(SolrZkClient zkClient, String collectionName, String commitName)
+      throws InterruptedException, KeeperException {
+    String zkPath = getSnapshotMetaDataZkPath(collectionName, Optional.of(commitName));
+    zkClient.delete(zkPath, -1, true);
+  }
+
+  /**
+   * This method deletes all snapshots for the specified collection in Zookeeper.
+   *
+   * @param zkClient  Zookeeper client
+   * @param collectionName The name of the collection
+   * @throws InterruptedException In case of thread interruption.
+   * @throws KeeperException In case of Zookeeper error
+   */
+  public static void cleanupCollectionLevelSnapshots(SolrZkClient zkClient, String collectionName)
+      throws InterruptedException, KeeperException {
+    String zkPath = getSnapshotMetaDataZkPath(collectionName, Optional.empty());
+    try {
+      // Delete the meta-data for each snapshot.
+      Collection<String> snapshots = zkClient.getChildren(zkPath, null, true);
+      for (String snapshot : snapshots) {
+        String path = getSnapshotMetaDataZkPath(collectionName, Optional.of(snapshot));
+        try {
+          zkClient.delete(path, -1, true);
+        } catch (KeeperException ex) {
+          // Gracefully handle the case when the zk node doesn't exist
+          if ( ex.code() != KeeperException.Code.NONODE ) {
+            throw ex;
+          }
+        }
+      }
+
+      // Delete the parent node.
+      zkClient.delete(zkPath, -1, true);
+    } catch (KeeperException ex) {
+      // Gracefully handle the case when the zk node doesn't exist (e.g. if no snapshots were created for this collection).
+      if ( ex.code() != KeeperException.Code.NONODE ) {
+        throw ex;
+      }
+    }
+  }
+
+  /**
+   * This method returns the {@linkplain CollectionSnapshotMetaData} for the named snapshot for the specified collection in Zookeeper.
+   *
+   * @param zkClient  Zookeeper client
+   * @param collectionName  The name of the collection
+   * @param commitName The name of the snapshot
+   * @return (Optional) the {@linkplain CollectionSnapshotMetaData}
+   * @throws InterruptedException In case of thread interruption.
+   * @throws KeeperException In case of Zookeeper error
+   */
+  public static Optional<CollectionSnapshotMetaData> getCollectionLevelSnapshot(SolrZkClient zkClient, String collectionName, String commitName)
+      throws InterruptedException, KeeperException {
+    String zkPath = getSnapshotMetaDataZkPath(collectionName, Optional.of(commitName));
+    try {
+      Map<String, Object> data = (Map<String, Object>)Utils.fromJSON(zkClient.getData(zkPath, null, null, true));
+      return Optional.of(new CollectionSnapshotMetaData(data));
+    } catch (KeeperException ex) {
+      // Gracefully handle the case when the zk node for a specific
+      // snapshot doesn't exist (e.g. due to a concurrent delete operation).
+      if ( ex.code() == KeeperException.Code.NONODE ) {
+        return Optional.empty();
+      }
+      throw ex;
+    }
+  }
+
+  /**
+   * This method returns the {@linkplain CollectionSnapshotMetaData} for each named snapshot for the specified collection in Zookeeper.
+   *
+   * @param zkClient Zookeeper client
+   * @param collectionName The name of the collection
+   * @return the {@linkplain CollectionSnapshotMetaData} for each named snapshot
+   * @throws InterruptedException In case of thread interruption.
+   * @throws KeeperException In case of Zookeeper error
+   */
+  public static Collection<CollectionSnapshotMetaData> listSnapshots(SolrZkClient zkClient, String collectionName)
+      throws InterruptedException, KeeperException {
+    Collection<CollectionSnapshotMetaData> result = new ArrayList<>();
+    String zkPath = getSnapshotMetaDataZkPath(collectionName, Optional.empty());
+
+    try {
+      Collection<String> snapshots = zkClient.getChildren(zkPath, null, true);
+      for (String snapshot : snapshots) {
+        Optional<CollectionSnapshotMetaData> s = getCollectionLevelSnapshot(zkClient, collectionName, snapshot);
+        if (s.isPresent()) {
+          result.add(s.get());
+        }
+      }
+    } catch (KeeperException ex) {
+      // Gracefully handle the case when the zk node doesn't exist (e.g. due to a concurrent delete collection operation).
+      if ( ex.code() != KeeperException.Code.NONODE ) {
+        throw ex;
+      }
+    }
+    return result;
+  }
+
+
   /**
    * This method deletes index files of the {@linkplain IndexCommit} for the specified generation number.
    *
@@ -117,4 +290,11 @@ public class SolrSnapshotManager {
       // Note the index writer creates a new commit during the close() operation (which is harmless).
     }
   }
+
+  private static String getSnapshotMetaDataZkPath(String collectionName, Optional<String> commitName) {
+    if (commitName.isPresent()) {
+      return "/snapshots/"+collectionName+"/"+commitName.get();
+    }
+    return "/snapshots/"+collectionName;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/57ba9614/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
index 3e134d5..e290ccb 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
@@ -77,6 +77,8 @@ import org.apache.solr.common.util.Utils;
 import org.apache.solr.core.CloudConfig;
 import org.apache.solr.core.CoreContainer;
 import org.apache.solr.core.backup.repository.BackupRepository;
+import org.apache.solr.core.snapshots.CollectionSnapshotMetaData;
+import org.apache.solr.core.snapshots.SolrSnapshotManager;
 import org.apache.solr.handler.RequestHandlerBase;
 import org.apache.solr.handler.component.ShardHandler;
 import org.apache.solr.request.SolrQueryRequest;
@@ -732,7 +734,7 @@ public class CollectionsHandler extends RequestHandlerBase implements Permission
         throw new SolrException(ErrorCode.SERVER_ERROR, "Failed to check the existance of " + uri + ". Is it valid?", ex);
       }
 
-      Map<String, Object> params = req.getParams().getAll(null, NAME, COLLECTION_PROP);
+      Map<String, Object> params = req.getParams().getAll(null, NAME, COLLECTION_PROP, CoreAdminParams.COMMIT_NAME);
       params.put(CoreAdminParams.BACKUP_LOCATION, location);
       return params;
     }),
@@ -778,7 +780,57 @@ public class CollectionsHandler extends RequestHandlerBase implements Permission
       copyPropertiesWithPrefix(req.getParams(), params, COLL_PROP_PREFIX);
       return params;
     }),
+    CREATESNAPSHOT_OP(CREATESNAPSHOT, (req, rsp, h) -> {
+      req.getParams().required().check(COLLECTION_PROP, CoreAdminParams.COMMIT_NAME);
+
+      String collectionName = req.getParams().get(COLLECTION_PROP);
+      String commitName = req.getParams().get(CoreAdminParams.COMMIT_NAME);
+      ClusterState clusterState = h.coreContainer.getZkController().getClusterState();
+      if (!clusterState.hasCollection(collectionName)) {
+        throw new SolrException(ErrorCode.BAD_REQUEST, "Collection '" + collectionName + "' does not exist, no action taken.");
+      }
+
+      SolrZkClient client = h.coreContainer.getZkController().getZkClient();
+      if (SolrSnapshotManager.snapshotExists(client, collectionName, commitName)) {
+        throw new SolrException(ErrorCode.BAD_REQUEST,
+            "Snapshot with name '" + commitName + "' already exists for collection '"
+                + collectionName + "', no action taken.");
+      }
+
+      Map<String, Object> params = req.getParams().getAll(null, COLLECTION_PROP, CoreAdminParams.COMMIT_NAME);
+      return params;
+    }),
+    DELETESNAPSHOT_OP(DELETESNAPSHOT, (req, rsp, h) -> {
+      req.getParams().required().check(COLLECTION_PROP, CoreAdminParams.COMMIT_NAME);
+
+      String collectionName = req.getParams().get(COLLECTION_PROP);
+      ClusterState clusterState = h.coreContainer.getZkController().getClusterState();
+      if (!clusterState.hasCollection(collectionName)) {
+        throw new SolrException(ErrorCode.BAD_REQUEST, "Collection '" + collectionName + "' does not exist, no action taken.");
+      }
+
+      Map<String, Object> params = req.getParams().getAll(null, COLLECTION_PROP, CoreAdminParams.COMMIT_NAME);
+      return params;
+    }),
+    LISTSNAPSHOTS_OP(LISTSNAPSHOTS, (req, rsp, h) -> {
+      req.getParams().required().check(COLLECTION_PROP);
+
+      String collectionName = req.getParams().get(COLLECTION_PROP);
+      ClusterState clusterState = h.coreContainer.getZkController().getClusterState();
+      if (!clusterState.hasCollection(collectionName)) {
+        throw new SolrException(ErrorCode.BAD_REQUEST, "Collection '" + collectionName + "' does not exist, no action taken.");
+      }
 
+      NamedList<Object> snapshots = new NamedList<Object>();
+      SolrZkClient client = h.coreContainer.getZkController().getZkClient();
+      Collection<CollectionSnapshotMetaData> m = SolrSnapshotManager.listSnapshots(client, collectionName);
+      for (CollectionSnapshotMetaData meta : m) {
+        snapshots.add(meta.getName(), meta.toNamedList());
+      }
+
+      rsp.add(SolrSnapshotManager.SNAPSHOTS_INFO, snapshots);
+      return null;
+    }),
     REPLACENODE_OP(REPLACENODE, (req, rsp, h) -> req.getParams().required().getAll(req.getParams().getAll(null, "parallel"), "source", "target")),
     DELETENODE_OP(DELETENODE, (req, rsp, h) -> req.getParams().required().getAll(null, "node"));
     public final CollectionOp fun;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/57ba9614/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminOperation.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminOperation.java b/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminOperation.java
index 0b17d9e..5836ed3 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminOperation.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminOperation.java
@@ -37,6 +37,7 @@ import org.apache.solr.core.CoreContainer;
 import org.apache.solr.core.CoreDescriptor;
 import org.apache.solr.core.DirectoryFactory;
 import org.apache.solr.core.SolrCore;
+import org.apache.solr.core.snapshots.SolrSnapshotManager;
 import org.apache.solr.core.snapshots.SolrSnapshotMetaDataManager;
 import org.apache.solr.core.snapshots.SolrSnapshotMetaDataManager.SnapshotMetaData;
 import org.apache.solr.handler.admin.CoreAdminHandler.CoreAdminOp;
@@ -270,12 +271,12 @@ enum CoreAdminOperation implements CoreAdminOp {
         Optional<SnapshotMetaData> metadata = mgr.getSnapshotMetaData(name);
         if ( metadata.isPresent() ) {
           NamedList<String> props = new NamedList<>();
-          props.add("generation", String.valueOf(metadata.get().getGenerationNumber()));
-          props.add("indexDirPath", metadata.get().getIndexDirPath());
+          props.add(SolrSnapshotManager.GENERATION_NUM, String.valueOf(metadata.get().getGenerationNumber()));
+          props.add(SolrSnapshotManager.INDEX_DIR_PATH, metadata.get().getIndexDirPath());
           result.add(name, props);
         }
       }
-      it.rsp.add("snapshots", result);
+      it.rsp.add(SolrSnapshotManager.SNAPSHOTS_INFO, result);
     }
   });
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/57ba9614/solr/core/src/java/org/apache/solr/handler/admin/CreateSnapshotOp.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CreateSnapshotOp.java b/solr/core/src/java/org/apache/solr/handler/admin/CreateSnapshotOp.java
index 81f56c4..63b052b 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/CreateSnapshotOp.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/CreateSnapshotOp.java
@@ -23,6 +23,7 @@ import org.apache.solr.common.params.CoreAdminParams;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.core.CoreContainer;
 import org.apache.solr.core.SolrCore;
+import org.apache.solr.core.snapshots.SolrSnapshotManager;
 import org.apache.solr.core.snapshots.SolrSnapshotMetaDataManager;
 import org.apache.solr.search.SolrIndexSearcher;
 import org.apache.solr.util.RefCounted;
@@ -53,10 +54,11 @@ class CreateSnapshotOp implements CoreAdminHandler.CoreAdminOp {
       SolrSnapshotMetaDataManager mgr = core.getSnapshotMetaDataManager();
       mgr.snapshot(commitName, indexDirPath, ic.getGeneration());
 
-      it.rsp.add("core", core.getName());
-      it.rsp.add("commitName", commitName);
-      it.rsp.add("indexDirPath", indexDirPath);
-      it.rsp.add("generation", ic.getGeneration());
+      it.rsp.add(CoreAdminParams.CORE, core.getName());
+      it.rsp.add(CoreAdminParams.COMMIT_NAME, commitName);
+      it.rsp.add(SolrSnapshotManager.INDEX_DIR_PATH, indexDirPath);
+      it.rsp.add(SolrSnapshotManager.GENERATION_NUM, ic.getGeneration());
+      it.rsp.add(SolrSnapshotManager.FILE_LIST, ic.getFileNames());
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/57ba9614/solr/core/src/java/org/apache/solr/handler/admin/DeleteSnapshotOp.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/DeleteSnapshotOp.java b/solr/core/src/java/org/apache/solr/handler/admin/DeleteSnapshotOp.java
index 739837c..ee77282 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/DeleteSnapshotOp.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/DeleteSnapshotOp.java
@@ -40,6 +40,10 @@ class DeleteSnapshotOp implements CoreAdminHandler.CoreAdminOp {
 
     try {
       core.deleteNamedSnapshot(commitName);
+      // Ideally we shouldn't need this. This is added since the RPC logic in
+      // OverseerCollectionMessageHandler can not provide the coreName as part of the result.
+      it.rsp.add(CoreAdminParams.CORE, core.getName());
+      it.rsp.add(CoreAdminParams.COMMIT_NAME, commitName);
     } finally {
       core.close();
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/57ba9614/solr/core/src/test/org/apache/solr/core/snapshots/TestSolrCloudSnapshots.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/snapshots/TestSolrCloudSnapshots.java b/solr/core/src/test/org/apache/solr/core/snapshots/TestSolrCloudSnapshots.java
new file mode 100644
index 0000000..65f74ca
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/core/snapshots/TestSolrCloudSnapshots.java
@@ -0,0 +1,285 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.core.snapshots;
+
+import static org.apache.solr.common.cloud.ZkStateReader.BASE_URL_PROP;
+
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+
+import org.apache.lucene.util.TestUtil;
+import org.apache.lucene.util.LuceneTestCase.Slow;
+import org.apache.solr.SolrTestCaseJ4;
+import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.client.solrj.request.CoreAdminRequest.ListSnapshots;
+import org.apache.solr.client.solrj.response.CollectionAdminResponse;
+import org.apache.solr.client.solrj.response.RequestStatusState;
+import org.apache.solr.cloud.AbstractDistribZkTestBase;
+import org.apache.solr.cloud.SolrCloudTestCase;
+import org.apache.solr.common.cloud.DocCollection;
+import org.apache.solr.common.cloud.Replica;
+import org.apache.solr.common.cloud.Replica.State;
+import org.apache.solr.common.cloud.Slice;
+import org.apache.solr.common.cloud.ZkStateReader;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.core.snapshots.CollectionSnapshotMetaData.CoreSnapshotMetaData;
+import org.apache.solr.core.snapshots.SolrSnapshotMetaDataManager.SnapshotMetaData;
+import org.apache.solr.handler.BackupRestoreUtils;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SolrTestCaseJ4.SuppressSSL // Currently unknown why SSL does not work with this test
+@Slow
+public class TestSolrCloudSnapshots extends SolrCloudTestCase {
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static long docsSeed; // see indexDocs()
+  private static final int NUM_SHARDS = 2;
+  private static final int NUM_REPLICAS = 2;
+  private static final int NUM_NODES = NUM_REPLICAS * NUM_SHARDS;
+
+  @BeforeClass
+  public static void setupClass() throws Exception {
+    useFactory("solr.StandardDirectoryFactory");
+    configureCluster(NUM_NODES)// nodes
+        .addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf"))
+        .configure();
+
+    docsSeed = random().nextLong();
+  }
+
+  @AfterClass
+  public static void teardownClass() throws Exception {
+    System.clearProperty("test.build.data");
+    System.clearProperty("test.cache.data");
+  }
+
+  @Test
+  public void testSnapshots() throws Exception {
+    CloudSolrClient solrClient = cluster.getSolrClient();
+    String collectionName = "SolrCloudSnapshots";
+    CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(collectionName, "conf1", NUM_SHARDS, NUM_REPLICAS);
+    create.process(solrClient);
+
+    int nDocs = BackupRestoreUtils.indexDocs(cluster.getSolrClient(), collectionName, docsSeed);
+    BackupRestoreUtils.verifyDocs(nDocs, solrClient, collectionName);
+
+    String commitName = TestUtil.randomSimpleString(random(), 1, 5);
+
+    // Verify if snapshot creation works with replica failures.
+    boolean replicaFailures = usually();
+    Optional<String> stoppedCoreName = Optional.empty();
+    if (replicaFailures) {
+      // Here the assumption is that Solr will spread the replicas uniformly across nodes.
+      // If this is not true for some reason, then we will need to add some logic to find a
+      // node with a single replica.
+      this.cluster.getRandomJetty(random()).stop();
+
+      // Sleep a bit for allowing ZK watch to fire.
+      Thread.sleep(5000);
+
+      // Figure out if at-least one replica is "down".
+      DocCollection collState = solrClient.getZkStateReader().getClusterState().getCollection(collectionName);
+      for (Slice s : collState.getSlices()) {
+        for (Replica replica : s.getReplicas()) {
+          if (replica.getState() == State.DOWN) {
+            stoppedCoreName = Optional.of(replica.getCoreName());
+          }
+        }
+      }
+    }
+
+    int expectedCoresWithSnapshot = stoppedCoreName.isPresent() ? (NUM_SHARDS * NUM_REPLICAS) - 1 : (NUM_SHARDS * NUM_REPLICAS);
+
+    CollectionAdminRequest.CreateSnapshot createSnap = new CollectionAdminRequest.CreateSnapshot(collectionName, commitName);
+    createSnap.process(solrClient);
+
+    Collection<CollectionSnapshotMetaData> collectionSnaps = listCollectionSnapshots(solrClient, collectionName);
+    assertEquals(1, collectionSnaps.size());
+    CollectionSnapshotMetaData meta = collectionSnaps.iterator().next();
+    assertEquals(commitName, meta.getName());
+    assertEquals(CollectionSnapshotMetaData.SnapshotStatus.Successful, meta.getStatus());
+    assertEquals(expectedCoresWithSnapshot, meta.getReplicaSnapshots().size());
+    Map<String, CoreSnapshotMetaData> snapshotByCoreName = meta.getReplicaSnapshots().stream()
+        .collect(Collectors.toMap(CoreSnapshotMetaData::getCoreName, Function.identity()));
+
+    DocCollection collectionState = solrClient.getZkStateReader().getClusterState().getCollection(collectionName);
+    assertEquals(2, collectionState.getActiveSlices().size());
+    for ( Slice shard : collectionState.getActiveSlices() ) {
+      assertEquals(2, shard.getReplicas().size());
+      for (Replica replica : shard.getReplicas()) {
+        if (stoppedCoreName.isPresent() && stoppedCoreName.get().equals(replica.getCoreName())) {
+          continue; // We know that the snapshot is not created for this replica.
+        }
+
+        String replicaBaseUrl = replica.getStr(BASE_URL_PROP);
+        String coreName = replica.getStr(ZkStateReader.CORE_NAME_PROP);
+
+        assertTrue(snapshotByCoreName.containsKey(coreName));
+        CoreSnapshotMetaData coreSnapshot = snapshotByCoreName.get(coreName);
+
+        try (SolrClient adminClient = getHttpSolrClient(replicaBaseUrl)) {
+          Collection<SnapshotMetaData> snapshots = listCoreSnapshots(adminClient, coreName);
+          Optional<SnapshotMetaData> metaData = snapshots.stream().filter(x -> commitName.equals(x.getName())).findFirst();
+          assertTrue("Snapshot not created for core " + coreName, metaData.isPresent());
+          assertEquals(coreSnapshot.getIndexDirPath(), metaData.get().getIndexDirPath());
+          assertEquals(coreSnapshot.getGenerationNumber(), metaData.get().getGenerationNumber());
+        }
+      }
+    }
+
+    // Delete all documents.
+    {
+      solrClient.deleteByQuery(collectionName, "*:*");
+      solrClient.commit(collectionName);
+      BackupRestoreUtils.verifyDocs(0, solrClient, collectionName);
+    }
+
+    String backupLocation = createTempDir().toFile().getAbsolutePath();
+    String backupName = "mytestbackup";
+    String restoreCollectionName = collectionName + "_restored";
+
+    //Create a backup using the earlier created snapshot.
+    {
+      CollectionAdminRequest.Backup backup = CollectionAdminRequest.backupCollection(collectionName, backupName)
+          .setLocation(backupLocation).setCommitName(commitName);
+      if (random().nextBoolean()) {
+        assertEquals(0, backup.process(solrClient).getStatus());
+      } else {
+        assertEquals(RequestStatusState.COMPLETED, backup.processAndWait(solrClient, 30));//async
+      }
+    }
+
+    // Restore backup.
+    {
+      CollectionAdminRequest.Restore restore = CollectionAdminRequest.restoreCollection(restoreCollectionName, backupName)
+          .setLocation(backupLocation);
+      if (random().nextBoolean()) {
+        assertEquals(0, restore.process(solrClient).getStatus());
+      } else {
+        assertEquals(RequestStatusState.COMPLETED, restore.processAndWait(solrClient, 30));//async
+      }
+      AbstractDistribZkTestBase.waitForRecoveriesToFinish(
+          restoreCollectionName, cluster.getSolrClient().getZkStateReader(), log.isDebugEnabled(), true, 30);
+      BackupRestoreUtils.verifyDocs(nDocs, solrClient, restoreCollectionName);
+    }
+
+    // Verify if the snapshot deletion works correctly when one or more replicas containing the snapshot are
+    // deleted
+    boolean replicaDeletion = rarely();
+    if (replicaDeletion) {
+      CoreSnapshotMetaData replicaToDelete = null;
+      for (String shardId : meta.getShards()) {
+        List<CoreSnapshotMetaData> replicas = meta.getReplicaSnapshotsForShard(shardId);
+        if (replicas.size() > 1) {
+          int r_index = random().nextInt(replicas.size());
+          replicaToDelete = replicas.get(r_index);
+        }
+      }
+
+      if (replicaToDelete != null) {
+        collectionState = solrClient.getZkStateReader().getClusterState().getCollection(collectionName);
+        for (Slice s : collectionState.getSlices()) {
+          for (Replica r : s.getReplicas()) {
+            if (r.getCoreName().equals(replicaToDelete.getCoreName())) {
+              log.info("Deleting replica {}", r);
+              CollectionAdminRequest.DeleteReplica delReplica = CollectionAdminRequest.deleteReplica(collectionName,
+                  replicaToDelete.getShardId(), r.getName());
+              delReplica.process(solrClient);
+              // The replica deletion will cleanup the snapshot meta-data.
+              snapshotByCoreName.remove(r.getCoreName());
+              break;
+            }
+          }
+        }
+      }
+    }
+
+    // Delete snapshot
+    CollectionAdminRequest.DeleteSnapshot deleteSnap = new CollectionAdminRequest.DeleteSnapshot(collectionName, commitName);
+    deleteSnap.process(solrClient);
+
+    // Wait for a while so that the clusterstate.json updates are propagated to the client side.
+    Thread.sleep(2000);
+    collectionState = solrClient.getZkStateReader().getClusterState().getCollection(collectionName);
+
+    for ( Slice shard : collectionState.getActiveSlices() ) {
+      for (Replica replica : shard.getReplicas()) {
+        if (stoppedCoreName.isPresent() && stoppedCoreName.get().equals(replica.getCoreName())) {
+          continue; // We know that the snapshot was not created for this replica.
+        }
+
+        String replicaBaseUrl = replica.getStr(BASE_URL_PROP);
+        String coreName = replica.getStr(ZkStateReader.CORE_NAME_PROP);
+
+        try (SolrClient adminClient = getHttpSolrClient(replicaBaseUrl)) {
+          Collection<SnapshotMetaData> snapshots = listCoreSnapshots(adminClient, coreName);
+          Optional<SnapshotMetaData> metaData = snapshots.stream().filter(x -> commitName.equals(x.getName())).findFirst();
+          assertFalse("Snapshot not deleted for core " + coreName, metaData.isPresent());
+          // Remove the entry for core if the snapshot is deleted successfully.
+          snapshotByCoreName.remove(coreName);
+        }
+      }
+    }
+
+    // Verify all core-level snapshots are deleted.
+    assertTrue("The cores remaining " + snapshotByCoreName, snapshotByCoreName.isEmpty());
+    assertTrue(listCollectionSnapshots(solrClient, collectionName).isEmpty());
+  }
+
+  private Collection<CollectionSnapshotMetaData> listCollectionSnapshots(SolrClient adminClient, String collectionName) throws Exception {
+    CollectionAdminRequest.ListSnapshots listSnapshots = new CollectionAdminRequest.ListSnapshots(collectionName);
+    CollectionAdminResponse resp = listSnapshots.process(adminClient);
+
+    assertTrue( resp.getResponse().get(SolrSnapshotManager.SNAPSHOTS_INFO) instanceof NamedList );
+    NamedList apiResult = (NamedList) resp.getResponse().get(SolrSnapshotManager.SNAPSHOTS_INFO);
+
+    Collection<CollectionSnapshotMetaData> result = new ArrayList<>();
+    for (int i = 0; i < apiResult.size(); i++) {
+      result.add(new CollectionSnapshotMetaData((NamedList<Object>)apiResult.getVal(i)));
+    }
+
+    return result;
+  }
+
+  private Collection<SnapshotMetaData> listCoreSnapshots(SolrClient adminClient, String coreName) throws Exception {
+    ListSnapshots req = new ListSnapshots();
+    req.setCoreName(coreName);
+    NamedList resp = adminClient.request(req);
+    assertTrue( resp.get(SolrSnapshotManager.SNAPSHOTS_INFO) instanceof NamedList );
+    NamedList apiResult = (NamedList) resp.get(SolrSnapshotManager.SNAPSHOTS_INFO);
+
+    List<SnapshotMetaData> result = new ArrayList<>(apiResult.size());
+    for(int i = 0 ; i < apiResult.size(); i++) {
+      String commitName = apiResult.getName(i);
+      String indexDirPath = (String)((NamedList)apiResult.get(commitName)).get(SolrSnapshotManager.INDEX_DIR_PATH);
+      long genNumber = Long.valueOf((String)((NamedList)apiResult.get(commitName)).get(SolrSnapshotManager.GENERATION_NUM));
+      result.add(new SnapshotMetaData(commitName, indexDirPath, genNumber));
+    }
+    return result;
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/57ba9614/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java
index c1f8261..72406ef 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java
@@ -706,8 +706,9 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
   // BACKUP request
   public static class Backup extends AsyncCollectionSpecificAdminRequest {
     protected final String name;
-    protected Optional<String> repositoryName;
+    protected Optional<String> repositoryName = Optional.empty();
     protected String location;
+    protected Optional<String> commitName = Optional.empty();
 
     public Backup(String collection, String name) {
       super(CollectionAction.BACKUP, collection);
@@ -746,6 +747,15 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
       return this;
     }
 
+    public Optional<String> getCommitName() {
+      return commitName;
+    }
+
+    public Backup setCommitName(String commitName) {
+      this.commitName = Optional.ofNullable(commitName);
+      return this;
+    }
+
     @Override
     public SolrParams getParams() {
       ModifiableSolrParams params = (ModifiableSolrParams) super.getParams();
@@ -755,6 +765,9 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
       if (repositoryName.isPresent()) {
         params.set(CoreAdminParams.BACKUP_REPOSITORY, repositoryName.get());
       }
+      if (commitName.isPresent()) {
+        params.set(CoreAdminParams.COMMIT_NAME, commitName.get());
+      }
       return params;
     }
 
@@ -767,7 +780,7 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
   // RESTORE request
   public static class Restore extends AsyncCollectionSpecificAdminRequest {
     protected final String backupName;
-    protected Optional<String> repositoryName;
+    protected Optional<String> repositoryName = Optional.empty();
     protected String location;
 
     // in common with collection creation:
@@ -860,6 +873,105 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
 
   }
 
+  //Note : This method is added since solrj module does not use Google
+  // guava library. Also changes committed for SOLR-8765 result in wrong
+  // error message when "collection" parameter is specified as Null.
+  // This is because the setCollectionName method is deprecated.
+  static <T> T checkNotNull(String param, T value) {
+    if (value == null) {
+      throw new NullPointerException("Please specify a value for parameter " + param);
+    }
+    return value;
+  }
+
+  @SuppressWarnings("serial")
+  public static class CreateSnapshot extends AsyncCollectionSpecificAdminRequest {
+    protected final String commitName;
+
+    public CreateSnapshot(String collection, String commitName) {
+      super(CollectionAction.CREATESNAPSHOT, checkNotNull(CoreAdminParams.COLLECTION ,collection));
+      this.commitName = checkNotNull(CoreAdminParams.COMMIT_NAME, commitName);
+    }
+
+    public String getCollectionName() {
+      return collection;
+    }
+
+    public String getCommitName() {
+      return commitName;
+    }
+
+    @Override
+    public AsyncCollectionSpecificAdminRequest setCollectionName (String collection) {
+      this.collection = checkNotNull(CoreAdminParams.COLLECTION ,collection);
+      return this;
+    }
+
+    @Override
+    public SolrParams getParams() {
+      ModifiableSolrParams params = (ModifiableSolrParams) super.getParams();
+      params.set(CoreAdminParams.COLLECTION, collection);
+      params.set(CoreAdminParams.COMMIT_NAME, commitName);
+      return params;
+    }
+  }
+
+  @SuppressWarnings("serial")
+  public static class DeleteSnapshot extends AsyncCollectionSpecificAdminRequest {
+    protected final String commitName;
+
+    public DeleteSnapshot (String collection, String commitName) {
+      super(CollectionAction.DELETESNAPSHOT, checkNotNull(CoreAdminParams.COLLECTION ,collection));
+      this.commitName = checkNotNull(CoreAdminParams.COMMIT_NAME, commitName);
+    }
+
+    public String getCollectionName() {
+      return collection;
+    }
+
+    public String getCommitName() {
+      return commitName;
+    }
+
+    @Override
+    public AsyncCollectionSpecificAdminRequest setCollectionName (String collection) {
+      this.collection = checkNotNull(CoreAdminParams.COLLECTION ,collection);
+      return this;
+    }
+
+    @Override
+    public SolrParams getParams() {
+      ModifiableSolrParams params = (ModifiableSolrParams) super.getParams();
+      params.set(CoreAdminParams.COLLECTION, collection);
+      params.set(CoreAdminParams.COMMIT_NAME, commitName);
+      return params;
+    }
+  }
+
+  @SuppressWarnings("serial")
+  public static class ListSnapshots extends AsyncCollectionSpecificAdminRequest {
+    public ListSnapshots (String collection) {
+      super(CollectionAction.LISTSNAPSHOTS, checkNotNull(CoreAdminParams.COLLECTION ,collection));
+    }
+
+    public String getCollectionName() {
+      return collection;
+    }
+
+    @Override
+    public AsyncCollectionSpecificAdminRequest setCollectionName (String collection) {
+      this.collection = checkNotNull(CoreAdminParams.COLLECTION ,collection);
+      return this;
+    }
+
+    @Override
+    public SolrParams getParams() {
+      ModifiableSolrParams params = (ModifiableSolrParams) super.getParams();
+      params.set(CoreAdminParams.COLLECTION, collection);
+      return params;
+    }
+  }
+
   /**
    * Returns a SolrRequest to create a new shard in a collection
    */

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/57ba9614/solr/solrj/src/java/org/apache/solr/common/params/CollectionParams.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/params/CollectionParams.java b/solr/solrj/src/java/org/apache/solr/common/params/CollectionParams.java
index f10f089..f1e5a52 100644
--- a/solr/solrj/src/java/org/apache/solr/common/params/CollectionParams.java
+++ b/solr/solrj/src/java/org/apache/solr/common/params/CollectionParams.java
@@ -91,6 +91,9 @@ public interface CollectionParams {
     MIGRATESTATEFORMAT(true, LockLevel.CLUSTER),
     BACKUP(true, LockLevel.COLLECTION),
     RESTORE(true, LockLevel.COLLECTION),
+    CREATESNAPSHOT(true, LockLevel.COLLECTION),
+    DELETESNAPSHOT(true, LockLevel.COLLECTION),
+    LISTSNAPSHOTS(false, LockLevel.NONE),
     //only for testing. it just waits for specified time
     // these are not exposed via collection API commands
     // but the overseer is aware of these tasks


[41/50] [abbrv] lucene-solr:jira/solr-8542-v2: SOLR-9506: cache IndexFingerprint for each segment

Posted by cp...@apache.org.
SOLR-9506: cache IndexFingerprint for each segment


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/184b0f22
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/184b0f22
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/184b0f22

Branch: refs/heads/jira/solr-8542-v2
Commit: 184b0f221559eaed5f273b1907e8af07bc95fec9
Parents: 9b49c72
Author: Noble Paul <no...@apache.org>
Authored: Mon Oct 24 16:45:42 2016 +0530
Committer: Noble Paul <no...@apache.org>
Committed: Mon Oct 24 16:45:42 2016 +0530

----------------------------------------------------------------------
 solr/CHANGES.txt                                |   3 +
 .../src/java/org/apache/solr/core/SolrCore.java |  40 +++++++
 .../apache/solr/search/SolrIndexSearcher.java   |  61 ++++++++---
 .../apache/solr/update/IndexFingerprint.java    |  78 ++++++++------
 .../cloud/LeaderFailureAfterFreshStartTest.java |  32 ------
 .../solr/cloud/PeerSyncReplicationTest.java     |   2 +-
 .../org/apache/solr/update/PeerSyncTest.java    |   4 +-
 ...PeerSyncWithIndexFingerprintCachingTest.java | 108 +++++++++++++++++++
 .../solr/cloud/AbstractDistribZkTestBase.java   |  31 ++++++
 9 files changed, 279 insertions(+), 80 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/184b0f22/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 92a994f..f455002 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -229,6 +229,9 @@ Optimizations
 
 * SOLR-9546: Eliminate unnecessary boxing/unboxing going on in SolrParams (Pushkar Raste, noble)
 
+* SOLR-9506: cache IndexFingerprint for each segment (Pushkar Raste, yonik, noble)
+
+
 Other Changes
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/184b0f22/solr/core/src/java/org/apache/solr/core/SolrCore.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java
index 7ba15af..a2dc1c4 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrCore.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java
@@ -52,12 +52,14 @@ import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.locks.ReentrantLock;
 
+import com.google.common.collect.MapMaker;
 import org.apache.commons.io.FileUtils;
 import org.apache.lucene.analysis.util.ResourceLoader;
 import org.apache.lucene.codecs.Codec;
 import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.IndexDeletionPolicy;
 import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.search.BooleanQuery;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IOContext;
@@ -127,6 +129,7 @@ import org.apache.solr.search.stats.LocalStatsCache;
 import org.apache.solr.search.stats.StatsCache;
 import org.apache.solr.update.DefaultSolrCoreState;
 import org.apache.solr.update.DirectUpdateHandler2;
+import org.apache.solr.update.IndexFingerprint;
 import org.apache.solr.update.SolrCoreState;
 import org.apache.solr.update.SolrCoreState.IndexWriterCloser;
 import org.apache.solr.update.SolrIndexWriter;
@@ -201,6 +204,8 @@ public final class SolrCore implements SolrInfoMBean, Closeable {
 
   public Date getStartTimeStamp() { return startTime; }
 
+  private final Map<Object, IndexFingerprint> perSegmentFingerprintCache = new MapMaker().weakKeys().makeMap();
+
   public long getStartNanoTime() {
     return startNanoTime;
   }
@@ -1589,6 +1594,41 @@ public final class SolrCore implements SolrInfoMBean, Closeable {
   }
 
   /**
+   * Computes fingerprint of a segment and caches it only if all the version in segment are included in the fingerprint.
+   * We can't use computeIfAbsent as caching is conditional (as described above)
+   * There is chance that two threads may compute fingerprint on the same segment. It might be OK to do so rather than locking entire map.
+   *
+   * @param searcher   searcher that includes specified LeaderReaderContext
+   * @param ctx        LeafReaderContext of a segment to compute fingerprint of
+   * @param maxVersion maximum version number to consider for fingerprint computation
+   * @return IndexFingerprint of the segment
+   * @throws IOException Can throw IOException
+   */
+  public IndexFingerprint getIndexFingerprint(SolrIndexSearcher searcher, LeafReaderContext ctx, long maxVersion)
+      throws IOException {
+    IndexFingerprint f = null;
+    f = perSegmentFingerprintCache.get(ctx.reader().getCoreCacheKey());
+    // fingerprint is either not cached or
+    // if we want fingerprint only up to a version less than maxVersionEncountered in the segment, or
+    // documents were deleted from segment for which fingerprint was cached
+    //
+    if (f == null || (f.getMaxInHash() > maxVersion) || (f.getNumDocs() != ctx.reader().numDocs())) {
+      log.debug("IndexFingerprint cache miss for searcher:{} reader:{} readerHash:{} maxVersion:{}", searcher, ctx.reader(), ctx.reader().hashCode(), maxVersion);
+      f = IndexFingerprint.getFingerprint(searcher, ctx, maxVersion);
+      // cache fingerprint for the segment only if all the versions in the segment are included in the fingerprint
+      if (f.getMaxVersionEncountered() == f.getMaxInHash()) {
+        log.info("Caching fingerprint for searcher:{} leafReaderContext:{} mavVersion:{}", searcher, ctx, maxVersion);
+        perSegmentFingerprintCache.put(ctx.reader().getCoreCacheKey(), f);
+      }
+
+    } else {
+      log.debug("IndexFingerprint cache hit for searcher:{} reader:{} readerHash:{} maxVersion:{}", searcher, ctx.reader(), ctx.reader().hashCode(), maxVersion);
+    }
+    log.debug("Cache Size: {}, Segments Size:{}", perSegmentFingerprintCache.size(), searcher.getTopReaderContext().leaves().size());
+    return f;
+  }
+
+  /**
   * Returns the current registered searcher with its reference count incremented, or null if none are registered.
   */
   public RefCounted<SolrIndexSearcher> getRegisteredSearcher() {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/184b0f22/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
index 933477b..d9364ca 100644
--- a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
@@ -33,7 +33,7 @@ import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
+
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicLong;
 import java.util.concurrent.atomic.AtomicReference;
@@ -42,22 +42,50 @@ import org.apache.lucene.document.Document;
 import org.apache.lucene.document.DocumentStoredFieldVisitor;
 import org.apache.lucene.document.LazyDocument;
 import org.apache.lucene.index.*;
-import org.apache.lucene.index.NumericDocValues;
 import org.apache.lucene.index.StoredFieldVisitor.Status;
-import org.apache.lucene.search.*;
+import org.apache.lucene.search.BooleanClause;
 import org.apache.lucene.search.BooleanClause.Occur;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.CollectionStatistics;
+import org.apache.lucene.search.Collector;
+import org.apache.lucene.search.ConstantScoreQuery;
+import org.apache.lucene.search.DocIdSet;
+import org.apache.lucene.search.DocIdSetIterator;
+import org.apache.lucene.search.EarlyTerminatingSortingCollector;
+import org.apache.lucene.search.Explanation;
+import org.apache.lucene.search.FieldDoc;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.LeafCollector;
+import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.search.MultiCollector;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.ScoreDoc;
+import org.apache.lucene.search.Scorer;
+import org.apache.lucene.search.SimpleCollector;
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.search.SortField;
+import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.search.TermStatistics;
+import org.apache.lucene.search.TimeLimitingCollector;
+import org.apache.lucene.search.TopDocs;
+import org.apache.lucene.search.TopDocsCollector;
+import org.apache.lucene.search.TopFieldCollector;
+import org.apache.lucene.search.TopFieldDocs;
+import org.apache.lucene.search.TopScoreDocCollector;
+import org.apache.lucene.search.TotalHitCountCollector;
+import org.apache.lucene.search.Weight;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.FixedBitSet;
 import org.apache.solr.common.SolrDocumentBase;
-import org.apache.solr.common.SolrException.ErrorCode;
 import org.apache.solr.common.SolrException;
+import org.apache.solr.common.SolrException.ErrorCode;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SimpleOrderedMap;
-import org.apache.solr.core.DirectoryFactory.DirContext;
 import org.apache.solr.core.DirectoryFactory;
+import org.apache.solr.core.DirectoryFactory.DirContext;
 import org.apache.solr.core.SolrConfig;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.core.SolrInfoMBean;
@@ -152,8 +180,6 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
   private final String path;
   private boolean releaseDirectory;
 
-  private final Map<Long, IndexFingerprint> maxVersionFingerprintCache = new ConcurrentHashMap<>();
-
   private final NamedList<Object> readerStats;
 
   private static DirectoryReader getReader(SolrCore core, SolrIndexConfig config, DirectoryFactory directoryFactory,
@@ -2416,19 +2442,24 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
     final SolrIndexSearcher searcher = this;
     final AtomicReference<IOException> exception = new AtomicReference<>();
     try {
-      return maxVersionFingerprintCache.computeIfAbsent(maxVersion, key -> {
-        try {
-          return IndexFingerprint.getFingerprint(searcher, key);
-        } catch (IOException e) {
-          exception.set(e);
-          return null;
-        }
-      });
+      return searcher.getTopReaderContext().leaves().stream()
+          .map(ctx -> {
+            try {
+              return searcher.getCore().getIndexFingerprint(searcher, ctx, maxVersion);
+            } catch (IOException e) {
+              exception.set(e);
+              return null;
+            }
+          })
+          .filter(java.util.Objects::nonNull)
+          .reduce(new IndexFingerprint(maxVersion), IndexFingerprint::reduce);
+
     } finally {
       if (exception.get() != null) throw exception.get();
     }
   }
 
+
   /////////////////////////////////////////////////////////////////////
   // SolrInfoMBean stuff: Statistics and Module Info
   /////////////////////////////////////////////////////////////////////

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/184b0f22/solr/core/src/java/org/apache/solr/update/IndexFingerprint.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/IndexFingerprint.java b/solr/core/src/java/org/apache/solr/update/IndexFingerprint.java
index 877ef03..0b7e655 100644
--- a/solr/core/src/java/org/apache/solr/update/IndexFingerprint.java
+++ b/solr/core/src/java/org/apache/solr/update/IndexFingerprint.java
@@ -52,6 +52,14 @@ public class IndexFingerprint implements MapSerializable {
   private long numDocs;
   private long maxDoc;
 
+  public IndexFingerprint() {
+    // default constructor
+  }
+  
+  public IndexFingerprint (long maxVersionSpecified)  {
+    this.maxVersionSpecified = maxVersionSpecified;
+  }
+  
   public long getMaxVersionSpecified() {
     return maxVersionSpecified;
   }
@@ -82,53 +90,62 @@ public class IndexFingerprint implements MapSerializable {
 
   /** Opens a new realtime searcher and returns it's (possibly cached) fingerprint */
   public static IndexFingerprint getFingerprint(SolrCore core, long maxVersion) throws IOException {
+    RTimer timer = new RTimer();
     core.getUpdateHandler().getUpdateLog().openRealtimeSearcher();
     RefCounted<SolrIndexSearcher> newestSearcher = core.getUpdateHandler().getUpdateLog().uhandler.core.getRealtimeSearcher();
     try {
-      return newestSearcher.get().getIndexFingerprint(maxVersion);
+      IndexFingerprint f = newestSearcher.get().getIndexFingerprint(maxVersion);
+      final double duration = timer.stop();
+      log.info("IndexFingerprint millis:{} result:{}",duration, f);
+      return f;
     } finally {
       if (newestSearcher != null) {
         newestSearcher.decref();
       }
     }
   }
-
-  /** Calculates an index fingerprint */
-  public static IndexFingerprint getFingerprint(SolrIndexSearcher searcher, long maxVersion) throws IOException {
-    RTimer timer = new RTimer();
-
+  
+  public static IndexFingerprint getFingerprint(SolrIndexSearcher searcher, LeafReaderContext ctx, Long maxVersion)
+      throws IOException {
     SchemaField versionField = VersionInfo.getAndCheckVersionField(searcher.getSchema());
-
-    IndexFingerprint f = new IndexFingerprint();
-    f.maxVersionSpecified = maxVersion;
-    f.maxDoc = searcher.maxDoc();
-
-    // TODO: this could be parallelized, or even cached per-segment if performance becomes an issue
     ValueSource vs = versionField.getType().getValueSource(versionField, null);
     Map funcContext = ValueSource.newContext(searcher);
     vs.createWeight(funcContext, searcher);
-    for (LeafReaderContext ctx : searcher.getTopReaderContext().leaves()) {
-      int maxDoc = ctx.reader().maxDoc();
-      f.numDocs += ctx.reader().numDocs();
-      Bits liveDocs = ctx.reader().getLiveDocs();
-      FunctionValues fv = vs.getValues(funcContext, ctx);
-      for (int doc = 0; doc < maxDoc; doc++) {
-        if (liveDocs != null && !liveDocs.get(doc)) continue;
-        long v = fv.longVal(doc);
-        f.maxVersionEncountered = Math.max(v, f.maxVersionEncountered);
-        if (v <= f.maxVersionSpecified) {
-          f.maxInHash = Math.max(v, f.maxInHash);
-          f.versionsHash += Hash.fmix64(v);
-          f.numVersions++;
-        }
+    
+    IndexFingerprint f = new IndexFingerprint();
+    f.maxVersionSpecified = maxVersion;
+    f.maxDoc = ctx.reader().maxDoc();
+    f.numDocs = ctx.reader().numDocs();
+    
+    int maxDoc = ctx.reader().maxDoc();
+    Bits liveDocs = ctx.reader().getLiveDocs();
+    FunctionValues fv = vs.getValues(funcContext, ctx);
+    for (int doc = 0; doc < maxDoc; doc++) {
+      if (liveDocs != null && !liveDocs.get(doc)) continue;
+      long v = fv.longVal(doc);
+      f.maxVersionEncountered = Math.max(v, f.maxVersionEncountered);
+      if (v <= f.maxVersionSpecified) {
+        f.maxInHash = Math.max(v, f.maxInHash);
+        f.versionsHash += Hash.fmix64(v);
+        f.numVersions++;
       }
     }
-
-    final double duration = timer.stop();
-    log.info("IndexFingerprint millis:" + duration + " result:" + f);
-
+    
     return f;
   }
+  
+  
+  public static IndexFingerprint reduce(IndexFingerprint acc, IndexFingerprint f2) {
+    // acc should have maxVersionSpecified already set in it using IndexFingerprint(long maxVersionSpecified) constructor
+    acc.maxDoc = Math.max(acc.maxDoc, f2.maxDoc);
+    acc.numDocs += f2.numDocs;
+    acc.maxVersionEncountered = Math.max(acc.maxVersionEncountered, f2.maxVersionEncountered);
+    acc.maxInHash = Math.max(acc.maxInHash, f2.maxInHash);
+    acc.versionsHash += f2.versionsHash;
+    acc.numVersions += f2.numVersions;
+
+    return acc;
+  }
 
   /** returns 0 for equal, negative if f1 is less recent than f2, positive if more recent */
   public static int compare(IndexFingerprint f1, IndexFingerprint f2) {
@@ -200,4 +217,5 @@ public class IndexFingerprint implements MapSerializable {
   public String toString() {
     return toMap(new LinkedHashMap<>()).toString();
   }
+
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/184b0f22/solr/core/src/test/org/apache/solr/cloud/LeaderFailureAfterFreshStartTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/LeaderFailureAfterFreshStartTest.java b/solr/core/src/test/org/apache/solr/cloud/LeaderFailureAfterFreshStartTest.java
index 348532c..ef21386 100644
--- a/solr/core/src/test/org/apache/solr/cloud/LeaderFailureAfterFreshStartTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/LeaderFailureAfterFreshStartTest.java
@@ -29,7 +29,6 @@ import java.util.stream.Collectors;
 import org.apache.commons.lang.RandomStringUtils;
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.solr.client.solrj.SolrServerException;
-import org.apache.solr.client.solrj.impl.CloudSolrClient;
 import org.apache.solr.client.solrj.request.UpdateRequest;
 import org.apache.solr.cloud.ZkTestServer.LimitViolationAction;
 import org.apache.solr.common.SolrInputDocument;
@@ -37,10 +36,8 @@ import org.apache.solr.common.cloud.ClusterState;
 import org.apache.solr.common.cloud.DocCollection;
 import org.apache.solr.common.cloud.Replica;
 import org.apache.solr.common.cloud.Slice;
-import org.apache.solr.common.cloud.Slice.State;
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.params.ModifiableSolrParams;
-import org.apache.solr.core.Diagnostics;
 import org.apache.solr.handler.ReplicationHandler;
 import org.junit.Test;
 import org.slf4j.Logger;
@@ -197,35 +194,6 @@ public class LeaderFailureAfterFreshStartTest extends AbstractFullDistribZkTestB
   }
 
   
-  static void waitForNewLeader(CloudSolrClient cloudClient, String shardName, Replica oldLeader, int maxWaitInSecs)
-      throws Exception {
-    log.info("Will wait for a node to become leader for {} secs", maxWaitInSecs);
-    boolean waitForLeader = true;
-    int i = 0;
-    ZkStateReader zkStateReader = cloudClient.getZkStateReader();
-    zkStateReader.forceUpdateCollection(DEFAULT_COLLECTION);
-    
-    while(waitForLeader) {
-      ClusterState clusterState = zkStateReader.getClusterState();
-      DocCollection coll = clusterState.getCollection("collection1");
-      Slice slice = coll.getSlice(shardName);
-      if(slice.getLeader() != oldLeader && slice.getState() == State.ACTIVE) {
-        log.info("New leader got elected in {} secs", i);
-        break;
-      }
-      
-      if(i == maxWaitInSecs) {
-        Diagnostics.logThreadDumps("Could not find new leader in specified timeout");
-        zkStateReader.getZkClient().printLayoutToStdOut();
-        fail("Could not find new leader even after waiting for " + maxWaitInSecs + "secs");
-      }
-      
-      i++;
-      Thread.sleep(1000);
-    }
-  }
-    
-
 
   private void waitTillNodesActive() throws Exception {
     for (int i = 0; i < 60; i++) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/184b0f22/solr/core/src/test/org/apache/solr/cloud/PeerSyncReplicationTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/PeerSyncReplicationTest.java b/solr/core/src/test/org/apache/solr/cloud/PeerSyncReplicationTest.java
index 3ded7d2..e00ea3c 100644
--- a/solr/core/src/test/org/apache/solr/cloud/PeerSyncReplicationTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/PeerSyncReplicationTest.java
@@ -149,7 +149,7 @@ public class PeerSyncReplicationTest extends AbstractFullDistribZkTestBase {
       log.info("Now shutting down initial leader");
       forceNodeFailures(singletonList(initialLeaderJetty));
       log.info("Updating mappings from zk");
-      LeaderFailureAfterFreshStartTest.waitForNewLeader(cloudClient, "shard1", (Replica) initialLeaderJetty.client.info, 15);
+      waitForNewLeader(cloudClient, "shard1", (Replica) initialLeaderJetty.client.info, 15);
       updateMappingsFromZk(jettys, clients, true);
       assertEquals("PeerSynced node did not become leader", nodePeerSynced, shardToLeaderJetty.get("shard1"));
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/184b0f22/solr/core/src/test/org/apache/solr/update/PeerSyncTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/PeerSyncTest.java b/solr/core/src/test/org/apache/solr/update/PeerSyncTest.java
index 64edd21..8f3a89a 100644
--- a/solr/core/src/test/org/apache/solr/update/PeerSyncTest.java
+++ b/solr/core/src/test/org/apache/solr/update/PeerSyncTest.java
@@ -122,7 +122,8 @@ public class PeerSyncTest extends BaseDistributedSearchTestCase {
     del(client0, params(DISTRIB_UPDATE_PARAM,FROM_LEADER,"_version_",Long.toString(-++v)), "1000");
 
     assertSync(client1, numVersions, true, shardsArr[0]);
-    client0.commit(); client1.commit(); queryAndCompare(params("q", "*:*", "sort","_version_ desc"), client0, client1);
+    client0.commit(); client1.commit(); 
+    queryAndCompare(params("q", "*:*", "sort","_version_ desc"), client0, client1);
 
     // test that delete by query is returned even if not requested, and that it doesn't delete newer stuff than it should
     v=2000;
@@ -145,7 +146,6 @@ public class PeerSyncTest extends BaseDistributedSearchTestCase {
     assertSync(client1, numVersions, true, shardsArr[0]);
     client0.commit(); client1.commit(); queryAndCompare(params("q", "*:*", "sort","_version_ desc"), client0, client1);
 
-
     //
     // Test that handling reorders work when applying docs retrieved from peer
     //

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/184b0f22/solr/core/src/test/org/apache/solr/update/PeerSyncWithIndexFingerprintCachingTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/PeerSyncWithIndexFingerprintCachingTest.java b/solr/core/src/test/org/apache/solr/update/PeerSyncWithIndexFingerprintCachingTest.java
new file mode 100644
index 0000000..9617ff2
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/update/PeerSyncWithIndexFingerprintCachingTest.java
@@ -0,0 +1,108 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.update;
+
+import static org.apache.solr.update.processor.DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM;
+
+import java.io.IOException;
+import java.util.Arrays;
+
+import org.apache.solr.BaseDistributedSearchTestCase;
+import org.apache.solr.SolrTestCaseJ4.SuppressSSL;
+import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.request.QueryRequest;
+import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.common.util.StrUtils;
+import org.apache.solr.update.processor.DistributedUpdateProcessor.DistribPhase;
+import org.junit.Assert;
+import org.junit.Test;
+
+
+/**
+ * This test is deliberately kept in different class as we don't want segment merging to kick in after deleting documents.
+ * This ensures that first check the cached IndexFingerprint and 
+ * recompute it only if any documents in the segment were deleted since caching the fingerprint first time around  
+ *   
+ *  
+ */
+@SuppressSSL(bugUrl = "https://issues.apache.org/jira/browse/SOLR-5776")
+public class PeerSyncWithIndexFingerprintCachingTest extends BaseDistributedSearchTestCase {
+  private static int numVersions = 100;  // number of versions to use when syncing
+  private final String FROM_LEADER = DistribPhase.FROMLEADER.toString();
+
+  private ModifiableSolrParams seenLeader = 
+    params(DISTRIB_UPDATE_PARAM, FROM_LEADER);
+  
+  public PeerSyncWithIndexFingerprintCachingTest() {
+    stress = 0;
+
+    // TODO: a better way to do this?
+    configString = "solrconfig-tlog.xml";
+    schemaString = "schema.xml";
+  }
+
+  @Test
+  @ShardsFixed(num = 3)
+  public void test() throws Exception {
+    handle.clear();
+    handle.put("timestamp", SKIPVAL);
+    handle.put("score", SKIPVAL);
+    handle.put("maxScore", SKIPVAL);
+
+    SolrClient client0 = clients.get(0);
+    SolrClient client1 = clients.get(1);
+
+    long v =1;
+    for(; v < 8; ++v) {
+      add(client0, seenLeader, sdoc("id", ""+v,"_version_",v));
+      add(client1, seenLeader, sdoc("id",""+v,"_version_",v));
+      
+    }
+    client0.commit(); client1.commit();
+    
+    IndexFingerprint before = getFingerprint(client0, Long.MAX_VALUE);
+    
+    del(client0, params(DISTRIB_UPDATE_PARAM,FROM_LEADER,"_version_",Long.toString(-++v)), "2");
+    client0.commit(); 
+    
+    IndexFingerprint after = getFingerprint(client0, Long.MAX_VALUE);
+   
+    // make sure fingerprint before and after deleting are not the same
+    Assert.assertTrue(IndexFingerprint.compare(before, after) != 0);
+    
+    // replica which missed the delete should be able to sync
+    assertSync(client1, numVersions, true, shardsArr[0]);
+    client0.commit(); client1.commit();  
+
+    queryAndCompare(params("q", "*:*", "sort","_version_ desc"), client0, client1);
+  }
+
+  IndexFingerprint getFingerprint(SolrClient client, long maxVersion) throws IOException, SolrServerException {
+    QueryRequest qr = new QueryRequest(params("qt","/get", "getFingerprint",Long.toString(maxVersion)));
+    NamedList rsp = client.request(qr);
+    return IndexFingerprint.fromObject(rsp.get("fingerprint"));
+  }
+
+  void assertSync(SolrClient client, int numVersions, boolean expectedResult, String... syncWith) throws IOException, SolrServerException {
+    QueryRequest qr = new QueryRequest(params("qt","/get", "getVersions",Integer.toString(numVersions), "sync", StrUtils.join(Arrays.asList(syncWith), ',')));
+    NamedList rsp = client.request(qr);
+    assertEquals(expectedResult, (Boolean) rsp.get("sync"));
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/184b0f22/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java
----------------------------------------------------------------------
diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java
index 03db71c..d04d996 100644
--- a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java
+++ b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java
@@ -24,11 +24,14 @@ import java.util.concurrent.atomic.AtomicInteger;
 import org.apache.commons.io.FileUtils;
 import org.apache.solr.BaseDistributedSearchTestCase;
 import org.apache.solr.client.solrj.embedded.JettySolrRunner;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
 import org.apache.solr.common.cloud.ClusterState;
+import org.apache.solr.common.cloud.DocCollection;
 import org.apache.solr.common.cloud.Replica;
 import org.apache.solr.common.cloud.Slice;
 import org.apache.solr.common.cloud.SolrZkClient;
 import org.apache.solr.common.cloud.ZkStateReader;
+import org.apache.solr.common.cloud.Slice.State;
 import org.apache.solr.core.Diagnostics;
 import org.apache.solr.core.MockDirectoryFactory;
 import org.apache.zookeeper.KeeperException;
@@ -222,6 +225,34 @@ public abstract class AbstractDistribZkTestBase extends BaseDistributedSearchTes
 
     log.info("Collection has disappeared - collection: " + collection);
   }
+  
+  static void waitForNewLeader(CloudSolrClient cloudClient, String shardName, Replica oldLeader, int maxWaitInSecs)
+      throws Exception {
+    log.info("Will wait for a node to become leader for {} secs", maxWaitInSecs);
+    boolean waitForLeader = true;
+    int i = 0;
+    ZkStateReader zkStateReader = cloudClient.getZkStateReader();
+    zkStateReader.forceUpdateCollection(DEFAULT_COLLECTION);
+    
+    while(waitForLeader) {
+      ClusterState clusterState = zkStateReader.getClusterState();
+      DocCollection coll = clusterState.getCollection("collection1");
+      Slice slice = coll.getSlice(shardName);
+      if(slice.getLeader() != oldLeader && slice.getState() == State.ACTIVE) {
+        log.info("New leader got elected in {} secs", i);
+        break;
+      }
+      
+      if(i == maxWaitInSecs) {
+        Diagnostics.logThreadDumps("Could not find new leader in specified timeout");
+        zkStateReader.getZkClient().printLayoutToStdOut();
+        fail("Could not find new leader even after waiting for " + maxWaitInSecs + "secs");
+      }
+      
+      i++;
+      Thread.sleep(1000);
+    }
+  }
 
   public static void verifyReplicaStatus(ZkStateReader reader, String collection, String shard, String coreNodeName, Replica.State expectedState) throws InterruptedException {
     int maxIterations = 100;


[10/50] [abbrv] lucene-solr:jira/solr-8542-v2: SOLR-9657: New TemplateUpdateProcessorFactory added

Posted by cp...@apache.org.
SOLR-9657: New TemplateUpdateProcessorFactory added


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/c2e031ad
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/c2e031ad
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/c2e031ad

Branch: refs/heads/jira/solr-8542-v2
Commit: c2e031add3d5db2c4e89a5a92afd7bb8cc1f481f
Parents: 53129ba
Author: Noble Paul <no...@gmail.com>
Authored: Wed Oct 19 11:04:10 2016 +0530
Committer: Noble Paul <no...@gmail.com>
Committed: Wed Oct 19 11:04:10 2016 +0530

----------------------------------------------------------------------
 solr/CHANGES.txt                                |   3 +
 .../handler/dataimport/TemplateTransformer.java |   5 +-
 .../handler/dataimport/VariableResolver.java    |  66 +++--------
 .../src/java/org/apache/solr/core/SolrCore.java |   2 +-
 .../processor/SimpleUpdateProcessorFactory.java |  45 +++++++-
 .../TemplateUpdateProcessorFactory.java         | 110 +++++++++++++++++++
 .../processor/UpdateRequestProcessorChain.java  |  14 ++-
 .../processor/TemplateUpdateProcessorTest.java  |  48 ++++++++
 .../UpdateRequestProcessorFactoryTest.java      |  15 +++
 9 files changed, 247 insertions(+), 61 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c2e031ad/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index be958d9..880718c 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -135,6 +135,9 @@ New Features
 
 * SOLR-9103: Restore ability for users to add custom Streaming Expressions (Cao Manh Dat)
 
+* SOLR-9657: New TemplateUpdateProcessorFactory added (noble)
+
+
 Bug Fixes
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c2e031ad/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/TemplateTransformer.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/TemplateTransformer.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/TemplateTransformer.java
index 6bd6050..a5faa7e 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/TemplateTransformer.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/TemplateTransformer.java
@@ -56,12 +56,11 @@ public class TemplateTransformer extends Transformer {
   public Object transformRow(Map<String, Object> row, Context context) {
 
 
-    VariableResolver resolver = (VariableResolver) context
-            .getVariableResolver();
+    VariableResolver resolver = context.getVariableResolver();
     // Add current row to the copy of resolver map
-//    for (Map.Entry<String, Object> entry : row.entrySet())
 
     for (Map<String, String> map : context.getAllEntityFields()) {
+      map.entrySet();
       String expr = map.get(TEMPLATE);
       if (expr == null)
         continue;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c2e031ad/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/VariableResolver.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/VariableResolver.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/VariableResolver.java
index 76930e2..f255657 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/VariableResolver.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/VariableResolver.java
@@ -16,16 +16,19 @@
  */
 package org.apache.solr.handler.dataimport;
 
-import java.util.ArrayList;
-import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
 import java.util.WeakHashMap;
+import java.util.function.Function;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import org.apache.solr.update.processor.TemplateUpdateProcessorFactory;
+
+import static org.apache.solr.update.processor.TemplateUpdateProcessorFactory.Resolved;
+
 /**
  * <p>
  * A set of nested maps that can resolve variables by namespaces. Variables are
@@ -48,20 +51,13 @@ import java.util.regex.Pattern;
 public class VariableResolver {
   
   private static final Pattern DOT_PATTERN = Pattern.compile("[.]");
-  private static final Pattern PLACEHOLDER_PATTERN = Pattern
-      .compile("[$][{](.*?)[}]");
   private static final Pattern EVALUATOR_FORMAT_PATTERN = Pattern
       .compile("^(\\w*?)\\((.*?)\\)$");
   private Map<String,Object> rootNamespace;
   private Map<String,Evaluator> evaluators;
   private Map<String,Resolved> cache = new WeakHashMap<>();
-  
-  class Resolved {
-    List<Integer> startIndexes = new ArrayList<>(2);
-    List<Integer> endOffsets = new ArrayList<>(2);
-    List<String> variables = new ArrayList<>(2);
-  }
-  
+  private Function<String,Object> fun = this::resolve;
+
   public static final String FUNCTIONS_NAMESPACE = "dataimporter.functions.";
   public static final String FUNCTIONS_NAMESPACE_SHORT = "dih.functions.";
   
@@ -145,48 +141,8 @@ public class VariableResolver {
    * @return the string with the placeholders replaced with their values
    */
   public String replaceTokens(String template) {
-    if (template == null) {
-      return null;
-    }
-    Resolved r = getResolved(template);
-    if (r.startIndexes != null) {
-      StringBuilder sb = new StringBuilder(template);
-      for (int i = r.startIndexes.size() - 1; i >= 0; i--) {
-        String replacement = resolve(r.variables.get(i)).toString();
-        sb.replace(r.startIndexes.get(i), r.endOffsets.get(i), replacement);
-      }
-      return sb.toString();
-    } else {
-      return template;
-    }
+    return TemplateUpdateProcessorFactory.replaceTokens(template, cache, fun);
   }
-  
-  private Resolved getResolved(String template) {
-    Resolved r = cache.get(template);
-    if (r == null) {
-      r = new Resolved();
-      Matcher m = PLACEHOLDER_PATTERN.matcher(template);
-      while (m.find()) {
-        String variable = m.group(1);
-        r.startIndexes.add(m.start(0));
-        r.endOffsets.add(m.end(0));
-        r.variables.add(variable);
-      }
-      cache.put(template, r);
-    }
-    return r;
-  }
-  /**
-   * Get a list of variables embedded in the template string.
-   */
-  public List<String> getVariables(String template) {
-    Resolved r = getResolved(template);
-    if (r == null) {
-      return Collections.emptyList();
-    }
-    return new ArrayList<>(r.variables);
-  }
-  
   public void addNamespace(String name, Map<String,Object> newMap) {
     if (newMap != null) {
       if (name != null) {
@@ -204,7 +160,11 @@ public class VariableResolver {
       }
     }
   }
-  
+
+  public List<String> getVariables(String expr) {
+    return TemplateUpdateProcessorFactory.getVariables(expr, cache);
+  }
+
   class CurrentLevel {
     final Map<String,Object> map;
     final int level;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c2e031ad/solr/core/src/java/org/apache/solr/core/SolrCore.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java
index 2827f03..7ba15af 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrCore.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java
@@ -184,7 +184,7 @@ public final class SolrCore implements SolrInfoMBean, Closeable {
   private final long startNanoTime = System.nanoTime();
   private final RequestHandlers reqHandlers;
   private final PluginBag<SearchComponent> searchComponents = new PluginBag<>(SearchComponent.class, this);
-  private final PluginBag<UpdateRequestProcessorFactory> updateProcessors = new PluginBag<>(UpdateRequestProcessorFactory.class, this);
+  private final PluginBag<UpdateRequestProcessorFactory> updateProcessors = new PluginBag<>(UpdateRequestProcessorFactory.class, this, true);
   private final Map<String,UpdateRequestProcessorChain> updateProcessorChains;
   private final Map<String, SolrInfoMBean> infoRegistry;
   private final IndexDeletionPolicyWrapper solrDelPolicy;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c2e031ad/solr/core/src/java/org/apache/solr/update/processor/SimpleUpdateProcessorFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/processor/SimpleUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/SimpleUpdateProcessorFactory.java
index e9c5b2d..aec9d87 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/SimpleUpdateProcessorFactory.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/SimpleUpdateProcessorFactory.java
@@ -18,6 +18,7 @@ package org.apache.solr.update.processor;
 
 import java.io.IOException;
 
+import org.apache.solr.common.util.NamedList;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.response.SolrQueryResponse;
 import org.apache.solr.update.AddUpdateCommand;
@@ -28,18 +29,60 @@ import org.apache.solr.update.AddUpdateCommand;
  * This is deliberately made to support only the add operation
  */
 public abstract class SimpleUpdateProcessorFactory extends UpdateRequestProcessorFactory {
+  protected final String myName;
+  protected NamedList initArgs = new NamedList();
+  private static ThreadLocal<SolrQueryRequest> REQ = new ThreadLocal<>();
+
+  protected SimpleUpdateProcessorFactory() {
+    String simpleName = this.getClass().getSimpleName();
+    this.myName = simpleName.substring(0, simpleName.indexOf("UpdateProcessorFactory"));
+  }
+
+  @Override
+  public void init(NamedList args) {
+    super.init(args);
+    this.initArgs = args;
+
+  }
 
   @Override
   public UpdateRequestProcessor getInstance(SolrQueryRequest req, SolrQueryResponse rsp, UpdateRequestProcessor next) {
     return new UpdateRequestProcessor(next) {
       @Override
       public void processAdd(AddUpdateCommand cmd) throws IOException {
-        process(cmd, req, rsp);
+        REQ.set(req);
+        try {
+          process(cmd, req, rsp);
+        } finally {
+          REQ.remove();
+        }
         super.processAdd(cmd);
       }
     };
   }
 
+  protected String getParam(String name) {
+    String[] v = getParams(name);
+    return v == null || v.length == 0 ? null : v[0];
+  }
+
+  /**returns value from init args or request parameter. the request parameter must have the
+   * URP shortname prefixed
+   */
+  protected String[] getParams(String name) {
+    Object v = REQ.get().getParams().getParams(_param(name));
+    if (v == null) v = initArgs.get(name);
+    if (v == null) return null;
+    if (v instanceof String[]) return (String[]) v;
+    return new String[]{v.toString()};
+
+  }
+
+  private String _param(String name) {
+    return myName + "." + name;
+  }
+
+
   /**
    * This object is reused across requests. So,this method should not store anything in the instance variable.
    */

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c2e031ad/solr/core/src/java/org/apache/solr/update/processor/TemplateUpdateProcessorFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/processor/TemplateUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/TemplateUpdateProcessorFactory.java
new file mode 100644
index 0000000..41d109b
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/update/processor/TemplateUpdateProcessorFactory.java
@@ -0,0 +1,110 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.update.processor;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.function.Function;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.solr.common.SolrInputDocument;
+import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.response.SolrQueryResponse;
+import org.apache.solr.update.AddUpdateCommand;
+
+public class TemplateUpdateProcessorFactory extends SimpleUpdateProcessorFactory {
+  @Override
+  protected void process(AddUpdateCommand cmd, SolrQueryRequest req, SolrQueryResponse rsp) {
+    String[] vals = getParams("field");
+    SolrInputDocument doc = cmd.getSolrInputDocument();
+    if (vals != null && vals.length > 0) {
+      for (String val : vals) {
+        if (val == null || val.isEmpty()) continue;
+        int idx = val.indexOf(':');
+        if (idx == -1)
+          throw new RuntimeException("'field' must be of the format <field-name>:<the-template-string>");
+
+        String fName = val.substring(0, idx);
+        String template = val.substring(idx + 1);
+        doc.addField(fName, replaceTokens(template, null, s -> {
+          Object v = doc.getFieldValue(s);
+          return v == null ? "" : v;
+        }));
+      }
+    }
+
+  }
+
+
+  public static Resolved getResolved(String template, Map<String, Resolved> cache) {
+    Resolved r = cache == null ? null : cache.get(template);
+    if (r == null) {
+      r = new Resolved();
+      Matcher m = PLACEHOLDER_PATTERN.matcher(template);
+      while (m.find()) {
+        String variable = m.group(1);
+        r.startIndexes.add(m.start(0));
+        r.endOffsets.add(m.end(0));
+        r.variables.add(variable);
+      }
+      if (cache != null) cache.put(template, r);
+    }
+    return r;
+  }
+
+  /**
+   * Get a list of variables embedded in the template string.
+   */
+  public static List<String> getVariables(String template, Map<String, Resolved> cache) {
+    Resolved r = getResolved(template, cache);
+    if (r == null) {
+      return Collections.emptyList();
+    }
+    return new ArrayList<>(r.variables);
+  }
+
+  public static String replaceTokens(String template, Map<String, Resolved> cache, Function<String, Object> fun) {
+    if (template == null) {
+      return null;
+    }
+    Resolved r = getResolved(template, cache);
+    if (r.startIndexes != null) {
+      StringBuilder sb = new StringBuilder(template);
+      for (int i = r.startIndexes.size() - 1; i >= 0; i--) {
+        String replacement = fun.apply(r.variables.get(i)).toString();
+        sb.replace(r.startIndexes.get(i), r.endOffsets.get(i), replacement);
+      }
+      return sb.toString();
+    } else {
+      return template;
+    }
+  }
+
+
+  public static class Resolved {
+    public List<Integer> startIndexes = new ArrayList<>(2);
+    public List<Integer> endOffsets = new ArrayList<>(2);
+    public List<String> variables = new ArrayList<>(2);
+  }
+
+  public static final Pattern PLACEHOLDER_PATTERN = Pattern
+      .compile("[$][{](.*?)[}]");
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c2e031ad/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java b/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java
index e77dd82..0ed626c 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java
@@ -101,7 +101,7 @@ public final class UpdateRequestProcessorChain implements PluginInfoInitialized
 
   /**
    * Initializes the chain using the factories specified by the <code>PluginInfo</code>.
-   * if the chain includes the <code>RunUpdateProcessorFactory</code>, but 
+   * if the chain includes the <code>RunUpdateProcessorFactory</code>, but
    * does not include an implementation of the 
    * <code>DistributingUpdateProcessorFactory</code> interface, then an 
    * instance of <code>DistributedUpdateProcessorFactory</code> will be 
@@ -269,8 +269,16 @@ public final class UpdateRequestProcessorChain implements PluginInfoInitialized
       s = s.trim();
       if (s.isEmpty()) continue;
       UpdateRequestProcessorFactory p = core.getUpdateProcessors().get(s);
-      if (p == null)
-        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "No such processor " + s);
+      if (p == null) {
+        try {
+          p = core.createInstance(s + "UpdateProcessorFactory", UpdateRequestProcessorFactory.class,
+              "updateProcessor", null, core.getMemClassLoader());
+          core.getUpdateProcessors().put(s, p);
+        } catch (SolrException e) {
+        }
+        if (p == null)
+          throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "No such processor " + s);
+      }
       result.add(p);
     }
     return result;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c2e031ad/solr/core/src/test/org/apache/solr/update/processor/TemplateUpdateProcessorTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/processor/TemplateUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/update/processor/TemplateUpdateProcessorTest.java
new file mode 100644
index 0000000..7ee8a34
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/update/processor/TemplateUpdateProcessorTest.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.update.processor;
+
+import org.apache.solr.SolrTestCaseJ4;
+import org.apache.solr.common.SolrInputDocument;
+import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.request.LocalSolrQueryRequest;
+import org.apache.solr.response.SolrQueryResponse;
+import org.apache.solr.update.AddUpdateCommand;
+
+public class TemplateUpdateProcessorTest extends SolrTestCaseJ4 {
+  public void testSimple() throws Exception {
+
+    AddUpdateCommand cmd = new AddUpdateCommand(new LocalSolrQueryRequest(null,
+        new ModifiableSolrParams()
+            .add("processor", "Template")
+            .add("Template.field", "id:${firstName}_${lastName}")
+            .add("Template.field", "another:${lastName}_${firstName}")
+            .add("Template.field", "missing:${lastName}_${unKnown}")
+
+    ));
+    cmd.solrDoc = new SolrInputDocument();
+    cmd.solrDoc.addField("firstName", "Tom");
+    cmd.solrDoc.addField("lastName", "Cruise");
+
+    new TemplateUpdateProcessorFactory().getInstance(cmd.getReq(), new SolrQueryResponse(), null).processAdd(cmd);
+    assertEquals("Tom_Cruise", cmd.solrDoc.getFieldValue("id"));
+    assertEquals("Cruise_Tom", cmd.solrDoc.getFieldValue("another"));
+    assertEquals("Cruise_", cmd.solrDoc.getFieldValue("missing"));
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c2e031ad/solr/core/src/test/org/apache/solr/update/processor/UpdateRequestProcessorFactoryTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/processor/UpdateRequestProcessorFactoryTest.java b/solr/core/src/test/org/apache/solr/update/processor/UpdateRequestProcessorFactoryTest.java
index e9dc93f..7ebefec 100644
--- a/solr/core/src/test/org/apache/solr/update/processor/UpdateRequestProcessorFactoryTest.java
+++ b/solr/core/src/test/org/apache/solr/update/processor/UpdateRequestProcessorFactoryTest.java
@@ -23,6 +23,7 @@ import java.util.Arrays;
 import java.util.ArrayList;
 import java.util.List;
 
+import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.response.SolrQueryResponse;
 import org.apache.solr.util.AbstractSolrTestCase;
@@ -41,6 +42,20 @@ public class UpdateRequestProcessorFactoryTest extends AbstractSolrTestCase {
   public static void beforeClass() throws Exception {
     initCore("solrconfig-transformers.xml", "schema.xml");
   }
+
+  public void testRequestTimeUrp(){
+    SolrCore core = h.getCore();
+    ModifiableSolrParams params = new ModifiableSolrParams()
+        .add("processor", "Template")
+        .add("Template.field", "id_t:${firstName}_${lastName}")
+        .add("Template.field", "another_t:${lastName}_${firstName}")
+        .add("Template.field", "missing_t:${lastName}_${unKnown}");
+    UpdateRequestProcessorChain chain = core.getUpdateProcessorChain(params);
+    List<UpdateRequestProcessorFactory> l = chain.getProcessors();
+    assertTrue(l.get(0) instanceof TemplateUpdateProcessorFactory);
+
+
+  }
   
   public void testConfiguration() throws Exception 
   {


[28/50] [abbrv] lucene-solr:jira/solr-8542-v2: LUCENE-7513: Update to randomizedtesting 2.4.0.

Posted by cp...@apache.org.
LUCENE-7513: Update to randomizedtesting 2.4.0.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/a19ec194
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/a19ec194
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/a19ec194

Branch: refs/heads/jira/solr-8542-v2
Commit: a19ec194d25692f13e03d92450c1f261670e938a
Parents: 36e997d
Author: Dawid Weiss <dw...@apache.org>
Authored: Fri Oct 21 11:14:37 2016 +0200
Committer: Dawid Weiss <dw...@apache.org>
Committed: Fri Oct 21 11:14:37 2016 +0200

----------------------------------------------------------------------
 lucene/CHANGES.txt                              |  2 ++
 .../AbstractTestCompressionMode.java            |  8 +++----
 .../AbstractTestLZ4CompressionMode.java         | 10 ++++-----
 .../lucene/codecs/lucene50/TestForUtil.java     |  8 +++----
 .../lucene/index/Test4GBStoredFields.java       |  4 ++--
 .../org/apache/lucene/search/TestBooleanOr.java |  4 ++--
 .../lucene/util/TestTimSorterWorstCase.java     |  6 +++---
 .../lucene/util/automaton/TestOperations.java   |  4 ++--
 .../lucene/util/packed/TestPackedInts.java      | 14 ++++++-------
 lucene/ivy-versions.properties                  |  2 +-
 .../apache/lucene/search/join/TestJoinUtil.java | 18 ++++++++--------
 .../randomizedtesting-runner-2.3.4.jar.sha1     |  1 -
 .../randomizedtesting-runner-2.4.0.jar.sha1     |  1 +
 .../function/TestDocValuesFieldSources.java     |  4 ++--
 .../apache/lucene/spatial3d/TestGeo3DPoint.java | 10 ++++-----
 .../codecs/compressing/CompressingCodec.java    | 14 ++++++-------
 .../index/BaseStoredFieldsFormatTestCase.java   | 22 ++++++++++----------
 .../lucene/search/AssertingBulkScorer.java      |  4 ++--
 .../lucene/search/RandomApproximationQuery.java |  4 ++--
 .../java/org/apache/lucene/util/TestUtil.java   |  6 +++---
 .../TestCompressingStoredFieldsFormat.java      |  4 ++--
 solr/licenses/junit4-ant-2.3.4.jar.sha1         |  1 -
 solr/licenses/junit4-ant-2.4.0.jar.sha1         |  1 +
 .../randomizedtesting-runner-2.3.4.jar.sha1     |  1 -
 .../randomizedtesting-runner-2.4.0.jar.sha1     |  1 +
 25 files changed, 78 insertions(+), 76 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a19ec194/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 2bd4c28..c4b3521 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -118,6 +118,8 @@ Optimizations
 
 Other
 
+* LUCENE-7513: Upgrade randomizedtesting to 2.4.0. (Dawid Weiss)
+
 * LUCENE-7452: Block join query exception suggests how to find a doc, which 
  violates orthogonality requirement. (Mikhail Khludnev)
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a19ec194/lucene/core/src/test/org/apache/lucene/codecs/compressing/AbstractTestCompressionMode.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/codecs/compressing/AbstractTestCompressionMode.java b/lucene/core/src/test/org/apache/lucene/codecs/compressing/AbstractTestCompressionMode.java
index c8d244b..045b19a 100644
--- a/lucene/core/src/test/org/apache/lucene/codecs/compressing/AbstractTestCompressionMode.java
+++ b/lucene/core/src/test/org/apache/lucene/codecs/compressing/AbstractTestCompressionMode.java
@@ -26,7 +26,7 @@ import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.TestUtil;
 
-import com.carrotsearch.randomizedtesting.generators.RandomInts;
+import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
 
 public abstract class AbstractTestCompressionMode extends LuceneTestCase {
 
@@ -35,7 +35,7 @@ public abstract class AbstractTestCompressionMode extends LuceneTestCase {
   static byte[] randomArray() {
     final int max = random().nextBoolean()
         ? random().nextInt(4)
-        : random().nextInt(256);
+        : random().nextInt(255);
     final int length = random().nextBoolean()
         ? random().nextInt(20)
         : random().nextInt(192 * 1024);
@@ -45,7 +45,7 @@ public abstract class AbstractTestCompressionMode extends LuceneTestCase {
   static byte[] randomArray(int length, int max) {
     final byte[] arr = new byte[length];
     for (int i = 0; i < arr.length; ++i) {
-      arr[i] = (byte) RandomInts.randomIntBetween(random(), 0, max);
+      arr[i] = (byte) RandomNumbers.randomIntBetween(random(), 0, max);
     }
     return arr;
   }
@@ -130,7 +130,7 @@ public abstract class AbstractTestCompressionMode extends LuceneTestCase {
   }
 
   public void testIncompressible() throws IOException {
-    final byte[] decompressed = new byte[RandomInts.randomIntBetween(random(), 20, 256)];
+    final byte[] decompressed = new byte[RandomNumbers.randomIntBetween(random(), 20, 256)];
     for (int i = 0; i < decompressed.length; ++i) {
       decompressed[i] = (byte) i;
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a19ec194/lucene/core/src/test/org/apache/lucene/codecs/compressing/AbstractTestLZ4CompressionMode.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/codecs/compressing/AbstractTestLZ4CompressionMode.java b/lucene/core/src/test/org/apache/lucene/codecs/compressing/AbstractTestLZ4CompressionMode.java
index 5a2801e..74ca047 100644
--- a/lucene/core/src/test/org/apache/lucene/codecs/compressing/AbstractTestLZ4CompressionMode.java
+++ b/lucene/core/src/test/org/apache/lucene/codecs/compressing/AbstractTestLZ4CompressionMode.java
@@ -20,7 +20,7 @@ package org.apache.lucene.codecs.compressing;
 import java.io.IOException;
 import java.nio.charset.StandardCharsets;
 
-import com.carrotsearch.randomizedtesting.generators.RandomInts;
+import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
 
 public abstract class AbstractTestLZ4CompressionMode extends AbstractTestCompressionMode {
 
@@ -88,7 +88,7 @@ public abstract class AbstractTestLZ4CompressionMode extends AbstractTestCompres
 
   public void testLongMatchs() throws IOException {
     // match length >= 20
-    final byte[] decompressed = new byte[RandomInts.randomIntBetween(random(), 300, 1024)];
+    final byte[] decompressed = new byte[RandomNumbers.randomIntBetween(random(), 300, 1024)];
     for (int i = 0; i < decompressed.length; ++i) {
       decompressed[i] = (byte) i;
     }
@@ -97,10 +97,10 @@ public abstract class AbstractTestLZ4CompressionMode extends AbstractTestCompres
 
   public void testLongLiterals() throws IOException {
     // long literals (length >= 16) which are not the last literals
-    final byte[] decompressed = randomArray(RandomInts.randomIntBetween(random(), 400, 1024), 256);
+    final byte[] decompressed = randomArray(RandomNumbers.randomIntBetween(random(), 400, 1024), 256);
     final int matchRef = random().nextInt(30);
-    final int matchOff = RandomInts.randomIntBetween(random(), decompressed.length - 40, decompressed.length - 20);
-    final int matchLength = RandomInts.randomIntBetween(random(), 4, 10);
+    final int matchOff = RandomNumbers.randomIntBetween(random(), decompressed.length - 40, decompressed.length - 20);
+    final int matchLength = RandomNumbers.randomIntBetween(random(), 4, 10);
     System.arraycopy(decompressed, matchRef, decompressed, matchOff, matchLength);
     test(decompressed);
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a19ec194/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestForUtil.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestForUtil.java b/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestForUtil.java
index 5d1e726..3fe003e 100644
--- a/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestForUtil.java
+++ b/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestForUtil.java
@@ -32,24 +32,24 @@ import org.apache.lucene.store.RAMDirectory;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.packed.PackedInts;
 
-import com.carrotsearch.randomizedtesting.generators.RandomInts;
+import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
 
 public class TestForUtil extends LuceneTestCase {
 
   public void testEncodeDecode() throws IOException {
-    final int iterations = RandomInts.randomIntBetween(random(), 1, 1000);
+    final int iterations = RandomNumbers.randomIntBetween(random(), 1, 1000);
     final float acceptableOverheadRatio = random().nextFloat();
     final int[] values = new int[(iterations - 1) * BLOCK_SIZE + ForUtil.MAX_DATA_SIZE];
     for (int i = 0; i < iterations; ++i) {
       final int bpv = random().nextInt(32);
       if (bpv == 0) {
-        final int value = RandomInts.randomIntBetween(random(), 0, Integer.MAX_VALUE);
+        final int value = RandomNumbers.randomIntBetween(random(), 0, Integer.MAX_VALUE);
         for (int j = 0; j < BLOCK_SIZE; ++j) {
           values[i * BLOCK_SIZE + j] = value;
         }
       } else {
         for (int j = 0; j < BLOCK_SIZE; ++j) {
-          values[i * BLOCK_SIZE + j] = RandomInts.randomIntBetween(random(),
+          values[i * BLOCK_SIZE + j] = RandomNumbers.randomIntBetween(random(),
               0, (int) PackedInts.maxValue(bpv));
         }
       }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a19ec194/lucene/core/src/test/org/apache/lucene/index/Test4GBStoredFields.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/Test4GBStoredFields.java b/lucene/core/src/test/org/apache/lucene/index/Test4GBStoredFields.java
index 3c147b6..7e173c8 100644
--- a/lucene/core/src/test/org/apache/lucene/index/Test4GBStoredFields.java
+++ b/lucene/core/src/test/org/apache/lucene/index/Test4GBStoredFields.java
@@ -30,7 +30,7 @@ import org.apache.lucene.util.TimeUnits;
 import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
 
 import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite;
-import com.carrotsearch.randomizedtesting.generators.RandomInts;
+import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
 
 /**
  * This test creates an index with one segment that is a little larger than 4GB.
@@ -69,7 +69,7 @@ public class Test4GBStoredFields extends LuceneTestCase {
     final FieldType ft = new FieldType();
     ft.setStored(true);
     ft.freeze();
-    final int valueLength = RandomInts.randomIntBetween(random(), 1 << 13, 1 << 20);
+    final int valueLength = RandomNumbers.randomIntBetween(random(), 1 << 13, 1 << 20);
     final byte[] value = new byte[valueLength];
     for (int i = 0; i < valueLength; ++i) {
       // random so that even compressing codecs can't compress it

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a19ec194/lucene/core/src/test/org/apache/lucene/search/TestBooleanOr.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestBooleanOr.java b/lucene/core/src/test/org/apache/lucene/search/TestBooleanOr.java
index 428b850..4e232c4 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestBooleanOr.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestBooleanOr.java
@@ -34,7 +34,7 @@ import org.apache.lucene.util.FixedBitSet;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.TestUtil;
 
-import com.carrotsearch.randomizedtesting.generators.RandomInts;
+import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
 
 public class TestBooleanOr extends LuceneTestCase {
 
@@ -239,7 +239,7 @@ public class TestBooleanOr extends LuceneTestCase {
         if (i == matches.length) {
           return DocIdSetIterator.NO_MORE_DOCS;
         }
-        return RandomInts.randomIntBetween(random(), max, matches[i]);
+        return RandomNumbers.randomIntBetween(random(), max, matches[i]);
       }
       @Override
       public long cost() {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a19ec194/lucene/core/src/test/org/apache/lucene/util/TestTimSorterWorstCase.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/util/TestTimSorterWorstCase.java b/lucene/core/src/test/org/apache/lucene/util/TestTimSorterWorstCase.java
index 5ab397d..68c5489 100644
--- a/lucene/core/src/test/org/apache/lucene/util/TestTimSorterWorstCase.java
+++ b/lucene/core/src/test/org/apache/lucene/util/TestTimSorterWorstCase.java
@@ -23,7 +23,7 @@ import java.util.List;
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.lucene.util.packed.PackedInts;
 
-import com.carrotsearch.randomizedtesting.generators.RandomInts;
+import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
 
 @Slow
 public class TestTimSorterWorstCase extends LuceneTestCase {
@@ -33,9 +33,9 @@ public class TestTimSorterWorstCase extends LuceneTestCase {
     // but not so big we blow up available heap.
     final int length;
     if (TEST_NIGHTLY) {
-      length = RandomInts.randomIntBetween(random(), 140000000, 400000000);
+      length = RandomNumbers.randomIntBetween(random(), 140000000, 400000000);
     } else {
-      length = RandomInts.randomIntBetween(random(), 140000000, 200000000);
+      length = RandomNumbers.randomIntBetween(random(), 140000000, 200000000);
     }
     final PackedInts.Mutable arr = generateWorstCaseArray(length);
     new TimSorter(0) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a19ec194/lucene/core/src/test/org/apache/lucene/util/automaton/TestOperations.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/util/automaton/TestOperations.java b/lucene/core/src/test/org/apache/lucene/util/automaton/TestOperations.java
index c9489e9..01517fc 100644
--- a/lucene/core/src/test/org/apache/lucene/util/automaton/TestOperations.java
+++ b/lucene/core/src/test/org/apache/lucene/util/automaton/TestOperations.java
@@ -21,7 +21,7 @@ import java.util.*;
 
 import org.apache.lucene.util.*;
 
-import com.carrotsearch.randomizedtesting.generators.RandomInts;
+import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
 
 import static org.apache.lucene.util.automaton.Operations.DEFAULT_MAX_DETERMINIZED_STATES;
 
@@ -29,7 +29,7 @@ public class TestOperations extends LuceneTestCase {
   /** Test string union. */
   public void testStringUnion() {
     List<BytesRef> strings = new ArrayList<>();
-    for (int i = RandomInts.randomIntBetween(random(), 0, 1000); --i >= 0;) {
+    for (int i = RandomNumbers.randomIntBetween(random(), 0, 1000); --i >= 0;) {
       strings.add(new BytesRef(TestUtil.randomUnicodeString(random())));
     }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a19ec194/lucene/core/src/test/org/apache/lucene/util/packed/TestPackedInts.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/util/packed/TestPackedInts.java b/lucene/core/src/test/org/apache/lucene/util/packed/TestPackedInts.java
index ce3447e..a675e0b 100644
--- a/lucene/core/src/test/org/apache/lucene/util/packed/TestPackedInts.java
+++ b/lucene/core/src/test/org/apache/lucene/util/packed/TestPackedInts.java
@@ -42,14 +42,14 @@ import org.apache.lucene.util.TestUtil;
 import org.apache.lucene.util.packed.PackedInts.Reader;
 import org.junit.Ignore;
 
-import com.carrotsearch.randomizedtesting.generators.RandomInts;
+import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
 
 public class TestPackedInts extends LuceneTestCase {
 
   public void testByteCount() {
     final int iters = atLeast(3);
     for (int i = 0; i < iters; ++i) {
-      final int valueCount = RandomInts.randomIntBetween(random(), 1, Integer.MAX_VALUE);
+      final int valueCount = RandomNumbers.randomIntBetween(random(), 1, Integer.MAX_VALUE);
       for (PackedInts.Format format : PackedInts.Format.values()) {
         for (int bpv = 1; bpv <= 64; ++bpv) {
           final long byteCount = format.byteCount(PackedInts.VERSION_CURRENT, valueCount, bpv);
@@ -206,7 +206,7 @@ public class TestPackedInts extends LuceneTestCase {
 
   public void testEndPointer() throws IOException {
     final Directory dir = newDirectory();
-    final int valueCount = RandomInts.randomIntBetween(random(), 1, 1000);
+    final int valueCount = RandomNumbers.randomIntBetween(random(), 1, 1000);
     final IndexOutput out = dir.createOutput("tests.bin", newIOContext(random()));
     for (int i = 0; i < valueCount; ++i) {
       out.writeLong(0);
@@ -224,7 +224,7 @@ public class TestPackedInts extends LuceneTestCase {
 
           // test iterator
           in.seek(0L);
-          final PackedInts.ReaderIterator it = PackedInts.getReaderIteratorNoHeader(in, format, version, valueCount, bpv, RandomInts.randomIntBetween(random(), 1, 1<<16));
+          final PackedInts.ReaderIterator it = PackedInts.getReaderIteratorNoHeader(in, format, version, valueCount, bpv, RandomNumbers.randomIntBetween(random(), 1, 1<<16));
           for (int i = 0; i < valueCount; ++i) {
             it.next();
           }
@@ -981,9 +981,9 @@ public class TestPackedInts extends LuceneTestCase {
   }
 
   public void testPackedLongValues() {
-    final long[] arr = new long[RandomInts.randomIntBetween(random(), 1, TEST_NIGHTLY ? 1000000 : 100000)];
+    final long[] arr = new long[RandomNumbers.randomIntBetween(random(), 1, TEST_NIGHTLY ? 1000000 : 100000)];
     float[] ratioOptions = new float[]{PackedInts.DEFAULT, PackedInts.COMPACT, PackedInts.FAST};
-    for (int bpv : new int[]{0, 1, 63, 64, RandomInts.randomIntBetween(random(), 2, 62)}) {
+    for (int bpv : new int[]{0, 1, 63, 64, RandomNumbers.randomIntBetween(random(), 2, 62)}) {
       for (DataType dataType : Arrays.asList(DataType.DELTA_PACKED)) {
         final int pageSize = 1 << TestUtil.nextInt(random(), 6, 20);
         float acceptableOverheadRatio = ratioOptions[TestUtil.nextInt(random(), 0, ratioOptions.length - 1)];
@@ -1063,7 +1063,7 @@ public class TestPackedInts extends LuceneTestCase {
     final int[] bitsPerValues = new int[longs.length];
     final boolean[] skip = new boolean[longs.length];
     for (int i = 0; i < longs.length; ++i) {
-      final int bpv = RandomInts.randomIntBetween(random(), 1, 64);
+      final int bpv = RandomNumbers.randomIntBetween(random(), 1, 64);
       bitsPerValues[i] = random().nextBoolean() ? bpv : TestUtil.nextInt(random(), bpv, 64);
       if (bpv == 64) {
         longs[i] = random().nextLong();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a19ec194/lucene/ivy-versions.properties
----------------------------------------------------------------------
diff --git a/lucene/ivy-versions.properties b/lucene/ivy-versions.properties
index 7f8ac12..b92112d 100644
--- a/lucene/ivy-versions.properties
+++ b/lucene/ivy-versions.properties
@@ -7,7 +7,7 @@
 /cglib/cglib-nodep = 2.2
 /com.adobe.xmp/xmpcore = 5.1.2
 
-com.carrotsearch.randomizedtesting.version = 2.3.4
+com.carrotsearch.randomizedtesting.version = 2.4.0
 /com.carrotsearch.randomizedtesting/junit4-ant = ${com.carrotsearch.randomizedtesting.version}
 /com.carrotsearch.randomizedtesting/randomizedtesting-runner = ${com.carrotsearch.randomizedtesting.version}
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a19ec194/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java
----------------------------------------------------------------------
diff --git a/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java b/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java
index 2bd8381..6e20f23 100644
--- a/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java
+++ b/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java
@@ -95,7 +95,7 @@ import org.apache.lucene.util.TestUtil;
 import org.apache.lucene.util.packed.PackedInts;
 import org.junit.Test;
 
-import com.carrotsearch.randomizedtesting.generators.RandomInts;
+import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
 import com.carrotsearch.randomizedtesting.generators.RandomPicks;
 
 public class TestJoinUtil extends LuceneTestCase {
@@ -517,7 +517,7 @@ public class TestJoinUtil extends LuceneTestCase {
 
     Map<String, Float> lowestScoresPerParent = new HashMap<>();
     Map<String, Float> highestScoresPerParent = new HashMap<>();
-    int numParents = RandomInts.randomIntBetween(random(), 16, 64);
+    int numParents = RandomNumbers.randomIntBetween(random(), 16, 64);
     for (int p = 0; p < numParents; p++) {
       String parentId = Integer.toString(p);
       Document parentDoc = new Document();
@@ -525,7 +525,7 @@ public class TestJoinUtil extends LuceneTestCase {
       parentDoc.add(new StringField("type", "to", Field.Store.NO));
       parentDoc.add(new SortedDocValuesField("join_field", new BytesRef(parentId)));
       iw.addDocument(parentDoc);
-      int numChildren = RandomInts.randomIntBetween(random(), 2, 16);
+      int numChildren = RandomNumbers.randomIntBetween(random(), 2, 16);
       int lowest = Integer.MAX_VALUE;
       int highest = Integer.MIN_VALUE;
       for (int c = 0; c < numChildren; c++) {
@@ -589,7 +589,7 @@ public class TestJoinUtil extends LuceneTestCase {
 
     int minChildDocsPerParent = 2;
     int maxChildDocsPerParent = 16;
-    int numParents = RandomInts.randomIntBetween(random(), 16, 64);
+    int numParents = RandomNumbers.randomIntBetween(random(), 16, 64);
     int[] childDocsPerParent = new int[numParents];
     for (int p = 0; p < numParents; p++) {
       String parentId = Integer.toString(p);
@@ -598,7 +598,7 @@ public class TestJoinUtil extends LuceneTestCase {
       parentDoc.add(new StringField("type", "to", Field.Store.NO));
       parentDoc.add(new SortedDocValuesField("join_field", new BytesRef(parentId)));
       iw.addDocument(parentDoc);
-      int numChildren = RandomInts.randomIntBetween(random(), minChildDocsPerParent, maxChildDocsPerParent);
+      int numChildren = RandomNumbers.randomIntBetween(random(), minChildDocsPerParent, maxChildDocsPerParent);
       childDocsPerParent[p] = numChildren;
       for (int c = 0; c < numChildren; c++) {
         String childId = Integer.toString(p + c);
@@ -622,11 +622,11 @@ public class TestJoinUtil extends LuceneTestCase {
     Query fromQuery = new TermQuery(new Term("type", "from"));
     Query toQuery = new TermQuery(new Term("type", "to"));
 
-    int iters = RandomInts.randomIntBetween(random(), 3, 9);
+    int iters = RandomNumbers.randomIntBetween(random(), 3, 9);
     for (int i = 1; i <= iters; i++) {
       final ScoreMode scoreMode = ScoreMode.values()[random().nextInt(ScoreMode.values().length)];
-      int min = RandomInts.randomIntBetween(random(), minChildDocsPerParent, maxChildDocsPerParent - 1);
-      int max = RandomInts.randomIntBetween(random(), min, maxChildDocsPerParent);
+      int min = RandomNumbers.randomIntBetween(random(), minChildDocsPerParent, maxChildDocsPerParent - 1);
+      int max = RandomNumbers.randomIntBetween(random(), min, maxChildDocsPerParent);
       if (VERBOSE) {
         System.out.println("iter=" + i);
         System.out.println("scoreMode=" + scoreMode);
@@ -1067,7 +1067,7 @@ public class TestJoinUtil extends LuceneTestCase {
     );
 
     IndexIterationContext context = new IndexIterationContext();
-    int numRandomValues = nDocs / RandomInts.randomIntBetween(random, 1, 4);
+    int numRandomValues = nDocs / RandomNumbers.randomIntBetween(random, 1, 4);
     context.randomUniqueValues = new String[numRandomValues];
     Set<String> trackSet = new HashSet<>();
     context.randomFrom = new boolean[numRandomValues];

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a19ec194/lucene/licenses/randomizedtesting-runner-2.3.4.jar.sha1
----------------------------------------------------------------------
diff --git a/lucene/licenses/randomizedtesting-runner-2.3.4.jar.sha1 b/lucene/licenses/randomizedtesting-runner-2.3.4.jar.sha1
deleted file mode 100644
index 000702c..0000000
--- a/lucene/licenses/randomizedtesting-runner-2.3.4.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-9f4c0e1de0837092115c89a38c12ae57db6983e7

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a19ec194/lucene/licenses/randomizedtesting-runner-2.4.0.jar.sha1
----------------------------------------------------------------------
diff --git a/lucene/licenses/randomizedtesting-runner-2.4.0.jar.sha1 b/lucene/licenses/randomizedtesting-runner-2.4.0.jar.sha1
new file mode 100644
index 0000000..798d11c
--- /dev/null
+++ b/lucene/licenses/randomizedtesting-runner-2.4.0.jar.sha1
@@ -0,0 +1 @@
+0222eb23dd6f45541acf6a5ac69cd9e9bdce25d2

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a19ec194/lucene/queries/src/test/org/apache/lucene/queries/function/TestDocValuesFieldSources.java
----------------------------------------------------------------------
diff --git a/lucene/queries/src/test/org/apache/lucene/queries/function/TestDocValuesFieldSources.java b/lucene/queries/src/test/org/apache/lucene/queries/function/TestDocValuesFieldSources.java
index c7738e9..7af2d49 100644
--- a/lucene/queries/src/test/org/apache/lucene/queries/function/TestDocValuesFieldSources.java
+++ b/lucene/queries/src/test/org/apache/lucene/queries/function/TestDocValuesFieldSources.java
@@ -38,7 +38,7 @@ import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.TestUtil;
 import org.apache.lucene.util.packed.PackedInts;
 
-import com.carrotsearch.randomizedtesting.generators.RandomInts;
+import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
 
 
 public class TestDocValuesFieldSources extends LuceneTestCase {
@@ -81,7 +81,7 @@ public class TestDocValuesFieldSources extends LuceneTestCase {
           f.setBytesValue(new BytesRef((String) vals[i]));
           break;
         case NUMERIC:
-          final int bitsPerValue = RandomInts.randomIntBetween(random(), 1, 31); // keep it an int
+          final int bitsPerValue = RandomNumbers.randomIntBetween(random(), 1, 31); // keep it an int
           vals[i] = (long) random().nextInt((int) PackedInts.maxValue(bitsPerValue));
           f.setLongValue((Long) vals[i]);
           break;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a19ec194/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/TestGeo3DPoint.java
----------------------------------------------------------------------
diff --git a/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/TestGeo3DPoint.java b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/TestGeo3DPoint.java
index 2af096a..295d63b 100644
--- a/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/TestGeo3DPoint.java
+++ b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/TestGeo3DPoint.java
@@ -80,7 +80,7 @@ import org.apache.lucene.util.NumericUtils;
 import org.apache.lucene.util.StringHelper;
 import org.apache.lucene.util.TestUtil;
 
-import com.carrotsearch.randomizedtesting.generators.RandomInts;
+import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
 
 public class TestGeo3DPoint extends LuceneTestCase {
 
@@ -206,7 +206,7 @@ public class TestGeo3DPoint extends LuceneTestCase {
 
     int iters = atLeast(10);
 
-    int recurseDepth = RandomInts.randomIntBetween(random(), 5, 15);
+    int recurseDepth = RandomNumbers.randomIntBetween(random(), 5, 15);
 
     iters = atLeast(50);
     
@@ -358,7 +358,7 @@ public class TestGeo3DPoint extends LuceneTestCase {
           case 0:
             // Split on X:
             {
-              int splitValue = RandomInts.randomIntBetween(random(), cell.xMinEnc, cell.xMaxEnc);
+              int splitValue = RandomNumbers.randomIntBetween(random(), cell.xMinEnc, cell.xMaxEnc);
               if (VERBOSE) {
                 log.println("    now split on x=" + splitValue);
               }
@@ -384,7 +384,7 @@ public class TestGeo3DPoint extends LuceneTestCase {
           case 1:
             // Split on Y:
             {
-              int splitValue = RandomInts.randomIntBetween(random(), cell.yMinEnc, cell.yMaxEnc);
+              int splitValue = RandomNumbers.randomIntBetween(random(), cell.yMinEnc, cell.yMaxEnc);
               if (VERBOSE) {
                 log.println("    now split on y=" + splitValue);
               }
@@ -410,7 +410,7 @@ public class TestGeo3DPoint extends LuceneTestCase {
           case 2:
             // Split on Z:
             {
-              int splitValue = RandomInts.randomIntBetween(random(), cell.zMinEnc, cell.zMaxEnc);
+              int splitValue = RandomNumbers.randomIntBetween(random(), cell.zMinEnc, cell.zMaxEnc);
               if (VERBOSE) {
                 log.println("    now split on z=" + splitValue);
               }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a19ec194/lucene/test-framework/src/java/org/apache/lucene/codecs/compressing/CompressingCodec.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/codecs/compressing/CompressingCodec.java b/lucene/test-framework/src/java/org/apache/lucene/codecs/compressing/CompressingCodec.java
index ca42881..4fd5e16 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/codecs/compressing/CompressingCodec.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/codecs/compressing/CompressingCodec.java
@@ -24,7 +24,7 @@ import org.apache.lucene.codecs.TermVectorsFormat;
 import org.apache.lucene.codecs.compressing.dummy.DummyCompressingCodec;
 import org.apache.lucene.util.TestUtil;
 
-import com.carrotsearch.randomizedtesting.generators.RandomInts;
+import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
 
 /**
  * A codec that uses {@link CompressingStoredFieldsFormat} for its stored
@@ -55,9 +55,9 @@ public abstract class CompressingCodec extends FilterCodec {
    * suffix
    */
   public static CompressingCodec randomInstance(Random random) {
-    final int chunkSize = random.nextBoolean() ? RandomInts.randomIntBetween(random, 1, 10) : RandomInts.randomIntBetween(random, 1, 1 << 15);
-    final int chunkDocs = random.nextBoolean() ? RandomInts.randomIntBetween(random, 1, 10) : RandomInts.randomIntBetween(random, 64, 1024);
-    final int blockSize = random.nextBoolean() ? RandomInts.randomIntBetween(random, 1, 10) : RandomInts.randomIntBetween(random, 1, 1024);
+    final int chunkSize = random.nextBoolean() ? RandomNumbers.randomIntBetween(random, 1, 10) : RandomNumbers.randomIntBetween(random, 1, 1 << 15);
+    final int chunkDocs = random.nextBoolean() ? RandomNumbers.randomIntBetween(random, 1, 10) : RandomNumbers.randomIntBetween(random, 64, 1024);
+    final int blockSize = random.nextBoolean() ? RandomNumbers.randomIntBetween(random, 1, 10) : RandomNumbers.randomIntBetween(random, 1, 1024);
     return randomInstance(random, chunkSize, chunkDocs, false, blockSize);
   }
 
@@ -79,10 +79,10 @@ public abstract class CompressingCodec extends FilterCodec {
    */
   public static CompressingCodec randomInstance(Random random, boolean withSegmentSuffix) {
     return randomInstance(random, 
-                          RandomInts.randomIntBetween(random, 1, 1 << 15), 
-                          RandomInts.randomIntBetween(random, 64, 1024), 
+                          RandomNumbers.randomIntBetween(random, 1, 1 << 15), 
+                          RandomNumbers.randomIntBetween(random, 64, 1024), 
                           withSegmentSuffix,
-                          RandomInts.randomIntBetween(random, 1, 1024));
+                          RandomNumbers.randomIntBetween(random, 1, 1024));
   }
 
   private final CompressingStoredFieldsFormat storedFieldsFormat;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a19ec194/lucene/test-framework/src/java/org/apache/lucene/index/BaseStoredFieldsFormatTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BaseStoredFieldsFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BaseStoredFieldsFormatTestCase.java
index 3868b16..a4d59de 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/BaseStoredFieldsFormatTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/BaseStoredFieldsFormatTestCase.java
@@ -53,7 +53,7 @@ import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.TestUtil;
 
-import com.carrotsearch.randomizedtesting.generators.RandomInts;
+import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
 import com.carrotsearch.randomizedtesting.generators.RandomPicks;
 import com.carrotsearch.randomizedtesting.generators.RandomStrings;
 
@@ -320,7 +320,7 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat
   public void testReadSkip() throws IOException {
     Directory dir = newDirectory();
     IndexWriterConfig iwConf = newIndexWriterConfig(new MockAnalyzer(random()));
-    iwConf.setMaxBufferedDocs(RandomInts.randomIntBetween(random(), 2, 30));
+    iwConf.setMaxBufferedDocs(RandomNumbers.randomIntBetween(random(), 2, 30));
     RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwConf);
     
     FieldType ft = new FieldType();
@@ -373,7 +373,7 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat
   public void testEmptyDocs() throws IOException {
     Directory dir = newDirectory();
     IndexWriterConfig iwConf = newIndexWriterConfig(new MockAnalyzer(random()));
-    iwConf.setMaxBufferedDocs(RandomInts.randomIntBetween(random(), 2, 30));
+    iwConf.setMaxBufferedDocs(RandomNumbers.randomIntBetween(random(), 2, 30));
     RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwConf);
     
     // make sure that the fact that documents might be empty is not a problem
@@ -398,7 +398,7 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat
   public void testConcurrentReads() throws Exception {
     Directory dir = newDirectory();
     IndexWriterConfig iwConf = newIndexWriterConfig(new MockAnalyzer(random()));
-    iwConf.setMaxBufferedDocs(RandomInts.randomIntBetween(random(), 2, 30));
+    iwConf.setMaxBufferedDocs(RandomNumbers.randomIntBetween(random(), 2, 30));
     RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwConf);
     
     // make sure the readers are properly cloned
@@ -486,15 +486,15 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat
     }
     Directory dir = newDirectory();
     IndexWriterConfig iwConf = newIndexWriterConfig(new MockAnalyzer(random()));
-    iwConf.setMaxBufferedDocs(RandomInts.randomIntBetween(random(), 2, 30));
+    iwConf.setMaxBufferedDocs(RandomNumbers.randomIntBetween(random(), 2, 30));
     RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwConf);
-    
+
     final int docCount = atLeast(200);
     final byte[][][] data = new byte [docCount][][];
     for (int i = 0; i < docCount; ++i) {
       final int fieldCount = rarely()
-          ? RandomInts.randomIntBetween(random(), 1, 500)
-          : RandomInts.randomIntBetween(random(), 1, 5);
+          ? RandomNumbers.randomIntBetween(random(), 1, 500)
+          : RandomNumbers.randomIntBetween(random(), 1, 5);
       data[i] = new byte[fieldCount][];
       for (int j = 0; j < fieldCount; ++j) {
         final int length = rarely()
@@ -669,7 +669,7 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat
     // so if we get NRTCachingDir+SimpleText, we make massive stored fields and OOM (LUCENE-4484)
     Directory dir = new MockDirectoryWrapper(random(), new MMapDirectory(createTempDir("testBigDocuments")));
     IndexWriterConfig iwConf = newIndexWriterConfig(new MockAnalyzer(random()));
-    iwConf.setMaxBufferedDocs(RandomInts.randomIntBetween(random(), 2, 30));
+    iwConf.setMaxBufferedDocs(RandomNumbers.randomIntBetween(random(), 2, 30));
     RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwConf);
 
     if (dir instanceof MockDirectoryWrapper) {
@@ -689,12 +689,12 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat
     onlyStored.setIndexOptions(IndexOptions.NONE);
 
     final Field smallField = new Field("fld", randomByteArray(random().nextInt(10), 256), onlyStored);
-    final int numFields = RandomInts.randomIntBetween(random(), 500000, 1000000);
+    final int numFields = RandomNumbers.randomIntBetween(random(), 500000, 1000000);
     for (int i = 0; i < numFields; ++i) {
       bigDoc1.add(smallField);
     }
 
-    final Field bigField = new Field("fld", randomByteArray(RandomInts.randomIntBetween(random(), 1000000, 5000000), 2), onlyStored);
+    final Field bigField = new Field("fld", randomByteArray(RandomNumbers.randomIntBetween(random(), 1000000, 5000000), 2), onlyStored);
     bigDoc2.add(bigField);
 
     final int numDocs = atLeast(5);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a19ec194/lucene/test-framework/src/java/org/apache/lucene/search/AssertingBulkScorer.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/AssertingBulkScorer.java b/lucene/test-framework/src/java/org/apache/lucene/search/AssertingBulkScorer.java
index cc3d0b3..6fcc563 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/search/AssertingBulkScorer.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/search/AssertingBulkScorer.java
@@ -22,7 +22,7 @@ import java.util.Random;
 import org.apache.lucene.index.PostingsEnum;
 import org.apache.lucene.util.Bits;
 
-import com.carrotsearch.randomizedtesting.generators.RandomInts;
+import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
 
 /** Wraps a Scorer with additional checks */
 final class AssertingBulkScorer extends BulkScorer {
@@ -82,7 +82,7 @@ final class AssertingBulkScorer extends BulkScorer {
       assert next == DocIdSetIterator.NO_MORE_DOCS;
       return DocIdSetIterator.NO_MORE_DOCS;
     } else {
-      return RandomInts.randomIntBetween(random, max, next);
+      return RandomNumbers.randomIntBetween(random, max, next);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a19ec194/lucene/test-framework/src/java/org/apache/lucene/search/RandomApproximationQuery.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/RandomApproximationQuery.java b/lucene/test-framework/src/java/org/apache/lucene/search/RandomApproximationQuery.java
index 0bf81e5..5c2873c 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/search/RandomApproximationQuery.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/search/RandomApproximationQuery.java
@@ -18,7 +18,7 @@ package org.apache.lucene.search;
 
 import java.io.IOException;
 import java.util.Random;
-import com.carrotsearch.randomizedtesting.generators.RandomInts;
+import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
 
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.LeafReaderContext;
@@ -185,7 +185,7 @@ public class RandomApproximationQuery extends Query {
       if (disi.docID() == NO_MORE_DOCS) {
         return doc = NO_MORE_DOCS;
       }
-      return doc = RandomInts.randomIntBetween(random, target, disi.docID());
+      return doc = RandomNumbers.randomIntBetween(random, target, disi.docID());
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a19ec194/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java b/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java
index ee20584..d3351ab 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java
@@ -100,7 +100,7 @@ import org.apache.lucene.store.NoLockFactory;
 import org.apache.lucene.store.RAMDirectory;
 import org.junit.Assert;
 
-import com.carrotsearch.randomizedtesting.generators.RandomInts;
+import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
 import com.carrotsearch.randomizedtesting.generators.RandomPicks;
 
 /**
@@ -429,7 +429,7 @@ public final class TestUtil {
 
   /** start and end are BOTH inclusive */
   public static int nextInt(Random r, int start, int end) {
-    return RandomInts.randomIntBetween(r, start, end);
+    return RandomNumbers.randomIntBetween(r, start, end);
   }
 
   /** start and end are BOTH inclusive */
@@ -580,7 +580,7 @@ public final class TestUtil {
     final StringBuilder regexp = new StringBuilder(maxLength);
     for (int i = nextInt(r, 0, maxLength); i > 0; i--) {
       if (r.nextBoolean()) {
-        regexp.append((char) RandomInts.randomIntBetween(r, 'a', 'z'));
+        regexp.append((char) RandomNumbers.randomIntBetween(r, 'a', 'z'));
       } else {
         regexp.append(RandomPicks.randomFrom(r, ops));
       }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a19ec194/lucene/test-framework/src/test/org/apache/lucene/codecs/compressing/TestCompressingStoredFieldsFormat.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/test/org/apache/lucene/codecs/compressing/TestCompressingStoredFieldsFormat.java b/lucene/test-framework/src/test/org/apache/lucene/codecs/compressing/TestCompressingStoredFieldsFormat.java
index 4ca4ffb..ddce756 100644
--- a/lucene/test-framework/src/test/org/apache/lucene/codecs/compressing/TestCompressingStoredFieldsFormat.java
+++ b/lucene/test-framework/src/test/org/apache/lucene/codecs/compressing/TestCompressingStoredFieldsFormat.java
@@ -36,7 +36,7 @@ import org.apache.lucene.index.NoMergePolicy;
 import org.apache.lucene.store.ByteArrayDataInput;
 import org.apache.lucene.store.ByteArrayDataOutput;
 import org.apache.lucene.store.Directory;
-import com.carrotsearch.randomizedtesting.generators.RandomInts;
+import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
 
 public class TestCompressingStoredFieldsFormat extends BaseStoredFieldsFormatTestCase {
 
@@ -52,7 +52,7 @@ public class TestCompressingStoredFieldsFormat extends BaseStoredFieldsFormatTes
   public void testDeletePartiallyWrittenFilesIfAbort() throws IOException {
     Directory dir = newDirectory();
     IndexWriterConfig iwConf = newIndexWriterConfig(new MockAnalyzer(random()));
-    iwConf.setMaxBufferedDocs(RandomInts.randomIntBetween(random(), 2, 30));
+    iwConf.setMaxBufferedDocs(RandomNumbers.randomIntBetween(random(), 2, 30));
     iwConf.setCodec(CompressingCodec.randomInstance(random()));
     // disable CFS because this test checks file names
     iwConf.setMergePolicy(newLogMergePolicy(false));

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a19ec194/solr/licenses/junit4-ant-2.3.4.jar.sha1
----------------------------------------------------------------------
diff --git a/solr/licenses/junit4-ant-2.3.4.jar.sha1 b/solr/licenses/junit4-ant-2.3.4.jar.sha1
deleted file mode 100644
index 1547f78..0000000
--- a/solr/licenses/junit4-ant-2.3.4.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-aafd329c4ddd57c539bdea9e4e5a4a688e142181

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a19ec194/solr/licenses/junit4-ant-2.4.0.jar.sha1
----------------------------------------------------------------------
diff --git a/solr/licenses/junit4-ant-2.4.0.jar.sha1 b/solr/licenses/junit4-ant-2.4.0.jar.sha1
new file mode 100644
index 0000000..0f55c47
--- /dev/null
+++ b/solr/licenses/junit4-ant-2.4.0.jar.sha1
@@ -0,0 +1 @@
+35ed49c7aafcceac5b0b1cb157a07dd94e09515c

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a19ec194/solr/licenses/randomizedtesting-runner-2.3.4.jar.sha1
----------------------------------------------------------------------
diff --git a/solr/licenses/randomizedtesting-runner-2.3.4.jar.sha1 b/solr/licenses/randomizedtesting-runner-2.3.4.jar.sha1
deleted file mode 100644
index 000702c..0000000
--- a/solr/licenses/randomizedtesting-runner-2.3.4.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-9f4c0e1de0837092115c89a38c12ae57db6983e7

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a19ec194/solr/licenses/randomizedtesting-runner-2.4.0.jar.sha1
----------------------------------------------------------------------
diff --git a/solr/licenses/randomizedtesting-runner-2.4.0.jar.sha1 b/solr/licenses/randomizedtesting-runner-2.4.0.jar.sha1
new file mode 100644
index 0000000..798d11c
--- /dev/null
+++ b/solr/licenses/randomizedtesting-runner-2.4.0.jar.sha1
@@ -0,0 +1 @@
+0222eb23dd6f45541acf6a5ac69cd9e9bdce25d2


[37/50] [abbrv] lucene-solr:jira/solr-8542-v2: In IndexWriter increase use of UNBOUNDED_MAX_MERGE_SEGMENTS (and decrease use of magic -1).

Posted by cp...@apache.org.
In IndexWriter increase use of UNBOUNDED_MAX_MERGE_SEGMENTS (and decrease use of magic -1).


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/0ec1f228
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/0ec1f228
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/0ec1f228

Branch: refs/heads/jira/solr-8542-v2
Commit: 0ec1f228ccd1f5155e512a6bf4e451279c8e13d1
Parents: 3488f12
Author: Christine Poerschke <cp...@apache.org>
Authored: Sat Oct 22 14:54:37 2016 -0500
Committer: Christine Poerschke <cp...@apache.org>
Committed: Sat Oct 22 14:54:37 2016 -0500

----------------------------------------------------------------------
 .../java/org/apache/lucene/index/IndexWriter.java   | 16 ++++++++--------
 1 file changed, 8 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0ec1f228/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java b/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java
index 7abf681..4517294 100644
--- a/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java
@@ -1898,7 +1898,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
             final int size = mergeExceptions.size();
             for(int i=0;i<size;i++) {
               final MergePolicy.OneMerge merge = mergeExceptions.get(i);
-              if (merge.maxNumSegments != -1) {
+              if (merge.maxNumSegments != UNBOUNDED_MAX_MERGE_SEGMENTS) {
                 throw new IOException("background merge hit exception: " + merge.segString(), merge.getException());
               }
             }
@@ -1926,12 +1926,12 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
    *  runningMerges are maxNumSegments merges. */
   private synchronized boolean maxNumSegmentsMergesPending() {
     for (final MergePolicy.OneMerge merge : pendingMerges) {
-      if (merge.maxNumSegments != -1)
+      if (merge.maxNumSegments != UNBOUNDED_MAX_MERGE_SEGMENTS)
         return true;
     }
 
     for (final MergePolicy.OneMerge merge : runningMerges) {
-      if (merge.maxNumSegments != -1)
+      if (merge.maxNumSegments != UNBOUNDED_MAX_MERGE_SEGMENTS)
         return true;
     }
 
@@ -2059,7 +2059,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
     // point, try again to log the config here:
     messageState();
 
-    assert maxNumSegments == -1 || maxNumSegments > 0;
+    assert maxNumSegments == UNBOUNDED_MAX_MERGE_SEGMENTS || maxNumSegments > 0;
     assert trigger != null;
     if (stopMerges) {
       return false;
@@ -2771,7 +2771,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
       // Best-effort up front check:
       testReserveDocs(numDocs);
 
-      final IOContext context = new IOContext(new MergeInfo(Math.toIntExact(numDocs), -1, false, -1));
+      final IOContext context = new IOContext(new MergeInfo(Math.toIntExact(numDocs), -1, false, UNBOUNDED_MAX_MERGE_SEGMENTS));
 
       // TODO: somehow we should fix this merge so it's
       // abortable so that IW.close(false) is able to stop it
@@ -3797,7 +3797,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
       infoStream.message("IW", "after commitMerge: " + segString());
     }
 
-    if (merge.maxNumSegments != -1 && !dropSegment) {
+    if (merge.maxNumSegments != UNBOUNDED_MAX_MERGE_SEGMENTS && !dropSegment) {
       // cascade the forceMerge:
       if (!segmentsToMerge.containsKey(merge.info)) {
         segmentsToMerge.put(merge.info, Boolean.FALSE);
@@ -3876,7 +3876,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
             if (infoStream.isEnabled("IW")) {
               infoStream.message("IW", "hit exception during merge");
             }
-          } else if (merge.rateLimiter.getAbort() == false && (merge.maxNumSegments != -1 || (!closed && !closing))) {
+          } else if (merge.rateLimiter.getAbort() == false && (merge.maxNumSegments != UNBOUNDED_MAX_MERGE_SEGMENTS || (!closed && !closing))) {
             // This merge (and, generally, any change to the
             // segments) may now enable new merges, so we call
             // merge policy & update pending merges.
@@ -4015,7 +4015,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
     testPoint("startMergeInit");
 
     assert merge.registerDone;
-    assert merge.maxNumSegments == -1 || merge.maxNumSegments > 0;
+    assert merge.maxNumSegments == UNBOUNDED_MAX_MERGE_SEGMENTS || merge.maxNumSegments > 0;
 
     if (tragedy != null) {
       throw new IllegalStateException("this writer hit an unrecoverable error; cannot merge", tragedy);


[42/50] [abbrv] lucene-solr:jira/solr-8542-v2: Merge remote-tracking branch 'origin/master'

Posted by cp...@apache.org.
Merge remote-tracking branch 'origin/master'


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/c9de11d0
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/c9de11d0
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/c9de11d0

Branch: refs/heads/jira/solr-8542-v2
Commit: c9de11d02464a146c6ab2aa561622876d081a070
Parents: 184b0f2 9aca4c9
Author: Noble Paul <no...@apache.org>
Authored: Mon Oct 24 16:45:53 2016 +0530
Committer: Noble Paul <no...@apache.org>
Committed: Mon Oct 24 16:45:53 2016 +0530

----------------------------------------------------------------------
 .../codecs/lucene53/Lucene53NormsProducer.java  |   6 +
 .../lucene54/Lucene54DocValuesProducer.java     |  65 +++-
 .../lucene54/TestLucene54DocValuesFormat.java   |   5 +-
 .../simpletext/SimpleTextDocValuesReader.java   |  95 +++++-
 .../simpletext/SimpleTextDocValuesWriter.java   |   9 +
 .../apache/lucene/codecs/DocValuesConsumer.java |  25 ++
 .../org/apache/lucene/codecs/NormsConsumer.java |   5 +
 .../lucene/codecs/lucene70/IndexedDISI.java     |  88 +++--
 .../lucene70/Lucene70DocValuesProducer.java     |  62 ++++
 .../codecs/lucene70/Lucene70NormsProducer.java  |  11 +
 .../apache/lucene/index/BinaryDocValues.java    |   5 +-
 .../lucene/index/BinaryDocValuesWriter.java     |   5 +
 .../org/apache/lucene/index/CheckIndex.java     |  81 ++++-
 .../java/org/apache/lucene/index/DocValues.java |  95 +++---
 .../apache/lucene/index/DocValuesIterator.java  |  33 ++
 .../lucene/index/FilterBinaryDocValues.java     |   5 +
 .../lucene/index/FilterNumericDocValues.java    |   5 +
 .../index/LegacyBinaryDocValuesWrapper.java     |   8 +
 .../index/LegacyNumericDocValuesWrapper.java    |   9 +
 .../index/LegacySortedDocValuesWrapper.java     |   9 +
 .../LegacySortedNumericDocValuesWrapper.java    |   9 +
 .../index/LegacySortedSetDocValuesWrapper.java  |  10 +
 .../org/apache/lucene/index/MultiDocValues.java | 125 +++++++
 .../apache/lucene/index/NormValuesWriter.java   |   5 +
 .../apache/lucene/index/NumericDocValues.java   |   7 +-
 .../lucene/index/NumericDocValuesWriter.java    |   5 +
 .../apache/lucene/index/ReadersAndUpdates.java  |  10 +
 .../index/SingletonSortedNumericDocValues.java  |  24 +-
 .../index/SingletonSortedSetDocValues.java      |  18 +-
 .../apache/lucene/index/SortedDocValues.java    |   3 +
 .../lucene/index/SortedDocValuesWriter.java     |   5 +
 .../lucene/index/SortedNumericDocValues.java    |   6 +-
 .../index/SortedNumericDocValuesWriter.java     |   5 +
 .../apache/lucene/index/SortedSetDocValues.java |   5 +-
 .../lucene/index/SortedSetDocValuesWriter.java  |   5 +
 .../apache/lucene/index/SortingLeafReader.java  |  32 ++
 .../apache/lucene/search/FieldComparator.java   |  40 +--
 .../lucene/search/SortedNumericSelector.java    |  18 +
 .../apache/lucene/search/SortedSetSelector.java |  36 ++
 .../search/similarities/BM25Similarity.java     |   8 +-
 .../search/similarities/SimilarityBase.java     |   6 +-
 .../search/similarities/TFIDFSimilarity.java    |   8 +-
 .../lucene/codecs/lucene70/TestIndexedDISI.java |  28 +-
 .../lucene70/TestLucene70DocValuesFormat.java   |   4 +-
 .../SortedSetDocValuesFacetCounts.java          |  15 +-
 .../lucene/search/join/BlockJoinSelector.java   | 104 +++++-
 .../search/join/GenericTermsCollector.java      |   7 +
 .../search/join/TestBlockJoinSelector.java      |  12 +
 .../apache/lucene/index/memory/MemoryIndex.java |   6 +
 .../search/TestDiversifiedTopDocsCollector.java |   9 +
 .../lucene/index/AssertingLeafReader.java       |  89 ++++-
 .../index/BaseDocValuesFormatTestCase.java      | 331 ++++++++++++-------
 .../index/BaseIndexFileFormatTestCase.java      |  12 +
 .../lucene/index/BaseNormsFormatTestCase.java   | 101 +++---
 .../apache/solr/request/DocValuesFacets.java    |  20 +-
 .../request/PerSegmentSingleValuedFaceting.java |  10 +-
 .../apache/solr/search/SolrIndexSearcher.java   |   6 +-
 .../facet/FacetFieldProcessorByArrayDV.java     |  30 +-
 .../apache/solr/uninverting/FieldCacheImpl.java |  18 +
 59 files changed, 1405 insertions(+), 413 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c9de11d0/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
----------------------------------------------------------------------


[21/50] [abbrv] lucene-solr:jira/solr-8542-v2: SOLR-8370: Display configured Similarity in Schema-Browser

Posted by cp...@apache.org.
SOLR-8370: Display configured Similarity in Schema-Browser


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/14b6d93d
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/14b6d93d
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/14b6d93d

Branch: refs/heads/jira/solr-8542-v2
Commit: 14b6d93db4b0a608809782d1ef01fa97840b80e0
Parents: 39db548
Author: Jan H�ydahl <ja...@apache.org>
Authored: Thu Oct 20 13:25:40 2016 +0200
Committer: Jan H�ydahl <ja...@apache.org>
Committed: Thu Oct 20 13:25:40 2016 +0200

----------------------------------------------------------------------
 solr/CHANGES.txt                                |  3 +
 .../solr/handler/admin/LukeRequestHandler.java  |  1 +
 .../similarities/SchemaSimilarityFactory.java   | 39 ++++++++-----
 solr/webapp/web/css/angular/schema.css          | 23 ++++++++
 .../webapp/web/js/angular/controllers/schema.js | 60 +++++++++++++-------
 solr/webapp/web/partials/schema.html            | 15 +++--
 6 files changed, 101 insertions(+), 40 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/14b6d93d/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index aca7601..b3a2a30 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -142,6 +142,9 @@ New Features
 
 * SOLR-9417: Allow daemons to terminate when they finish iterating a topic (Joel Bernstein)
 
+* SOLR-8370: Display configured Similarity in Schema-Browser, both global/default and per-field/field-type 
+  (janhoy, Alexandre Rafalovitch)
+
 Bug Fixes
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/14b6d93d/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java
index a5fc36c..d0dd152 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java
@@ -474,6 +474,7 @@ public class LukeRequestHandler extends RequestHandlerBase
     finfo.add("uniqueKeyField",
         null == uniqueField ? null : uniqueField.getName());
     finfo.add("defaultSearchField", schema.getDefaultSearchFieldName());
+    finfo.add("similarity", getSimilarityInfo(schema.getSimilarity()));
     finfo.add("types", types);
     return finfo;
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/14b6d93d/solr/core/src/java/org/apache/solr/search/similarities/SchemaSimilarityFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/similarities/SchemaSimilarityFactory.java b/solr/core/src/java/org/apache/solr/search/similarities/SchemaSimilarityFactory.java
index e648481..a71de18 100644
--- a/solr/core/src/java/org/apache/solr/search/similarities/SchemaSimilarityFactory.java
+++ b/solr/core/src/java/org/apache/solr/search/similarities/SchemaSimilarityFactory.java
@@ -129,21 +129,32 @@ public class SchemaSimilarityFactory extends SimilarityFactory implements SolrCo
                                   "' but that <fieldType> does not define a <similarity>");
         }
       }
-      assert null != defaultSim;
-      final Similarity defaultSimilarity = defaultSim;
-      similarity = new PerFieldSimilarityWrapper() {
-        @Override
-        public Similarity get(String name) {
-          FieldType fieldType = core.getLatestSchema().getFieldTypeNoEx(name);
-          if (fieldType == null) {
-            return defaultSimilarity;
-          } else {
-            Similarity similarity = fieldType.getSimilarity();
-            return similarity == null ? defaultSimilarity : similarity;
-          }
-        }
-      };
+      similarity = new SchemaSimilarity(defaultSim);
     }
     return similarity;
   }
+  
+  private class SchemaSimilarity extends PerFieldSimilarityWrapper {
+    private Similarity defaultSimilarity;
+
+    public SchemaSimilarity(Similarity defaultSimilarity) {
+      this.defaultSimilarity = defaultSimilarity;
+    }
+
+    @Override
+    public Similarity get(String name) {
+      FieldType fieldType = core.getLatestSchema().getFieldTypeNoEx(name);
+      if (fieldType == null) {
+        return defaultSimilarity;
+      } else {
+        Similarity similarity = fieldType.getSimilarity();
+        return similarity == null ? defaultSimilarity : similarity;
+      }
+    }
+
+    @Override
+    public String toString() {
+      return "SchemaSimilarity. Default: " + ((get("") == null) ? "null" : get("").toString());
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/14b6d93d/solr/webapp/web/css/angular/schema.css
----------------------------------------------------------------------
diff --git a/solr/webapp/web/css/angular/schema.css b/solr/webapp/web/css/angular/schema.css
index 626cdc2..98a857f 100644
--- a/solr/webapp/web/css/angular/schema.css
+++ b/solr/webapp/web/css/angular/schema.css
@@ -701,4 +701,27 @@ limitations under the License.
 #content #schema .copyfield .updatable a {
   float:left;
   width:80%;
+}
+
+#content #schema dd.similarity.ng-binding::after {
+  content: attr(data-tip) ;
+
+  font-size: 12px;
+  position: relative;
+  white-space: nowrap;
+  bottom: 9999px;
+  left: 0;
+  background: lightyellow;
+  color: black;
+  padding: 4px 7px;
+  line-height: 24px;
+  height: 24px;
+  border: 1px solid darkgray;
+  opacity: 0;
+  transition:opacity 0.4s ease-out;
+}
+
+#content #schema dd.similarity.ng-binding:hover::after {
+  opacity: 90;
+  bottom: -20px;
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/14b6d93d/solr/webapp/web/js/angular/controllers/schema.js
----------------------------------------------------------------------
diff --git a/solr/webapp/web/js/angular/controllers/schema.js b/solr/webapp/web/js/angular/controllers/schema.js
index ee23bd7..94dd93e 100644
--- a/solr/webapp/web/js/angular/controllers/schema.js
+++ b/solr/webapp/web/js/angular/controllers/schema.js
@@ -70,6 +70,10 @@ solrAdminApp.controller('SchemaController',
                     $scope.core = $routeParams.core;
                     $scope.defaultSearchField = data.default_search_field;
                     $scope.uniqueKeyField = data.unique_key_field;
+                    $scope.similarity = data.similarity; 
+                    if ($scope.similarity && $scope.similarity.className) {
+                        $scope.similarity.className = shortenPackages($scope.similarity.className); 
+                    }
                     $scope.isDefaultSearchField = ($scope.selectedType == "Field" && $scope.name == $scope.defaultSearchField);
                     $scope.isUniqueKeyField = ($scope.selectedType == "Field" && $scope.name == $scope.uniqueKeyField);
 
@@ -334,6 +338,7 @@ var mergeIndexAndSchemaData = function(index, schema) {
     var data = {
         default_search_field: null,
         unique_key_field: null,
+        similarity: null,
         key: {},
         fields: {},
         dynamic_fields: {},
@@ -354,6 +359,7 @@ var mergeIndexAndSchemaData = function(index, schema) {
 
     data.default_search_field = schema.defaultSearchField;
     data.unique_key_field = schema.uniqueKeyField;
+    data.similarity = schema.similarity;
 
     data.dynamic_fields = schema.dynamicFields;
     data.types = schema.types;
@@ -422,11 +428,11 @@ var mergeIndexAndSchemaData = function(index, schema) {
     return data;
 };
 
-var getFieldProperties = function(data, core, is, field) {
+var getFieldProperties = function(data, core, is, name) {
 
     var display = {};
 
-    display.partialState = is.field && !!data.fields[field].partial;
+    display.partialState = is.field && !!data.fields[name].partial;
 
     display.columns = [];
     display.rows = [];
@@ -446,23 +452,33 @@ var getFieldProperties = function(data, core, is, field) {
     }
 
     // Identify the rows for our field property table
-    if (is.field && data.fields[field]) {
-        if (data.fields[field].flags) {
-            addRow('Properties', data.fields[field].flags);
-        }
-        if (data.fields[field].schema) {
-            addRow('Schema', data.fields[field].schema);
-        }
-        if (data.fields[field].index) {
-            addRow('Index', data.fields[field].index);
-        }
-        display.docs = data.fields[field].docs;
-        display.docsUrl = "#/" + core + "/query?q=" + field + ":[* TO *]";
-        display.distinct = data.fields[field].distinct;
-        display.positionIncrementGap = data.fields[field].positionIncrementGap;
-        display.similarity = data.fields[field].similarity;
-    } else if (is.dynamicField && data.dynamic_fields[field] && data.dynamic_fields[field].flags) {
-        addRow('Properties', data.dynamic_fields[field].flags);
+    if (is.field && data.fields[name]) {
+        if (data.fields[name].flags) {
+            addRow('Properties', data.fields[name].flags);
+        }
+        if (data.fields[name].schema) {
+            addRow('Schema', data.fields[name].schema);
+        }
+        if (data.fields[name].index) {
+            addRow('Index', data.fields[name].index);
+        }
+        display.docs = data.fields[name].docs;
+        display.docsUrl = "#/" + core + "/query?q=" + name + ":[* TO *]";
+        display.distinct = data.fields[name].distinct;
+        display.positionIncrementGap = data.fields[name].positionIncrementGap;
+        if (data.types[data.fields[name].type]) {
+          display.similarity = data.types[data.fields[name].type].similarity;
+        } else {
+          display.similarity = null;
+        }
+    } else if (is.dynamicField && data.dynamic_fields[name] && data.dynamic_fields[name].flags) {
+        addRow('Properties', data.dynamic_fields[name].flags);
+        display.similarity = data.types[data.dynamic_fields[name].type].similarity;
+    } else if (is.type && data.types[name]) {
+        display.similarity = data.types[name].similarity;
+    }
+    if (display.similarity && display.similarity.className) {
+        display.similarity.className = shortenPackages(display.similarity.className);
     }
 
     // identify columns in field property table:
@@ -591,7 +607,11 @@ var sortedObjectArray = function(list) {
       objarr.push({"name": list[i]});
     }
     return objarr;
-}
+};
+
+var shortenPackages = function(className) {
+    return className.replace("org.apache.solr", "o.a.s").replace("org.apache.lucene", "o.a.l");
+};
 
 /*
         var get_width = function get_width()

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/14b6d93d/solr/webapp/web/partials/schema.html
----------------------------------------------------------------------
diff --git a/solr/webapp/web/partials/schema.html b/solr/webapp/web/partials/schema.html
index ca626fd..1c0347f 100644
--- a/solr/webapp/web/partials/schema.html
+++ b/solr/webapp/web/partials/schema.html
@@ -212,7 +212,7 @@ limitations under the License.
             </h2>
           </div>
 
-          <div class="partial" ng-show="partialState">
+          <div class="partial" ng-show="display.partialState">
 
             <p>Because your Index is empty, we do not have enough Information about this Field</p>
 
@@ -220,11 +220,11 @@ limitations under the License.
 
           <dl class="options clearfix">
 
-            <dt class="field-type">Field-Type:</dt>
-            <dd class="field-type">{{analysis.data.className}}</dd>
+            <dt class="field-type" ng-show="analysis.data.className">Field-Type:</dt>
+            <dd class="field-type" ng-show="analysis.data.className">{{analysis.data.className}}</dd>
 
-            <dt class="similarity" ng-show="display.similarity">Similarity:</dt>
-            <dd class="similarity" ng-show="display.similarity">{{ display.similarity.details }} ({{ similarity.className }}) </dd>
+            <dt class="similarity" ng-show="display.similarity.className">Similarity:</dt>
+            <dd class="similarity" ng-show="display.similarity.className" data-tip="{{ display.similarity.className }}">{{ display.similarity.details }}</dd>
 
             <dt class="position-increment-gap" ng-show="display.positionIncrementGap"><abbr title="Position Increment Gap">PI Gap</abbr>:</dt>
             <dd class="position-increment-gap" ng-show="display.positionIncrementGap">{{ display.positionIncrementGap }}</dd>
@@ -287,7 +287,7 @@ limitations under the License.
 
         </div>
 
-        <div class="terminfo-holder loaded clearfix" ng-class="{disabled: noTermData}" ng-show="is.field">
+        <div class="terminfo-holder loaded clearfix" ng-class="{disabled: noTermData}" ng-show="is.field && !display.partialState">
 
           <div class="trigger">
 
@@ -438,6 +438,9 @@ limitations under the License.
         <dt class="default-search-field" ng-class="{active: isDefaultSearchField}" ng-show="defaultSearchField">Default Search Field</dt>
         <dd class="default-search-field" ng-class="{active: isDefaultSearchField}"><a ng-href="#/{{core}}/schema?field={{defaultSearchField}}">{{defaultSearchField}}</a></dd>
 
+        <dt class="similarity" ng-class="{active: similarity.className}">Global Similarity:</dt>
+        <dd class="similarity" ng-class="{active: similarity.className}" data-tip="{{ similarity.className }}">{{ similarity.details }}</dd>
+
       </dl>
 
     </div>


[20/50] [abbrv] lucene-solr:jira/solr-8542-v2: SOLR-9417: Update CHANGES.txt

Posted by cp...@apache.org.
SOLR-9417: Update CHANGES.txt


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/39db5489
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/39db5489
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/39db5489

Branch: refs/heads/jira/solr-8542-v2
Commit: 39db5489b9816f7762aad92163b5eb3005331755
Parents: be8bd77
Author: Joel Bernstein <jb...@apache.org>
Authored: Wed Oct 19 18:28:35 2016 -0400
Committer: Joel Bernstein <jb...@apache.org>
Committed: Wed Oct 19 18:28:35 2016 -0400

----------------------------------------------------------------------
 solr/CHANGES.txt | 1 +
 1 file changed, 1 insertion(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/39db5489/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index efea20f..aca7601 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -140,6 +140,7 @@ New Features
 
 * SOLR-9657: New TemplateUpdateProcessorFactory added (noble)
 
+* SOLR-9417: Allow daemons to terminate when they finish iterating a topic (Joel Bernstein)
 
 Bug Fixes
 ----------------------


[19/50] [abbrv] lucene-solr:jira/solr-8542-v2: LUCENE-7507: Upgrade morfologik-stemming to version 2.1.1 (fixes security manager issue with Polish dictionary lookup).

Posted by cp...@apache.org.
LUCENE-7507: Upgrade morfologik-stemming to version 2.1.1 (fixes security manager issue with Polish dictionary lookup).


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/be8bd77b
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/be8bd77b
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/be8bd77b

Branch: refs/heads/jira/solr-8542-v2
Commit: be8bd77b1745e9c7aab9f81c54e057da7dfd5e63
Parents: fe77dff
Author: Dawid Weiss <dw...@apache.org>
Authored: Wed Oct 19 23:31:03 2016 +0200
Committer: Dawid Weiss <dw...@apache.org>
Committed: Wed Oct 19 23:40:24 2016 +0200

----------------------------------------------------------------------
 lucene/CHANGES.txt                                 | 3 +++
 lucene/ivy-versions.properties                     | 2 +-
 lucene/licenses/morfologik-fsa-2.1.0.jar.sha1      | 1 -
 lucene/licenses/morfologik-fsa-2.1.1.jar.sha1      | 1 +
 lucene/licenses/morfologik-polish-2.1.0.jar.sha1   | 1 -
 lucene/licenses/morfologik-polish-2.1.1.jar.sha1   | 1 +
 lucene/licenses/morfologik-stemming-2.1.0.jar.sha1 | 1 -
 lucene/licenses/morfologik-stemming-2.1.1.jar.sha1 | 1 +
 solr/licenses/morfologik-fsa-2.1.0.jar.sha1        | 1 -
 solr/licenses/morfologik-fsa-2.1.1.jar.sha1        | 1 +
 solr/licenses/morfologik-polish-2.1.0.jar.sha1     | 1 -
 solr/licenses/morfologik-polish-2.1.1.jar.sha1     | 1 +
 solr/licenses/morfologik-stemming-2.1.0.jar.sha1   | 1 -
 solr/licenses/morfologik-stemming-2.1.1.jar.sha1   | 1 +
 14 files changed, 10 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/be8bd77b/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 7105330..17e0b49 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -68,6 +68,9 @@ New Features
 
 Bug Fixes
 
+* LUCENE-7507: Upgrade morfologik-stemming to version 2.1.1 (fixes security
+  manager issue with Polish dictionary lookup). (Dawid Weiss)
+
 * LUCENE-7472: MultiFieldQueryParser.getFieldQuery() drops queries that are
   neither BooleanQuery nor TermQuery.  (Steve Rowe)
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/be8bd77b/lucene/ivy-versions.properties
----------------------------------------------------------------------
diff --git a/lucene/ivy-versions.properties b/lucene/ivy-versions.properties
index bc46ee6..7f8ac12 100644
--- a/lucene/ivy-versions.properties
+++ b/lucene/ivy-versions.properties
@@ -224,7 +224,7 @@ org.bouncycastle.version = 1.45
 
 /org.carrot2/carrot2-mini = 3.12.0
 
-org.carrot2.morfologik.version = 2.1.0
+org.carrot2.morfologik.version = 2.1.1
 /org.carrot2/morfologik-fsa = ${org.carrot2.morfologik.version}
 /org.carrot2/morfologik-polish = ${org.carrot2.morfologik.version}
 /org.carrot2/morfologik-stemming = ${org.carrot2.morfologik.version}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/be8bd77b/lucene/licenses/morfologik-fsa-2.1.0.jar.sha1
----------------------------------------------------------------------
diff --git a/lucene/licenses/morfologik-fsa-2.1.0.jar.sha1 b/lucene/licenses/morfologik-fsa-2.1.0.jar.sha1
deleted file mode 100644
index 42c0fb3..0000000
--- a/lucene/licenses/morfologik-fsa-2.1.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-88e5993f73c102f378c711f6e47221b7a9e22d25

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/be8bd77b/lucene/licenses/morfologik-fsa-2.1.1.jar.sha1
----------------------------------------------------------------------
diff --git a/lucene/licenses/morfologik-fsa-2.1.1.jar.sha1 b/lucene/licenses/morfologik-fsa-2.1.1.jar.sha1
new file mode 100644
index 0000000..4ceed4b
--- /dev/null
+++ b/lucene/licenses/morfologik-fsa-2.1.1.jar.sha1
@@ -0,0 +1 @@
+87866deba6aa5d19956fbe3406d8ddb5f19f5352

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/be8bd77b/lucene/licenses/morfologik-polish-2.1.0.jar.sha1
----------------------------------------------------------------------
diff --git a/lucene/licenses/morfologik-polish-2.1.0.jar.sha1 b/lucene/licenses/morfologik-polish-2.1.0.jar.sha1
deleted file mode 100644
index 7f8b4c2..0000000
--- a/lucene/licenses/morfologik-polish-2.1.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-d5a9c7721bce2ef17444abbe25ac2e65bfaa181f

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/be8bd77b/lucene/licenses/morfologik-polish-2.1.1.jar.sha1
----------------------------------------------------------------------
diff --git a/lucene/licenses/morfologik-polish-2.1.1.jar.sha1 b/lucene/licenses/morfologik-polish-2.1.1.jar.sha1
new file mode 100644
index 0000000..e625def
--- /dev/null
+++ b/lucene/licenses/morfologik-polish-2.1.1.jar.sha1
@@ -0,0 +1 @@
+41483a4bd785a065e03aad9be4449c21d89e2d50

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/be8bd77b/lucene/licenses/morfologik-stemming-2.1.0.jar.sha1
----------------------------------------------------------------------
diff --git a/lucene/licenses/morfologik-stemming-2.1.0.jar.sha1 b/lucene/licenses/morfologik-stemming-2.1.0.jar.sha1
deleted file mode 100644
index 78f1961..0000000
--- a/lucene/licenses/morfologik-stemming-2.1.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-94167b64752138a246cc33cbf1a3b0bfe5274b7c

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/be8bd77b/lucene/licenses/morfologik-stemming-2.1.1.jar.sha1
----------------------------------------------------------------------
diff --git a/lucene/licenses/morfologik-stemming-2.1.1.jar.sha1 b/lucene/licenses/morfologik-stemming-2.1.1.jar.sha1
new file mode 100644
index 0000000..6437873
--- /dev/null
+++ b/lucene/licenses/morfologik-stemming-2.1.1.jar.sha1
@@ -0,0 +1 @@
+5c169bab2e7dd04f5cb03d179a73a4339cc1d0a2

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/be8bd77b/solr/licenses/morfologik-fsa-2.1.0.jar.sha1
----------------------------------------------------------------------
diff --git a/solr/licenses/morfologik-fsa-2.1.0.jar.sha1 b/solr/licenses/morfologik-fsa-2.1.0.jar.sha1
deleted file mode 100644
index 42c0fb3..0000000
--- a/solr/licenses/morfologik-fsa-2.1.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-88e5993f73c102f378c711f6e47221b7a9e22d25

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/be8bd77b/solr/licenses/morfologik-fsa-2.1.1.jar.sha1
----------------------------------------------------------------------
diff --git a/solr/licenses/morfologik-fsa-2.1.1.jar.sha1 b/solr/licenses/morfologik-fsa-2.1.1.jar.sha1
new file mode 100644
index 0000000..4ceed4b
--- /dev/null
+++ b/solr/licenses/morfologik-fsa-2.1.1.jar.sha1
@@ -0,0 +1 @@
+87866deba6aa5d19956fbe3406d8ddb5f19f5352

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/be8bd77b/solr/licenses/morfologik-polish-2.1.0.jar.sha1
----------------------------------------------------------------------
diff --git a/solr/licenses/morfologik-polish-2.1.0.jar.sha1 b/solr/licenses/morfologik-polish-2.1.0.jar.sha1
deleted file mode 100644
index 7f8b4c2..0000000
--- a/solr/licenses/morfologik-polish-2.1.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-d5a9c7721bce2ef17444abbe25ac2e65bfaa181f

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/be8bd77b/solr/licenses/morfologik-polish-2.1.1.jar.sha1
----------------------------------------------------------------------
diff --git a/solr/licenses/morfologik-polish-2.1.1.jar.sha1 b/solr/licenses/morfologik-polish-2.1.1.jar.sha1
new file mode 100644
index 0000000..e625def
--- /dev/null
+++ b/solr/licenses/morfologik-polish-2.1.1.jar.sha1
@@ -0,0 +1 @@
+41483a4bd785a065e03aad9be4449c21d89e2d50

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/be8bd77b/solr/licenses/morfologik-stemming-2.1.0.jar.sha1
----------------------------------------------------------------------
diff --git a/solr/licenses/morfologik-stemming-2.1.0.jar.sha1 b/solr/licenses/morfologik-stemming-2.1.0.jar.sha1
deleted file mode 100644
index 78f1961..0000000
--- a/solr/licenses/morfologik-stemming-2.1.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-94167b64752138a246cc33cbf1a3b0bfe5274b7c

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/be8bd77b/solr/licenses/morfologik-stemming-2.1.1.jar.sha1
----------------------------------------------------------------------
diff --git a/solr/licenses/morfologik-stemming-2.1.1.jar.sha1 b/solr/licenses/morfologik-stemming-2.1.1.jar.sha1
new file mode 100644
index 0000000..6437873
--- /dev/null
+++ b/solr/licenses/morfologik-stemming-2.1.1.jar.sha1
@@ -0,0 +1 @@
+5c169bab2e7dd04f5cb03d179a73a4339cc1d0a2


[46/50] [abbrv] lucene-solr:jira/solr-8542-v2: LUCENE-7462: Fix LegacySortedSetDocValuesWrapper to reset `upTo` when calling `advanceExact`.

Posted by cp...@apache.org.
LUCENE-7462: Fix LegacySortedSetDocValuesWrapper to reset `upTo` when calling `advanceExact`.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/97339e2c
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/97339e2c
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/97339e2c

Branch: refs/heads/jira/solr-8542-v2
Commit: 97339e2cacc308c3689d1cd16dfbc44ebea60788
Parents: e1b0693
Author: Adrien Grand <jp...@gmail.com>
Authored: Mon Oct 24 15:43:21 2016 +0200
Committer: Adrien Grand <jp...@gmail.com>
Committed: Mon Oct 24 15:44:40 2016 +0200

----------------------------------------------------------------------
 .../apache/lucene/index/LegacySortedNumericDocValuesWrapper.java    | 1 +
 1 file changed, 1 insertion(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/97339e2c/lucene/core/src/java/org/apache/lucene/index/LegacySortedNumericDocValuesWrapper.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/LegacySortedNumericDocValuesWrapper.java b/lucene/core/src/java/org/apache/lucene/index/LegacySortedNumericDocValuesWrapper.java
index cfb61e3..a75274e 100644
--- a/lucene/core/src/java/org/apache/lucene/index/LegacySortedNumericDocValuesWrapper.java
+++ b/lucene/core/src/java/org/apache/lucene/index/LegacySortedNumericDocValuesWrapper.java
@@ -77,6 +77,7 @@ public final class LegacySortedNumericDocValuesWrapper extends SortedNumericDocV
   public boolean advanceExact(int target) throws IOException {
     docID = target;
     values.setDocument(docID);
+    upto = 0;
     return values.count() != 0;
   }
 


[11/50] [abbrv] lucene-solr:jira/solr-8542-v2: LUCENE-7505: AnalyzingInfixSuggester returned invalid results when allTermsRequired is false and context filters are specified

Posted by cp...@apache.org.
LUCENE-7505: AnalyzingInfixSuggester returned invalid results when allTermsRequired is false and context filters are specified


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/45ca4bd3
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/45ca4bd3
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/45ca4bd3

Branch: refs/heads/jira/solr-8542-v2
Commit: 45ca4bd3ed53b264cb1188bac5c76efa47d6e23e
Parents: c2e031a
Author: Mike McCandless <mi...@apache.org>
Authored: Wed Oct 19 09:44:08 2016 -0400
Committer: Mike McCandless <mi...@apache.org>
Committed: Wed Oct 19 09:44:34 2016 -0400

----------------------------------------------------------------------
 lucene/CHANGES.txt                              |  4 ++
 .../analyzing/AnalyzingInfixSuggester.java      | 12 +++-
 .../analyzing/AnalyzingInfixSuggesterTest.java  | 76 ++++++++++++++++++++
 3 files changed, 89 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/45ca4bd3/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 6d83c53..21ded1a 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -92,6 +92,10 @@ Bug Fixes
 * LUCENE-7493: FacetCollector.search threw an unexpected exception if
   you asked for zero hits but wanted facets (Mahesh via Mike McCandless)
 
+* LUCENE-7505: AnalyzingInfixSuggester returned invalid results when
+  allTermsRequired is false and context filters are specified (Mike
+  McCandless)
+
 Improvements
 
 * LUCENE-7439: FuzzyQuery now matches all terms within the specified

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/45ca4bd3/lucene/suggest/src/java/org/apache/lucene/search/suggest/analyzing/AnalyzingInfixSuggester.java
----------------------------------------------------------------------
diff --git a/lucene/suggest/src/java/org/apache/lucene/search/suggest/analyzing/AnalyzingInfixSuggester.java b/lucene/suggest/src/java/org/apache/lucene/search/suggest/analyzing/AnalyzingInfixSuggester.java
index d05c39f..aa60237 100644
--- a/lucene/suggest/src/java/org/apache/lucene/search/suggest/analyzing/AnalyzingInfixSuggester.java
+++ b/lucene/suggest/src/java/org/apache/lucene/search/suggest/analyzing/AnalyzingInfixSuggester.java
@@ -560,12 +560,18 @@ public class AnalyzingInfixSuggester extends Lookup implements Closeable {
         }
         
         if (allMustNot) {
-          //all are MUST_NOT: add the contextQuery to the main query instead (not as sub-query)
+          // All are MUST_NOT: add the contextQuery to the main query instead (not as sub-query)
           for (BooleanClause clause : contextQuery.clauses()) {
             query.add(clause);
           }
+        } else if (allTermsRequired == false) {
+          // We must carefully upgrade the query clauses to MUST:
+          BooleanQuery.Builder newQuery = new BooleanQuery.Builder();
+          newQuery.add(query.build(), BooleanClause.Occur.MUST);
+          newQuery.add(contextQuery, BooleanClause.Occur.MUST);
+          query = newQuery;
         } else {
-          //Add contextQuery as sub-query
+          // Add contextQuery as sub-query
           query.add(contextQuery, BooleanClause.Occur.MUST);
         }
       }
@@ -577,7 +583,7 @@ public class AnalyzingInfixSuggester extends Lookup implements Closeable {
 
     Query finalQuery = finishQuery(query, allTermsRequired);
 
-    //System.out.println("finalQuery=" + query);
+    //System.out.println("finalQuery=" + finalQuery);
 
     // Sort by weight, descending:
     TopFieldCollector c = TopFieldCollector.create(SORT, num, true, false, false);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/45ca4bd3/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/AnalyzingInfixSuggesterTest.java
----------------------------------------------------------------------
diff --git a/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/AnalyzingInfixSuggesterTest.java b/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/AnalyzingInfixSuggesterTest.java
index 69d3ed6..d98d052 100644
--- a/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/AnalyzingInfixSuggesterTest.java
+++ b/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/AnalyzingInfixSuggesterTest.java
@@ -1258,4 +1258,80 @@ public class AnalyzingInfixSuggesterTest extends LuceneTestCase {
       a.close();
     }
   }
+
+  public void testContextNotAllTermsRequired() throws Exception {
+
+    Input keys[] = new Input[] {
+      new Input("lend me your ear", 8, new BytesRef("foobar"), asSet("foo", "bar")),
+      new Input("a penny saved is a penny earned", 10, new BytesRef("foobaz"), asSet("foo", "baz"))
+    };
+    Path tempDir = createTempDir("analyzingInfixContext");
+
+    Analyzer a = new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false);
+    AnalyzingInfixSuggester suggester = new AnalyzingInfixSuggester(newFSDirectory(tempDir), a, a, 3, false);
+    suggester.build(new InputArrayIterator(keys));
+
+    // No context provided, all results returned
+    List<LookupResult> results = suggester.lookup(TestUtil.stringToCharSequence("ear", random()), 10, false, true);
+    assertEquals(2, results.size());
+    LookupResult result = results.get(0);
+    assertEquals("a penny saved is a penny earned", result.key);
+    assertEquals("a penny saved is a penny <b>ear</b>ned", result.highlightKey);
+    assertEquals(10, result.value);
+    assertEquals(new BytesRef("foobaz"), result.payload);
+    assertNotNull(result.contexts);
+    assertEquals(2, result.contexts.size());
+    assertTrue(result.contexts.contains(new BytesRef("foo")));
+    assertTrue(result.contexts.contains(new BytesRef("baz")));
+
+    result = results.get(1);
+    assertEquals("lend me your ear", result.key);
+    assertEquals("lend me your <b>ear</b>", result.highlightKey);
+    assertEquals(8, result.value);
+    assertEquals(new BytesRef("foobar"), result.payload);
+    assertNotNull(result.contexts);
+    assertEquals(2, result.contexts.size());
+    assertTrue(result.contexts.contains(new BytesRef("foo")));
+    assertTrue(result.contexts.contains(new BytesRef("bar")));
+
+    // Both have "foo" context:
+    results = suggester.lookup(TestUtil.stringToCharSequence("ear", random()), asSet("foo"), 10, false, true);
+    assertEquals(2, results.size());
+
+    result = results.get(0);
+    assertEquals("a penny saved is a penny earned", result.key);
+    assertEquals("a penny saved is a penny <b>ear</b>ned", result.highlightKey);
+    assertEquals(10, result.value);
+    assertEquals(new BytesRef("foobaz"), result.payload);
+    assertNotNull(result.contexts);
+    assertEquals(2, result.contexts.size());
+    assertTrue(result.contexts.contains(new BytesRef("foo")));
+    assertTrue(result.contexts.contains(new BytesRef("baz")));
+
+    result = results.get(1);
+    assertEquals("lend me your ear", result.key);
+    assertEquals("lend me your <b>ear</b>", result.highlightKey);
+    assertEquals(8, result.value);
+    assertEquals(new BytesRef("foobar"), result.payload);
+    assertNotNull(result.contexts);
+    assertEquals(2, result.contexts.size());
+    assertTrue(result.contexts.contains(new BytesRef("foo")));
+    assertTrue(result.contexts.contains(new BytesRef("bar")));
+
+    // Only one has "foo" context and len
+    results = suggester.lookup(TestUtil.stringToCharSequence("len", random()), asSet("foo"), 10, false, true);
+    assertEquals(1, results.size());
+
+    result = results.get(0);
+    assertEquals("lend me your ear", result.key);
+    assertEquals("<b>len</b>d me your ear", result.highlightKey);
+    assertEquals(8, result.value);
+    assertEquals(new BytesRef("foobar"), result.payload);
+    assertNotNull(result.contexts);
+    assertEquals(2, result.contexts.size());
+    assertTrue(result.contexts.contains(new BytesRef("foo")));
+    assertTrue(result.contexts.contains(new BytesRef("bar")));
+
+    suggester.close();
+  }
 }


[23/50] [abbrv] lucene-solr:jira/solr-8542-v2: SOLR-99570: Various log tidying at Solr startup

Posted by cp...@apache.org.
SOLR-99570: Various log tidying at Solr startup


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/97761966
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/97761966
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/97761966

Branch: refs/heads/jira/solr-8542-v2
Commit: 97761966f30557c33b3bbb131ce64ea7905ae213
Parents: c4b4830
Author: Jan H�ydahl <ja...@apache.org>
Authored: Thu Oct 20 14:47:32 2016 +0200
Committer: Jan H�ydahl <ja...@apache.org>
Committed: Thu Oct 20 14:47:32 2016 +0200

----------------------------------------------------------------------
 solr/CHANGES.txt                                |  15 +-
 solr/bin/solr                                   |  18 +-
 solr/bin/solr.cmd                               |  28 +-
 .../src/java/org/apache/solr/util/SolrCLI.java  | 273 ++++++++++++++++++-
 .../org/apache/solr/util/UtilsToolTest.java     | 185 +++++++++++++
 5 files changed, 481 insertions(+), 38 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/97761966/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index b3a2a30..b4dcf4c 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -86,8 +86,12 @@ Upgrade Notes
   prefix, then you will now get an error as these options are incompatible with numeric faceting.
 
 * Solr's logging verbosity at the INFO level has been greatly reduced, and
-  you may need to update the log configs to use the DEBUG level to get the
-  same logging messages as before.
+  you may need to update the log configs to use the DEBUG level to see all the
+  logging messages you used to see at INFO level before.
+
+* We are no longer backing up solr.log and solr_gc.log files in date-stamped copies forever. If you relied on
+  the solr_log_<date> or solr_gc_log_<date> being in the logs folder that will no longer be the case. 
+  See SOLR-9570 for details.
 
 * The create/deleteCollection methods on MiniSolrCloudCluster have been
   deprecated.  Clients should instead use the CollectionAdminRequest API.  In
@@ -308,6 +312,13 @@ Other Changes
 * SOLR-7850: Moved defaults within bin/solr.in.sh (and bin/solr.in.cmd on Windows) to bin/solr (and bin/solr.cmd)
   such that the default state of these files is to set nothing. This makes Solr work better with Docker. (David Smiley)
 
+* SOLR-9570: Various log tidying now happens at Solr startup:
+  Old solr_log_<date> and solr_gc_log_<date> files are removed, avoiding disks to fill up,
+  solr.log.X files are rotated, preserving solr.log from last run in solr.log.1, solr.log.1 => solr.log.2 etc
+  solr-*-console.log files are moved into $SOLR_LOGS_DIR/archived/ instead of being overwritten
+  Last JVM garbage collection log solr_gc.log is moved into $SOLR_LOGS_DIR/archived/
+  (janhoy)  
+
 ==================  6.2.1 ==================
 
 Bug Fixes

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/97761966/solr/bin/solr
----------------------------------------------------------------------
diff --git a/solr/bin/solr b/solr/bin/solr
index df6b4d0..6aa5709 100755
--- a/solr/bin/solr
+++ b/solr/bin/solr
@@ -1387,20 +1387,10 @@ if [ ! -e "$SOLR_HOME" ]; then
   exit 1
 fi
 
-# backup the log files before starting
-if [ -f "$SOLR_LOGS_DIR/solr.log" ]; then
-  if $verbose ; then
-    echo "Backing up $SOLR_LOGS_DIR/solr.log"
-  fi
-  mv "$SOLR_LOGS_DIR/solr.log" "$SOLR_LOGS_DIR/solr_log_$(date +"%Y%m%d_%H%M")"
-fi
-
-if [ -f "$SOLR_LOGS_DIR/solr_gc.log" ]; then
-  if $verbose ; then
-    echo "Backing up $SOLR_LOGS_DIR/solr_gc.log"
-  fi
-  mv "$SOLR_LOGS_DIR/solr_gc.log" "$SOLR_LOGS_DIR/solr_gc_log_$(date +"%Y%m%d_%H%M")"
-fi
+run_tool utils -s "$DEFAULT_SERVER_DIR" -l "$SOLR_LOGS_DIR" -remove_old_solr_logs 7 || echo "Failed removing old solr logs"
+run_tool utils -s "$DEFAULT_SERVER_DIR" -l "$SOLR_LOGS_DIR" -archive_gc_logs        || echo "Failed archiving old GC logs"
+run_tool utils -s "$DEFAULT_SERVER_DIR" -l "$SOLR_LOGS_DIR" -archive_console_logs   || echo "Failed archiving old console logs"
+run_tool utils -s "$DEFAULT_SERVER_DIR" -l "$SOLR_LOGS_DIR" -rotate_solr_logs 9     || echo "Failed rotating old solr logs"
 
 java_ver_out=`echo "$("$JAVA" -version 2>&1)"`
 JAVA_VERSION=`echo $java_ver_out | grep "java version" | awk '{ print substr($3, 2, length($3)-2); }'`

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/97761966/solr/bin/solr.cmd
----------------------------------------------------------------------
diff --git a/solr/bin/solr.cmd b/solr/bin/solr.cmd
index 10ea6d6..317a789 100644
--- a/solr/bin/solr.cmd
+++ b/solr/bin/solr.cmd
@@ -860,19 +860,11 @@ IF ERRORLEVEL 1 (
   set IS_64bit=true
 )
 
-REM backup log files (use current timestamp for backup name)
-For /f "tokens=2-4 delims=/ " %%a in ('date /t') do (set mydate=%%c-%%a-%%b)
-For /f "tokens=1-2 delims=/:" %%a in ("%TIME%") do (set mytime=%%a%%b)
-set now_ts=!mydate!_!mytime!
-IF EXIST "!SOLR_LOGS_DIR!\solr.log" (
-  echo Backing up !SOLR_LOGS_DIR!\solr.log
-  move /Y "!SOLR_LOGS_DIR!\solr.log" "!SOLR_LOGS_DIR!\solr_log_!now_ts!"
-)
-
-IF EXIST "!SOLR_LOGS_DIR!\solr_gc.log" (
-  echo Backing up !SOLR_LOGS_DIR!\solr_gc.log
-  move /Y "!SOLR_LOGS_DIR!\solr_gc.log" "!SOLR_LOGS_DIR!\solr_gc_log_!now_ts!"
-)
+REM Clean up and rotate logs
+call :run_utils "-remove_old_solr_logs 7" || echo "Failed removing old solr logs"
+call :run_utils "-archive_gc_logs"        || echo "Failed archiving old GC logs"
+call :run_utils "-archive_console_logs"   || echo "Failed archiving old console logs"
+call :run_utils "-rotate_solr_logs 9"     || echo "Failed rotating old solr logs"
 
 IF NOT "%ZK_HOST%"=="" set SOLR_MODE=solrcloud
 
@@ -1136,6 +1128,16 @@ goto done
   org.apache.solr.util.SolrCLI version
 goto done
 
+:run_utils
+set "TOOL_CMD=%~1"
+"%JAVA%" %SOLR_SSL_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
+  -classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
+  org.apache.solr.util.SolrCLI utils -s "%DEFAULT_SERVER_DIR%" -l "%SOLR_LOGS_DIR%" %TOOL_CMD%
+if errorlevel 1 (
+   exit /b 1
+)
+goto done
+
 :parse_create_args
 IF [%1]==[] goto run_create
 IF "%1"=="-c" goto set_create_name

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/97761966/solr/core/src/java/org/apache/solr/util/SolrCLI.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/util/SolrCLI.java b/solr/core/src/java/org/apache/solr/util/SolrCLI.java
index 9724b81..39bf548 100644
--- a/solr/core/src/java/org/apache/solr/util/SolrCLI.java
+++ b/solr/core/src/java/org/apache/solr/util/SolrCLI.java
@@ -29,7 +29,10 @@ import java.net.URL;
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.nio.file.Paths;
+import java.nio.file.StandardCopyOption;
 import java.nio.file.attribute.FileOwnerAttributeView;
+import java.time.Instant;
+import java.time.Period;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
@@ -44,6 +47,8 @@ import java.util.Set;
 import java.util.TreeSet;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.TimeoutException;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
 import java.util.zip.ZipEntry;
 import java.util.zip.ZipInputStream;
 
@@ -108,11 +113,11 @@ import org.slf4j.LoggerFactory;
 
 import static java.nio.charset.StandardCharsets.UTF_8;
 import static org.apache.solr.common.params.CommonParams.NAME;
+
 /**
  * Command-line utility for working with Solr.
  */
 public class SolrCLI {
-
   /**
    * Defines the interface to a Solr tool that can be run from this command-line app.
    */
@@ -233,7 +238,6 @@ public class SolrCLI {
   };
 
   private static void exit(int exitStatus) {
-    // TODO: determine if we're running in a test and don't exit
     try {
       System.exit(exitStatus);
     } catch (java.lang.SecurityException secExc) {
@@ -259,6 +263,18 @@ public class SolrCLI {
       exit(0);
     }
 
+    Tool tool = findTool(args);
+    CommandLine cli = parseCmdLine(args, tool.getOptions());
+    System.exit(tool.runTool(cli));
+  }
+
+  public static Tool findTool(String[] args) throws Exception {
+    String toolType = args[0].trim().toLowerCase(Locale.ROOT);
+    return newTool(toolType);
+  }
+
+  public static CommandLine parseCmdLine(String[] args, Option[] toolOptions) throws Exception {
+
     String builderClassName = System.getProperty("solr.authentication.httpclient.builder");
     if (builderClassName!=null) {
       try {
@@ -272,10 +288,6 @@ public class SolrCLI {
       }
     }
 
-    // Determine the tool
-    String toolType = args[0].trim().toLowerCase(Locale.ROOT);
-    Tool tool = newTool(toolType);
-
     // the parser doesn't like -D props
     List<String> toolArgList = new ArrayList<String>();
     List<String> dashDList = new ArrayList<String>();
@@ -291,7 +303,7 @@ public class SolrCLI {
 
     // process command-line args to configure this application
     CommandLine cli = 
-        processCommandLineArgs(joinCommonAndToolOptions(tool.getOptions()), toolArgs);
+        processCommandLineArgs(joinCommonAndToolOptions(toolOptions), toolArgs);
 
     List argList = cli.getArgList();
     argList.addAll(dashDList);
@@ -303,8 +315,7 @@ public class SolrCLI {
       checkSslStoreSysProp(solrInstallDir, "trustStore");
     }
 
-    // run the tool
-    exit(tool.runTool(cli));
+    return cli;
   }
 
   protected static void checkSslStoreSysProp(String solrInstallDir, String key) {
@@ -368,6 +379,8 @@ public class SolrCLI {
       return new ZkLsTool();
     else if ("assert".equals(toolType))
       return new AssertTool();
+    else if ("utils".equals(toolType))
+      return new UtilsTool();
 
     // If you add a built-in tool to this class, add it here to avoid
     // classpath scanning
@@ -3339,4 +3352,246 @@ public class SolrCLI {
       }
     }
   } // end AssertTool class
+  
+  public static class UtilsTool extends ToolBase {
+    private Path serverPath;
+    private Path logsPath;
+    private boolean beQuiet;
+
+    public UtilsTool() { this(System.out); }
+    public UtilsTool(PrintStream stdout) { super(stdout); }
+
+    public String getName() {
+      return "prestart";
+    }
+
+    @SuppressWarnings("static-access")
+    public Option[] getOptions() {
+      return new Option[]{
+          OptionBuilder
+              .withArgName("path")
+              .hasArg()
+              .withDescription("Path to server dir. Required if logs path is relative")
+              .create("s"),
+          OptionBuilder
+              .withArgName("path")
+              .hasArg()
+              .withDescription("Path to logs dir. If relative, also provide server dir with -s")
+              .create("l"),
+          OptionBuilder
+              .withDescription("Be quiet, don't print to stdout, only return exit codes")
+              .create("q"),
+          OptionBuilder
+              .withArgName("daysToKeep")
+              .hasArg()
+              .withType(Integer.class)
+              .withDescription("Path to logs directory")
+              .create("remove_old_solr_logs"),
+          OptionBuilder
+              .withArgName("generations")
+              .hasArg()
+              .withType(Integer.class)
+              .withDescription("Rotate solr.log to solr.log.1 etc")
+              .create("rotate_solr_logs"),
+          OptionBuilder
+              .withDescription("Archive old garbage collection logs into archive/")
+              .create("archive_gc_logs"),
+          OptionBuilder
+              .withDescription("Archive old console logs into archive/")
+              .create("archive_console_logs")
+      };
+    }
+
+    @Override
+    public int runTool(CommandLine cli) throws Exception {
+      if (cli.getOptions().length == 0 || cli.getArgs().length > 0 || cli.hasOption("h")) {
+        new HelpFormatter().printHelp("bin/solr utils [OPTIONS]", getToolOptions(this));
+        return 1;
+      }
+      if (cli.hasOption("s")) {
+        serverPath = Paths.get(cli.getOptionValue("s"));
+      }
+      if (cli.hasOption("l")) {
+        logsPath = Paths.get(cli.getOptionValue("l"));
+      }
+      if (cli.hasOption("q")) {
+        beQuiet = cli.hasOption("q");
+      }
+      if (cli.hasOption("remove_old_solr_logs")) {
+        if (removeOldSolrLogs(Integer.parseInt(cli.getOptionValue("remove_old_solr_logs"))) > 0) return 1;
+      }
+      if (cli.hasOption("rotate_solr_logs")) {
+        if (rotateSolrLogs(Integer.parseInt(cli.getOptionValue("rotate_solr_logs"))) > 0) return 1;
+      }
+      if (cli.hasOption("archive_gc_logs")) {
+        if (archiveGcLogs() > 0) return 1;
+      }
+      if (cli.hasOption("archive_console_logs")) {
+        if (archiveConsoleLogs() > 0) return 1;
+      }
+      return 0;
+    }
+
+    /**
+     * Moves gc logs into archived/
+     * @return 0 on success
+     * @throws Exception on failure
+     */
+    public int archiveGcLogs() throws Exception {
+      prepareLogsPath();
+      Path archivePath = logsPath.resolve("archived");
+      if (!archivePath.toFile().exists()) {
+        Files.createDirectories(archivePath);
+      }
+      List<Path> archived = Files.find(archivePath, 1, (f, a) 
+          -> a.isRegularFile() && String.valueOf(f.getFileName()).startsWith("solr_gc_"))
+          .collect(Collectors.toList());
+      for (Path p : archived) {
+        Files.delete(p);
+      }
+      List<Path> files = Files.find(logsPath, 1, (f, a) 
+          -> a.isRegularFile() && String.valueOf(f.getFileName()).startsWith("solr_gc_"))
+          .collect(Collectors.toList());
+      if (files.size() > 0) {
+        out("Archiving " + files.size() + " old GC log files to " + archivePath);
+        for (Path p : files) {
+          Files.move(p, archivePath.resolve(p.getFileName()), StandardCopyOption.REPLACE_EXISTING);
+        }
+      }
+      return 0;
+    }
+
+    /**
+     * Moves console log(s) into archiced/
+     * @return 0 on success
+     * @throws Exception on failure
+     */
+    public int archiveConsoleLogs() throws Exception {
+      prepareLogsPath();
+      Path archivePath = logsPath.resolve("archived");
+      if (!archivePath.toFile().exists()) {
+        Files.createDirectories(archivePath);
+      }
+      List<Path> archived = Files.find(archivePath, 1, (f, a) 
+          -> a.isRegularFile() && String.valueOf(f.getFileName()).endsWith("-console.log"))
+          .collect(Collectors.toList());
+      for (Path p : archived) {        
+        Files.delete(p);
+      }
+      List<Path> files = Files.find(logsPath, 1, (f, a) 
+          -> a.isRegularFile() && String.valueOf(f.getFileName()).endsWith("-console.log"))
+          .collect(Collectors.toList());
+      if (files.size() > 0) {
+        out("Archiving " + files.size() + " console log files");
+        for (Path p : files) {
+          Files.move(p, archivePath.resolve(p.getFileName()), StandardCopyOption.REPLACE_EXISTING);
+        }
+      }
+      return 0;
+    }
+
+    /**
+     * Rotates solr.log before starting Solr. Mimics log4j2 behavior, i.e. with generations=9:
+     * <pre>
+     *   solr.log.9 (and higher) are deleted
+     *   solr.log.8 -&gt; solr.log.9
+     *   solr.log.7 -&gt; solr.log.8
+     *   ...
+     *   solr.log   -&gt; solr.log.1
+     * </pre>
+     * @param generations number of generations to keep. Should agree with setting in log4j.properties
+     * @return 0 if success
+     * @throws Exception if problems
+     */
+    public int rotateSolrLogs(int generations) throws Exception {
+      prepareLogsPath();
+      if (logsPath.toFile().exists() && logsPath.resolve("solr.log").toFile().exists()) {
+        out("Rotating solr logs, keeping a max of "+generations+" generations");
+        try (Stream<Path> files = Files.find(logsPath, 1, 
+            (f, a) -> a.isRegularFile() && String.valueOf(f.getFileName()).startsWith("solr.log."))
+            .sorted((b,a) -> new Integer(a.getFileName().toString().substring(9))
+                  .compareTo(new Integer(b.getFileName().toString().substring(9))))) {
+          files.forEach(p -> {
+            try {
+              int number = Integer.parseInt(p.getFileName().toString().substring(9));
+              if (number >= generations) {
+                Files.delete(p);
+              } else {
+                Path renamed = p.getParent().resolve("solr.log." + (number + 1));
+                Files.move(p, renamed);
+              }
+            } catch (IOException e) {
+              out("Problem during rotation of log files: " + e.getMessage());
+            }
+          });
+        } catch (NumberFormatException nfe) {
+          throw new Exception("Do not know how to rotate solr.log.<ext> with non-numeric extension. Rotate aborted.", nfe);
+        }
+        Files.move(logsPath.resolve("solr.log"), logsPath.resolve("solr.log.1"));
+      }
+      
+      return 0;
+    }
+
+    /**
+     * Deletes time-stamped old solr logs, if older than n days 
+     * @param daysToKeep number of days logs to keep before deleting
+     * @return 0 on success
+     * @throws Exception on failure
+     */
+    public int removeOldSolrLogs(int daysToKeep) throws Exception {
+      prepareLogsPath();
+      if (logsPath.toFile().exists()) {
+        try (Stream<Path> stream = Files.find(logsPath, 2, (f, a) -> a.isRegularFile() 
+            && Instant.now().minus(Period.ofDays(daysToKeep)).isAfter(a.lastModifiedTime().toInstant())
+            && String.valueOf(f.getFileName()).startsWith("solr_log_"))) {
+          List<Path> files = stream.collect(Collectors.toList());
+          if (files.size() > 0) {
+            out("Deleting "+files.size() + " solr_log_* files older than " + daysToKeep + " days.");
+            for (Path p : files) {
+              Files.delete(p);
+            }
+          }
+        }
+      }
+      return 0;
+    }
+
+    // Private methods to follow
+    
+    private void out(String message) {
+      if (!beQuiet) {
+        stdout.print(message + "\n");
+      }
+    }
+
+    private void prepareLogsPath() throws Exception {
+      if (logsPath == null) {
+        throw new Exception("Command requires the -l <log-directory> option");
+      }
+      if (!logsPath.isAbsolute()) {
+        if (serverPath != null && serverPath.isAbsolute() && serverPath.toFile().exists()) {
+          logsPath = serverPath.resolve(logsPath);
+        } else {
+          throw new Exception("Logs directory must be an absolute path, or -s must be supplied");
+        }
+      }
+    }
+    
+    @Override
+    protected void runImpl(CommandLine cli) throws Exception {
+    }
+    
+    public void setLogPath(Path logsPath) {
+      this.logsPath = logsPath; 
+    }
+
+    public void setServerPath(Path serverPath) {
+      this.serverPath = serverPath; 
+    }
+    
+    public void setQuiet(boolean shouldPrintStdout) {
+      this.beQuiet = shouldPrintStdout; 
+    }
+  } // end UtilsTool class  
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/97761966/solr/core/src/test/org/apache/solr/util/UtilsToolTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/util/UtilsToolTest.java b/solr/core/src/test/org/apache/solr/util/UtilsToolTest.java
new file mode 100644
index 0000000..fa39620
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/util/UtilsToolTest.java
@@ -0,0 +1,185 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.util;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.attribute.FileTime;
+import java.time.Instant;
+import java.time.Period;
+import java.util.Arrays;
+import java.util.List;
+import java.util.stream.Collectors;
+
+import org.apache.commons.cli.CommandLine;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.apache.solr.util.SolrCLI.findTool;
+import static org.apache.solr.util.SolrCLI.parseCmdLine;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * Unit test for SolrCLI's UtilsTool
+ */
+public class UtilsToolTest {
+
+  private Path dir;
+  private SolrCLI.UtilsTool tool;
+  private List<String> files = Arrays.asList(
+      "solr.log", 
+      "solr.log.1", 
+      "solr.log.2", 
+      "solr.log.3", 
+      "solr.log.9", 
+      "solr.log.10", 
+      "solr.log.11", 
+      "solr_log_20160102", 
+      "solr_log_20160304", 
+      "solr-8983-console.log",
+      "solr_gc_log_20160102", 
+      "solr_gc_log_2");
+  
+  @Before
+  public void setUp() throws IOException {
+    dir = Files.createTempDirectory("Utils Tool Test");
+    files.stream().forEach(f -> {
+      try {
+        dir.resolve(f).toFile().createNewFile();
+      } catch (IOException e) {
+        assertTrue(false);
+      }
+    });
+  }
+  
+  @After
+  public void tearDown() throws IOException {
+    org.apache.commons.io.FileUtils.deleteDirectory(dir.toFile());
+  }
+  
+  @Test
+  public void testEmptyAndQuiet() throws Exception {
+    String[] args = {"utils", "-remove_old_solr_logs", "7",  
+        "-rotate_solr_logs", "9",  
+        "-archive_gc_logs",
+        "-archive_console_logs",
+        "-q",
+        "-l", dir.toString()};
+    assertEquals(0, runTool(args));
+  }
+
+  @Test
+  public void testNonexisting() throws Exception {
+    String nonexisting = dir.resolve("non-existing").toString();
+    String[] args = {"utils", "-remove_old_solr_logs", "7",
+        "-rotate_solr_logs", "9",
+        "-archive_gc_logs",
+        "-archive_console_logs",
+        "-l", nonexisting};
+    assertEquals(0, runTool(args));
+  }
+  
+  @Test
+  public void testRemoveOldSolrLogs() throws Exception {
+    String[] args = {"utils", "-remove_old_solr_logs", "1", "-l", dir.toString()};
+    assertEquals(files.size(), fileCount());
+    assertEquals(0, runTool(args));
+    assertEquals(files.size(), fileCount());     // No logs older than 1 day
+    Files.setLastModifiedTime(dir.resolve("solr_log_20160102"), FileTime.from(Instant.now().minus(Period.ofDays(2))));
+    assertEquals(0, runTool(args));
+    assertEquals(files.size()-1, fileCount());   // One logs older than 1 day
+    Files.setLastModifiedTime(dir.resolve("solr_log_20160304"), FileTime.from(Instant.now().minus(Period.ofDays(3))));
+    assertEquals(0, runTool(args));
+    assertEquals(files.size()-2, fileCount());   // Two logs older than 1 day
+  }
+
+  @Test
+  public void testRelativePath() throws Exception {
+    String[] args = {"utils", "-remove_old_solr_logs", "0", "-l", dir.getFileName().toString(), "-s", dir.getParent().toString()};
+    assertEquals(files.size(), fileCount());
+    assertEquals(0, runTool(args));
+    assertEquals(files.size()-2, fileCount());
+  }
+
+  @Test
+  public void testRelativePathError() throws Exception {
+    String[] args = {"utils", "-remove_old_solr_logs", "0", "-l", dir.getFileName().toString()};
+    try {
+      runTool(args);
+    } catch (Exception e) {
+      return;
+    }
+    assertTrue(false);
+  }
+  
+  @Test
+  public void testRemoveOldGcLogs() throws Exception {
+    String[] args = {"utils", "-archive_gc_logs", "-l", dir.toString()};
+    assertEquals(files.size(), fileCount());
+    assertEquals(0, runTool(args));
+    assertEquals(files.size()-2, fileCount());
+    assertFalse(listFiles().contains("solr_gc_log_2"));
+    assertTrue(Files.exists(dir.resolve("archived").resolve("solr_gc_log_2")));
+    assertEquals(0, runTool(args));
+    assertFalse(Files.exists(dir.resolve("archived").resolve("solr_gc_log_2")));
+  }
+
+  @Test
+  public void testArchiveConsoleLogs() throws Exception {
+    String[] args = {"utils", "-archive_console_logs", "-l", dir.toString()};
+    assertEquals(files.size(), fileCount());
+    assertEquals(0, runTool(args));
+    assertEquals(files.size()-1, fileCount());
+    assertFalse(listFiles().contains("solr-8983-console.log"));
+    assertTrue(Files.exists(dir.resolve("archived").resolve("solr-8983-console.log")));
+    assertEquals(0, runTool(args));
+    assertFalse(Files.exists(dir.resolve("archived").resolve("solr-8983-console.log")));
+  }
+
+  @Test
+  public void testRotateSolrLogs() throws Exception {
+    String[] args = {"utils", "-rotate_solr_logs", "9", "-l", dir.toString()};
+    assertEquals(files.size(), fileCount());
+    assertTrue(listFiles().contains("solr.log"));
+    assertEquals(0, runTool(args));
+    assertEquals(files.size()-3, fileCount());
+    assertTrue(listFiles().contains("solr.log.4"));
+    assertFalse(listFiles().contains("solr.log"));
+    assertFalse(listFiles().contains("solr.log.9"));
+    assertFalse(listFiles().contains("solr.log.10"));
+    assertFalse(listFiles().contains("solr.log.11"));
+  }
+  
+  private List<String> listFiles() throws IOException {
+    return Files.find(dir, 1, (p, a) -> a.isRegularFile()).map(p -> p.getFileName().toString()).collect(Collectors.toList());
+  }
+  
+  private long fileCount() throws IOException {
+    return listFiles().size();
+  }
+
+  private int runTool(String[] args) throws Exception {
+    SolrCLI.Tool tool = findTool(args);
+    CommandLine cli = parseCmdLine(args, tool.getOptions());
+    return tool.runTool(cli);
+  }
+}
\ No newline at end of file


[09/50] [abbrv] lucene-solr:jira/solr-8542-v2: Merge remote-tracking branch 'origin/master'

Posted by cp...@apache.org.
Merge remote-tracking branch 'origin/master'


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/53129ba6
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/53129ba6
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/53129ba6

Branch: refs/heads/jira/solr-8542-v2
Commit: 53129ba670f8fb5c0e352fe6940736ee5b050631
Parents: ffa5c4b b78f221
Author: Noble Paul <no...@gmail.com>
Authored: Wed Oct 19 00:54:30 2016 +0530
Committer: Noble Paul <no...@gmail.com>
Committed: Wed Oct 19 00:54:30 2016 +0530

----------------------------------------------------------------------
 .../lucene54/Lucene54DocValuesProducer.java     | 39 ++++++++++-
 .../codecs/memory/MemoryDocValuesProducer.java  | 69 +++++++-------------
 .../java/org/apache/lucene/util/LongValues.java | 21 +-----
 .../lucene/util/packed/TestDirectPacked.java    |  6 +-
 .../search/join/ToParentBlockJoinQuery.java     |  2 +-
 .../lucene/search/join/TestBlockJoin.java       | 66 +++++++++++++++++++
 6 files changed, 134 insertions(+), 69 deletions(-)
----------------------------------------------------------------------



[47/50] [abbrv] lucene-solr:jira/solr-8542-v2: SOLR-9634: correct name of deprecated/removed method in solr/CHANGES.txt

Posted by cp...@apache.org.
SOLR-9634: correct name of deprecated/removed method in solr/CHANGES.txt


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/37871de2
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/37871de2
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/37871de2

Branch: refs/heads/jira/solr-8542-v2
Commit: 37871de29bc5bd329eeb2f6867f3f8ca3b96e84f
Parents: 97339e2
Author: Christine Poerschke <cp...@apache.org>
Authored: Mon Oct 24 18:58:26 2016 +0100
Committer: Christine Poerschke <cp...@apache.org>
Committed: Mon Oct 24 18:58:26 2016 +0100

----------------------------------------------------------------------
 solr/CHANGES.txt | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/37871de2/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index e223b4d..3bb28c4 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -98,7 +98,7 @@ Upgrade Notes
 
 * The create/deleteCollection methods on MiniSolrCloudCluster have been
   deprecated.  Clients should instead use the CollectionAdminRequest API.  In
-  addition, MiniSolrCloudCluster#uploadConfigSet(File, String) has been
+  addition, MiniSolrCloudCluster#uploadConfigDir(File, String) has been
   deprecated in favour of #uploadConfigSet(Path, String)
 
 * The bin/solr.in.sh (bin/solr.in.cmd on Windows) is now completely commented by default. Previously, this wasn't so,


[16/50] [abbrv] lucene-solr:jira/solr-8542-v2: Merge remote-tracking branch 'origin/master'

Posted by cp...@apache.org.
Merge remote-tracking branch 'origin/master'


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/67ba19a8
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/67ba19a8
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/67ba19a8

Branch: refs/heads/jira/solr-8542-v2
Commit: 67ba19a810a6bce652622de67e57b6c33096060f
Parents: 6893595 f43742a
Author: Noble Paul <no...@gmail.com>
Authored: Thu Oct 20 01:26:31 2016 +0530
Committer: Noble Paul <no...@gmail.com>
Committed: Thu Oct 20 01:26:31 2016 +0530

----------------------------------------------------------------------
 lucene/CHANGES.txt                              |   9 +
 .../org/apache/lucene/facet/DrillSideways.java  |   2 +-
 .../facet/taxonomy/FastTaxonomyFacetCounts.java |  41 ++--
 .../analyzing/AnalyzingInfixSuggester.java      |  12 +-
 .../analyzing/AnalyzingInfixSuggesterTest.java  |  76 ++++++++
 .../org/apache/solr/handler/StreamHandler.java  |   4 +-
 .../client/solrj/io/stream/DaemonStream.java    |  44 ++++-
 .../solrj/io/stream/StreamExpressionTest.java   | 193 +++++++++++++++++--
 8 files changed, 327 insertions(+), 54 deletions(-)
----------------------------------------------------------------------



[13/50] [abbrv] lucene-solr:jira/solr-8542-v2: LUCENE-7506: FastTaxonomyFacetCounts use ConjunctionDISI so cost is in proportion to size of intersected set of documents

Posted by cp...@apache.org.
LUCENE-7506: FastTaxonomyFacetCounts use ConjunctionDISI so cost is in proportion to size of intersected set of documents


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/d03cc92b
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/d03cc92b
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/d03cc92b

Branch: refs/heads/jira/solr-8542-v2
Commit: d03cc92b222681b5d701a0383d93c2ca5c1a186d
Parents: 731c5f9
Author: Mike McCandless <mi...@apache.org>
Authored: Wed Oct 19 10:04:39 2016 -0400
Committer: Mike McCandless <mi...@apache.org>
Committed: Wed Oct 19 10:04:39 2016 -0400

----------------------------------------------------------------------
 lucene/CHANGES.txt                              |  5 +++
 .../facet/taxonomy/FastTaxonomyFacetCounts.java | 41 +++++++++-----------
 2 files changed, 24 insertions(+), 22 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d03cc92b/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 21ded1a..7105330 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -40,6 +40,11 @@ Optimizations
   in the sets of SHOULD and FILTER clauses, or both in MUST/FILTER and MUST_NOT
   clauses. (Spyros Kapnissis via Adrien Grand, Uwe Schindler)
 
+* LUCENE-7506: FastTaxonomyFacetCounts should use CPU in proportion to
+  the size of the intersected set of hits from the query and documents
+  that have a facet value, so sparse faceting works as expected
+  (Adrien Grand via Mike McCandless)
+
 Other
 
 * LUCENE-7328: Remove LegacyNumericEncoding from GeoPointField. (Nick Knize)

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d03cc92b/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/FastTaxonomyFacetCounts.java
----------------------------------------------------------------------
diff --git a/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/FastTaxonomyFacetCounts.java b/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/FastTaxonomyFacetCounts.java
index 7ad5430..ef96073 100644
--- a/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/FastTaxonomyFacetCounts.java
+++ b/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/FastTaxonomyFacetCounts.java
@@ -17,12 +17,14 @@
 package org.apache.lucene.facet.taxonomy;
 
 import java.io.IOException;
+import java.util.Arrays;
 import java.util.List;
 
 import org.apache.lucene.facet.FacetsCollector.MatchingDocs;
 import org.apache.lucene.facet.FacetsCollector;
 import org.apache.lucene.facet.FacetsConfig;
 import org.apache.lucene.index.BinaryDocValues;
+import org.apache.lucene.search.ConjunctionDISI;
 import org.apache.lucene.search.DocIdSetIterator;
 import org.apache.lucene.util.BytesRef;
 
@@ -55,29 +57,24 @@ public class FastTaxonomyFacetCounts extends IntTaxonomyFacets {
         continue;
       }
 
-      DocIdSetIterator docs = hits.bits.iterator();
+      DocIdSetIterator it = ConjunctionDISI.intersectIterators(Arrays.asList(
+          hits.bits.iterator(), dv));
       
-      int doc;
-      while ((doc = docs.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
-        if (dv.docID() < doc) {
-          dv.advance(doc);
-        }
-        if (dv.docID() == doc) {
-          final BytesRef bytesRef = dv.binaryValue();
-          byte[] bytes = bytesRef.bytes;
-          int end = bytesRef.offset + bytesRef.length;
-          int ord = 0;
-          int offset = bytesRef.offset;
-          int prev = 0;
-          while (offset < end) {
-            byte b = bytes[offset++];
-            if (b >= 0) {
-              prev = ord = ((ord << 7) | b) + prev;
-              ++values[ord];
-              ord = 0;
-            } else {
-              ord = (ord << 7) | (b & 0x7F);
-            }
+      for (int doc = it.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = it.nextDoc()) {
+        final BytesRef bytesRef = dv.binaryValue();
+        byte[] bytes = bytesRef.bytes;
+        int end = bytesRef.offset + bytesRef.length;
+        int ord = 0;
+        int offset = bytesRef.offset;
+        int prev = 0;
+        while (offset < end) {
+          byte b = bytes[offset++];
+          if (b >= 0) {
+            prev = ord = ((ord << 7) | b) + prev;
+            ++values[ord];
+            ord = 0;
+          } else {
+            ord = (ord << 7) | (b & 0x7F);
           }
         }
       }


[35/50] [abbrv] lucene-solr:jira/solr-8542-v2: SOLR-9662: New parameter -u in bin/post to pass basicauth credentials

Posted by cp...@apache.org.
SOLR-9662: New parameter -u <user:pass> in bin/post to pass basicauth credentials


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/e3a8a0fe
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/e3a8a0fe
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/e3a8a0fe

Branch: refs/heads/jira/solr-8542-v2
Commit: e3a8a0fe5f7ebff46509f51f9d490a5c801626ba
Parents: c9cf0ef
Author: Jan H�ydahl <ja...@apache.org>
Authored: Sat Oct 22 02:02:07 2016 +0200
Committer: Jan H�ydahl <ja...@apache.org>
Committed: Sat Oct 22 02:02:07 2016 +0200

----------------------------------------------------------------------
 solr/CHANGES.txt                                |  2 +
 solr/bin/post                                   | 23 ++++++--
 .../org/apache/solr/util/SimplePostTool.java    | 59 +++++++++++++++-----
 3 files changed, 65 insertions(+), 19 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e3a8a0fe/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index b4b0a33..e1c3971 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -152,6 +152,8 @@ New Features
 * SOLR-9326: Ability to create/delete/list snapshots at collection level.
   (Hrishikesh Gadre via yonik)
 
+* SOLR-9662: New parameter -u <user:pass> in bin/post to pass basicauth credentials (janhoy)
+
 Bug Fixes
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e3a8a0fe/solr/bin/post
----------------------------------------------------------------------
diff --git a/solr/bin/post b/solr/bin/post
index 73e59ed..4c629d1 100755
--- a/solr/bin/post
+++ b/solr/bin/post
@@ -68,6 +68,7 @@ function print_usage() {
   echo "    -host <host> (default: localhost)"
   echo "    -p or -port <port> (default: 8983)"
   echo "    -commit yes|no (default: yes)"
+  echo "    -u or -user <user:pass> (sets BasicAuth credentials)"
   # optimize intentionally omitted, but can be used as '-optimize yes' (default: no)
   echo ""
   echo "  Web crawl options:"
@@ -155,13 +156,23 @@ while [ $# -gt 0 ]; do
             ARGS+=("<add/>")
           fi
         fi
-      else
-        key="${1:1}"
+      elif [[ ("$1" == "-u" || "$1" == "-user") ]]; then
         shift
-#       echo "$1: PROP"
-        PROPS+=("-D$key=$1")
-        if [[ "$key" == "url" ]]; then
-          SOLR_URL=$1
+        PROPS+=("-Dbasicauth=$1")
+      else
+        if [[ "$1" == -D* ]] ; then
+          PROPS+=("$1")
+          if [[ "${1:2:4}" == "url=" ]]; then
+            SOLR_URL=${1:6}
+          fi
+        else          
+          key="${1:1}"
+          shift
+  #       echo "$1: PROP"
+          PROPS+=("-D$key=$1")
+          if [[ "$key" == "url" ]]; then
+            SOLR_URL=$1
+          fi
         fi
       fi
     else

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e3a8a0fe/solr/core/src/java/org/apache/solr/util/SimplePostTool.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/util/SimplePostTool.java b/solr/core/src/java/org/apache/solr/util/SimplePostTool.java
index 20e7231..a8ef372 100644
--- a/solr/core/src/java/org/apache/solr/util/SimplePostTool.java
+++ b/solr/core/src/java/org/apache/solr/util/SimplePostTool.java
@@ -42,6 +42,7 @@ import java.nio.BufferOverflowException;
 import java.nio.ByteBuffer;
 import java.nio.charset.Charset;
 import java.nio.charset.StandardCharsets;
+import java.security.GeneralSecurityException;
 import java.text.SimpleDateFormat;
 import java.util.ArrayList;
 import java.util.Base64;
@@ -65,6 +66,9 @@ import org.w3c.dom.Node;
 import org.w3c.dom.NodeList;
 import org.xml.sax.SAXException;
 
+import static java.nio.charset.StandardCharsets.US_ASCII;
+import static java.nio.charset.StandardCharsets.UTF_8;
+
 /**
  * A simple utility class for posting raw updates to a Solr server, 
  * has a main method so it can be run on the command line.
@@ -86,6 +90,7 @@ public class SimplePostTool {
   private static final int MAX_WEB_DEPTH = 10;
   private static final String DEFAULT_CONTENT_TYPE = "application/xml";
   private static final String DEFAULT_FILE_TYPES = "xml,json,jsonl,csv,pdf,doc,docx,ppt,pptx,xls,xlsx,odt,odp,ods,ott,otp,ots,rtf,htm,html,txt,log";
+  private static final String BASIC_AUTH = "basicauth";
 
   static final String DATA_MODE_FILES = "files";
   static final String DATA_MODE_ARGS = "args";
@@ -233,6 +238,15 @@ public class SimplePostTool {
       }
       urlStr = SimplePostTool.appendParam(urlStr, params);
       URL url = new URL(urlStr);
+      String user = null;
+      if (url.getUserInfo() != null && url.getUserInfo().trim().length() > 0) {
+        user = url.getUserInfo().split(":")[0];
+      } else if (System.getProperty(BASIC_AUTH) != null) {
+        user = System.getProperty(BASIC_AUTH).trim().split(":")[0];
+      }
+      if (user != null)
+        info("Basic Authentication enabled, user=" + user);
+      
       boolean auto = isOn(System.getProperty("auto", DEFAULT_AUTO));
       String type = System.getProperty("type");
       String format = System.getProperty("format");
@@ -385,6 +399,7 @@ public class SimplePostTool {
      "  -Dtype=<content-type> (default=" + DEFAULT_CONTENT_TYPE + ")\n"+
      "  -Dhost=<host> (default: " + DEFAULT_POST_HOST+ ")\n"+
      "  -Dport=<port> (default: " + DEFAULT_POST_PORT+ ")\n"+
+     "  -Dbasicauth=<user:pass> (sets Basic Authentication credentials)\n"+
      "  -Dauto=yes|no (default=" + DEFAULT_AUTO + ")\n"+
      "  -Drecursive=yes|no|<depth> (default=" + DEFAULT_RECURSIVE + ")\n"+
      "  -Ddelay=<seconds> (default=0 for files, 10 for web)\n"+
@@ -851,14 +866,13 @@ public class SimplePostTool {
     try {
       if(mockMode) return;
       HttpURLConnection urlc = (HttpURLConnection) url.openConnection();
-      if (url.getUserInfo() != null) {
-        String encoding = Base64.getEncoder().encodeToString(url.getUserInfo().getBytes(StandardCharsets.US_ASCII));
-        urlc.setRequestProperty("Authorization", "Basic " + encoding);
-      }
+      basicAuth(urlc);
       urlc.connect();
       checkResponseCode(urlc);
     } catch (IOException e) {
-      warn("An error occurred posting data to "+url+". Please check that Solr is running.");
+      warn("An error occurred getting data from "+url+". Please check that Solr is running.");
+    } catch (Exception e) {
+      warn("An error occurred getting data from "+url+". Message: " + e.getMessage());
     }
   }
 
@@ -886,10 +900,7 @@ public class SimplePostTool {
         urlc.setUseCaches(false);
         urlc.setAllowUserInteraction(false);
         urlc.setRequestProperty("Content-type", type);
-        if (url.getUserInfo() != null) {
-          String encoding = Base64.getEncoder().encodeToString(url.getUserInfo().getBytes(StandardCharsets.US_ASCII));
-          urlc.setRequestProperty("Authorization", "Basic " + encoding);
-        }
+        basicAuth(urlc);
         if (null != length) {
           urlc.setFixedLengthStreamingMode(length);
         } else {
@@ -899,13 +910,14 @@ public class SimplePostTool {
       } catch (IOException e) {
         fatal("Connection error (is Solr running at " + solrUrl + " ?): " + e);
         success = false;
+      } catch (Exception e) {
+        fatal("POST failed with error " + e.getMessage());
       }
-      
+
       try (final OutputStream out = urlc.getOutputStream()) {
         pipe(data, out);
       } catch (IOException e) {
         fatal("IOException while posting data: " + e);
-        success = false;
       }
       
       try {
@@ -916,14 +928,29 @@ public class SimplePostTool {
       } catch (IOException e) {
         warn("IOException while reading response: " + e);
         success = false;
+      } catch (GeneralSecurityException e) {
+        fatal("Looks like Solr is secured and would not let us in. Try with another user in '-u' parameter");
       }
     } finally {
       if (urlc!=null) urlc.disconnect();
     }
     return success;
   }
-  
-  private static boolean checkResponseCode(HttpURLConnection urlc) throws IOException {
+
+  private static void basicAuth(HttpURLConnection urlc) throws Exception {
+    if (urlc.getURL().getUserInfo() != null) {
+      String encoding = Base64.getEncoder().encodeToString(urlc.getURL().getUserInfo().getBytes(US_ASCII));
+      urlc.setRequestProperty("Authorization", "Basic " + encoding);
+    } else if (System.getProperty(BASIC_AUTH) != null) {
+      String basicauth = System.getProperty(BASIC_AUTH).trim();
+      if (!basicauth.contains(":")) {
+        throw new Exception("System property '"+BASIC_AUTH+"' must be of format user:pass");
+      }
+      urlc.setRequestProperty("Authorization", "Basic " + Base64.getEncoder().encodeToString(basicauth.getBytes(UTF_8)));
+    }
+  }
+
+  private static boolean checkResponseCode(HttpURLConnection urlc) throws IOException, GeneralSecurityException {
     if (urlc.getResponseCode() >= 400) {
       warn("Solr returned an error #" + urlc.getResponseCode() + 
             " (" + urlc.getResponseMessage() + ") for url: " + urlc.getURL());
@@ -948,6 +975,12 @@ public class SimplePostTool {
           warn(response.toString().trim());
         }
       }
+      if (urlc.getResponseCode() == 401) {
+        throw new GeneralSecurityException("Solr requires authentication (response 401). Please try again with '-u' option");
+      }
+      if (urlc.getResponseCode() == 403) {
+        throw new GeneralSecurityException("You are not authorized to perform this action against Solr. (response 403)");
+      }
       return false;
     }
     return true;


[15/50] [abbrv] lucene-solr:jira/solr-8542-v2: SOLR-9657: Addressing test failures

Posted by cp...@apache.org.
SOLR-9657: Addressing test failures


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/6893595f
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/6893595f
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/6893595f

Branch: refs/heads/jira/solr-8542-v2
Commit: 6893595f340b748a9c5e5c3884704e71275deab4
Parents: c2e031a
Author: Noble Paul <no...@gmail.com>
Authored: Thu Oct 20 01:26:02 2016 +0530
Committer: Noble Paul <no...@gmail.com>
Committed: Thu Oct 20 01:26:02 2016 +0530

----------------------------------------------------------------------
 .../solr/update/processor/SimpleUpdateProcessorFactory.java     | 5 +++--
 .../solr/update/processor/TemplateUpdateProcessorFactory.java   | 3 ++-
 2 files changed, 5 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6893595f/solr/core/src/java/org/apache/solr/update/processor/SimpleUpdateProcessorFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/processor/SimpleUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/SimpleUpdateProcessorFactory.java
index aec9d87..b1edea0 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/SimpleUpdateProcessorFactory.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/SimpleUpdateProcessorFactory.java
@@ -29,13 +29,14 @@ import org.apache.solr.update.AddUpdateCommand;
  * This is deliberately made to support only the add operation
  */
 public abstract class SimpleUpdateProcessorFactory extends UpdateRequestProcessorFactory {
-  protected final String myName;
+  protected final String myName; // if classname==XyzUpdateProcessorFactory  myName=Xyz
   protected NamedList initArgs = new NamedList();
   private static ThreadLocal<SolrQueryRequest> REQ = new ThreadLocal<>();
 
   protected SimpleUpdateProcessorFactory() {
     String simpleName = this.getClass().getSimpleName();
-    this.myName = simpleName.substring(0, simpleName.indexOf("UpdateProcessorFactory"));
+    int idx = simpleName.indexOf("UpdateProcessorFactory");
+    this.myName = idx == -1 ? simpleName : simpleName.substring(0, idx);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6893595f/solr/core/src/java/org/apache/solr/update/processor/TemplateUpdateProcessorFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/processor/TemplateUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/TemplateUpdateProcessorFactory.java
index 41d109b..dbe61b4 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/TemplateUpdateProcessorFactory.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/TemplateUpdateProcessorFactory.java
@@ -29,7 +29,8 @@ import org.apache.solr.common.SolrInputDocument;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.response.SolrQueryResponse;
 import org.apache.solr.update.AddUpdateCommand;
-
+//Adds new fields to documents based on a template pattern specified via Template.field
+// request parameters (multi-valued) or 'field' value specified in initArgs
 public class TemplateUpdateProcessorFactory extends SimpleUpdateProcessorFactory {
   @Override
   protected void process(AddUpdateCommand cmd, SolrQueryRequest req, SolrQueryResponse rsp) {


[27/50] [abbrv] lucene-solr:jira/solr-8542-v2: Relax scores comparisons in TestBooleanRewrites.

Posted by cp...@apache.org.
Relax scores comparisons in TestBooleanRewrites.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/36e997d4
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/36e997d4
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/36e997d4

Branch: refs/heads/jira/solr-8542-v2
Commit: 36e997d45c5e393e6f63effccaf7d3d4ea25b018
Parents: af88e7f
Author: Adrien Grand <jp...@gmail.com>
Authored: Fri Oct 21 09:12:28 2016 +0200
Committer: Adrien Grand <jp...@gmail.com>
Committed: Fri Oct 21 09:12:28 2016 +0200

----------------------------------------------------------------------
 .../src/test/org/apache/lucene/search/TestBooleanRewrites.java     | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/36e997d4/lucene/core/src/test/org/apache/lucene/search/TestBooleanRewrites.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestBooleanRewrites.java b/lucene/core/src/test/org/apache/lucene/search/TestBooleanRewrites.java
index 5e65906..1fd3d4b 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestBooleanRewrites.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestBooleanRewrites.java
@@ -424,7 +424,7 @@ public class TestBooleanRewrites extends LuceneTestCase {
     for (ScoreDoc scoreDoc : td2.scoreDocs) {
       final float expectedScore = expectedScores.get(scoreDoc.doc);
       final float actualScore = scoreDoc.score;
-      assertEquals(expectedScore, actualScore, 10e-5);
+      assertEquals(expectedScore, actualScore, expectedScore / 100); // error under 1%
     }
   }
 }


[06/50] [abbrv] lucene-solr:jira/solr-8542-v2: LUCENE-7503: Undeprecate o.o.l.util.LongValues.

Posted by cp...@apache.org.
LUCENE-7503: Undeprecate o.o.l.util.LongValues.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/3be6701f
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/3be6701f
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/3be6701f

Branch: refs/heads/jira/solr-8542-v2
Commit: 3be6701f17d9a507e07e4a3f01bcfd702bdfc806
Parents: 9aa764a
Author: Adrien Grand <jp...@gmail.com>
Authored: Tue Oct 18 18:28:39 2016 +0200
Committer: Adrien Grand <jp...@gmail.com>
Committed: Tue Oct 18 18:50:57 2016 +0200

----------------------------------------------------------------------
 .../lucene54/Lucene54DocValuesProducer.java     | 39 ++++++++++-
 .../codecs/memory/MemoryDocValuesProducer.java  | 69 +++++++-------------
 .../java/org/apache/lucene/util/LongValues.java | 21 +-----
 .../lucene/util/packed/TestDirectPacked.java    |  6 +-
 4 files changed, 67 insertions(+), 68 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3be6701f/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene54/Lucene54DocValuesProducer.java
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene54/Lucene54DocValuesProducer.java b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene54/Lucene54DocValuesProducer.java
index 8a44c31..1f785fe 100644
--- a/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene54/Lucene54DocValuesProducer.java
+++ b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene54/Lucene54DocValuesProducer.java
@@ -491,7 +491,44 @@ final class Lucene54DocValuesProducer extends DocValuesProducer implements Close
         docsWithField = getLiveBits(entry.missingOffset, maxDoc);
       }
     }
-    return new LegacyNumericDocValuesWrapper(docsWithField, getNumeric(entry));
+    final LongValues values = getNumeric(entry);
+    return new NumericDocValues() {
+
+      int doc = -1;
+      long value;
+
+      @Override
+      public long longValue() throws IOException {
+        return value;
+      }
+
+      @Override
+      public int docID() {
+        return doc;
+      }
+
+      @Override
+      public int nextDoc() throws IOException {
+        return advance(doc + 1);
+      }
+
+      @Override
+      public int advance(int target) throws IOException {
+        for (int doc = target; doc < maxDoc; ++doc) {
+          value = values.get(doc);
+          if (value != 0 || docsWithField.get(doc)) {
+            return this.doc = doc;
+          }
+        }
+        return doc = NO_MORE_DOCS;
+      }
+
+      @Override
+      public long cost() {
+        return maxDoc;
+      }
+
+    };
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3be6701f/lucene/codecs/src/java/org/apache/lucene/codecs/memory/MemoryDocValuesProducer.java
----------------------------------------------------------------------
diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/memory/MemoryDocValuesProducer.java b/lucene/codecs/src/java/org/apache/lucene/codecs/memory/MemoryDocValuesProducer.java
index bff8c2d..b81e56e 100644
--- a/lucene/codecs/src/java/org/apache/lucene/codecs/memory/MemoryDocValuesProducer.java
+++ b/lucene/codecs/src/java/org/apache/lucene/codecs/memory/MemoryDocValuesProducer.java
@@ -42,7 +42,6 @@ import org.apache.lucene.util.FixedBitSet;
 import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.IntsRef;
 import org.apache.lucene.util.IntsRefBuilder;
-import org.apache.lucene.util.LongValues;
 import org.apache.lucene.util.PagedBytes;
 import org.apache.lucene.util.RamUsageEstimator;
 import org.apache.lucene.util.fst.BytesRefFSTEnum.InputOutput;
@@ -371,7 +370,12 @@ class MemoryDocValuesProducer extends DocValuesProducer {
           ramBytesUsed.addAndGet(reader.ramBytesUsed());
           numericInfo.put(field.name, Accountables.namedAccountable("block compressed", reader));
         }
-        return reader;
+        return new LegacyNumericDocValues() {
+          @Override
+          public long get(int docID) {
+            return reader.get(docID);
+          }
+        };
       case GCD_COMPRESSED:
         final long min = data.readLong();
         final long mult = data.readLong();
@@ -568,51 +572,26 @@ class MemoryDocValuesProducer extends DocValuesProducer {
         }
         addr = res;
       }
-      if (values instanceof LongValues) {
-        // probably not the greatest codec choice for this situation, but we support it
-        final LongValues longValues = (LongValues) values;
-        return new LegacySortedNumericDocValuesWrapper(new LegacySortedNumericDocValues() {
-          long startOffset;
-          long endOffset;
-          
-          @Override
-          public void setDocument(int doc) {
-            startOffset = (int) addr.get(doc);
-            endOffset = (int) addr.get(doc+1L);
-          }
-
-          @Override
-          public long valueAt(int index) {
-            return longValues.get(startOffset + index);
-          }
-
-          @Override
-          public int count() {
-            return (int) (endOffset - startOffset);
-          }
-          }, maxDoc);
-      } else {
-        return new LegacySortedNumericDocValuesWrapper(new LegacySortedNumericDocValues() {
-          int startOffset;
-          int endOffset;
-        
-          @Override
-          public void setDocument(int doc) {
-            startOffset = (int) addr.get(doc);
-            endOffset = (int) addr.get(doc+1);
-          }
+      return new LegacySortedNumericDocValuesWrapper(new LegacySortedNumericDocValues() {
+        int startOffset;
+        int endOffset;
+      
+        @Override
+        public void setDocument(int doc) {
+          startOffset = (int) addr.get(doc);
+          endOffset = (int) addr.get(doc+1);
+        }
 
-          @Override
-          public long valueAt(int index) {
-            return values.get(startOffset + index);
-          }
+        @Override
+        public long valueAt(int index) {
+          return values.get(startOffset + index);
+        }
 
-          @Override
-          public int count() {
-            return (endOffset - startOffset);
-          }
-          }, maxDoc);
-      }
+        @Override
+        public int count() {
+          return (endOffset - startOffset);
+        }
+        }, maxDoc);
     }
   }
   

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3be6701f/lucene/core/src/java/org/apache/lucene/util/LongValues.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/util/LongValues.java b/lucene/core/src/java/org/apache/lucene/util/LongValues.java
index aa3b7fa..23f4d32 100644
--- a/lucene/core/src/java/org/apache/lucene/util/LongValues.java
+++ b/lucene/core/src/java/org/apache/lucene/util/LongValues.java
@@ -16,21 +16,9 @@
  */
 package org.apache.lucene.util;
 
-
-import org.apache.lucene.index.LegacyNumericDocValues;
-import org.apache.lucene.index.NumericDocValues;
-import org.apache.lucene.util.packed.PackedInts;
-
 /** Abstraction over an array of longs.
- *  This class extends NumericDocValues so that we don't need to add another
- *  level of abstraction every time we want eg. to use the {@link PackedInts}
- *  utility classes to represent a {@link LegacyNumericDocValues} instance.
- *  @lucene.internal
- *
- *  @deprecated Switch to {@link NumericDocValues} instead. */
- @Deprecated
-// TODO: cutover to iterator once codecs have all cutover?
-public abstract class LongValues extends LegacyNumericDocValues {
+ *  @lucene.internal */
+public abstract class LongValues  {
 
   /** An instance that returns the provided value. */
   public static final LongValues IDENTITY = new LongValues() {
@@ -45,9 +33,4 @@ public abstract class LongValues extends LegacyNumericDocValues {
   /** Get value at <code>index</code>. */
   public abstract long get(long index);
 
-  @Override
-  public long get(int idx) {
-    return get((long) idx);
-  }
-
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3be6701f/lucene/core/src/test/org/apache/lucene/util/packed/TestDirectPacked.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/util/packed/TestDirectPacked.java b/lucene/core/src/test/org/apache/lucene/util/packed/TestDirectPacked.java
index 4287125..3c5d5d8 100644
--- a/lucene/core/src/test/org/apache/lucene/util/packed/TestDirectPacked.java
+++ b/lucene/core/src/test/org/apache/lucene/util/packed/TestDirectPacked.java
@@ -19,12 +19,12 @@ package org.apache.lucene.util.packed;
 
 import java.util.Random;
 
-import org.apache.lucene.index.LegacyNumericDocValues;
 import org.apache.lucene.store.ByteArrayDataInput;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IOContext;
 import org.apache.lucene.store.IndexInput;
 import org.apache.lucene.store.IndexOutput;
+import org.apache.lucene.util.LongValues;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.TestUtil;
 import org.apache.lucene.util.packed.DirectReader;
@@ -46,7 +46,7 @@ public class TestDirectPacked extends LuceneTestCase {
     writer.finish();
     output.close();
     IndexInput input = dir.openInput("foo", IOContext.DEFAULT);
-    LegacyNumericDocValues reader = DirectReader.getInstance(input.randomAccessSlice(0, input.length()), bitsPerValue, 0);
+    LongValues reader = DirectReader.getInstance(input.randomAccessSlice(0, input.length()), bitsPerValue, 0);
     assertEquals(1, reader.get(0));
     assertEquals(0, reader.get(1));
     assertEquals(2, reader.get(2));
@@ -110,7 +110,7 @@ public class TestDirectPacked extends LuceneTestCase {
       writer.finish();
       output.close();
       IndexInput input = directory.openInput(name, IOContext.DEFAULT);
-      LegacyNumericDocValues reader = DirectReader.getInstance(input.randomAccessSlice(0, input.length()), bitsRequired, offset);
+      LongValues reader = DirectReader.getInstance(input.randomAccessSlice(0, input.length()), bitsRequired, offset);
       for (int j = 0; j < original.length; j++) {
         assertEquals("bpv=" + bpv, original[j], reader.get(j));
       }


[05/50] [abbrv] lucene-solr:jira/solr-8542-v2: * SOLR-9506: cache IndexFingerprint for each segment

Posted by cp...@apache.org.
* SOLR-9506: cache IndexFingerprint for each segment


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/9aa764a5
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/9aa764a5
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/9aa764a5

Branch: refs/heads/jira/solr-8542-v2
Commit: 9aa764a54f50eca5a8ef805bdb29e4ad90fcce5e
Parents: fe17b4e
Author: Noble Paul <no...@gmail.com>
Authored: Tue Oct 18 21:09:42 2016 +0530
Committer: Noble Paul <no...@gmail.com>
Committed: Tue Oct 18 21:09:42 2016 +0530

----------------------------------------------------------------------
 solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aa764a5/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
index 59797b9..02069ee 100644
--- a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
@@ -277,7 +277,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
 
     this.path = path;
     this.directoryFactory = directoryFactory;
-    this.reader = (DirectoryReader) super.getTopReaderContext().reader();
+    this.reader = (DirectoryReader) super.readerContext.reader();
     this.rawReader = r;
     this.leafReader = SlowCompositeReaderWrapper.wrap(this.reader);
     this.core = core;


[24/50] [abbrv] lucene-solr:jira/solr-8542-v2: SOLR-9657: Use cache for templates

Posted by cp...@apache.org.
SOLR-9657: Use cache for templates


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/127bf9f7
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/127bf9f7
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/127bf9f7

Branch: refs/heads/jira/solr-8542-v2
Commit: 127bf9f772468cbc94478ad67d54652001b175e0
Parents: 14b6d93
Author: Noble Paul <no...@apache.org>
Authored: Thu Oct 20 20:06:21 2016 +0530
Committer: Noble Paul <no...@apache.org>
Committed: Thu Oct 20 20:06:21 2016 +0530

----------------------------------------------------------------------
 .../handler/dataimport/VariableResolver.java    |  4 +-
 .../TemplateUpdateProcessorFactory.java         | 13 +++--
 .../java/org/apache/solr/common/util/Cache.java | 26 +++++++--
 .../apache/solr/common/util/MapBackedCache.java | 57 ++++++++++++++++++++
 .../solr/common/util/TestJavaBinCodec.java      | 52 +-----------------
 5 files changed, 92 insertions(+), 60 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/127bf9f7/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/VariableResolver.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/VariableResolver.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/VariableResolver.java
index f255657..51b5841 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/VariableResolver.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/VariableResolver.java
@@ -25,6 +25,8 @@ import java.util.function.Function;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import org.apache.solr.common.util.Cache;
+import org.apache.solr.common.util.MapBackedCache;
 import org.apache.solr.update.processor.TemplateUpdateProcessorFactory;
 
 import static org.apache.solr.update.processor.TemplateUpdateProcessorFactory.Resolved;
@@ -55,7 +57,7 @@ public class VariableResolver {
       .compile("^(\\w*?)\\((.*?)\\)$");
   private Map<String,Object> rootNamespace;
   private Map<String,Evaluator> evaluators;
-  private Map<String,Resolved> cache = new WeakHashMap<>();
+  private Cache<String,Resolved> cache = new MapBackedCache<>(new WeakHashMap<>());
   private Function<String,Object> fun = this::resolve;
 
   public static final String FUNCTIONS_NAMESPACE = "dataimporter.functions.";

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/127bf9f7/solr/core/src/java/org/apache/solr/update/processor/TemplateUpdateProcessorFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/processor/TemplateUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/TemplateUpdateProcessorFactory.java
index dbe61b4..b791d3b 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/TemplateUpdateProcessorFactory.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/TemplateUpdateProcessorFactory.java
@@ -20,18 +20,21 @@ package org.apache.solr.update.processor;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
-import java.util.Map;
 import java.util.function.Function;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
 import org.apache.solr.common.SolrInputDocument;
+import org.apache.solr.common.util.Cache;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.response.SolrQueryResponse;
 import org.apache.solr.update.AddUpdateCommand;
+import org.apache.solr.util.ConcurrentLRUCache;
+
 //Adds new fields to documents based on a template pattern specified via Template.field
 // request parameters (multi-valued) or 'field' value specified in initArgs
 public class TemplateUpdateProcessorFactory extends SimpleUpdateProcessorFactory {
+  private Cache<String, Resolved> templateCache = new ConcurrentLRUCache<>(1000, 800, 900, 10, false, false, null);
   @Override
   protected void process(AddUpdateCommand cmd, SolrQueryRequest req, SolrQueryResponse rsp) {
     String[] vals = getParams("field");
@@ -45,7 +48,7 @@ public class TemplateUpdateProcessorFactory extends SimpleUpdateProcessorFactory
 
         String fName = val.substring(0, idx);
         String template = val.substring(idx + 1);
-        doc.addField(fName, replaceTokens(template, null, s -> {
+        doc.addField(fName, replaceTokens(template, templateCache, s -> {
           Object v = doc.getFieldValue(s);
           return v == null ? "" : v;
         }));
@@ -55,7 +58,7 @@ public class TemplateUpdateProcessorFactory extends SimpleUpdateProcessorFactory
   }
 
 
-  public static Resolved getResolved(String template, Map<String, Resolved> cache) {
+  public static Resolved getResolved(String template, Cache<String, Resolved> cache) {
     Resolved r = cache == null ? null : cache.get(template);
     if (r == null) {
       r = new Resolved();
@@ -74,7 +77,7 @@ public class TemplateUpdateProcessorFactory extends SimpleUpdateProcessorFactory
   /**
    * Get a list of variables embedded in the template string.
    */
-  public static List<String> getVariables(String template, Map<String, Resolved> cache) {
+  public static List<String> getVariables(String template, Cache<String, Resolved> cache) {
     Resolved r = getResolved(template, cache);
     if (r == null) {
       return Collections.emptyList();
@@ -82,7 +85,7 @@ public class TemplateUpdateProcessorFactory extends SimpleUpdateProcessorFactory
     return new ArrayList<>(r.variables);
   }
 
-  public static String replaceTokens(String template, Map<String, Resolved> cache, Function<String, Object> fun) {
+  public static String replaceTokens(String template, Cache<String, Resolved> cache, Function<String, Object> fun) {
     if (template == null) {
       return null;
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/127bf9f7/solr/solrj/src/java/org/apache/solr/common/util/Cache.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/util/Cache.java b/solr/solrj/src/java/org/apache/solr/common/util/Cache.java
index 7be7f0c..2ec4d7e 100644
--- a/solr/solrj/src/java/org/apache/solr/common/util/Cache.java
+++ b/solr/solrj/src/java/org/apache/solr/common/util/Cache.java
@@ -16,13 +16,31 @@
  */
 package org.apache.solr.common.util;
 
+import java.util.Objects;
+import java.util.function.Function;
+
 public interface Cache<K, V> {
-  public V put(K key, V val);
+  V put(K key, V val);
+
+  V get(K key);
+
+  V remove(K key);
 
-  public V get(K key);
+  void clear();
 
-  public V remove(K key);
+  default V computeIfAbsent(K key,
+                            Function<? super K, ? extends V> mappingFunction) {
+    Objects.requireNonNull(mappingFunction);
+    V v;
+    if ((v = get(key)) == null) {
+      V newValue;
+      if ((newValue = mappingFunction.apply(key)) != null) {
+        put(key, newValue);
+        return newValue;
+      }
+    }
 
-  public void clear();
+    return v;
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/127bf9f7/solr/solrj/src/java/org/apache/solr/common/util/MapBackedCache.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/util/MapBackedCache.java b/solr/solrj/src/java/org/apache/solr/common/util/MapBackedCache.java
new file mode 100644
index 0000000..1aba20b
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/common/util/MapBackedCache.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.common.util;
+
+
+import java.util.Map;
+import java.util.function.Function;
+
+// A cache backed by a map
+public class MapBackedCache<K, V> implements Cache<K, V> {
+
+  private final Map<K, V> map;
+
+  public MapBackedCache(Map<K, V> map) {
+    this.map = map;
+  }
+
+  @Override
+  public V put(K key, V val) {
+    return map.put(key, val);
+  }
+
+  @Override
+  public V get(K key) {
+    return map.get(key);
+  }
+
+  @Override
+  public V remove(K key) {
+    return map.remove(key);
+  }
+
+  @Override
+  public void clear() {
+    map.clear();
+  }
+
+  @Override
+  public V computeIfAbsent(K key, Function<? super K, ? extends V> mappingFunction) {
+    return map.computeIfAbsent(key, mappingFunction);
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/127bf9f7/solr/solrj/src/test/org/apache/solr/common/util/TestJavaBinCodec.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/common/util/TestJavaBinCodec.java b/solr/solrj/src/test/org/apache/solr/common/util/TestJavaBinCodec.java
index 96ddc8b..e2229c5 100644
--- a/solr/solrj/src/test/org/apache/solr/common/util/TestJavaBinCodec.java
+++ b/solr/solrj/src/test/org/apache/solr/common/util/TestJavaBinCodec.java
@@ -340,30 +340,7 @@ public class TestJavaBinCodec extends SolrTestCaseJ4 {
     assertTrue(l1.get(1).equals(l2.get(1)));
     assertFalse(l1.get(1) == l2.get(1));
 
-    JavaBinCodec.StringCache stringCache = new JavaBinCodec.StringCache(new Cache<JavaBinCodec.StringBytes, String>() {
-      private HashMap<JavaBinCodec.StringBytes, String> cache = new HashMap<>();
-
-      @Override
-      public String put(JavaBinCodec.StringBytes key, String val) {
-        return cache.put(key, val);
-      }
-
-      @Override
-      public String get(JavaBinCodec.StringBytes key) {
-        return cache.get(key);
-      }
-
-      @Override
-      public String remove(JavaBinCodec.StringBytes key) {
-        return cache.remove(key);
-      }
-
-      @Override
-      public void clear() {
-        cache.clear();
-
-      }
-    });
+    JavaBinCodec.StringCache stringCache = new JavaBinCodec.StringCache(new MapBackedCache<>(new HashMap<>()));
 
 
     m1 = (Map) new JavaBinCodec(null, stringCache).unmarshal(new ByteArrayInputStream(b1));
@@ -409,32 +386,7 @@ public class TestJavaBinCodec extends SolrTestCaseJ4 {
     Runtime.getRuntime().gc();
     printMem("before cache init");
 
-    Cache<JavaBinCodec.StringBytes, String> cache1 = new Cache<JavaBinCodec.StringBytes, String>() {
-      private HashMap<JavaBinCodec.StringBytes, String> cache = new HashMap<>();
-
-      @Override
-      public String put(JavaBinCodec.StringBytes key, String val) {
-        l.add(key);
-        return cache.put(key, val);
-
-      }
-
-      @Override
-      public String get(JavaBinCodec.StringBytes key) {
-        return cache.get(key);
-      }
-
-      @Override
-      public String remove(JavaBinCodec.StringBytes key) {
-        return cache.remove(key);
-      }
-
-      @Override
-      public void clear() {
-        cache.clear();
-
-      }
-    };
+    Cache<JavaBinCodec.StringBytes, String> cache1 = new MapBackedCache<>(new HashMap<>()) ;
     final JavaBinCodec.StringCache STRING_CACHE = new JavaBinCodec.StringCache(cache1);
 
 //    STRING_CACHE = new JavaBinCodec.StringCache(cache);


[18/50] [abbrv] lucene-solr:jira/solr-8542-v2: SOLR-7850: Move defaults in bin/solr.in.sh into bin/solr (incl. Windows)

Posted by cp...@apache.org.
SOLR-7850: Move defaults in bin/solr.in.sh into bin/solr (incl. Windows)


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/fe77dff0
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/fe77dff0
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/fe77dff0

Branch: refs/heads/jira/solr-8542-v2
Commit: fe77dff09406b0c848a269a6bfee490ea6c67015
Parents: 8ae3304
Author: David Smiley <ds...@apache.org>
Authored: Wed Oct 19 16:45:13 2016 -0400
Committer: David Smiley <ds...@apache.org>
Committed: Wed Oct 19 16:45:13 2016 -0400

----------------------------------------------------------------------
 solr/CHANGES.txt | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/fe77dff0/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 56f3b80..efea20f 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -301,7 +301,7 @@ Other Changes
 
 * SOLR-9634: Deprecate collection methods on MiniSolrCloudCluster (Alan Woodward)
 
-* SOLR-7580: Moved defaults within bin/solr.in.sh (and bin/solr.in.cmd on Windows) to bin/solr (and bin/solr.cmd)
+* SOLR-7850: Moved defaults within bin/solr.in.sh (and bin/solr.in.cmd on Windows) to bin/solr (and bin/solr.cmd)
   such that the default state of these files is to set nothing. This makes Solr work better with Docker. (David Smiley)
 
 ==================  6.2.1 ==================


[36/50] [abbrv] lucene-solr:jira/solr-8542-v2: SOLR-9610: Bugfix option key for assertTool

Posted by cp...@apache.org.
SOLR-9610: Bugfix option key for assertTool


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/3488f121
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/3488f121
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/3488f121

Branch: refs/heads/jira/solr-8542-v2
Commit: 3488f12170a6b035391fda719ce69380dc4b2882
Parents: e3a8a0f
Author: Jan H�ydahl <ja...@apache.org>
Authored: Sat Oct 22 02:31:27 2016 +0200
Committer: Jan H�ydahl <ja...@apache.org>
Committed: Sat Oct 22 02:31:27 2016 +0200

----------------------------------------------------------------------
 solr/core/src/java/org/apache/solr/util/SolrCLI.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3488f121/solr/core/src/java/org/apache/solr/util/SolrCLI.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/util/SolrCLI.java b/solr/core/src/java/org/apache/solr/util/SolrCLI.java
index c5a359e..8180c44 100644
--- a/solr/core/src/java/org/apache/solr/util/SolrCLI.java
+++ b/solr/core/src/java/org/apache/solr/util/SolrCLI.java
@@ -3267,7 +3267,7 @@ public class SolrCLI {
       if (cli.hasOption("s")) {
         if (assertSolrRunning(cli.getOptionValue("s")) > 0) return 1;
       }
-      if (cli.hasOption("s")) {
+      if (cli.hasOption("S")) {
         if (assertSolrNotRunning(cli.getOptionValue("S")) > 0) return 1;
       }
       return 0;


[45/50] [abbrv] lucene-solr:jira/solr-8542-v2: LUCENE-7520: WSTE shouldn't expand MTQ if its field doesn't match filter

Posted by cp...@apache.org.
LUCENE-7520: WSTE shouldn't expand MTQ if its field doesn't match filter


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/e1b06938
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/e1b06938
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/e1b06938

Branch: refs/heads/jira/solr-8542-v2
Commit: e1b06938b4b0442b18878e59fde57e29ca641499
Parents: ef57374
Author: David Smiley <ds...@apache.org>
Authored: Mon Oct 24 09:31:55 2016 -0400
Committer: David Smiley <ds...@apache.org>
Committed: Mon Oct 24 09:31:55 2016 -0400

----------------------------------------------------------------------
 lucene/CHANGES.txt                                 |  4 ++++
 .../highlight/WeightedSpanTermExtractor.java       | 13 +++++--------
 .../lucene/search/highlight/HighlighterTest.java   | 17 +++++++++++++++++
 3 files changed, 26 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e1b06938/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index c4b3521..954137f 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -111,6 +111,10 @@ Improvements
 
 * LUCENE-7496: Better toString for SweetSpotSimilarity (janhoy)
 
+* LUCENE-7520: Highlighter's WeightedSpanTermExtractor shouldn't attempt to expand a MultiTermQuery
+  when its field doesn't match the field the extraction is scoped to.
+  (Cao Manh Dat via David Smiley)
+
 Optimizations
 
 * LUCENE-7501: BKDReader should not store the split dimension explicitly in the

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e1b06938/lucene/highlighter/src/java/org/apache/lucene/search/highlight/WeightedSpanTermExtractor.java
----------------------------------------------------------------------
diff --git a/lucene/highlighter/src/java/org/apache/lucene/search/highlight/WeightedSpanTermExtractor.java b/lucene/highlighter/src/java/org/apache/lucene/search/highlight/WeightedSpanTermExtractor.java
index 1b277f1..0e0093b 100644
--- a/lucene/highlighter/src/java/org/apache/lucene/search/highlight/WeightedSpanTermExtractor.java
+++ b/lucene/highlighter/src/java/org/apache/lucene/search/highlight/WeightedSpanTermExtractor.java
@@ -217,13 +217,14 @@ public class WeightedSpanTermExtractor {
     } else if (isQueryUnsupported(query.getClass())) {
       // nothing
     } else {
+      if (query instanceof MultiTermQuery &&
+          (!expandMultiTermQuery || !fieldNameComparator(((MultiTermQuery)query).getField()))) {
+        return;
+      }
       Query origQuery = query;
       final IndexReader reader = getLeafContext().reader();
       Query rewritten;
       if (query instanceof MultiTermQuery) {
-        if (!expandMultiTermQuery) {
-          return;
-        }
         rewritten = MultiTermQuery.SCORING_BOOLEAN_REWRITE.rewrite(reader, (MultiTermQuery) query);
       } else {
         rewritten = origQuery.rewrite(reader);
@@ -508,11 +509,7 @@ public class WeightedSpanTermExtractor {
    */
   public Map<String,WeightedSpanTerm> getWeightedSpanTerms(Query query, float boost, TokenStream tokenStream,
       String fieldName) throws IOException {
-    if (fieldName != null) {
-      this.fieldName = fieldName;
-    } else {
-      this.fieldName = null;
-    }
+    this.fieldName = fieldName;
 
     Map<String,WeightedSpanTerm> terms = new PositionCheckingMap<>();
     this.tokenStream = tokenStream;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e1b06938/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java
----------------------------------------------------------------------
diff --git a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java
index fc402ba..c37709b 100644
--- a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java
+++ b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java
@@ -33,6 +33,7 @@ import java.util.StringTokenizer;
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.BaseTokenStreamTestCase;
 import org.apache.lucene.analysis.CachingTokenFilter;
+import org.apache.lucene.analysis.CannedTokenStream;
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.analysis.MockPayloadAnalyzer;
 import org.apache.lucene.analysis.MockTokenFilter;
@@ -1339,6 +1340,22 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
 
   }
 
+  public void testNotRewriteMultiTermQuery() throws IOException {
+    // field "bar": (not the field we ultimately want to extract)
+    MultiTermQuery mtq = new TermRangeQuery("bar", new BytesRef("aa"), new BytesRef("zz"), true, true) ;
+    WeightedSpanTermExtractor extractor = new WeightedSpanTermExtractor() {
+      @Override
+      protected void extract(Query query, float boost, Map<String, WeightedSpanTerm> terms) throws IOException {
+        assertEquals(mtq, query);
+        super.extract(query, boost, terms);
+      }
+    };
+    extractor.setExpandMultiTermQuery(true);
+    extractor.setMaxDocCharsToAnalyze(51200);
+    extractor.getWeightedSpanTerms(
+        mtq, 3, new CannedTokenStream(new Token("aa",0,2), new Token("bb", 2,4)), "foo"); // field "foo"
+  }
+
   public void testGetBestSingleFragmentWithWeights() throws Exception {
 
     TestHighlightRunner helper = new TestHighlightRunner() {


[29/50] [abbrv] lucene-solr:jira/solr-8542-v2: LUCENE-7515: RunListenerPrintReproduceInfo may try to access static rule fields without the rule being called. This flag is needed to ensure this isn't the case.

Posted by cp...@apache.org.
LUCENE-7515: RunListenerPrintReproduceInfo may try to access static rule fields without
the rule being called. This flag is needed to ensure this isn't the case.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/bc0116af
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/bc0116af
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/bc0116af

Branch: refs/heads/jira/solr-8542-v2
Commit: bc0116af6928ef921c03b6533c29f230a0fa193e
Parents: a19ec19
Author: Dawid Weiss <dw...@apache.org>
Authored: Fri Oct 21 10:41:38 2016 +0200
Committer: Dawid Weiss <dw...@apache.org>
Committed: Fri Oct 21 11:19:12 2016 +0200

----------------------------------------------------------------------
 .../lucene/util/RunListenerPrintReproduceInfo.java     |  4 ++--
 .../lucene/util/TestRuleSetupAndRestoreClassEnv.java   | 13 +++++++++++--
 2 files changed, 13 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/bc0116af/lucene/test-framework/src/java/org/apache/lucene/util/RunListenerPrintReproduceInfo.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/util/RunListenerPrintReproduceInfo.java b/lucene/test-framework/src/java/org/apache/lucene/util/RunListenerPrintReproduceInfo.java
index 3d4f4fd..7e4c786 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/util/RunListenerPrintReproduceInfo.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/util/RunListenerPrintReproduceInfo.java
@@ -125,7 +125,7 @@ public final class RunListenerPrintReproduceInfo extends RunListener {
 
   /** print some useful debugging information about the environment */
   private static void printDebuggingInformation() {
-    if (classEnvRule != null) {
+    if (classEnvRule != null && classEnvRule.isInitialized()) {
       System.err.println("NOTE: test params are: codec=" + classEnvRule.codec +
           ", sim=" + classEnvRule.similarity +
           ", locale=" + classEnvRule.locale.toLanguageTag() +
@@ -176,7 +176,7 @@ public final class RunListenerPrintReproduceInfo extends RunListener {
 
     // Environment.
     if (!TEST_LINE_DOCS_FILE.equals(DEFAULT_LINE_DOCS_FILE)) addVmOpt(b, "tests.linedocsfile", TEST_LINE_DOCS_FILE);
-    if (classEnvRule != null) {
+    if (classEnvRule != null && classEnvRule.isInitialized()) {
       addVmOpt(b, "tests.locale", classEnvRule.locale.toLanguageTag());
       if (classEnvRule.timeZone != null) {
         addVmOpt(b, "tests.timezone", classEnvRule.timeZone.getID());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/bc0116af/lucene/test-framework/src/java/org/apache/lucene/util/TestRuleSetupAndRestoreClassEnv.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/util/TestRuleSetupAndRestoreClassEnv.java b/lucene/test-framework/src/java/org/apache/lucene/util/TestRuleSetupAndRestoreClassEnv.java
index 279305e..0e4facc 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/util/TestRuleSetupAndRestoreClassEnv.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/util/TestRuleSetupAndRestoreClassEnv.java
@@ -73,12 +73,16 @@ final class TestRuleSetupAndRestoreClassEnv extends AbstractBeforeAfterRule {
   Codec codec;
 
   /**
+   * Indicates whether the rule has executed its {@link #before()} method fully.
+   */
+  private boolean initialized;
+
+  /**
    * @see SuppressCodecs
    */
   HashSet<String> avoidCodecs;
 
   static class ThreadNameFixingPrintStreamInfoStream extends PrintStreamInfoStream {
-
     public ThreadNameFixingPrintStreamInfoStream(PrintStream out) {
       super(out);
     }
@@ -99,6 +103,10 @@ final class TestRuleSetupAndRestoreClassEnv extends AbstractBeforeAfterRule {
       stream.println(component + " " + messageID + " [" + getTimestamp() + "; " + name + "]: " + message);    
     }
   }
+  
+  public boolean isInitialized() {
+    return initialized;
+  }
 
   @Override
   protected void before() throws Exception {
@@ -113,7 +121,6 @@ final class TestRuleSetupAndRestoreClassEnv extends AbstractBeforeAfterRule {
     if (VERBOSE) {
       System.out.println("Loaded codecs: " + Codec.availableCodecs());
       System.out.println("Loaded postingsFormats: " + PostingsFormat.availablePostingsFormats());
-
     }
 
     savedInfoStream = InfoStream.getDefault();
@@ -235,6 +242,8 @@ final class TestRuleSetupAndRestoreClassEnv extends AbstractBeforeAfterRule {
     }
 
     LuceneTestCase.setLiveIWCFlushMode(flushMode);
+
+    initialized = true;
   }
 
   /**


[25/50] [abbrv] lucene-solr:jira/solr-8542-v2: Merge remote-tracking branch 'origin/master'

Posted by cp...@apache.org.
Merge remote-tracking branch 'origin/master'


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/a4952b11
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/a4952b11
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/a4952b11

Branch: refs/heads/jira/solr-8542-v2
Commit: a4952b11fec9e366cd97010d3cc96ef2ce6e953a
Parents: 127bf9f 9776196
Author: Noble Paul <no...@apache.org>
Authored: Thu Oct 20 20:07:15 2016 +0530
Committer: Noble Paul <no...@apache.org>
Committed: Thu Oct 20 20:07:15 2016 +0530

----------------------------------------------------------------------
 lucene/CHANGES.txt                              |   2 +
 .../apache/lucene/misc/SweetSpotSimilarity.java |  16 ++
 solr/CHANGES.txt                                |  15 +-
 solr/bin/solr                                   |  18 +-
 solr/bin/solr.cmd                               |  28 +-
 .../src/java/org/apache/solr/util/SolrCLI.java  | 273 ++++++++++++++++++-
 .../org/apache/solr/util/UtilsToolTest.java     | 185 +++++++++++++
 7 files changed, 499 insertions(+), 38 deletions(-)
----------------------------------------------------------------------



[07/50] [abbrv] lucene-solr:jira/solr-8542-v2: LUCENE-7497: add test case

Posted by cp...@apache.org.
LUCENE-7497: add test case


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/b78f2219
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/b78f2219
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/b78f2219

Branch: refs/heads/jira/solr-8542-v2
Commit: b78f2219f45ca64c6a4b7261a87fae89477ec26f
Parents: 3be6701
Author: Mike McCandless <mi...@apache.org>
Authored: Tue Oct 18 14:02:02 2016 -0400
Committer: Mike McCandless <mi...@apache.org>
Committed: Tue Oct 18 14:02:40 2016 -0400

----------------------------------------------------------------------
 .../search/join/ToParentBlockJoinQuery.java     |  2 +-
 .../lucene/search/join/TestBlockJoin.java       | 66 ++++++++++++++++++++
 2 files changed, 67 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b78f2219/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinQuery.java
----------------------------------------------------------------------
diff --git a/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinQuery.java b/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinQuery.java
index 3abdeeb..432ebcc 100644
--- a/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinQuery.java
+++ b/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinQuery.java
@@ -203,7 +203,7 @@ public class ToParentBlockJoinQuery extends Query {
     public abstract int[] swapChildDocs(int[] other);
   }
   
-  static class BlockJoinScorer extends ChildrenMatchesScorer{
+  static class BlockJoinScorer extends ChildrenMatchesScorer {
     private final Scorer childScorer;
     private final BitSet parentBits;
     private final ScoreMode scoreMode;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b78f2219/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java
----------------------------------------------------------------------
diff --git a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java
index af9ff5c..cf21fa4 100644
--- a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java
+++ b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java
@@ -187,7 +187,73 @@ public class TestBlockJoin extends LuceneTestCase {
     dir.close();
   }
 
+  // You must use ToParentBlockJoinSearcher if you want to do BQ SHOULD queries:
+  public void testBQShouldJoinedChild() throws Exception {
+    final Directory dir = newDirectory();
+    final RandomIndexWriter w = new RandomIndexWriter(random(), dir);
+
+    final List<Document> docs = new ArrayList<>();
+
+    docs.add(makeJob("java", 2007));
+    docs.add(makeJob("python", 2010));
+    docs.add(makeResume("Lisa", "United Kingdom"));
+    w.addDocuments(docs);
+
+    docs.clear();
+    docs.add(makeJob("ruby", 2005));
+    docs.add(makeJob("java", 2006));
+    docs.add(makeResume("Frank", "United States"));
+    w.addDocuments(docs);
+
+    IndexReader r = w.getReader();
+    w.close();
+    IndexSearcher s = new ToParentBlockJoinIndexSearcher(r);
+    //IndexSearcher s = newSearcher(r, false);
+    //IndexSearcher s = new IndexSearcher(r);
+
+    // Create a filter that defines "parent" documents in the index - in this case resumes
+    BitSetProducer parentsFilter = new QueryBitSetProducer(new TermQuery(new Term("docType", "resume")));
+    CheckJoinIndex.check(r, parentsFilter);
+
+    // Define child document criteria (finds an example of relevant work experience)
+    BooleanQuery.Builder childQuery = new BooleanQuery.Builder();
+    childQuery.add(new BooleanClause(new TermQuery(new Term("skill", "java")), Occur.MUST));
+    childQuery.add(new BooleanClause(IntPoint.newRangeQuery("year", 2006, 2011), Occur.MUST));
+
+    // Define parent document criteria (find a resident in the UK)
+    Query parentQuery = new TermQuery(new Term("country", "United Kingdom"));
+
+    // Wrap the child document query to 'join' any matches
+    // up to corresponding parent:
+    ToParentBlockJoinQuery childJoinQuery = new ToParentBlockJoinQuery(childQuery.build(), parentsFilter, ScoreMode.Avg);
+
+    // Combine the parent and nested child queries into a single query for a candidate
+    BooleanQuery.Builder fullQuery = new BooleanQuery.Builder();
+    fullQuery.add(new BooleanClause(parentQuery, Occur.SHOULD));
+    fullQuery.add(new BooleanClause(childJoinQuery, Occur.SHOULD));
+
+    ToParentBlockJoinCollector c = new ToParentBlockJoinCollector(Sort.RELEVANCE, 1, true, true);
+    s.search(fullQuery.build(), c);
+    TopGroups<Integer> results = c.getTopGroups(childJoinQuery, null, 0, 10, 0, true);
+    assertEquals(1, results.totalGroupedHitCount);
+    assertEquals(1, results.groups.length);
 
+    final GroupDocs<Integer> group = results.groups[0];
+    assertEquals(1, group.totalHits);
+    assertFalse(Float.isNaN(group.score));
+
+    Document childDoc = s.doc(group.scoreDocs[0].doc);
+    //System.out.println("  doc=" + group.scoreDocs[0].doc);
+    assertEquals("java", childDoc.get("skill"));
+    assertNotNull(group.groupValue);
+    Document parentDoc = s.doc(group.groupValue);
+    assertEquals("Lisa", parentDoc.get("name"));
+    
+    
+    r.close();
+    dir.close();
+  }
+  
   public void testSimple() throws Exception {
 
     final Directory dir = newDirectory();


[26/50] [abbrv] lucene-solr:jira/solr-8542-v2: SOLR-9570: Fix test failures and start using SolrTestCaseJ4's createTempDir mm

Posted by cp...@apache.org.
SOLR-9570: Fix test failures and start using SolrTestCaseJ4's createTempDir mm


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/af88e7f5
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/af88e7f5
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/af88e7f5

Branch: refs/heads/jira/solr-8542-v2
Commit: af88e7f54d2042a2ff5c3bef7b6016084ad15cec
Parents: a4952b1
Author: Jan H�ydahl <ja...@apache.org>
Authored: Thu Oct 20 20:58:52 2016 +0200
Committer: Jan H�ydahl <ja...@apache.org>
Committed: Thu Oct 20 20:58:52 2016 +0200

----------------------------------------------------------------------
 .../src/java/org/apache/solr/util/SolrCLI.java  |  6 +++---
 .../org/apache/solr/util/UtilsToolTest.java     | 22 ++++++++++----------
 2 files changed, 14 insertions(+), 14 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/af88e7f5/solr/core/src/java/org/apache/solr/util/SolrCLI.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/util/SolrCLI.java b/solr/core/src/java/org/apache/solr/util/SolrCLI.java
index 39bf548..c5a359e 100644
--- a/solr/core/src/java/org/apache/solr/util/SolrCLI.java
+++ b/solr/core/src/java/org/apache/solr/util/SolrCLI.java
@@ -3362,7 +3362,7 @@ public class SolrCLI {
     public UtilsTool(PrintStream stdout) { super(stdout); }
 
     public String getName() {
-      return "prestart";
+      return "utils";
     }
 
     @SuppressWarnings("static-access")
@@ -3482,7 +3482,7 @@ public class SolrCLI {
           -> a.isRegularFile() && String.valueOf(f.getFileName()).endsWith("-console.log"))
           .collect(Collectors.toList());
       if (files.size() > 0) {
-        out("Archiving " + files.size() + " console log files");
+        out("Archiving " + files.size() + " console log files to " + archivePath);
         for (Path p : files) {
           Files.move(p, archivePath.resolve(p.getFileName()), StandardCopyOption.REPLACE_EXISTING);
         }
@@ -3570,7 +3570,7 @@ public class SolrCLI {
         throw new Exception("Command requires the -l <log-directory> option");
       }
       if (!logsPath.isAbsolute()) {
-        if (serverPath != null && serverPath.isAbsolute() && serverPath.toFile().exists()) {
+        if (serverPath != null && serverPath.isAbsolute() && Files.exists(serverPath)) {
           logsPath = serverPath.resolve(logsPath);
         } else {
           throw new Exception("Logs directory must be an absolute path, or -s must be supplied");

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/af88e7f5/solr/core/src/test/org/apache/solr/util/UtilsToolTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/util/UtilsToolTest.java b/solr/core/src/test/org/apache/solr/util/UtilsToolTest.java
index fa39620..6b2d31c 100644
--- a/solr/core/src/test/org/apache/solr/util/UtilsToolTest.java
+++ b/solr/core/src/test/org/apache/solr/util/UtilsToolTest.java
@@ -28,20 +28,18 @@ import java.util.List;
 import java.util.stream.Collectors;
 
 import org.apache.commons.cli.CommandLine;
+import org.apache.solr.SolrTestCaseJ4;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
 import static org.apache.solr.util.SolrCLI.findTool;
 import static org.apache.solr.util.SolrCLI.parseCmdLine;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
 
 /**
  * Unit test for SolrCLI's UtilsTool
  */
-public class UtilsToolTest {
+public class UtilsToolTest extends SolrTestCaseJ4 {
 
   private Path dir;
   private SolrCLI.UtilsTool tool;
@@ -60,19 +58,21 @@ public class UtilsToolTest {
       "solr_gc_log_2");
   
   @Before
-  public void setUp() throws IOException {
-    dir = Files.createTempDirectory("Utils Tool Test");
-    files.stream().forEach(f -> {
+  public void setUp() throws Exception {
+    super.setUp();
+    dir = createTempDir("Utils Tool Test").toAbsolutePath();
+    files.forEach(f -> {
       try {
-        dir.resolve(f).toFile().createNewFile();
+        Files.createFile(dir.resolve(f));
       } catch (IOException e) {
-        assertTrue(false);
+        fail("Error when creating temporary file " + dir.resolve(f));
       }
     });
   }
   
   @After
-  public void tearDown() throws IOException {
+  public void tearDown() throws Exception {
+    super.tearDown();
     org.apache.commons.io.FileUtils.deleteDirectory(dir.toFile());
   }
   
@@ -128,7 +128,7 @@ public class UtilsToolTest {
     } catch (Exception e) {
       return;
     }
-    assertTrue(false);
+    fail("Should have thrown exception if using relative path without -s");
   }
   
   @Test


[50/50] [abbrv] lucene-solr:jira/solr-8542-v2: Merge branch 'master' into jira/solr-8542-v2

Posted by cp...@apache.org.
Merge branch 'master' into jira/solr-8542-v2


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/38052f35
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/38052f35
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/38052f35

Branch: refs/heads/jira/solr-8542-v2
Commit: 38052f35657e90aec73e73fa0f24e52d17513d97
Parents: 4ca9262 37871de
Author: Christine Poerschke <cp...@apache.org>
Authored: Mon Oct 24 13:11:16 2016 -0500
Committer: Christine Poerschke <cp...@apache.org>
Committed: Mon Oct 24 13:11:16 2016 -0500

----------------------------------------------------------------------
 lucene/CHANGES.txt                              |  26 ++
 .../codecs/lucene53/Lucene53NormsProducer.java  |   6 +
 .../lucene54/Lucene54DocValuesProducer.java     | 104 +++++-
 .../lucene54/TestLucene54DocValuesFormat.java   |   5 +-
 .../codecs/memory/MemoryDocValuesProducer.java  |  69 ++--
 .../simpletext/SimpleTextDocValuesReader.java   |  95 +++++-
 .../simpletext/SimpleTextDocValuesWriter.java   |   9 +
 .../simpletext/SimpleTextPointsReader.java      |  17 +-
 .../apache/lucene/codecs/DocValuesConsumer.java |  25 ++
 .../org/apache/lucene/codecs/NormsConsumer.java |   5 +
 .../lucene/codecs/lucene70/IndexedDISI.java     |  88 +++--
 .../lucene70/Lucene70DocValuesProducer.java     | 125 +++++--
 .../codecs/lucene70/Lucene70NormsProducer.java  |  11 +
 .../apache/lucene/index/BinaryDocValues.java    |   5 +-
 .../lucene/index/BinaryDocValuesWriter.java     |   5 +
 .../org/apache/lucene/index/CheckIndex.java     |  81 ++++-
 .../java/org/apache/lucene/index/DocValues.java |  95 +++---
 .../apache/lucene/index/DocValuesIterator.java  |  33 ++
 .../lucene/index/FilterBinaryDocValues.java     |   5 +
 .../lucene/index/FilterNumericDocValues.java    |   5 +
 .../org/apache/lucene/index/IndexWriter.java    |  16 +-
 .../index/LegacyBinaryDocValuesWrapper.java     |   8 +
 .../index/LegacyNumericDocValuesWrapper.java    |   9 +
 .../index/LegacySortedDocValuesWrapper.java     |   9 +
 .../LegacySortedNumericDocValuesWrapper.java    |  10 +
 .../index/LegacySortedSetDocValuesWrapper.java  |  10 +
 .../org/apache/lucene/index/MultiDocValues.java | 125 +++++++
 .../apache/lucene/index/NormValuesWriter.java   |   5 +
 .../apache/lucene/index/NumericDocValues.java   |   7 +-
 .../lucene/index/NumericDocValuesWriter.java    |   5 +
 .../apache/lucene/index/ReadersAndUpdates.java  |  10 +
 .../index/SingletonSortedNumericDocValues.java  |  24 +-
 .../index/SingletonSortedSetDocValues.java      |  18 +-
 .../apache/lucene/index/SortedDocValues.java    |   3 +
 .../lucene/index/SortedDocValuesWriter.java     |   5 +
 .../lucene/index/SortedNumericDocValues.java    |   6 +-
 .../index/SortedNumericDocValuesWriter.java     |   5 +
 .../apache/lucene/index/SortedSetDocValues.java |   5 +-
 .../lucene/index/SortedSetDocValuesWriter.java  |   5 +
 .../apache/lucene/index/SortingLeafReader.java  |  32 ++
 .../apache/lucene/search/FieldComparator.java   |  40 +--
 .../lucene/search/SortedNumericSelector.java    |  18 +
 .../apache/lucene/search/SortedSetSelector.java |  36 ++
 .../search/similarities/BM25Similarity.java     |   8 +-
 .../search/similarities/SimilarityBase.java     |   6 +-
 .../search/similarities/TFIDFSimilarity.java    |   8 +-
 .../java/org/apache/lucene/util/LongValues.java |  21 +-
 .../org/apache/lucene/util/bkd/BKDReader.java   |  32 +-
 .../org/apache/lucene/util/bkd/BKDWriter.java   |  14 +-
 .../AbstractTestCompressionMode.java            |   8 +-
 .../AbstractTestLZ4CompressionMode.java         |  10 +-
 .../lucene/codecs/lucene50/TestForUtil.java     |   8 +-
 .../lucene/codecs/lucene70/TestIndexedDISI.java |  28 +-
 .../lucene70/TestLucene70DocValuesFormat.java   |   4 +-
 .../lucene/index/Test4GBStoredFields.java       |   4 +-
 .../org/apache/lucene/search/TestBooleanOr.java |   4 +-
 .../lucene/search/TestBooleanRewrites.java      |   2 +-
 .../lucene/util/TestTimSorterWorstCase.java     |   6 +-
 .../lucene/util/automaton/TestOperations.java   |   4 +-
 .../lucene/util/packed/TestDirectPacked.java    |   6 +-
 .../lucene/util/packed/TestPackedInts.java      |  14 +-
 .../org/apache/lucene/facet/DrillSideways.java  |   2 +-
 .../apache/lucene/facet/FacetsCollector.java    |  42 ++-
 .../SortedSetDocValuesFacetCounts.java          |  15 +-
 .../facet/taxonomy/FastTaxonomyFacetCounts.java |  41 ++-
 .../apache/lucene/facet/TestDrillDownQuery.java |  11 +
 .../highlight/WeightedSpanTermExtractor.java    |  13 +-
 .../search/highlight/HighlighterTest.java       |  17 +
 lucene/ivy-versions.properties                  |   4 +-
 .../lucene/search/join/BlockJoinSelector.java   | 104 +++++-
 .../search/join/GenericTermsCollector.java      |   7 +
 .../search/join/ToParentBlockJoinQuery.java     |   2 +-
 .../lucene/search/join/TestBlockJoin.java       |  66 ++++
 .../search/join/TestBlockJoinSelector.java      |  12 +
 .../apache/lucene/search/join/TestJoinUtil.java |  18 +-
 lucene/licenses/morfologik-fsa-2.1.0.jar.sha1   |   1 -
 lucene/licenses/morfologik-fsa-2.1.1.jar.sha1   |   1 +
 .../licenses/morfologik-polish-2.1.0.jar.sha1   |   1 -
 .../licenses/morfologik-polish-2.1.1.jar.sha1   |   1 +
 .../licenses/morfologik-stemming-2.1.0.jar.sha1 |   1 -
 .../licenses/morfologik-stemming-2.1.1.jar.sha1 |   1 +
 .../randomizedtesting-runner-2.3.4.jar.sha1     |   1 -
 .../randomizedtesting-runner-2.4.0.jar.sha1     |   1 +
 .../apache/lucene/index/memory/MemoryIndex.java |   6 +
 .../apache/lucene/misc/SweetSpotSimilarity.java |  16 +
 .../search/TestDiversifiedTopDocsCollector.java |   9 +
 .../function/TestDocValuesFieldSources.java     |   4 +-
 .../apache/lucene/spatial3d/TestGeo3DPoint.java |  10 +-
 .../analyzing/AnalyzingInfixSuggester.java      |  12 +-
 .../analyzing/AnalyzingInfixSuggesterTest.java  |  76 +++++
 .../codecs/compressing/CompressingCodec.java    |  14 +-
 .../lucene/index/AssertingLeafReader.java       |  89 ++++-
 .../index/BaseDocValuesFormatTestCase.java      | 331 ++++++++++++-------
 .../index/BaseIndexFileFormatTestCase.java      |  12 +
 .../lucene/index/BaseNormsFormatTestCase.java   | 101 +++---
 .../index/BaseStoredFieldsFormatTestCase.java   |  22 +-
 .../lucene/search/AssertingBulkScorer.java      |   4 +-
 .../lucene/search/RandomApproximationQuery.java |   4 +-
 .../util/RunListenerPrintReproduceInfo.java     |   4 +-
 .../util/TestRuleSetupAndRestoreClassEnv.java   |  13 +-
 .../java/org/apache/lucene/util/TestUtil.java   |   6 +-
 .../TestCompressingStoredFieldsFormat.java      |   4 +-
 solr/CHANGES.txt                                |  55 ++-
 solr/bin/install_solr_service.sh                |   2 -
 solr/bin/post                                   |  23 +-
 solr/bin/solr                                   |  83 +++--
 solr/bin/solr.cmd                               |  89 +++--
 solr/bin/solr.in.cmd                            |  55 ++-
 solr/bin/solr.in.sh                             |  34 +-
 .../handler/dataimport/TemplateTransformer.java |   5 +-
 .../handler/dataimport/VariableResolver.java    |  70 +---
 .../java/org/apache/solr/cloud/BackupCmd.java   |  75 ++++-
 .../apache/solr/cloud/CreateSnapshotCmd.java    | 179 ++++++++++
 .../apache/solr/cloud/DeleteSnapshotCmd.java    | 160 +++++++++
 .../cloud/OverseerCollectionMessageHandler.java |   2 +
 .../org/apache/solr/cloud/ZkController.java     |   2 +-
 .../src/java/org/apache/solr/core/SolrCore.java |  42 ++-
 .../snapshots/CollectionSnapshotMetaData.java   | 242 ++++++++++++++
 .../core/snapshots/SolrSnapshotManager.java     | 180 ++++++++++
 .../apache/solr/handler/DumpRequestHandler.java |   6 +-
 .../apache/solr/handler/ReplicationHandler.java |   2 +-
 .../org/apache/solr/handler/StreamHandler.java  |   4 +-
 .../solr/handler/admin/CollectionsHandler.java  |  54 ++-
 .../solr/handler/admin/CoreAdminOperation.java  |   7 +-
 .../solr/handler/admin/CreateSnapshotOp.java    |  10 +-
 .../solr/handler/admin/DeleteSnapshotOp.java    |   4 +
 .../solr/handler/admin/LukeRequestHandler.java  |   1 +
 .../solr/highlight/DefaultSolrHighlighter.java  |   4 +-
 .../apache/solr/request/DocValuesFacets.java    |  20 +-
 .../request/PerSegmentSingleValuedFaceting.java |  10 +-
 .../solr/response/BinaryResponseWriter.java     |   3 +-
 .../solr/response/JSONResponseWriter.java       |   3 +-
 .../apache/solr/search/HashQParserPlugin.java   |   4 +-
 .../apache/solr/search/SolrIndexSearcher.java   |  67 +++-
 .../TextLogisticRegressionQParserPlugin.java    |   2 +-
 .../facet/FacetFieldProcessorByArrayDV.java     |  30 +-
 .../solr/search/mlt/SimpleMLTQParser.java       |  33 +-
 .../similarities/SchemaSimilarityFactory.java   |  39 ++-
 .../security/DelegationTokenKerberosFilter.java |  11 +-
 .../apache/solr/security/KerberosPlugin.java    |   9 +-
 .../apache/solr/uninverting/FieldCacheImpl.java |  18 +
 .../apache/solr/update/IndexFingerprint.java    |  78 +++--
 .../processor/SimpleUpdateProcessorFactory.java |  46 ++-
 .../TemplateUpdateProcessorFactory.java         | 114 +++++++
 .../processor/UpdateRequestProcessorChain.java  |  14 +-
 .../org/apache/solr/util/SimplePostTool.java    |  59 +++-
 .../src/java/org/apache/solr/util/SolrCLI.java  | 279 +++++++++++++++-
 .../cloud/LeaderFailureAfterFreshStartTest.java |  32 --
 .../solr/cloud/PeerSyncReplicationTest.java     |   2 +-
 .../core/snapshots/TestSolrCloudSnapshots.java  | 285 ++++++++++++++++
 .../org/apache/solr/update/PeerSyncTest.java    |   4 +-
 ...PeerSyncWithIndexFingerprintCachingTest.java | 108 ++++++
 .../processor/TemplateUpdateProcessorTest.java  |  48 +++
 .../UpdateRequestProcessorFactoryTest.java      |  15 +
 .../org/apache/solr/util/UtilsToolTest.java     | 189 +++++++++++
 solr/licenses/junit4-ant-2.3.4.jar.sha1         |   1 -
 solr/licenses/junit4-ant-2.4.0.jar.sha1         |   1 +
 solr/licenses/morfologik-fsa-2.1.0.jar.sha1     |   1 -
 solr/licenses/morfologik-fsa-2.1.1.jar.sha1     |   1 +
 solr/licenses/morfologik-polish-2.1.0.jar.sha1  |   1 -
 solr/licenses/morfologik-polish-2.1.1.jar.sha1  |   1 +
 .../licenses/morfologik-stemming-2.1.0.jar.sha1 |   1 -
 .../licenses/morfologik-stemming-2.1.1.jar.sha1 |   1 +
 .../randomizedtesting-runner-2.3.4.jar.sha1     |   1 -
 .../randomizedtesting-runner-2.4.0.jar.sha1     |   1 +
 .../client/solrj/io/stream/DaemonStream.java    |  44 ++-
 .../solrj/request/CollectionAdminRequest.java   | 116 ++++++-
 .../solr/common/params/CollectionParams.java    |   3 +
 .../apache/solr/common/params/SolrParams.java   | 124 +++++--
 .../java/org/apache/solr/common/util/Cache.java |  26 +-
 .../apache/solr/common/util/MapBackedCache.java |  57 ++++
 .../solrj/io/stream/StreamExpressionTest.java   | 193 +++++++++--
 .../solr/common/util/TestJavaBinCodec.java      |  52 +--
 .../solr/cloud/AbstractDistribZkTestBase.java   |  31 ++
 .../apache/solr/cloud/MiniSolrCloudCluster.java |  73 ----
 solr/webapp/web/css/angular/schema.css          |  23 ++
 .../webapp/web/js/angular/controllers/schema.js |  60 ++--
 solr/webapp/web/partials/schema.html            |  15 +-
 178 files changed, 4989 insertions(+), 1261 deletions(-)
----------------------------------------------------------------------



[43/50] [abbrv] lucene-solr:jira/solr-8542-v2: SOLR-9255: Rename SOLR_AUTHENTICATION_CLIENT_CONFIGURER -> SOLR_AUTHENTICATION_CLIENT_BUILDER

Posted by cp...@apache.org.
SOLR-9255: Rename SOLR_AUTHENTICATION_CLIENT_CONFIGURER -> SOLR_AUTHENTICATION_CLIENT_BUILDER


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/61e180b7
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/61e180b7
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/61e180b7

Branch: refs/heads/jira/solr-8542-v2
Commit: 61e180b7efa965edd4979b15ee56d946d50f8221
Parents: c9de11d
Author: Jan H�ydahl <ja...@apache.org>
Authored: Mon Oct 24 14:18:21 2016 +0200
Committer: Jan H�ydahl <ja...@apache.org>
Committed: Mon Oct 24 14:18:21 2016 +0200

----------------------------------------------------------------------
 solr/CHANGES.txt                                     | 5 ++++-
 solr/bin/solr                                        | 8 ++++++--
 solr/bin/solr.in.sh                                  | 4 ++--
 solr/core/src/java/org/apache/solr/util/SolrCLI.java | 4 ++--
 4 files changed, 14 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/61e180b7/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index f455002..04d4d77 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -25,7 +25,8 @@ Upgrading from Solr 6.x
   SolrHttpClientBuilder rather than an HttpClientConfigurer.
   
 * HttpClientUtil now allows configuring HttpClient instances via SolrHttpClientBuilder
-  rather than an HttpClientConfigurer.
+  rather than an HttpClientConfigurer. Use of env variable SOLR_AUTHENTICATION_CLIENT_CONFIGURER
+  no longer works, please use SOLR_AUTHENTICATION_CLIENT_BUILDER 
 
 * SolrClient implementations now use their own internal configuration for socket timeouts,
   connect timeouts, and allowing redirects rather than what is set as the default when
@@ -56,6 +57,8 @@ Optimizations
   check on every request and move connection lifecycle management towards the client.
   (Ryan Zezeski, Mark Miller, Shawn Heisey, Steve Davids)
 
+* SOLR-9255: Rename SOLR_AUTHENTICATION_CLIENT_CONFIGURER -> SOLR_AUTHENTICATION_CLIENT_BUILDER (janhoy) 
+
 * SOLR-9579: Make Solr's SchemaField implement Lucene's IndexableFieldType, removing the 
   creation of a Lucene FieldType every time a field is indexed. (John Call, yonik) 
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/61e180b7/solr/bin/solr
----------------------------------------------------------------------
diff --git a/solr/bin/solr b/solr/bin/solr
index 6aa5709..d2936de 100755
--- a/solr/bin/solr
+++ b/solr/bin/solr
@@ -178,9 +178,13 @@ fi
 
 # Authentication options
 if [ "$SOLR_AUTHENTICATION_CLIENT_CONFIGURER" != "" ]; then
-  AUTHC_CLIENT_CONFIGURER_ARG="-Dsolr.authentication.httpclient.configurer=$SOLR_AUTHENTICATION_CLIENT_CONFIGURER"
+  echo "WARNING: Found unsupported configuration variable SOLR_AUTHENTICATION_CLIENT_CONFIGURER"
+  echo "         Please start using SOLR_AUTHENTICATION_CLIENT_BUILDER instead"
 fi
-AUTHC_OPTS="$AUTHC_CLIENT_CONFIGURER_ARG $SOLR_AUTHENTICATION_OPTS"
+if [ "$SOLR_AUTHENTICATION_CLIENT_BUILDER" != "" ]; then
+  AUTHC_CLIENT_BUILDER_ARG="-Dsolr.authentication.httpclient.builder=$SOLR_AUTHENTICATION_CLIENT_BUILDER"
+fi
+AUTHC_OPTS="$AUTHC_CLIENT_BUILDER_ARG $SOLR_AUTHENTICATION_OPTS"
 
 # Set the SOLR_TOOL_HOST variable for use when connecting to a running Solr instance
 if [ "$SOLR_HOST" != "" ]; then

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/61e180b7/solr/bin/solr.in.sh
----------------------------------------------------------------------
diff --git a/solr/bin/solr.in.sh b/solr/bin/solr.in.sh
index 2fcaabb..40c59a6 100644
--- a/solr/bin/solr.in.sh
+++ b/solr/bin/solr.in.sh
@@ -105,8 +105,8 @@
 #SOLR_SSL_CLIENT_TRUST_STORE_PASSWORD=
 
 # Settings for authentication
-#SOLR_AUTHENTICATION_CLIENT_CONFIGURER=
-#SOLR_AUTHENTICATION_OPTS=
+#SOLR_AUTHENTICATION_CLIENT_BUILDER=
+#SOLR_AUTHENTICATION_OPTS="-Dbasicauth=solr:SolrRocks"
 
 # Settings for ZK ACL
 #SOLR_ZK_CREDS_AND_ACLS="-DzkACLProvider=org.apache.solr.common.cloud.VMParamsAllAndReadonlyDigestZkACLProvider \

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/61e180b7/solr/core/src/java/org/apache/solr/util/SolrCLI.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/util/SolrCLI.java b/solr/core/src/java/org/apache/solr/util/SolrCLI.java
index 8180c44..76e5ee9 100644
--- a/solr/core/src/java/org/apache/solr/util/SolrCLI.java
+++ b/solr/core/src/java/org/apache/solr/util/SolrCLI.java
@@ -281,10 +281,10 @@ public class SolrCLI {
         Class c = Class.forName(builderClassName);
         SolrHttpClientBuilder builder = (SolrHttpClientBuilder)c.newInstance();
         HttpClientUtil.setHttpClientBuilder(builder);
-        log.info("Set HttpClientConfigurer from: "+builderClassName);
+        log.info("Set SolrHttpClientBuilder from: "+builderClassName);
       } catch (Exception ex) {
         log.error(ex.getMessage());
-        throw new RuntimeException("Error during loading of configurer '"+builderClassName+"'.", ex);
+        throw new RuntimeException("Error during loading of builder '"+builderClassName+"'.", ex);
       }
     }
 


[33/50] [abbrv] lucene-solr:jira/solr-8542-v2: SOLR-9676: DefaultSolrHighlighter: clarify warning when FVH can't be used

Posted by cp...@apache.org.
SOLR-9676: DefaultSolrHighlighter: clarify warning when FVH can't be used


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/91f58ac7
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/91f58ac7
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/91f58ac7

Branch: refs/heads/jira/solr-8542-v2
Commit: 91f58ac72b603bc9a66f537829c0f99dcd65fbff
Parents: 57ba961
Author: David Smiley <ds...@apache.org>
Authored: Fri Oct 21 13:51:45 2016 -0400
Committer: David Smiley <ds...@apache.org>
Committed: Fri Oct 21 13:51:45 2016 -0400

----------------------------------------------------------------------
 .../java/org/apache/solr/highlight/DefaultSolrHighlighter.java   | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/91f58ac7/solr/core/src/java/org/apache/solr/highlight/DefaultSolrHighlighter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/highlight/DefaultSolrHighlighter.java b/solr/core/src/java/org/apache/solr/highlight/DefaultSolrHighlighter.java
index 2e31d7d..f020eb7 100644
--- a/solr/core/src/java/org/apache/solr/highlight/DefaultSolrHighlighter.java
+++ b/solr/core/src/java/org/apache/solr/highlight/DefaultSolrHighlighter.java
@@ -467,8 +467,8 @@ public class DefaultSolrHighlighter extends SolrHighlighter implements PluginInf
     if (!useFvhParam) return false;
     boolean termPosOff = schemaField.storeTermPositions() && schemaField.storeTermOffsets();
     if (!termPosOff) {
-      log.warn("Solr will not use FastVectorHighlighter because {} field does not store TermPositions and "
-          + "TermOffsets.", schemaField.getName());
+      log.warn("Solr will use the standard Highlighter instead of FastVectorHighlighter because the {} field " +
+          "does not store TermVectors with TermPositions and TermOffsets.", schemaField.getName());
     }
     return termPosOff;
   }


[03/50] [abbrv] lucene-solr:jira/solr-8542-v2: * SOLR-9506: cache IndexFingerprint for each segment

Posted by cp...@apache.org.
* SOLR-9506: cache IndexFingerprint for each segment


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/bb907a29
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/bb907a29
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/bb907a29

Branch: refs/heads/jira/solr-8542-v2
Commit: bb907a2983b4a7eba8cb4d527a859f1b312bdc79
Parents: b2188f4
Author: Noble Paul <no...@gmail.com>
Authored: Tue Oct 18 19:43:32 2016 +0530
Committer: Noble Paul <no...@gmail.com>
Committed: Tue Oct 18 19:43:32 2016 +0530

----------------------------------------------------------------------
 solr/CHANGES.txt                                |  2 +
 .../src/java/org/apache/solr/core/SolrCore.java | 38 ++++++++++
 .../apache/solr/search/SolrIndexSearcher.java   | 62 +++++++++++++--
 .../apache/solr/update/IndexFingerprint.java    | 79 ++++++++++++--------
 .../cloud/LeaderFailureAfterFreshStartTest.java | 32 --------
 .../solr/cloud/PeerSyncReplicationTest.java     |  2 +-
 .../org/apache/solr/update/PeerSyncTest.java    | 20 ++++-
 .../solr/cloud/AbstractDistribZkTestBase.java   | 31 ++++++++
 8 files changed, 193 insertions(+), 73 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/bb907a29/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index be958d9..4d75f88 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -206,6 +206,8 @@ Optimizations
 * SOLR-9566: Don't put replicas into recovery when first creating a Collection
   (Alan Woodward)
 
+* SOLR-9506: cache IndexFingerprint for each segment (Pushkar Raste, noble)
+
 Other Changes
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/bb907a29/solr/core/src/java/org/apache/solr/core/SolrCore.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java
index 2827f03..e47c8b6 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrCore.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java
@@ -58,6 +58,7 @@ import org.apache.lucene.codecs.Codec;
 import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.IndexDeletionPolicy;
 import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.search.BooleanQuery;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IOContext;
@@ -127,6 +128,7 @@ import org.apache.solr.search.stats.LocalStatsCache;
 import org.apache.solr.search.stats.StatsCache;
 import org.apache.solr.update.DefaultSolrCoreState;
 import org.apache.solr.update.DirectUpdateHandler2;
+import org.apache.solr.update.IndexFingerprint;
 import org.apache.solr.update.SolrCoreState;
 import org.apache.solr.update.SolrCoreState.IndexWriterCloser;
 import org.apache.solr.update.SolrIndexWriter;
@@ -149,6 +151,8 @@ import org.apache.zookeeper.data.Stat;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.collect.MapMaker;
+
 import static com.google.common.base.Preconditions.checkNotNull;
 import static org.apache.solr.common.params.CommonParams.NAME;
 import static org.apache.solr.common.params.CommonParams.PATH;
@@ -200,6 +204,8 @@ public final class SolrCore implements SolrInfoMBean, Closeable {
   private final ReentrantLock snapshotDelLock; // A lock instance to guard against concurrent deletions.
 
   public Date getStartTimeStamp() { return startTime; }
+  
+  private final Map<Object, IndexFingerprint> perSegmentFingerprintCache = new  MapMaker().weakKeys().makeMap();
 
   public long getStartNanoTime() {
     return startNanoTime;
@@ -1589,6 +1595,38 @@ public final class SolrCore implements SolrInfoMBean, Closeable {
   }
 
   /**
+   * Computes fingerprint of a segment and caches it only if all the version in segment are included in the fingerprint. 
+   * We can't use computeIfAbsent as caching is conditional (as described above) 
+   * There is chance that two threads may compute fingerprint on the same segment. It might be OK to do so rather than locking entire map.
+   * @param searcher searcher that includes specified LeaderReaderContext
+   * @param ctx LeafReaderContext of a segment to compute fingerprint of 
+   * @param maxVersion maximum version number to consider for fingerprint computation
+   * @return IndexFingerprint of the segment
+   * @throws IOException Can throw IOException
+   */
+  public IndexFingerprint getIndexFingerprint(SolrIndexSearcher searcher, LeafReaderContext ctx, long maxVersion)
+      throws IOException {
+      IndexFingerprint f = null;
+      f = perSegmentFingerprintCache.get(ctx.reader().getCoreCacheKey()) ; 
+      // fingerprint is either not cached or 
+      // we want fingerprint only up to a version less than maxVersionEncountered in the segment 
+      if(f == null || (f.getMaxInHash() > maxVersion)) {
+        log.debug("IndexFingerprint cache miss for searcher:{} reader:{} readerHash:{} maxVersion:{}", searcher, ctx.reader() , ctx.reader().hashCode(), maxVersion);
+        f = IndexFingerprint.getFingerprint(searcher, ctx, maxVersion);
+        // cache fingerprint for the segment only if all the versions in the segment are included in the fingerprint  
+        if(f.getMaxVersionEncountered() == f.getMaxInHash()) {
+          log.info("Caching fingerprint for searcher:{} leafReaderContext:{} mavVersion:{}", searcher, ctx, maxVersion);
+          perSegmentFingerprintCache.put(ctx.reader().getCoreCacheKey(), f);
+        }
+        
+      } else {
+        log.debug("IndexFingerprint cache hit for searcher:{} reader:{} readerHash:{} maxVersion:{}", searcher, ctx.reader(), ctx.reader().hashCode(), maxVersion);
+      }
+      log.debug("Cache Size: {}, Segments Size:{}", perSegmentFingerprintCache.size(), searcher.getTopReaderContext().leaves().size());
+      return f;
+  }
+
+  /**
   * Returns the current registered searcher with its reference count incremented, or null if none are registered.
   */
   public RefCounted<SolrIndexSearcher> getRegisteredSearcher() {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/bb907a29/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
index 933477b..59797b9 100644
--- a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
@@ -42,22 +42,50 @@ import org.apache.lucene.document.Document;
 import org.apache.lucene.document.DocumentStoredFieldVisitor;
 import org.apache.lucene.document.LazyDocument;
 import org.apache.lucene.index.*;
-import org.apache.lucene.index.NumericDocValues;
 import org.apache.lucene.index.StoredFieldVisitor.Status;
-import org.apache.lucene.search.*;
+import org.apache.lucene.search.BooleanClause;
 import org.apache.lucene.search.BooleanClause.Occur;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.CollectionStatistics;
+import org.apache.lucene.search.Collector;
+import org.apache.lucene.search.ConstantScoreQuery;
+import org.apache.lucene.search.DocIdSet;
+import org.apache.lucene.search.DocIdSetIterator;
+import org.apache.lucene.search.EarlyTerminatingSortingCollector;
+import org.apache.lucene.search.Explanation;
+import org.apache.lucene.search.FieldDoc;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.LeafCollector;
+import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.search.MultiCollector;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.ScoreDoc;
+import org.apache.lucene.search.Scorer;
+import org.apache.lucene.search.SimpleCollector;
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.search.SortField;
+import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.search.TermStatistics;
+import org.apache.lucene.search.TimeLimitingCollector;
+import org.apache.lucene.search.TopDocs;
+import org.apache.lucene.search.TopDocsCollector;
+import org.apache.lucene.search.TopFieldCollector;
+import org.apache.lucene.search.TopFieldDocs;
+import org.apache.lucene.search.TopScoreDocCollector;
+import org.apache.lucene.search.TotalHitCountCollector;
+import org.apache.lucene.search.Weight;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.FixedBitSet;
 import org.apache.solr.common.SolrDocumentBase;
-import org.apache.solr.common.SolrException.ErrorCode;
 import org.apache.solr.common.SolrException;
+import org.apache.solr.common.SolrException.ErrorCode;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SimpleOrderedMap;
-import org.apache.solr.core.DirectoryFactory.DirContext;
 import org.apache.solr.core.DirectoryFactory;
+import org.apache.solr.core.DirectoryFactory.DirContext;
 import org.apache.solr.core.SolrConfig;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.core.SolrInfoMBean;
@@ -249,7 +277,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
 
     this.path = path;
     this.directoryFactory = directoryFactory;
-    this.reader = (DirectoryReader) super.readerContext.reader();
+    this.reader = (DirectoryReader) super.getTopReaderContext().reader();
     this.rawReader = r;
     this.leafReader = SlowCompositeReaderWrapper.wrap(this.reader);
     this.core = core;
@@ -2413,12 +2441,11 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
    * gets a cached version of the IndexFingerprint for this searcher
    **/
   public IndexFingerprint getIndexFingerprint(long maxVersion) throws IOException {
-    final SolrIndexSearcher searcher = this;
     final AtomicReference<IOException> exception = new AtomicReference<>();
     try {
       return maxVersionFingerprintCache.computeIfAbsent(maxVersion, key -> {
         try {
-          return IndexFingerprint.getFingerprint(searcher, key);
+          return computeFromPerSegmentIndexFingerprint(maxVersion);
         } catch (IOException e) {
           exception.set(e);
           return null;
@@ -2429,6 +2456,27 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
     }
   }
 
+  private IndexFingerprint computeFromPerSegmentIndexFingerprint(long maxVersion) throws IOException {
+    final SolrIndexSearcher searcher = this;
+    final AtomicReference<IOException> exception = new AtomicReference<>();
+    try {
+      return searcher.getTopReaderContext().leaves().stream()
+          .map(ctx -> {
+            try {
+              return searcher.getCore().getIndexFingerprint(searcher, ctx, maxVersion);
+            } catch (IOException e) {
+              exception.set(e);
+              return null;
+            }
+          })
+          .filter(java.util.Objects::nonNull)
+          .reduce(new IndexFingerprint(maxVersion), IndexFingerprint::reduce);
+    } finally {
+      if (exception.get() != null) throw exception.get();
+    }
+  }
+  
+
   /////////////////////////////////////////////////////////////////////
   // SolrInfoMBean stuff: Statistics and Module Info
   /////////////////////////////////////////////////////////////////////

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/bb907a29/solr/core/src/java/org/apache/solr/update/IndexFingerprint.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/IndexFingerprint.java b/solr/core/src/java/org/apache/solr/update/IndexFingerprint.java
index 877ef03..31adf07 100644
--- a/solr/core/src/java/org/apache/solr/update/IndexFingerprint.java
+++ b/solr/core/src/java/org/apache/solr/update/IndexFingerprint.java
@@ -52,6 +52,14 @@ public class IndexFingerprint implements MapSerializable {
   private long numDocs;
   private long maxDoc;
 
+  public IndexFingerprint() {
+    // default constructor
+  }
+  
+  public IndexFingerprint (long maxVersionSpecified)  {
+    this.maxVersionSpecified = maxVersionSpecified;
+  }
+  
   public long getMaxVersionSpecified() {
     return maxVersionSpecified;
   }
@@ -82,53 +90,63 @@ public class IndexFingerprint implements MapSerializable {
 
   /** Opens a new realtime searcher and returns it's (possibly cached) fingerprint */
   public static IndexFingerprint getFingerprint(SolrCore core, long maxVersion) throws IOException {
+    RTimer timer = new RTimer();
     core.getUpdateHandler().getUpdateLog().openRealtimeSearcher();
     RefCounted<SolrIndexSearcher> newestSearcher = core.getUpdateHandler().getUpdateLog().uhandler.core.getRealtimeSearcher();
     try {
-      return newestSearcher.get().getIndexFingerprint(maxVersion);
+      IndexFingerprint f = newestSearcher.get().getIndexFingerprint(maxVersion);
+      final double duration = timer.stop();
+      log.debug("IndexFingerprint time : {} result:{}" ,duration, f);
+      return f;
     } finally {
       if (newestSearcher != null) {
         newestSearcher.decref();
       }
     }
+    
   }
-
-  /** Calculates an index fingerprint */
-  public static IndexFingerprint getFingerprint(SolrIndexSearcher searcher, long maxVersion) throws IOException {
-    RTimer timer = new RTimer();
-
+  
+  public static IndexFingerprint getFingerprint(SolrIndexSearcher searcher, LeafReaderContext ctx, Long maxVersion)
+      throws IOException {
     SchemaField versionField = VersionInfo.getAndCheckVersionField(searcher.getSchema());
-
-    IndexFingerprint f = new IndexFingerprint();
-    f.maxVersionSpecified = maxVersion;
-    f.maxDoc = searcher.maxDoc();
-
-    // TODO: this could be parallelized, or even cached per-segment if performance becomes an issue
     ValueSource vs = versionField.getType().getValueSource(versionField, null);
     Map funcContext = ValueSource.newContext(searcher);
     vs.createWeight(funcContext, searcher);
-    for (LeafReaderContext ctx : searcher.getTopReaderContext().leaves()) {
-      int maxDoc = ctx.reader().maxDoc();
-      f.numDocs += ctx.reader().numDocs();
-      Bits liveDocs = ctx.reader().getLiveDocs();
-      FunctionValues fv = vs.getValues(funcContext, ctx);
-      for (int doc = 0; doc < maxDoc; doc++) {
-        if (liveDocs != null && !liveDocs.get(doc)) continue;
-        long v = fv.longVal(doc);
-        f.maxVersionEncountered = Math.max(v, f.maxVersionEncountered);
-        if (v <= f.maxVersionSpecified) {
-          f.maxInHash = Math.max(v, f.maxInHash);
-          f.versionsHash += Hash.fmix64(v);
-          f.numVersions++;
-        }
+    
+    IndexFingerprint f = new IndexFingerprint();
+    f.maxVersionSpecified = maxVersion;
+    f.maxDoc = ctx.reader().maxDoc();
+    f.numDocs = ctx.reader().numDocs();
+    
+    int maxDoc = ctx.reader().maxDoc();
+    Bits liveDocs = ctx.reader().getLiveDocs();
+    FunctionValues fv = vs.getValues(funcContext, ctx);
+    for (int doc = 0; doc < maxDoc; doc++) {
+      if (liveDocs != null && !liveDocs.get(doc)) continue;
+      long v = fv.longVal(doc);
+      f.maxVersionEncountered = Math.max(v, f.maxVersionEncountered);
+      if (v <= f.maxVersionSpecified) {
+        f.maxInHash = Math.max(v, f.maxInHash);
+        f.versionsHash += Hash.fmix64(v);
+        f.numVersions++;
       }
     }
-
-    final double duration = timer.stop();
-    log.info("IndexFingerprint millis:" + duration + " result:" + f);
-
+    
     return f;
   }
+  
+  
+  public static IndexFingerprint reduce(IndexFingerprint acc, IndexFingerprint f2) {
+    // acc should have maxVersionSpecified already set in it using IndexFingerprint(long maxVersionSpecified) constructor
+    acc.maxDoc = Math.max(acc.maxDoc, f2.maxDoc);
+    acc.numDocs += f2.numDocs;
+    acc.maxVersionEncountered = Math.max(acc.maxVersionEncountered, f2.maxVersionEncountered);
+    acc.maxInHash = Math.max(acc.maxInHash, f2.maxInHash);
+    acc.versionsHash += f2.versionsHash;
+    acc.numVersions += f2.numVersions;
+
+    return acc;
+  }
 
   /** returns 0 for equal, negative if f1 is less recent than f2, positive if more recent */
   public static int compare(IndexFingerprint f1, IndexFingerprint f2) {
@@ -200,4 +218,5 @@ public class IndexFingerprint implements MapSerializable {
   public String toString() {
     return toMap(new LinkedHashMap<>()).toString();
   }
+
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/bb907a29/solr/core/src/test/org/apache/solr/cloud/LeaderFailureAfterFreshStartTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/LeaderFailureAfterFreshStartTest.java b/solr/core/src/test/org/apache/solr/cloud/LeaderFailureAfterFreshStartTest.java
index 348532c..ef21386 100644
--- a/solr/core/src/test/org/apache/solr/cloud/LeaderFailureAfterFreshStartTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/LeaderFailureAfterFreshStartTest.java
@@ -29,7 +29,6 @@ import java.util.stream.Collectors;
 import org.apache.commons.lang.RandomStringUtils;
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.solr.client.solrj.SolrServerException;
-import org.apache.solr.client.solrj.impl.CloudSolrClient;
 import org.apache.solr.client.solrj.request.UpdateRequest;
 import org.apache.solr.cloud.ZkTestServer.LimitViolationAction;
 import org.apache.solr.common.SolrInputDocument;
@@ -37,10 +36,8 @@ import org.apache.solr.common.cloud.ClusterState;
 import org.apache.solr.common.cloud.DocCollection;
 import org.apache.solr.common.cloud.Replica;
 import org.apache.solr.common.cloud.Slice;
-import org.apache.solr.common.cloud.Slice.State;
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.params.ModifiableSolrParams;
-import org.apache.solr.core.Diagnostics;
 import org.apache.solr.handler.ReplicationHandler;
 import org.junit.Test;
 import org.slf4j.Logger;
@@ -197,35 +194,6 @@ public class LeaderFailureAfterFreshStartTest extends AbstractFullDistribZkTestB
   }
 
   
-  static void waitForNewLeader(CloudSolrClient cloudClient, String shardName, Replica oldLeader, int maxWaitInSecs)
-      throws Exception {
-    log.info("Will wait for a node to become leader for {} secs", maxWaitInSecs);
-    boolean waitForLeader = true;
-    int i = 0;
-    ZkStateReader zkStateReader = cloudClient.getZkStateReader();
-    zkStateReader.forceUpdateCollection(DEFAULT_COLLECTION);
-    
-    while(waitForLeader) {
-      ClusterState clusterState = zkStateReader.getClusterState();
-      DocCollection coll = clusterState.getCollection("collection1");
-      Slice slice = coll.getSlice(shardName);
-      if(slice.getLeader() != oldLeader && slice.getState() == State.ACTIVE) {
-        log.info("New leader got elected in {} secs", i);
-        break;
-      }
-      
-      if(i == maxWaitInSecs) {
-        Diagnostics.logThreadDumps("Could not find new leader in specified timeout");
-        zkStateReader.getZkClient().printLayoutToStdOut();
-        fail("Could not find new leader even after waiting for " + maxWaitInSecs + "secs");
-      }
-      
-      i++;
-      Thread.sleep(1000);
-    }
-  }
-    
-
 
   private void waitTillNodesActive() throws Exception {
     for (int i = 0; i < 60; i++) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/bb907a29/solr/core/src/test/org/apache/solr/cloud/PeerSyncReplicationTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/PeerSyncReplicationTest.java b/solr/core/src/test/org/apache/solr/cloud/PeerSyncReplicationTest.java
index 3ded7d2..e00ea3c 100644
--- a/solr/core/src/test/org/apache/solr/cloud/PeerSyncReplicationTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/PeerSyncReplicationTest.java
@@ -149,7 +149,7 @@ public class PeerSyncReplicationTest extends AbstractFullDistribZkTestBase {
       log.info("Now shutting down initial leader");
       forceNodeFailures(singletonList(initialLeaderJetty));
       log.info("Updating mappings from zk");
-      LeaderFailureAfterFreshStartTest.waitForNewLeader(cloudClient, "shard1", (Replica) initialLeaderJetty.client.info, 15);
+      waitForNewLeader(cloudClient, "shard1", (Replica) initialLeaderJetty.client.info, 15);
       updateMappingsFromZk(jettys, clients, true);
       assertEquals("PeerSynced node did not become leader", nodePeerSynced, shardToLeaderJetty.get("shard1"));
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/bb907a29/solr/core/src/test/org/apache/solr/update/PeerSyncTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/PeerSyncTest.java b/solr/core/src/test/org/apache/solr/update/PeerSyncTest.java
index 64edd21..60fed8b 100644
--- a/solr/core/src/test/org/apache/solr/update/PeerSyncTest.java
+++ b/solr/core/src/test/org/apache/solr/update/PeerSyncTest.java
@@ -16,6 +16,9 @@
  */
 package org.apache.solr.update;
 
+import java.io.IOException;
+import java.util.Arrays;
+
 import org.apache.solr.BaseDistributedSearchTestCase;
 import org.apache.solr.SolrTestCaseJ4.SuppressSSL;
 import org.apache.solr.client.solrj.SolrClient;
@@ -26,9 +29,6 @@ import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.StrUtils;
 import org.junit.Test;
 
-import java.io.IOException;
-import java.util.Arrays;
-
 import static org.apache.solr.update.processor.DistributedUpdateProcessor.DistribPhase;
 import static org.apache.solr.update.processor.DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM;
 
@@ -145,6 +145,20 @@ public class PeerSyncTest extends BaseDistributedSearchTestCase {
     assertSync(client1, numVersions, true, shardsArr[0]);
     client0.commit(); client1.commit(); queryAndCompare(params("q", "*:*", "sort","_version_ desc"), client0, client1);
 
+    // Test PeerSync after replica misses delete
+    v = 2500;
+    add(client0, seenLeader, sdoc("id", "2500", "_version_", ++v));
+    add(client1, seenLeader, sdoc("id", "2500", "_version_", v));
+    client0.commit();
+    client1.commit();
+    del(client0, params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", Long.toString(-++v)), "2500");
+    add(client0, seenLeader, sdoc("id", "2501", "_version_", ++v));
+    add(client1, seenLeader, sdoc("id", "2501", "_version_", v));
+    // Sync should be able to delete the document
+    assertSync(client1, numVersions, true, shardsArr[0]);
+    client0.commit();
+    client1.commit();
+    queryAndCompare(params("q", "*:*", "sort", "_version_ desc"), client0, client1);
 
     //
     // Test that handling reorders work when applying docs retrieved from peer

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/bb907a29/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java
----------------------------------------------------------------------
diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java
index 03db71c..d04d996 100644
--- a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java
+++ b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java
@@ -24,11 +24,14 @@ import java.util.concurrent.atomic.AtomicInteger;
 import org.apache.commons.io.FileUtils;
 import org.apache.solr.BaseDistributedSearchTestCase;
 import org.apache.solr.client.solrj.embedded.JettySolrRunner;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
 import org.apache.solr.common.cloud.ClusterState;
+import org.apache.solr.common.cloud.DocCollection;
 import org.apache.solr.common.cloud.Replica;
 import org.apache.solr.common.cloud.Slice;
 import org.apache.solr.common.cloud.SolrZkClient;
 import org.apache.solr.common.cloud.ZkStateReader;
+import org.apache.solr.common.cloud.Slice.State;
 import org.apache.solr.core.Diagnostics;
 import org.apache.solr.core.MockDirectoryFactory;
 import org.apache.zookeeper.KeeperException;
@@ -222,6 +225,34 @@ public abstract class AbstractDistribZkTestBase extends BaseDistributedSearchTes
 
     log.info("Collection has disappeared - collection: " + collection);
   }
+  
+  static void waitForNewLeader(CloudSolrClient cloudClient, String shardName, Replica oldLeader, int maxWaitInSecs)
+      throws Exception {
+    log.info("Will wait for a node to become leader for {} secs", maxWaitInSecs);
+    boolean waitForLeader = true;
+    int i = 0;
+    ZkStateReader zkStateReader = cloudClient.getZkStateReader();
+    zkStateReader.forceUpdateCollection(DEFAULT_COLLECTION);
+    
+    while(waitForLeader) {
+      ClusterState clusterState = zkStateReader.getClusterState();
+      DocCollection coll = clusterState.getCollection("collection1");
+      Slice slice = coll.getSlice(shardName);
+      if(slice.getLeader() != oldLeader && slice.getState() == State.ACTIVE) {
+        log.info("New leader got elected in {} secs", i);
+        break;
+      }
+      
+      if(i == maxWaitInSecs) {
+        Diagnostics.logThreadDumps("Could not find new leader in specified timeout");
+        zkStateReader.getZkClient().printLayoutToStdOut();
+        fail("Could not find new leader even after waiting for " + maxWaitInSecs + "secs");
+      }
+      
+      i++;
+      Thread.sleep(1000);
+    }
+  }
 
   public static void verifyReplicaStatus(ZkStateReader reader, String collection, String shard, String coreNodeName, Replica.State expectedState) throws InterruptedException {
     int maxIterations = 100;


[08/50] [abbrv] lucene-solr:jira/solr-8542-v2: SOLR-9506: reverting the previous commit

Posted by cp...@apache.org.
SOLR-9506: reverting the previous commit


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/ffa5c4ba
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/ffa5c4ba
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/ffa5c4ba

Branch: refs/heads/jira/solr-8542-v2
Commit: ffa5c4ba2c2d6fa6bb943a70196aad0058333fa2
Parents: 9aa764a
Author: Noble Paul <no...@gmail.com>
Authored: Wed Oct 19 00:50:30 2016 +0530
Committer: Noble Paul <no...@gmail.com>
Committed: Wed Oct 19 00:50:30 2016 +0530

----------------------------------------------------------------------
 solr/CHANGES.txt                                |  2 -
 .../src/java/org/apache/solr/core/SolrCore.java | 38 ----------
 .../apache/solr/search/SolrIndexSearcher.java   | 60 ++-------------
 .../apache/solr/update/IndexFingerprint.java    | 79 ++++++++------------
 .../cloud/LeaderFailureAfterFreshStartTest.java | 32 ++++++++
 .../solr/cloud/PeerSyncReplicationTest.java     |  2 +-
 .../org/apache/solr/update/PeerSyncTest.java    | 20 +----
 .../solr/cloud/AbstractDistribZkTestBase.java   | 31 --------
 8 files changed, 72 insertions(+), 192 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ffa5c4ba/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 4d75f88..be958d9 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -206,8 +206,6 @@ Optimizations
 * SOLR-9566: Don't put replicas into recovery when first creating a Collection
   (Alan Woodward)
 
-* SOLR-9506: cache IndexFingerprint for each segment (Pushkar Raste, noble)
-
 Other Changes
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ffa5c4ba/solr/core/src/java/org/apache/solr/core/SolrCore.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java
index e47c8b6..2827f03 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrCore.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java
@@ -58,7 +58,6 @@ import org.apache.lucene.codecs.Codec;
 import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.IndexDeletionPolicy;
 import org.apache.lucene.index.IndexWriter;
-import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.search.BooleanQuery;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IOContext;
@@ -128,7 +127,6 @@ import org.apache.solr.search.stats.LocalStatsCache;
 import org.apache.solr.search.stats.StatsCache;
 import org.apache.solr.update.DefaultSolrCoreState;
 import org.apache.solr.update.DirectUpdateHandler2;
-import org.apache.solr.update.IndexFingerprint;
 import org.apache.solr.update.SolrCoreState;
 import org.apache.solr.update.SolrCoreState.IndexWriterCloser;
 import org.apache.solr.update.SolrIndexWriter;
@@ -151,8 +149,6 @@ import org.apache.zookeeper.data.Stat;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.collect.MapMaker;
-
 import static com.google.common.base.Preconditions.checkNotNull;
 import static org.apache.solr.common.params.CommonParams.NAME;
 import static org.apache.solr.common.params.CommonParams.PATH;
@@ -204,8 +200,6 @@ public final class SolrCore implements SolrInfoMBean, Closeable {
   private final ReentrantLock snapshotDelLock; // A lock instance to guard against concurrent deletions.
 
   public Date getStartTimeStamp() { return startTime; }
-  
-  private final Map<Object, IndexFingerprint> perSegmentFingerprintCache = new  MapMaker().weakKeys().makeMap();
 
   public long getStartNanoTime() {
     return startNanoTime;
@@ -1595,38 +1589,6 @@ public final class SolrCore implements SolrInfoMBean, Closeable {
   }
 
   /**
-   * Computes fingerprint of a segment and caches it only if all the version in segment are included in the fingerprint. 
-   * We can't use computeIfAbsent as caching is conditional (as described above) 
-   * There is chance that two threads may compute fingerprint on the same segment. It might be OK to do so rather than locking entire map.
-   * @param searcher searcher that includes specified LeaderReaderContext
-   * @param ctx LeafReaderContext of a segment to compute fingerprint of 
-   * @param maxVersion maximum version number to consider for fingerprint computation
-   * @return IndexFingerprint of the segment
-   * @throws IOException Can throw IOException
-   */
-  public IndexFingerprint getIndexFingerprint(SolrIndexSearcher searcher, LeafReaderContext ctx, long maxVersion)
-      throws IOException {
-      IndexFingerprint f = null;
-      f = perSegmentFingerprintCache.get(ctx.reader().getCoreCacheKey()) ; 
-      // fingerprint is either not cached or 
-      // we want fingerprint only up to a version less than maxVersionEncountered in the segment 
-      if(f == null || (f.getMaxInHash() > maxVersion)) {
-        log.debug("IndexFingerprint cache miss for searcher:{} reader:{} readerHash:{} maxVersion:{}", searcher, ctx.reader() , ctx.reader().hashCode(), maxVersion);
-        f = IndexFingerprint.getFingerprint(searcher, ctx, maxVersion);
-        // cache fingerprint for the segment only if all the versions in the segment are included in the fingerprint  
-        if(f.getMaxVersionEncountered() == f.getMaxInHash()) {
-          log.info("Caching fingerprint for searcher:{} leafReaderContext:{} mavVersion:{}", searcher, ctx, maxVersion);
-          perSegmentFingerprintCache.put(ctx.reader().getCoreCacheKey(), f);
-        }
-        
-      } else {
-        log.debug("IndexFingerprint cache hit for searcher:{} reader:{} readerHash:{} maxVersion:{}", searcher, ctx.reader(), ctx.reader().hashCode(), maxVersion);
-      }
-      log.debug("Cache Size: {}, Segments Size:{}", perSegmentFingerprintCache.size(), searcher.getTopReaderContext().leaves().size());
-      return f;
-  }
-
-  /**
   * Returns the current registered searcher with its reference count incremented, or null if none are registered.
   */
   public RefCounted<SolrIndexSearcher> getRegisteredSearcher() {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ffa5c4ba/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
index 02069ee..933477b 100644
--- a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
@@ -42,50 +42,22 @@ import org.apache.lucene.document.Document;
 import org.apache.lucene.document.DocumentStoredFieldVisitor;
 import org.apache.lucene.document.LazyDocument;
 import org.apache.lucene.index.*;
+import org.apache.lucene.index.NumericDocValues;
 import org.apache.lucene.index.StoredFieldVisitor.Status;
-import org.apache.lucene.search.BooleanClause;
+import org.apache.lucene.search.*;
 import org.apache.lucene.search.BooleanClause.Occur;
-import org.apache.lucene.search.BooleanQuery;
-import org.apache.lucene.search.CollectionStatistics;
-import org.apache.lucene.search.Collector;
-import org.apache.lucene.search.ConstantScoreQuery;
-import org.apache.lucene.search.DocIdSet;
-import org.apache.lucene.search.DocIdSetIterator;
-import org.apache.lucene.search.EarlyTerminatingSortingCollector;
-import org.apache.lucene.search.Explanation;
-import org.apache.lucene.search.FieldDoc;
-import org.apache.lucene.search.IndexSearcher;
-import org.apache.lucene.search.LeafCollector;
-import org.apache.lucene.search.MatchAllDocsQuery;
-import org.apache.lucene.search.MultiCollector;
-import org.apache.lucene.search.Query;
-import org.apache.lucene.search.ScoreDoc;
-import org.apache.lucene.search.Scorer;
-import org.apache.lucene.search.SimpleCollector;
-import org.apache.lucene.search.Sort;
-import org.apache.lucene.search.SortField;
-import org.apache.lucene.search.TermQuery;
-import org.apache.lucene.search.TermStatistics;
-import org.apache.lucene.search.TimeLimitingCollector;
-import org.apache.lucene.search.TopDocs;
-import org.apache.lucene.search.TopDocsCollector;
-import org.apache.lucene.search.TopFieldCollector;
-import org.apache.lucene.search.TopFieldDocs;
-import org.apache.lucene.search.TopScoreDocCollector;
-import org.apache.lucene.search.TotalHitCountCollector;
-import org.apache.lucene.search.Weight;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.FixedBitSet;
 import org.apache.solr.common.SolrDocumentBase;
-import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
+import org.apache.solr.common.SolrException;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SimpleOrderedMap;
-import org.apache.solr.core.DirectoryFactory;
 import org.apache.solr.core.DirectoryFactory.DirContext;
+import org.apache.solr.core.DirectoryFactory;
 import org.apache.solr.core.SolrConfig;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.core.SolrInfoMBean;
@@ -2441,11 +2413,12 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
    * gets a cached version of the IndexFingerprint for this searcher
    **/
   public IndexFingerprint getIndexFingerprint(long maxVersion) throws IOException {
+    final SolrIndexSearcher searcher = this;
     final AtomicReference<IOException> exception = new AtomicReference<>();
     try {
       return maxVersionFingerprintCache.computeIfAbsent(maxVersion, key -> {
         try {
-          return computeFromPerSegmentIndexFingerprint(maxVersion);
+          return IndexFingerprint.getFingerprint(searcher, key);
         } catch (IOException e) {
           exception.set(e);
           return null;
@@ -2456,27 +2429,6 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
     }
   }
 
-  private IndexFingerprint computeFromPerSegmentIndexFingerprint(long maxVersion) throws IOException {
-    final SolrIndexSearcher searcher = this;
-    final AtomicReference<IOException> exception = new AtomicReference<>();
-    try {
-      return searcher.getTopReaderContext().leaves().stream()
-          .map(ctx -> {
-            try {
-              return searcher.getCore().getIndexFingerprint(searcher, ctx, maxVersion);
-            } catch (IOException e) {
-              exception.set(e);
-              return null;
-            }
-          })
-          .filter(java.util.Objects::nonNull)
-          .reduce(new IndexFingerprint(maxVersion), IndexFingerprint::reduce);
-    } finally {
-      if (exception.get() != null) throw exception.get();
-    }
-  }
-  
-
   /////////////////////////////////////////////////////////////////////
   // SolrInfoMBean stuff: Statistics and Module Info
   /////////////////////////////////////////////////////////////////////

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ffa5c4ba/solr/core/src/java/org/apache/solr/update/IndexFingerprint.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/IndexFingerprint.java b/solr/core/src/java/org/apache/solr/update/IndexFingerprint.java
index 31adf07..877ef03 100644
--- a/solr/core/src/java/org/apache/solr/update/IndexFingerprint.java
+++ b/solr/core/src/java/org/apache/solr/update/IndexFingerprint.java
@@ -52,14 +52,6 @@ public class IndexFingerprint implements MapSerializable {
   private long numDocs;
   private long maxDoc;
 
-  public IndexFingerprint() {
-    // default constructor
-  }
-  
-  public IndexFingerprint (long maxVersionSpecified)  {
-    this.maxVersionSpecified = maxVersionSpecified;
-  }
-  
   public long getMaxVersionSpecified() {
     return maxVersionSpecified;
   }
@@ -90,63 +82,53 @@ public class IndexFingerprint implements MapSerializable {
 
   /** Opens a new realtime searcher and returns it's (possibly cached) fingerprint */
   public static IndexFingerprint getFingerprint(SolrCore core, long maxVersion) throws IOException {
-    RTimer timer = new RTimer();
     core.getUpdateHandler().getUpdateLog().openRealtimeSearcher();
     RefCounted<SolrIndexSearcher> newestSearcher = core.getUpdateHandler().getUpdateLog().uhandler.core.getRealtimeSearcher();
     try {
-      IndexFingerprint f = newestSearcher.get().getIndexFingerprint(maxVersion);
-      final double duration = timer.stop();
-      log.debug("IndexFingerprint time : {} result:{}" ,duration, f);
-      return f;
+      return newestSearcher.get().getIndexFingerprint(maxVersion);
     } finally {
       if (newestSearcher != null) {
         newestSearcher.decref();
       }
     }
-    
   }
-  
-  public static IndexFingerprint getFingerprint(SolrIndexSearcher searcher, LeafReaderContext ctx, Long maxVersion)
-      throws IOException {
+
+  /** Calculates an index fingerprint */
+  public static IndexFingerprint getFingerprint(SolrIndexSearcher searcher, long maxVersion) throws IOException {
+    RTimer timer = new RTimer();
+
     SchemaField versionField = VersionInfo.getAndCheckVersionField(searcher.getSchema());
+
+    IndexFingerprint f = new IndexFingerprint();
+    f.maxVersionSpecified = maxVersion;
+    f.maxDoc = searcher.maxDoc();
+
+    // TODO: this could be parallelized, or even cached per-segment if performance becomes an issue
     ValueSource vs = versionField.getType().getValueSource(versionField, null);
     Map funcContext = ValueSource.newContext(searcher);
     vs.createWeight(funcContext, searcher);
-    
-    IndexFingerprint f = new IndexFingerprint();
-    f.maxVersionSpecified = maxVersion;
-    f.maxDoc = ctx.reader().maxDoc();
-    f.numDocs = ctx.reader().numDocs();
-    
-    int maxDoc = ctx.reader().maxDoc();
-    Bits liveDocs = ctx.reader().getLiveDocs();
-    FunctionValues fv = vs.getValues(funcContext, ctx);
-    for (int doc = 0; doc < maxDoc; doc++) {
-      if (liveDocs != null && !liveDocs.get(doc)) continue;
-      long v = fv.longVal(doc);
-      f.maxVersionEncountered = Math.max(v, f.maxVersionEncountered);
-      if (v <= f.maxVersionSpecified) {
-        f.maxInHash = Math.max(v, f.maxInHash);
-        f.versionsHash += Hash.fmix64(v);
-        f.numVersions++;
+    for (LeafReaderContext ctx : searcher.getTopReaderContext().leaves()) {
+      int maxDoc = ctx.reader().maxDoc();
+      f.numDocs += ctx.reader().numDocs();
+      Bits liveDocs = ctx.reader().getLiveDocs();
+      FunctionValues fv = vs.getValues(funcContext, ctx);
+      for (int doc = 0; doc < maxDoc; doc++) {
+        if (liveDocs != null && !liveDocs.get(doc)) continue;
+        long v = fv.longVal(doc);
+        f.maxVersionEncountered = Math.max(v, f.maxVersionEncountered);
+        if (v <= f.maxVersionSpecified) {
+          f.maxInHash = Math.max(v, f.maxInHash);
+          f.versionsHash += Hash.fmix64(v);
+          f.numVersions++;
+        }
       }
     }
-    
+
+    final double duration = timer.stop();
+    log.info("IndexFingerprint millis:" + duration + " result:" + f);
+
     return f;
   }
-  
-  
-  public static IndexFingerprint reduce(IndexFingerprint acc, IndexFingerprint f2) {
-    // acc should have maxVersionSpecified already set in it using IndexFingerprint(long maxVersionSpecified) constructor
-    acc.maxDoc = Math.max(acc.maxDoc, f2.maxDoc);
-    acc.numDocs += f2.numDocs;
-    acc.maxVersionEncountered = Math.max(acc.maxVersionEncountered, f2.maxVersionEncountered);
-    acc.maxInHash = Math.max(acc.maxInHash, f2.maxInHash);
-    acc.versionsHash += f2.versionsHash;
-    acc.numVersions += f2.numVersions;
-
-    return acc;
-  }
 
   /** returns 0 for equal, negative if f1 is less recent than f2, positive if more recent */
   public static int compare(IndexFingerprint f1, IndexFingerprint f2) {
@@ -218,5 +200,4 @@ public class IndexFingerprint implements MapSerializable {
   public String toString() {
     return toMap(new LinkedHashMap<>()).toString();
   }
-
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ffa5c4ba/solr/core/src/test/org/apache/solr/cloud/LeaderFailureAfterFreshStartTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/LeaderFailureAfterFreshStartTest.java b/solr/core/src/test/org/apache/solr/cloud/LeaderFailureAfterFreshStartTest.java
index ef21386..348532c 100644
--- a/solr/core/src/test/org/apache/solr/cloud/LeaderFailureAfterFreshStartTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/LeaderFailureAfterFreshStartTest.java
@@ -29,6 +29,7 @@ import java.util.stream.Collectors;
 import org.apache.commons.lang.RandomStringUtils;
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
 import org.apache.solr.client.solrj.request.UpdateRequest;
 import org.apache.solr.cloud.ZkTestServer.LimitViolationAction;
 import org.apache.solr.common.SolrInputDocument;
@@ -36,8 +37,10 @@ import org.apache.solr.common.cloud.ClusterState;
 import org.apache.solr.common.cloud.DocCollection;
 import org.apache.solr.common.cloud.Replica;
 import org.apache.solr.common.cloud.Slice;
+import org.apache.solr.common.cloud.Slice.State;
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.core.Diagnostics;
 import org.apache.solr.handler.ReplicationHandler;
 import org.junit.Test;
 import org.slf4j.Logger;
@@ -194,6 +197,35 @@ public class LeaderFailureAfterFreshStartTest extends AbstractFullDistribZkTestB
   }
 
   
+  static void waitForNewLeader(CloudSolrClient cloudClient, String shardName, Replica oldLeader, int maxWaitInSecs)
+      throws Exception {
+    log.info("Will wait for a node to become leader for {} secs", maxWaitInSecs);
+    boolean waitForLeader = true;
+    int i = 0;
+    ZkStateReader zkStateReader = cloudClient.getZkStateReader();
+    zkStateReader.forceUpdateCollection(DEFAULT_COLLECTION);
+    
+    while(waitForLeader) {
+      ClusterState clusterState = zkStateReader.getClusterState();
+      DocCollection coll = clusterState.getCollection("collection1");
+      Slice slice = coll.getSlice(shardName);
+      if(slice.getLeader() != oldLeader && slice.getState() == State.ACTIVE) {
+        log.info("New leader got elected in {} secs", i);
+        break;
+      }
+      
+      if(i == maxWaitInSecs) {
+        Diagnostics.logThreadDumps("Could not find new leader in specified timeout");
+        zkStateReader.getZkClient().printLayoutToStdOut();
+        fail("Could not find new leader even after waiting for " + maxWaitInSecs + "secs");
+      }
+      
+      i++;
+      Thread.sleep(1000);
+    }
+  }
+    
+
 
   private void waitTillNodesActive() throws Exception {
     for (int i = 0; i < 60; i++) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ffa5c4ba/solr/core/src/test/org/apache/solr/cloud/PeerSyncReplicationTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/PeerSyncReplicationTest.java b/solr/core/src/test/org/apache/solr/cloud/PeerSyncReplicationTest.java
index e00ea3c..3ded7d2 100644
--- a/solr/core/src/test/org/apache/solr/cloud/PeerSyncReplicationTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/PeerSyncReplicationTest.java
@@ -149,7 +149,7 @@ public class PeerSyncReplicationTest extends AbstractFullDistribZkTestBase {
       log.info("Now shutting down initial leader");
       forceNodeFailures(singletonList(initialLeaderJetty));
       log.info("Updating mappings from zk");
-      waitForNewLeader(cloudClient, "shard1", (Replica) initialLeaderJetty.client.info, 15);
+      LeaderFailureAfterFreshStartTest.waitForNewLeader(cloudClient, "shard1", (Replica) initialLeaderJetty.client.info, 15);
       updateMappingsFromZk(jettys, clients, true);
       assertEquals("PeerSynced node did not become leader", nodePeerSynced, shardToLeaderJetty.get("shard1"));
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ffa5c4ba/solr/core/src/test/org/apache/solr/update/PeerSyncTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/PeerSyncTest.java b/solr/core/src/test/org/apache/solr/update/PeerSyncTest.java
index 60fed8b..64edd21 100644
--- a/solr/core/src/test/org/apache/solr/update/PeerSyncTest.java
+++ b/solr/core/src/test/org/apache/solr/update/PeerSyncTest.java
@@ -16,9 +16,6 @@
  */
 package org.apache.solr.update;
 
-import java.io.IOException;
-import java.util.Arrays;
-
 import org.apache.solr.BaseDistributedSearchTestCase;
 import org.apache.solr.SolrTestCaseJ4.SuppressSSL;
 import org.apache.solr.client.solrj.SolrClient;
@@ -29,6 +26,9 @@ import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.StrUtils;
 import org.junit.Test;
 
+import java.io.IOException;
+import java.util.Arrays;
+
 import static org.apache.solr.update.processor.DistributedUpdateProcessor.DistribPhase;
 import static org.apache.solr.update.processor.DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM;
 
@@ -145,20 +145,6 @@ public class PeerSyncTest extends BaseDistributedSearchTestCase {
     assertSync(client1, numVersions, true, shardsArr[0]);
     client0.commit(); client1.commit(); queryAndCompare(params("q", "*:*", "sort","_version_ desc"), client0, client1);
 
-    // Test PeerSync after replica misses delete
-    v = 2500;
-    add(client0, seenLeader, sdoc("id", "2500", "_version_", ++v));
-    add(client1, seenLeader, sdoc("id", "2500", "_version_", v));
-    client0.commit();
-    client1.commit();
-    del(client0, params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", Long.toString(-++v)), "2500");
-    add(client0, seenLeader, sdoc("id", "2501", "_version_", ++v));
-    add(client1, seenLeader, sdoc("id", "2501", "_version_", v));
-    // Sync should be able to delete the document
-    assertSync(client1, numVersions, true, shardsArr[0]);
-    client0.commit();
-    client1.commit();
-    queryAndCompare(params("q", "*:*", "sort", "_version_ desc"), client0, client1);
 
     //
     // Test that handling reorders work when applying docs retrieved from peer

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ffa5c4ba/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java
----------------------------------------------------------------------
diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java
index d04d996..03db71c 100644
--- a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java
+++ b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java
@@ -24,14 +24,11 @@ import java.util.concurrent.atomic.AtomicInteger;
 import org.apache.commons.io.FileUtils;
 import org.apache.solr.BaseDistributedSearchTestCase;
 import org.apache.solr.client.solrj.embedded.JettySolrRunner;
-import org.apache.solr.client.solrj.impl.CloudSolrClient;
 import org.apache.solr.common.cloud.ClusterState;
-import org.apache.solr.common.cloud.DocCollection;
 import org.apache.solr.common.cloud.Replica;
 import org.apache.solr.common.cloud.Slice;
 import org.apache.solr.common.cloud.SolrZkClient;
 import org.apache.solr.common.cloud.ZkStateReader;
-import org.apache.solr.common.cloud.Slice.State;
 import org.apache.solr.core.Diagnostics;
 import org.apache.solr.core.MockDirectoryFactory;
 import org.apache.zookeeper.KeeperException;
@@ -225,34 +222,6 @@ public abstract class AbstractDistribZkTestBase extends BaseDistributedSearchTes
 
     log.info("Collection has disappeared - collection: " + collection);
   }
-  
-  static void waitForNewLeader(CloudSolrClient cloudClient, String shardName, Replica oldLeader, int maxWaitInSecs)
-      throws Exception {
-    log.info("Will wait for a node to become leader for {} secs", maxWaitInSecs);
-    boolean waitForLeader = true;
-    int i = 0;
-    ZkStateReader zkStateReader = cloudClient.getZkStateReader();
-    zkStateReader.forceUpdateCollection(DEFAULT_COLLECTION);
-    
-    while(waitForLeader) {
-      ClusterState clusterState = zkStateReader.getClusterState();
-      DocCollection coll = clusterState.getCollection("collection1");
-      Slice slice = coll.getSlice(shardName);
-      if(slice.getLeader() != oldLeader && slice.getState() == State.ACTIVE) {
-        log.info("New leader got elected in {} secs", i);
-        break;
-      }
-      
-      if(i == maxWaitInSecs) {
-        Diagnostics.logThreadDumps("Could not find new leader in specified timeout");
-        zkStateReader.getZkClient().printLayoutToStdOut();
-        fail("Could not find new leader even after waiting for " + maxWaitInSecs + "secs");
-      }
-      
-      i++;
-      Thread.sleep(1000);
-    }
-  }
 
   public static void verifyReplicaStatus(ZkStateReader reader, String collection, String shard, String coreNodeName, Replica.State expectedState) throws InterruptedException {
     int maxIterations = 100;


[34/50] [abbrv] lucene-solr:jira/solr-8542-v2: SOLR-9325: Remove unnecessary search/replace in installer script

Posted by cp...@apache.org.
SOLR-9325: Remove unnecessary search/replace in installer script


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/c9cf0eff
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/c9cf0eff
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/c9cf0eff

Branch: refs/heads/jira/solr-8542-v2
Commit: c9cf0eff03763d151a04baccb5530445d5d5feb5
Parents: 91f58ac
Author: Jan H�ydahl <ja...@apache.org>
Authored: Sat Oct 22 01:00:48 2016 +0200
Committer: Jan H�ydahl <ja...@apache.org>
Committed: Sat Oct 22 01:00:48 2016 +0200

----------------------------------------------------------------------
 solr/bin/install_solr_service.sh | 2 --
 1 file changed, 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c9cf0eff/solr/bin/install_solr_service.sh
----------------------------------------------------------------------
diff --git a/solr/bin/install_solr_service.sh b/solr/bin/install_solr_service.sh
index 6b9df79..a23612f 100755
--- a/solr/bin/install_solr_service.sh
+++ b/solr/bin/install_solr_service.sh
@@ -344,8 +344,6 @@ if [ -f "$SOLR_VAR_DIR/log4j.properties" ]; then
   echo -e "\n$SOLR_VAR_DIR/log4j.properties already exists. Skipping install ...\n"
 else
   cp "$SOLR_INSTALL_DIR/server/resources/log4j.properties" "$SOLR_VAR_DIR/log4j.properties"
-  sed_expr="s#solr.log=.*#solr.log=\${solr.solr.home}/../logs#"
-  sed -i -e "$sed_expr" "$SOLR_VAR_DIR/log4j.properties"
 fi
 chown -R "$SOLR_USER:" "$SOLR_VAR_DIR"
 find "$SOLR_VAR_DIR" -type d -print0 | xargs -0 chmod 0750


[04/50] [abbrv] lucene-solr:jira/solr-8542-v2: Merge remote-tracking branch 'origin/master'

Posted by cp...@apache.org.
Merge remote-tracking branch 'origin/master'


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/fe17b4e2
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/fe17b4e2
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/fe17b4e2

Branch: refs/heads/jira/solr-8542-v2
Commit: fe17b4e2bb9c90bd3c39b01fe516fbe52d5ec914
Parents: bb907a2 a17e920
Author: Noble Paul <no...@gmail.com>
Authored: Tue Oct 18 19:45:06 2016 +0530
Committer: Noble Paul <no...@gmail.com>
Committed: Tue Oct 18 19:45:06 2016 +0530

----------------------------------------------------------------------
 lucene/CHANGES.txt                              |  6 ++
 .../simpletext/SimpleTextPointsReader.java      | 17 +++++-
 .../lucene70/Lucene70DocValuesProducer.java     | 63 +++++++++-----------
 .../org/apache/lucene/util/bkd/BKDReader.java   | 32 +++++-----
 .../org/apache/lucene/util/bkd/BKDWriter.java   | 14 +++--
 .../apache/lucene/facet/FacetsCollector.java    | 42 ++++++++-----
 .../apache/lucene/facet/TestDrillDownQuery.java | 11 ++++
 7 files changed, 112 insertions(+), 73 deletions(-)
----------------------------------------------------------------------



[12/50] [abbrv] lucene-solr:jira/solr-8542-v2: fix typo in comment

Posted by cp...@apache.org.
fix typo in comment


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/731c5f93
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/731c5f93
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/731c5f93

Branch: refs/heads/jira/solr-8542-v2
Commit: 731c5f93166ad28c21f330eef94c9d8f451d11c7
Parents: 45ca4bd
Author: Mike McCandless <mi...@apache.org>
Authored: Wed Oct 19 09:47:15 2016 -0400
Committer: Mike McCandless <mi...@apache.org>
Committed: Wed Oct 19 09:47:47 2016 -0400

----------------------------------------------------------------------
 lucene/facet/src/java/org/apache/lucene/facet/DrillSideways.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/731c5f93/lucene/facet/src/java/org/apache/lucene/facet/DrillSideways.java
----------------------------------------------------------------------
diff --git a/lucene/facet/src/java/org/apache/lucene/facet/DrillSideways.java b/lucene/facet/src/java/org/apache/lucene/facet/DrillSideways.java
index cc5647e..57f0a32 100644
--- a/lucene/facet/src/java/org/apache/lucene/facet/DrillSideways.java
+++ b/lucene/facet/src/java/org/apache/lucene/facet/DrillSideways.java
@@ -157,7 +157,7 @@ public class DrillSideways {
     
     DrillSidewaysQuery dsq = new DrillSidewaysQuery(baseQuery, drillDownCollector, drillSidewaysCollectors, drillDownQueries, scoreSubDocsAtOnce());
     if (hitCollector.needsScores() == false) {
-      // this is a borrible hack in order to make sure IndexSearcher will not
+      // this is a horrible hack in order to make sure IndexSearcher will not
       // attempt to cache the DrillSidewaysQuery
       hitCollector = new FilterCollector(hitCollector) {
         @Override


[14/50] [abbrv] lucene-solr:jira/solr-8542-v2: SOLR-9417: Allow daemons to terminate when they finish iterating a topic

Posted by cp...@apache.org.
SOLR-9417: Allow daemons to terminate when they finish iterating a topic


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/f43742ac
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/f43742ac
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/f43742ac

Branch: refs/heads/jira/solr-8542-v2
Commit: f43742acc5148ea89a9a625818a7229d56e0558e
Parents: d03cc92
Author: Joel Bernstein <jb...@apache.org>
Authored: Wed Oct 19 13:16:01 2016 -0400
Committer: Joel Bernstein <jb...@apache.org>
Committed: Wed Oct 19 13:17:06 2016 -0400

----------------------------------------------------------------------
 .../org/apache/solr/handler/StreamHandler.java  |   4 +-
 .../client/solrj/io/stream/DaemonStream.java    |  44 ++++-
 .../solrj/io/stream/StreamExpressionTest.java   | 193 +++++++++++++++++--
 3 files changed, 213 insertions(+), 28 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f43742ac/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/StreamHandler.java b/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
index b9f30bc..3e841bd 100644
--- a/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
@@ -19,6 +19,7 @@ package org.apache.solr.handler;
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
 import java.util.Collection;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.Iterator;
 import java.util.List;
@@ -97,7 +98,7 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
   private StreamFactory streamFactory = new StreamFactory();
   private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   private String coreName;
-  private Map<String, DaemonStream> daemons = new HashMap<>();
+  private Map<String, DaemonStream> daemons = Collections.synchronizedMap(new HashMap());
 
   @Override
   public PermissionNameProvider.Name getPermissionName(AuthorizationContext request) {
@@ -245,6 +246,7 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
       if(daemons.containsKey(daemonStream.getId())) {
         daemons.remove(daemonStream.getId()).close();
       }
+      daemonStream.setDaemons(daemons);
       daemonStream.open();  //This will start the deamonStream
       daemons.put(daemonStream.getId(), daemonStream);
       rsp.add("result-set", new DaemonResponseStream("Deamon:"+daemonStream.getId()+" started on "+coreName));

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f43742ac/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/DaemonStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/DaemonStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/DaemonStream.java
index 77648df..8214f9a 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/DaemonStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/DaemonStream.java
@@ -52,6 +52,8 @@ public class DaemonStream extends TupleStream implements Expressible {
   private Exception exception;
   private long runInterval;
   private String id;
+  private Map<String, DaemonStream> daemons;
+  private boolean terminate;
   private boolean closed = false;
   private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
@@ -64,10 +66,13 @@ public class DaemonStream extends TupleStream implements Expressible {
     StreamExpressionNamedParameter idExpression = factory.getNamedOperand(expression, "id");
     StreamExpressionNamedParameter runExpression = factory.getNamedOperand(expression, "runInterval");
     StreamExpressionNamedParameter queueExpression = factory.getNamedOperand(expression, "queueSize");
+    StreamExpressionNamedParameter terminateExpression = factory.getNamedOperand(expression, "terminate");
+
 
     String id = null;
     long runInterval = 0L;
     int queueSize = 0;
+    boolean terminate = false;
 
     if(idExpression == null) {
       throw new IOException("Invalid expression id parameter expected");
@@ -82,24 +87,26 @@ public class DaemonStream extends TupleStream implements Expressible {
     }
 
     if(queueExpression != null) {
-       queueSize= Integer.parseInt(((StreamExpressionValue)queueExpression.getParameter()).getValue());
+       queueSize= Integer.parseInt(((StreamExpressionValue) queueExpression.getParameter()).getValue());
     }
 
-    // validate expression contains only what we want.
-    if(expression.getParameters().size() != streamExpressions.size() + 2 &&
-        expression.getParameters().size() != streamExpressions.size() + 3) {
-      throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - unknown operands found", expression));
+    if(terminateExpression != null) {
+      terminate = Boolean.parseBoolean(((StreamExpressionValue) terminateExpression.getParameter()).getValue());
     }
 
     if(1 != streamExpressions.size()){
       throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - expecting a single stream but found %d",expression, streamExpressions.size()));
     }
 
-    init(tupleStream, id, runInterval, queueSize);
+    init(tupleStream, id, runInterval, queueSize, terminate);
+  }
+
+  public DaemonStream(TupleStream tupleStream, String id, long runInterval, int queueSize, boolean terminate) {
+    init(tupleStream, id, runInterval, queueSize, terminate);
   }
 
   public DaemonStream(TupleStream tupleStream, String id, long runInterval, int queueSize) {
-    init(tupleStream, id, runInterval, queueSize);
+    this(tupleStream, id, runInterval, queueSize, false);
   }
 
   @Override
@@ -126,6 +133,7 @@ public class DaemonStream extends TupleStream implements Expressible {
     expression.addParameter(new StreamExpressionNamedParameter("id", id));
     expression.addParameter(new StreamExpressionNamedParameter("runInterval", Long.toString(runInterval)));
     expression.addParameter(new StreamExpressionNamedParameter("queueSize", Integer.toString(queueSize)));
+    expression.addParameter(new StreamExpressionNamedParameter("terminate", Boolean.toString(terminate)));
 
     return expression;
   }
@@ -148,10 +156,16 @@ public class DaemonStream extends TupleStream implements Expressible {
   }
 
   public void init(TupleStream tupleStream, String id, long runInterval, int queueSize) {
+    init(tupleStream, id, runInterval, queueSize, false);
+  }
+
+  public void init(TupleStream tupleStream, String id, long runInterval, int queueSize, boolean terminate) {
     this.tupleStream = tupleStream;
     this.id = id;
     this.runInterval = runInterval;
     this.queueSize = queueSize;
+    this.terminate = terminate;
+
     if(queueSize > 0) {
       queue = new ArrayBlockingQueue(queueSize);
       eatTuples = false;
@@ -228,6 +242,10 @@ public class DaemonStream extends TupleStream implements Expressible {
     return tuple;
   }
 
+  public void setDaemons(Map<String, DaemonStream> daemons) {
+    this.daemons = daemons;
+  }
+
   private synchronized void incrementIterations() {
     ++iterations;
   }
@@ -279,6 +297,18 @@ public class DaemonStream extends TupleStream implements Expressible {
                 errors = 0; // Reset errors on successful run.
                 if (tuple.fields.containsKey("sleepMillis")) {
                   this.sleepMillis = tuple.getLong("sleepMillis");
+
+                  if(terminate && sleepMillis > 0) {
+                    //TopicStream provides sleepMillis > 0 if the last run had no Tuples.
+                    //This means the topic queue is empty. Time to terminate.
+                    //Remove ourselves from the daemons map.
+                    if(daemons != null) {
+                      daemons.remove(id);
+                    }
+                    //Break out of the thread loop and end the run.
+                    break OUTER;
+                  }
+
                   this.runInterval = -1;
                 }
                 break INNER;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f43742ac/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
index 842f6a6..7b5777d 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
@@ -533,24 +533,24 @@ public class StreamExpressionTest extends SolrCloudTestCase {
 
     // Basic test desc
     expression = StreamExpressionParser.parse("top("
-                                              + "n=2,"
-                                              + "unique("
-                                              +   "search(" + COLLECTION + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f desc\"),"
-                                              +   "over=\"a_f\"),"
-                                              + "sort=\"a_f desc\")");
+        + "n=2,"
+        + "unique("
+        + "search(" + COLLECTION + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f desc\"),"
+        + "over=\"a_f\"),"
+        + "sort=\"a_f desc\")");
     stream = new RankStream(expression, factory);
     tuples = getTuples(stream);
     
     assert(tuples.size() == 2);
-    assertOrder(tuples, 4,3);
+    assertOrder(tuples, 4, 3);
     
     // full factory
     stream = factory.constructStream("top("
-                                    + "n=4,"
-                                    + "unique("
-                                    +   "search(" + COLLECTION + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\"),"
-                                    +   "over=\"a_f\"),"
-                                    + "sort=\"a_f asc\")");
+        + "n=4,"
+        + "unique("
+        + "search(" + COLLECTION + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\"),"
+        + "over=\"a_f\"),"
+        + "sort=\"a_f asc\")");
     tuples = getTuples(stream);
     
     assert(tuples.size() == 4);
@@ -827,7 +827,7 @@ public class StreamExpressionTest extends SolrCloudTestCase {
         .withFunctionName("parallel", ParallelStream.class)
         .withFunctionName("fetch", FetchStream.class);
 
-    stream = factory.constructStream("parallel("+COLLECTION+", workers=2, sort=\"a_f asc\", fetch("+COLLECTION+",  search(" + COLLECTION + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc\", partitionKeys=\"id\"), on=\"id=a_i\", batchSize=\"2\", fl=\"subject\"))");
+    stream = factory.constructStream("parallel(" + COLLECTION + ", workers=2, sort=\"a_f asc\", fetch(" + COLLECTION + ",  search(" + COLLECTION + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc\", partitionKeys=\"id\"), on=\"id=a_i\", batchSize=\"2\", fl=\"subject\"))");
     tuples = getTuples(stream);
 
     assert(tuples.size() == 10);
@@ -853,7 +853,7 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     assertTrue("blah blah blah 9".equals(t.getString("subject")));
 
 
-    stream = factory.constructStream("parallel("+COLLECTION+", workers=2, sort=\"a_f asc\", fetch("+COLLECTION+",  search(" + COLLECTION + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc\", partitionKeys=\"id\"), on=\"id=a_i\", batchSize=\"3\", fl=\"subject\"))");
+    stream = factory.constructStream("parallel(" + COLLECTION + ", workers=2, sort=\"a_f asc\", fetch(" + COLLECTION + ",  search(" + COLLECTION + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc\", partitionKeys=\"id\"), on=\"id=a_i\", batchSize=\"3\", fl=\"subject\"))");
     tuples = getTuples(stream);
 
     assert(tuples.size() == 10);
@@ -1003,6 +1003,45 @@ public class StreamExpressionTest extends SolrCloudTestCase {
 
   }
 
+
+  @Test
+  public void testTerminatingDaemonStream() throws Exception {
+
+    new UpdateRequest()
+        .add(id, "0", "a_s", "hello", "a_i", "0", "a_f", "1")
+        .add(id, "2", "a_s", "hello", "a_i", "2", "a_f", "2")
+        .add(id, "3", "a_s", "hello", "a_i", "3", "a_f", "3")
+        .add(id, "4", "a_s", "hello", "a_i", "4", "a_f", "4")
+        .add(id, "1", "a_s", "hello", "a_i", "1", "a_f", "5")
+        .add(id, "5", "a_s", "hello", "a_i", "10", "a_f", "6")
+        .add(id, "6", "a_s", "hello", "a_i", "11", "a_f", "7")
+        .add(id, "7", "a_s", "hello", "a_i", "12", "a_f", "8")
+        .add(id, "8", "a_s", "hello", "a_i", "13", "a_f", "9")
+        .add(id, "9", "a_s", "hello", "a_i", "14", "a_f", "10")
+        .commit(cluster.getSolrClient(), COLLECTION);
+
+    StreamFactory factory = new StreamFactory()
+        .withCollectionZkHost(COLLECTION, cluster.getZkServer().getZkAddress())
+        .withFunctionName("topic", TopicStream.class)
+        .withFunctionName("daemon", DaemonStream.class);
+
+    StreamExpression expression;
+    DaemonStream daemonStream;
+
+    SolrClientCache cache = new SolrClientCache();
+    StreamContext context = new StreamContext();
+    context.setSolrClientCache(cache);
+    expression = StreamExpressionParser.parse("daemon(topic("+COLLECTION+","+COLLECTION+", q=\"a_s:hello\", initialCheckpoint=0, id=\"topic1\", rows=2, fl=\"id\""
+        + "), id=test, runInterval=1000, terminate=true, queueSize=50)");
+    daemonStream = (DaemonStream)factory.constructStream(expression);
+    daemonStream.setStreamContext(context);
+
+    List<Tuple> tuples = getTuples(daemonStream);
+    assertTrue(tuples.size() == 10);
+    cache.close();
+  }
+
+
   @Test
   public void testRollupStream() throws Exception {
 
@@ -1367,7 +1406,7 @@ public class StreamExpressionTest extends SolrCloudTestCase {
 
 
     assert(tuples.size() == 9);
-    assertOrder(tuples, 0,1,2,3,4,7,6,8,9);
+    assertOrder(tuples, 0, 1, 2, 3, 4, 7, 6, 8, 9);
 
     //Test descending
 
@@ -1376,7 +1415,7 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     tuples = getTuples(pstream);
 
     assert(tuples.size() == 8);
-    assertOrder(tuples, 9,8,6,4,3,2,1,0);
+    assertOrder(tuples, 9, 8, 6, 4, 3, 2, 1, 0);
 
   }
 
@@ -1627,7 +1666,7 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     stream = new LeftOuterJoinStream(expression, factory);
     tuples = getTuples(stream);    
     assert(tuples.size() == 10);
-    assertOrder(tuples, 7,6,3,4,5,1,1,15,15,2);
+    assertOrder(tuples, 7, 6, 3, 4, 5, 1, 1, 15, 15, 2);
     
     // Results in both searches, no join matches
     expression = StreamExpressionParser.parse("leftOuterJoin("
@@ -1637,7 +1676,7 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     stream = new LeftOuterJoinStream(expression, factory);
     tuples = getTuples(stream);    
     assert(tuples.size() == 8);
-    assertOrder(tuples, 1,15,2,3,4,5,6,7);
+    assertOrder(tuples, 1, 15, 2, 3, 4, 5, 6, 7);
     
     // Differing field names
     expression = StreamExpressionParser.parse("leftOuterJoin("
@@ -1647,7 +1686,7 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     stream = new LeftOuterJoinStream(expression, factory);
     tuples = getTuples(stream);
     assert(tuples.size() == 10);
-    assertOrder(tuples, 1,1,15,15,2,3,4,5,6,7);
+    assertOrder(tuples, 1, 1, 15, 15, 2, 3, 4, 5, 6, 7);
 
   }
 
@@ -1764,7 +1803,7 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     stream = new OuterHashJoinStream(expression, factory);
     tuples = getTuples(stream);    
     assert(tuples.size() == 10);
-    assertOrder(tuples, 1,1,15,15,2,3,4,5,6,7);
+    assertOrder(tuples, 1, 1, 15, 15, 2, 3, 4, 5, 6, 7);
 
     // Basic desc
     expression = StreamExpressionParser.parse("outerHashJoin("
@@ -1794,7 +1833,7 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     stream = new OuterHashJoinStream(expression, factory);
     tuples = getTuples(stream);
     assert(tuples.size() == 10);
-    assertOrder(tuples, 1,1,15,15,2,3,4,5,6,7);
+    assertOrder(tuples, 1, 1, 15, 15, 2, 3, 4, 5, 6, 7);
   }
 
   @Test
@@ -3202,6 +3241,120 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     CollectionAdminRequest.deleteCollection("parallelDestinationCollection1").process(cluster.getSolrClient());
   }
 
+
+  @Test
+  public void testParallelTerminatingDaemonUpdateStream() throws Exception {
+
+    CollectionAdminRequest.createCollection("parallelDestinationCollection1", "conf", 2, 1).process(cluster.getSolrClient());
+    AbstractDistribZkTestBase.waitForRecoveriesToFinish("parallelDestinationCollection1", cluster.getSolrClient().getZkStateReader(),
+        false, true, TIMEOUT);
+
+    new UpdateRequest()
+        .add(id, "0", "a_s", "hello", "a_i", "0", "a_f", "0", "s_multi", "aaaa",  "s_multi", "bbbb",  "i_multi", "4", "i_multi", "7")
+        .add(id, "2", "a_s", "hello", "a_i", "2", "a_f", "0", "s_multi", "aaaa1", "s_multi", "bbbb1", "i_multi", "44", "i_multi", "77")
+        .add(id, "3", "a_s", "hello", "a_i", "3", "a_f", "3", "s_multi", "aaaa2", "s_multi", "bbbb2", "i_multi", "444", "i_multi", "777")
+        .add(id, "4", "a_s", "hello", "a_i", "4", "a_f", "4", "s_multi", "aaaa3", "s_multi", "bbbb3", "i_multi", "4444", "i_multi", "7777")
+        .add(id, "1", "a_s", "hello", "a_i", "1", "a_f", "1", "s_multi", "aaaa4", "s_multi", "bbbb4", "i_multi", "44444", "i_multi", "77777")
+        .commit(cluster.getSolrClient(), "collection1");
+
+    StreamExpression expression;
+    TupleStream stream;
+    Tuple t;
+
+    String zkHost = cluster.getZkServer().getZkAddress();
+    StreamFactory factory = new StreamFactory()
+        .withCollectionZkHost("collection1", cluster.getZkServer().getZkAddress())
+        .withCollectionZkHost("parallelDestinationCollection1", cluster.getZkServer().getZkAddress())
+        .withFunctionName("topic", TopicStream.class)
+        .withFunctionName("update", UpdateStream.class)
+        .withFunctionName("parallel", ParallelStream.class)
+        .withFunctionName("daemon", DaemonStream.class);
+
+    //Copy all docs to destinationCollection
+    String updateExpression = "daemon(update(parallelDestinationCollection1, batchSize=2, topic(collection1, collection1, q=\"a_s:hello\", fl=\"id,a_s,a_i,a_f,s_multi,i_multi\", partitionKeys=\"a_f\", initialCheckpoint=0, id=\"topic1\")), terminate=true, runInterval=\"1000\", id=\"test\")";
+    TupleStream parallelUpdateStream = factory.constructStream("parallel(collection1, " + updateExpression + ", workers=\"2\", zkHost=\""+zkHost+"\", sort=\"batchNumber asc\")");
+    List<Tuple> tuples = getTuples(parallelUpdateStream);
+    assert(tuples.size() == 2);
+
+
+    ModifiableSolrParams sParams = new ModifiableSolrParams(StreamingTest.mapParams(CommonParams.QT, "/stream", "action", "list"));
+
+    int workersComplete = 0;
+
+    //Daemons should terminate after the topic is completed
+    //Loop through all shards and wait for the daemons to be gone from the listing.
+    for(JettySolrRunner jetty : cluster.getJettySolrRunners()) {
+      INNER:
+      while(true) {
+        SolrStream solrStream = new SolrStream(jetty.getBaseUrl().toString() + "/collection1", sParams);
+        solrStream.open();
+        Tuple tupleResponse = solrStream.read();
+        if (tupleResponse.EOF) {
+          solrStream.close();
+          ++workersComplete;
+          break INNER;
+        } else {
+          solrStream.close();
+          Thread.sleep(1000);
+        }
+      }
+    }
+
+    assertEquals(cluster.getJettySolrRunners().size(), workersComplete);
+
+    cluster.getSolrClient().commit("parallelDestinationCollection1");
+
+    //Ensure that destinationCollection actually has the new docs.
+    expression = StreamExpressionParser.parse("search(parallelDestinationCollection1, q=*:*, fl=\"id,a_s,a_i,a_f,s_multi,i_multi\", sort=\"a_i asc\")");
+    stream = new CloudSolrStream(expression, factory);
+    tuples = getTuples(stream);
+    assertEquals(5, tuples.size());
+
+    Tuple tuple = tuples.get(0);
+    assert(tuple.getLong("id") == 0);
+    assert(tuple.get("a_s").equals("hello"));
+    assert(tuple.getLong("a_i") == 0);
+    assert(tuple.getDouble("a_f") == 0.0);
+    assertList(tuple.getStrings("s_multi"), "aaaa", "bbbb");
+    assertList(tuple.getLongs("i_multi"), Long.parseLong("4"), Long.parseLong("7"));
+
+    tuple = tuples.get(1);
+    assert(tuple.getLong("id") == 1);
+    assert(tuple.get("a_s").equals("hello"));
+    assert(tuple.getLong("a_i") == 1);
+    assert(tuple.getDouble("a_f") == 1.0);
+    assertList(tuple.getStrings("s_multi"), "aaaa4", "bbbb4");
+    assertList(tuple.getLongs("i_multi"), Long.parseLong("44444"), Long.parseLong("77777"));
+
+    tuple = tuples.get(2);
+    assert(tuple.getLong("id") == 2);
+    assert(tuple.get("a_s").equals("hello"));
+    assert(tuple.getLong("a_i") == 2);
+    assert(tuple.getDouble("a_f") == 0.0);
+    assertList(tuple.getStrings("s_multi"), "aaaa1", "bbbb1");
+    assertList(tuple.getLongs("i_multi"), Long.parseLong("44"), Long.parseLong("77"));
+
+    tuple = tuples.get(3);
+    assert(tuple.getLong("id") == 3);
+    assert(tuple.get("a_s").equals("hello"));
+    assert(tuple.getLong("a_i") == 3);
+    assert(tuple.getDouble("a_f") == 3.0);
+    assertList(tuple.getStrings("s_multi"), "aaaa2", "bbbb2");
+    assertList(tuple.getLongs("i_multi"), Long.parseLong("444"), Long.parseLong("777"));
+
+    tuple = tuples.get(4);
+    assert(tuple.getLong("id") == 4);
+    assert(tuple.get("a_s").equals("hello"));
+    assert(tuple.getLong("a_i") == 4);
+    assert(tuple.getDouble("a_f") == 4.0);
+    assertList(tuple.getStrings("s_multi"), "aaaa3", "bbbb3");
+    assertList(tuple.getLongs("i_multi"), Long.parseLong("4444"), Long.parseLong("7777"));
+
+    CollectionAdminRequest.deleteCollection("parallelDestinationCollection1").process(cluster.getSolrClient());
+  }
+
+
+
   ////////////////////////////////////////////
   @Test
   public void testCommitStream() throws Exception {


[48/50] [abbrv] lucene-solr:jira/solr-8542-v2: Merge commit '67f3f43' into jira/solr-8542-v2

Posted by cp...@apache.org.
Merge commit '67f3f43' into jira/solr-8542-v2


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/88058099
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/88058099
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/88058099

Branch: refs/heads/jira/solr-8542-v2
Commit: 880580993eec9903758b75aaec143177c1430024
Parents: bfa05b8 67f3f43
Author: Christine Poerschke <cp...@apache.org>
Authored: Mon Oct 24 13:03:19 2016 -0500
Committer: Christine Poerschke <cp...@apache.org>
Committed: Mon Oct 24 13:03:19 2016 -0500

----------------------------------------------------------------------
 lucene/CHANGES.txt                              |   25 +
 .../analysis/core/DecimalDigitFilter.java       |    2 +-
 .../lucene/analysis/hunspell/Stemmer.java       |    2 +-
 .../analysis/core/TestDecimalDigitFilter.java   |  149 +-
 .../analysis/ja/JapaneseNumberFilter.java       |   11 +
 .../lucene/analysis/ja/TestFactories.java       |  203 ++
 .../lucene54/Lucene54DocValuesProducer.java     |   14 +-
 .../lucene/codecs/lucene60/Lucene60Codec.java   |    2 +-
 .../lucene50/TestLucene50SegmentInfoFormat.java |   40 +
 .../lucene/codecs/lucene60/Lucene60RWCodec.java |   38 +
 .../lucene/benchmark/byTask/tasks/ReadTask.java |   15 +-
 .../blocktreeords/OrdsIntersectTermsEnum.java   |    2 +-
 .../codecs/memory/DirectPostingsFormat.java     |    2 +-
 .../lucene/codecs/memory/FSTOrdTermsReader.java |    2 +-
 .../lucene/codecs/memory/FSTTermsReader.java    |    2 +-
 .../codecs/simpletext/SimpleTextBKDReader.java  |    1 -
 .../simpletext/SimpleTextPointsReader.java      |   81 +-
 .../simpletext/SimpleTextPointsWriter.java      |   10 +-
 .../apache/lucene/codecs/DocValuesConsumer.java |   45 +-
 .../lucene/codecs/MutablePointValues.java       |   42 +
 .../lucene/codecs/MutablePointsReader.java      |   41 -
 .../org/apache/lucene/codecs/PointsFormat.java  |   35 +-
 .../org/apache/lucene/codecs/PointsReader.java  |    5 +-
 .../org/apache/lucene/codecs/PointsWriter.java  |  204 +-
 .../codecs/blocktree/IntersectTermsEnum.java    |    2 +-
 .../codecs/lucene60/Lucene60PointsReader.java   |   84 +-
 .../codecs/lucene60/Lucene60PointsWriter.java   |   28 +-
 .../lucene/codecs/lucene70/IndexedDISI.java     |  269 +++
 .../lucene70/Lucene70DocValuesConsumer.java     | 1001 +++-----
 .../lucene70/Lucene70DocValuesFormat.java       |  123 +-
 .../lucene70/Lucene70DocValuesProducer.java     | 2229 ++++++------------
 .../codecs/lucene70/Lucene70NormsConsumer.java  |    8 +-
 .../codecs/lucene70/Lucene70NormsFormat.java    |    4 +-
 .../codecs/lucene70/Lucene70NormsProducer.java  |   12 +-
 .../lucene/codecs/lucene70/SparseDISI.java      |  114 -
 .../org/apache/lucene/document/BinaryPoint.java |    3 +-
 .../java/org/apache/lucene/document/Field.java  |   18 +-
 .../apache/lucene/index/AutomatonTermsEnum.java |    6 +-
 .../org/apache/lucene/index/CheckIndex.java     |   19 +-
 .../org/apache/lucene/index/CodecReader.java    |   12 +
 .../lucene/index/EmptyDocValuesProducer.java    |    4 +-
 .../java/org/apache/lucene/index/FieldInfo.java |    4 +
 .../apache/lucene/index/FilterCodecReader.java  |    5 -
 .../apache/lucene/index/FilterLeafReader.java   |    4 +-
 .../org/apache/lucene/index/LeafReader.java     |    5 +-
 .../lucene/index/LegacySortedSetDocValues.java  |    4 +-
 .../apache/lucene/index/MergeReaderWrapper.java |    4 +-
 .../apache/lucene/index/ParallelLeafReader.java |   97 +-
 .../org/apache/lucene/index/PointValues.java    |   64 +-
 .../apache/lucene/index/PointValuesWriter.java  |   66 +-
 .../org/apache/lucene/index/SegmentReader.java  |    8 +-
 .../index/SingletonSortedSetDocValues.java      |    2 +-
 .../lucene/index/SlowCodecReaderWrapper.java    |   46 +-
 .../apache/lucene/index/SortedDocValues.java    |    2 +-
 .../apache/lucene/index/SortedSetDocValues.java |    2 +-
 .../apache/lucene/index/SortingLeafReader.java  |   33 +-
 .../lucene/search/DisjunctionMaxQuery.java      |    2 +-
 .../lucene/search/DisjunctionMaxScorer.java     |    2 +-
 .../lucene/search/DocValuesRewriteMethod.java   |    2 +-
 .../org/apache/lucene/search/LRUQueryCache.java |   14 +-
 .../apache/lucene/search/PointInSetQuery.java   |   24 +-
 .../apache/lucene/search/PointRangeQuery.java   |   28 +-
 .../org/apache/lucene/util/DocIdSetBuilder.java |    2 +-
 .../lucene/util/automaton/ByteRunAutomaton.java |    2 +-
 .../util/automaton/CharacterRunAutomaton.java   |    4 +-
 .../util/automaton/CompiledAutomaton.java       |    2 +-
 .../lucene/util/automaton/RunAutomaton.java     |   15 +-
 .../org/apache/lucene/util/bkd/BKDReader.java   |   13 +-
 .../org/apache/lucene/util/bkd/BKDWriter.java   |   24 +-
 .../util/bkd/MutablePointsReaderUtils.java      |   10 +-
 .../util/packed/DirectMonotonicReader.java      |    2 -
 .../lucene50/TestLucene50FieldInfoFormat.java   |   33 -
 .../lucene50/TestLucene60FieldInfoFormat.java   |   33 +
 .../lucene/codecs/lucene70/TestIndexedDISI.java |  223 ++
 .../lucene70/TestLucene70DocValuesFormat.java   |  116 +-
 .../lucene/codecs/lucene70/TestSparseDISI.java  |   94 -
 .../org/apache/lucene/index/Test2BPoints.java   |    4 +-
 .../apache/lucene/index/TestBagOfPositions.java |    4 +-
 .../apache/lucene/index/TestPointValues.java    |   25 +-
 .../org/apache/lucene/index/TestTermsEnum.java  |    4 +-
 .../lucene/search/TestDisjunctionMaxQuery.java  |   16 +
 .../apache/lucene/util/TestDocIdSetBuilder.java |   14 +-
 .../lucene/util/automaton/TestUTF32ToUTF8.java  |    4 +-
 .../util/bkd/TestMutablePointsReaderUtils.java  |   33 +-
 .../search/highlight/TermVectorLeafReader.java  |    2 +-
 .../search/vectorhighlight/FieldQuery.java      |    7 +
 .../FastVectorHighlighterTest.java              |   38 +
 .../lucene/search/join/BlockJoinSelector.java   |    4 +-
 .../join/PointInSetIncludingScoreQuery.java     |   10 +-
 .../apache/lucene/index/memory/MemoryIndex.java |   91 +-
 .../lucene/index/memory/TestMemoryIndex.java    |    6 +-
 .../queryparser/simple/SimpleQueryParser.java   |    4 +
 .../simple/TestSimpleQueryParser.java           |    7 +
 .../org/apache/lucene/document/LatLonPoint.java |    9 +-
 .../document/LatLonPointDistanceQuery.java      |    4 +-
 .../document/LatLonPointInPolygonQuery.java     |    4 +-
 .../apache/lucene/document/RangeFieldQuery.java |    8 +-
 .../spatial3d/PointInGeo3DShapeQuery.java       |    4 +-
 .../apache/lucene/spatial3d/TestGeo3DPoint.java |    4 +-
 .../analysis/BaseTokenStreamTestCase.java       |    3 +-
 .../apache/lucene/analysis/MockTokenizer.java   |    6 +-
 .../codecs/asserting/AssertingPointsFormat.java |  139 +-
 .../codecs/cranky/CrankyPointsFormat.java       |  110 +-
 .../lucene/index/AssertingLeafReader.java       |  165 +-
 .../lucene/index/BaseNormsFormatTestCase.java   |  114 +-
 .../lucene/index/BasePointsFormatTestCase.java  |   49 +-
 .../org/apache/lucene/index/RandomCodec.java    |    9 +-
 .../org/apache/lucene/search/QueryUtils.java    |    2 +-
 .../org/apache/lucene/util/LuceneTestCase.java  |    4 +-
 solr/CHANGES.txt                                |   36 +
 solr/bin/solr                                   |   28 +-
 solr/bin/solr.cmd                               |   41 +-
 solr/bin/solr.in.cmd                            |    3 +
 solr/bin/solr.in.sh                             |    5 +-
 .../apache/solr/cloud/CreateCollectionCmd.java  |    1 +
 .../org/apache/solr/cloud/ZkController.java     |   23 +-
 .../org/apache/solr/core/CoreContainer.java     |   26 +-
 .../java/org/apache/solr/core/SolrConfig.java   |   30 +-
 .../src/java/org/apache/solr/core/SolrCore.java |   83 +
 .../java/org/apache/solr/core/ZkContainer.java  |    4 +-
 .../core/snapshots/SolrSnapshotManager.java     |  130 +-
 .../org/apache/solr/handler/IndexFetcher.java   |   16 +-
 .../solr/handler/MoreLikeThisHandler.java       |    2 +-
 .../org/apache/solr/handler/RestoreCore.java    |   16 +-
 .../org/apache/solr/handler/SQLHandler.java     |   18 +-
 .../org/apache/solr/handler/StreamHandler.java  |   54 +-
 .../solr/handler/admin/CoreAdminOperation.java  |    4 +-
 .../solr/handler/admin/DeleteSnapshotOp.java    |   37 +-
 .../solr/handler/component/QueryComponent.java  |    6 +-
 .../solr/handler/component/ResponseBuilder.java |    4 +-
 .../solr/index/SlowCompositeReaderWrapper.java  |    2 +-
 .../org/apache/solr/schema/CurrencyField.java   |    6 +-
 .../java/org/apache/solr/schema/EnumField.java  |    4 +-
 .../java/org/apache/solr/schema/FieldType.java  |   38 +-
 .../java/org/apache/solr/schema/LatLonType.java |    6 +-
 .../java/org/apache/solr/schema/PointType.java  |    6 +-
 .../apache/solr/schema/PreAnalyzedField.java    |    4 +-
 .../org/apache/solr/schema/SchemaField.java     |   63 +-
 .../org/apache/solr/schema/TrieDoubleField.java |    1 +
 .../java/org/apache/solr/schema/TrieField.java  |    3 +-
 .../org/apache/solr/schema/TrieIntField.java    |    1 +
 .../org/apache/solr/schema/TrieLongField.java   |    1 +
 .../apache/solr/search/LuceneQParserPlugin.java |    8 +-
 .../java/org/apache/solr/search/QParser.java    |   10 +
 .../apache/solr/search/facet/FacetBucket.java   |  189 ++
 .../apache/solr/search/facet/FacetModule.java   |  166 --
 .../search/facet/FacetRequestSortedMerger.java  |   25 +-
 .../solr/security/PKIAuthenticationPlugin.java  |   18 +-
 .../org/apache/solr/servlet/HttpSolrCall.java   |    3 +-
 .../apache/solr/servlet/SolrDispatchFilter.java |    1 +
 .../solr/servlet/StartupLoggingUtils.java       |    9 +
 .../apache/solr/uninverting/FieldCacheImpl.java |   22 +-
 .../org/apache/solr/update/SolrIndexWriter.java |   14 +-
 .../src/java/org/apache/solr/util/SolrCLI.java  |  215 +-
 .../cdcr-source-disabled/conf/schema.xml        |   29 +
 .../cdcr-source-disabled/conf/solrconfig.xml    |   60 +
 .../configsets/cdcr-source-disabled/schema.xml  |   29 -
 .../cdcr-source-disabled/solrconfig.xml         |   60 -
 .../solr/configsets/cdcr-source/conf/schema.xml |   29 +
 .../configsets/cdcr-source/conf/solrconfig.xml  |   76 +
 .../solr/configsets/cdcr-source/schema.xml      |   29 -
 .../solr/configsets/cdcr-source/solrconfig.xml  |   76 -
 .../solr/configsets/cdcr-target/conf/schema.xml |   29 +
 .../configsets/cdcr-target/conf/solrconfig.xml  |   63 +
 .../solr/configsets/cdcr-target/schema.xml      |   29 -
 .../solr/configsets/cdcr-target/solrconfig.xml  |   63 -
 .../solr/HelloWorldSolrCloudTestCase.java       |   94 +
 .../solr/client/solrj/ConnectionReuseTest.java  |  198 --
 .../client/solrj/impl/ConnectionReuseTest.java  |  196 ++
 .../solr/cloud/BaseCdcrDistributedZkTest.java   |    1 +
 .../apache/solr/cloud/CdcrBootstrapTest.java    |   76 +-
 ...ConcurrentDeleteAndCreateCollectionTest.java |   12 +-
 .../cloud/DistribJoinFromCollectionTest.java    |   25 +-
 .../solr/cloud/TestAuthenticationFramework.java |   45 +-
 .../solr/cloud/TestCloudDeleteByQuery.java      |   15 +-
 .../solr/cloud/TestCloudPseudoReturnFields.java |   21 +-
 .../apache/solr/cloud/TestConfigSetsAPI.java    |   13 +-
 .../cloud/TestConfigSetsAPIExclusivity.java     |   10 +-
 .../solr/cloud/TestConfigSetsAPIZkFailure.java  |    2 +-
 .../solr/cloud/TestMiniSolrCloudCluster.java    |   54 +-
 .../solr/cloud/TestMiniSolrCloudClusterSSL.java |   50 +-
 .../apache/solr/cloud/TestRandomFlRTGCloud.java |   40 +-
 .../solr/cloud/TestRequestForwarding.java       |    4 +-
 .../cloud/TestSolrCloudWithKerberosAlt.java     |    3 +-
 .../TestSolrCloudWithSecureImpersonation.java   |    8 +-
 .../TestStressCloudBlindAtomicUpdates.java      |   35 +-
 .../cloud/TestTolerantUpdateProcessorCloud.java |   23 +-
 .../TestTolerantUpdateProcessorRandomCloud.java |   29 +-
 .../org/apache/solr/cloud/rule/RulesTest.java   |   13 +-
 .../solr/core/BlobRepositoryCloudTest.java      |   12 +-
 .../test/org/apache/solr/core/HelloStream.java  |  100 +
 .../org/apache/solr/core/TestCodecSupport.java  |    2 +-
 .../org/apache/solr/core/TestCustomStream.java  |   94 +
 .../core/snapshots/TestSolrCoreSnapshots.java   |  127 +-
 .../solr/handler/PingRequestHandlerTest.java    |   12 +-
 .../apache/solr/handler/TestReqParamsAPI.java   |    4 +-
 .../solr/handler/TestSQLHandlerNonCloud.java    |   92 +
 .../handler/component/SearchHandlerTest.java    |   15 +-
 .../TestSubQueryTransformerDistrib.java         |   18 +-
 .../solr/schema/TestManagedSchemaAPI.java       |    3 +-
 .../test/org/apache/solr/search/TestDocSet.java |    2 +-
 .../apache/solr/search/TestIndexSearcher.java   |    6 +-
 .../search/facet/TestJsonFacetRefinement.java   |  214 ++
 .../search/join/BlockJoinFacetDistribTest.java  |   13 +-
 .../solr/search/stats/TestDistribIDF.java       |    7 +-
 solr/server/resources/log4j.properties          |    2 +-
 .../solrj/impl/ConcurrentUpdateSolrClient.java  |    3 +-
 .../solr/client/solrj/impl/HttpClientUtil.java  |   15 +-
 .../solr/client/solrj/impl/HttpSolrClient.java  |    6 +-
 .../impl/SolrHttpClientContextBuilder.java      |   12 +-
 .../client/solrj/io/stream/FetchStream.java     |  314 +++
 .../solrj/request/CollectionAdminRequest.java   |   14 +
 .../solr/common/params/CoreAdminParams.java     |    5 +
 .../client/solrj/SolrSchemalessExampleTest.java |   16 +-
 .../solrj/embedded/SolrExampleJettyTest.java    |    6 +-
 .../solrj/impl/HttpSolrClientConPoolTest.java   |  189 ++
 .../impl/HttpSolrClientSSLAuthConPoolTest.java  |   40 +
 .../solrj/io/stream/StreamExpressionTest.java   |  167 ++
 .../cloud/TestCollectionStateWatchers.java      |    4 +-
 .../apache/solr/cloud/MiniSolrCloudCluster.java |   45 +-
 .../org/apache/solr/util/RestTestHarness.java   |   14 +-
 221 files changed, 6220 insertions(+), 5081 deletions(-)
----------------------------------------------------------------------



[30/50] [abbrv] lucene-solr:jira/solr-8542-v2: SOLR-9546: Eliminate unnecessary boxing/unboxing going on in SolrParams

Posted by cp...@apache.org.
SOLR-9546: Eliminate unnecessary boxing/unboxing going on in SolrParams


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/ccbafdc4
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/ccbafdc4
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/ccbafdc4

Branch: refs/heads/jira/solr-8542-v2
Commit: ccbafdc403fb66e4becfe1b934957f6247b07a7a
Parents: bc0116a
Author: Noble Paul <no...@apache.org>
Authored: Fri Oct 21 18:58:33 2016 +0530
Committer: Noble Paul <no...@apache.org>
Committed: Fri Oct 21 18:58:33 2016 +0530

----------------------------------------------------------------------
 solr/CHANGES.txt                                |   2 +
 .../org/apache/solr/cloud/ZkController.java     |   2 +-
 .../apache/solr/handler/DumpRequestHandler.java |   6 +-
 .../apache/solr/handler/ReplicationHandler.java |   2 +-
 .../solr/response/BinaryResponseWriter.java     |   3 +-
 .../solr/response/JSONResponseWriter.java       |   3 +-
 .../apache/solr/search/HashQParserPlugin.java   |   4 +-
 .../TextLogisticRegressionQParserPlugin.java    |   2 +-
 .../apache/solr/search/mlt/CloudMLTQParser.java |  29 ++---
 .../solr/search/mlt/SimpleMLTQParser.java       |  33 ++---
 .../apache/solr/common/params/SolrParams.java   | 124 +++++++++++++++----
 11 files changed, 133 insertions(+), 77 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ccbafdc4/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index b4dcf4c..7228559 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -220,6 +220,8 @@ Optimizations
 * SOLR-9566: Don't put replicas into recovery when first creating a Collection
   (Alan Woodward)
 
+* SOLR-9546: Eliminate unnecessary boxing/unboxing going on in SolrParams (Pushkar Raste, noble)
+
 Other Changes
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ccbafdc4/solr/core/src/java/org/apache/solr/cloud/ZkController.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/ZkController.java b/solr/core/src/java/org/apache/solr/cloud/ZkController.java
index 9b0a90e..c0a8d55 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ZkController.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ZkController.java
@@ -1887,7 +1887,7 @@ public class ZkController {
         elect.setup(context);
         electionContexts.put(contextKey, context);
         
-        elect.retryElection(context, params.getBool(REJOIN_AT_HEAD_PROP));
+        elect.retryElection(context, params.getBool(REJOIN_AT_HEAD_PROP, false));
       }
     } catch (Exception e) {
       throw new SolrException(ErrorCode.SERVER_ERROR, "Unable to rejoin election", e);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ccbafdc4/solr/core/src/java/org/apache/solr/handler/DumpRequestHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/DumpRequestHandler.java b/solr/core/src/java/org/apache/solr/handler/DumpRequestHandler.java
index 9b0f959..ecafb52 100644
--- a/solr/core/src/java/org/apache/solr/handler/DumpRequestHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/DumpRequestHandler.java
@@ -56,13 +56,15 @@ public class DumpRequestHandler extends RequestHandlerBase
       }
     }
 
-    if(Boolean.TRUE.equals( req.getParams().getBool("getdefaults"))){
+    if(req.getParams().getBool("getdefaults", false)){
       NamedList def = (NamedList) initArgs.get(PluginInfo.DEFAULTS);
       rsp.add("getdefaults", def);
     }
 
 
-    if(Boolean.TRUE.equals( req.getParams().getBool("initArgs"))) rsp.add("initArgs", initArgs);
+    if(req.getParams().getBool("initArgs", false)) {
+      rsp.add("initArgs", initArgs);
+    }
         
     // Write the streams...
     if( req.getContentStreams() != null ) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ccbafdc4/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
index 84e1ba2..08b6f39 100644
--- a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
@@ -1436,7 +1436,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
       sLen = params.get(LEN);
       compress = params.get(COMPRESSION);
       useChecksum = params.getBool(CHECKSUM, false);
-      indexGen = params.getLong(GENERATION, null);
+      indexGen = params.getLong(GENERATION);
       if (useChecksum) {
         checksum = new Adler32();
       }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ccbafdc4/solr/core/src/java/org/apache/solr/response/BinaryResponseWriter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/response/BinaryResponseWriter.java b/solr/core/src/java/org/apache/solr/response/BinaryResponseWriter.java
index 9634e63..11c6074 100644
--- a/solr/core/src/java/org/apache/solr/response/BinaryResponseWriter.java
+++ b/solr/core/src/java/org/apache/solr/response/BinaryResponseWriter.java
@@ -47,8 +47,7 @@ public class BinaryResponseWriter implements BinaryQueryResponseWriter {
   @Override
   public void write(OutputStream out, SolrQueryRequest req, SolrQueryResponse response) throws IOException {
     Resolver resolver = new Resolver(req, response.getReturnFields());
-    Boolean omitHeader = req.getParams().getBool(CommonParams.OMIT_HEADER);
-    if (omitHeader != null && omitHeader) response.removeResponseHeader();
+    if (req.getParams().getBool(CommonParams.OMIT_HEADER, false)) response.removeResponseHeader();
     new JavaBinCodec(resolver).setWritableDocFields(resolver).marshal(response.getValues(), out);
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ccbafdc4/solr/core/src/java/org/apache/solr/response/JSONResponseWriter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/response/JSONResponseWriter.java b/solr/core/src/java/org/apache/solr/response/JSONResponseWriter.java
index 522030f..cd6648b 100644
--- a/solr/core/src/java/org/apache/solr/response/JSONResponseWriter.java
+++ b/solr/core/src/java/org/apache/solr/response/JSONResponseWriter.java
@@ -92,8 +92,7 @@ class JSONWriter extends TextResponseWriter {
     if(wrapperFunction!=null) {
         writer.write(wrapperFunction + "(");
     }
-    Boolean omitHeader = req.getParams().getBool(CommonParams.OMIT_HEADER);
-    if(omitHeader != null && omitHeader) rsp.removeResponseHeader();
+    if(req.getParams().getBool(CommonParams.OMIT_HEADER, false)) rsp.removeResponseHeader();
     writeNamedList(null, rsp.getValues());
     if(wrapperFunction!=null) {
         writer.write(')');

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ccbafdc4/solr/core/src/java/org/apache/solr/search/HashQParserPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/HashQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/HashQParserPlugin.java
index df493f0..3e0fc22 100644
--- a/solr/core/src/java/org/apache/solr/search/HashQParserPlugin.java
+++ b/solr/core/src/java/org/apache/solr/search/HashQParserPlugin.java
@@ -66,8 +66,8 @@ public class HashQParserPlugin extends QParserPlugin {
     }
 
     public Query parse() {
-      int workers = localParams.getInt("workers");
-      int worker = localParams.getInt("worker");
+      int workers = localParams.getInt("workers", 0);
+      int worker = localParams.getInt("worker", 0);
       String keys = params.get("partitionKeys");
       keys = keys.replace(" ", "");
       return new HashQuery(keys, workers, worker);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ccbafdc4/solr/core/src/java/org/apache/solr/search/TextLogisticRegressionQParserPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/TextLogisticRegressionQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/TextLogisticRegressionQParserPlugin.java
index e1d3b7b..c1b8906 100644
--- a/solr/core/src/java/org/apache/solr/search/TextLogisticRegressionQParserPlugin.java
+++ b/solr/core/src/java/org/apache/solr/search/TextLogisticRegressionQParserPlugin.java
@@ -74,7 +74,7 @@ public class TextLogisticRegressionQParserPlugin extends QParserPlugin {
       String[] terms = params.get("terms").split(",");
       String ws = params.get("weights");
       String dfsStr = params.get("idfs");
-      int iteration = params.getInt("iteration");
+      int iteration = params.getInt("iteration", 0);
       String outcome = params.get("outcome");
       int positiveLabel = params.getInt("positiveLabel", 1);
       double threshold = params.getDouble("threshold", 0.5);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ccbafdc4/solr/core/src/java/org/apache/solr/search/mlt/CloudMLTQParser.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/mlt/CloudMLTQParser.java b/solr/core/src/java/org/apache/solr/search/mlt/CloudMLTQParser.java
index 0f85feb..9ff5a3c 100644
--- a/solr/core/src/java/org/apache/solr/search/mlt/CloudMLTQParser.java
+++ b/solr/core/src/java/org/apache/solr/search/mlt/CloudMLTQParser.java
@@ -69,29 +69,16 @@ public class CloudMLTQParser extends QParser {
     Map<String,Float> boostFields = new HashMap<>();
     MoreLikeThis mlt = new MoreLikeThis(req.getSearcher().getIndexReader());
     
-    if(localParams.getInt("mintf") != null)
-      mlt.setMinTermFreq(localParams.getInt("mintf"));
-
-    mlt.setMinDocFreq(localParams.getInt("mindf", 0));
-
-    if(localParams.get("minwl") != null)
-      mlt.setMinWordLen(localParams.getInt("minwl"));
-
-    if(localParams.get("maxwl") != null)
-      mlt.setMaxWordLen(localParams.getInt("maxwl"));
-
-    if(localParams.get("maxqt") != null)
-      mlt.setMaxQueryTerms(localParams.getInt("maxqt"));
-
-    if(localParams.get("maxntp") != null)
-      mlt.setMaxNumTokensParsed(localParams.getInt("maxntp"));
-    
-    if(localParams.get("maxdf") != null) {
-      mlt.setMaxDocFreq(localParams.getInt("maxdf"));
-    }
+    mlt.setMinTermFreq(localParams.getInt("mintf", MoreLikeThis.DEFAULT_MIN_TERM_FREQ));
+    mlt.setMinDocFreq(localParams.getInt("mindf", MoreLikeThis.DEFAULT_MIN_DOC_FREQ));
+    mlt.setMinWordLen(localParams.getInt("minwl", MoreLikeThis.DEFAULT_MIN_WORD_LENGTH));
+    mlt.setMaxWordLen(localParams.getInt("maxwl", MoreLikeThis.DEFAULT_MIN_WORD_LENGTH));
+    mlt.setMaxQueryTerms(localParams.getInt("maxqt",MoreLikeThis.DEFAULT_MAX_QUERY_TERMS));
+    mlt.setMaxNumTokensParsed(localParams.getInt("maxntp",MoreLikeThis.DEFAULT_MAX_NUM_TOKENS_PARSED));
+    mlt.setMaxDocFreq(localParams.getInt("maxdf", MoreLikeThis.DEFAULT_MAX_DOC_FREQ));
 
     if(localParams.get("boost") != null) {
-      mlt.setBoost(localParams.getBool("boost"));
+      mlt.setBoost(localParams.getBool("boost", false));
       boostFields = SolrPluginUtils.parseFieldBoosts(qf);
     }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ccbafdc4/solr/core/src/java/org/apache/solr/search/mlt/SimpleMLTQParser.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/mlt/SimpleMLTQParser.java b/solr/core/src/java/org/apache/solr/search/mlt/SimpleMLTQParser.java
index da3a487..50803df 100644
--- a/solr/core/src/java/org/apache/solr/search/mlt/SimpleMLTQParser.java
+++ b/solr/core/src/java/org/apache/solr/search/mlt/SimpleMLTQParser.java
@@ -69,30 +69,17 @@ public class SimpleMLTQParser extends QParser {
       ScoreDoc[] scoreDocs = td.scoreDocs;
       MoreLikeThis mlt = new MoreLikeThis(req.getSearcher().getIndexReader());
       
-      if(localParams.getInt("mintf") != null)
-        mlt.setMinTermFreq(localParams.getInt("mintf"));
-      
-      if(localParams.getInt("mindf") != null)
-      mlt.setMinDocFreq(localParams.getInt("mindf"));
-      
-      if(localParams.get("minwl") != null)
-        mlt.setMinWordLen(localParams.getInt("minwl"));
-
-      if(localParams.get("maxwl") != null)
-        mlt.setMaxWordLen(localParams.getInt("maxwl"));
-
-      if(localParams.get("maxqt") != null)
-        mlt.setMaxQueryTerms(localParams.getInt("maxqt"));
-
-      if(localParams.get("maxntp") != null)
-        mlt.setMaxNumTokensParsed(localParams.getInt("maxntp"));
-
-      if(localParams.get("maxdf") != null) {
-        mlt.setMaxDocFreq(localParams.getInt("maxdf"));
-      }
-
+      mlt.setMinTermFreq(localParams.getInt("mintf", MoreLikeThis.DEFAULT_MIN_TERM_FREQ));
+      mlt.setMinDocFreq(localParams.getInt("mindf", MoreLikeThis.DEFAULT_MIN_DOC_FREQ));
+      mlt.setMinWordLen(localParams.getInt("minwl", MoreLikeThis.DEFAULT_MIN_WORD_LENGTH));
+      mlt.setMaxWordLen(localParams.getInt("maxwl", MoreLikeThis.DEFAULT_MAX_WORD_LENGTH));
+      mlt.setMaxQueryTerms(localParams.getInt("maxqt", MoreLikeThis.DEFAULT_MAX_QUERY_TERMS));
+      mlt.setMaxNumTokensParsed(localParams.getInt("maxntp", MoreLikeThis.DEFAULT_MAX_NUM_TOKENS_PARSED));
+      mlt.setMaxDocFreq(localParams.getInt("maxdf", MoreLikeThis.DEFAULT_MAX_DOC_FREQ));
+
+      // what happens if value is explicitly set to false?
       if(localParams.get("boost") != null) {
-        mlt.setBoost(localParams.getBool("boost"));
+        mlt.setBoost(localParams.getBool("boost", false));
         boostFields = SolrPluginUtils.parseFieldBoosts(qf);
       }
       

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ccbafdc4/solr/solrj/src/java/org/apache/solr/common/params/SolrParams.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/params/SolrParams.java b/solr/solrj/src/java/org/apache/solr/common/params/SolrParams.java
index 0b74c14..e884a5b 100644
--- a/solr/solrj/src/java/org/apache/solr/common/params/SolrParams.java
+++ b/solr/solrj/src/java/org/apache/solr/common/params/SolrParams.java
@@ -92,11 +92,24 @@ public abstract class SolrParams implements Serializable, MapSerializable {
     return val!=null ? val : getParams(param);
   }
 
-  /** Returns the Boolean value of the param, or null if not set */
+  /** 
+   * Returns the Boolean value of the param, or null if not set. 
+   * Use this method only when you want to be explicit 
+   * about absence of a value (<code>null</code>) vs the default value <code>false</code>.  
+   * @see #getBool(String, boolean) 
+   * @see #getPrimitiveBool(String) 
+   *  
+   **/
+  
   public Boolean getBool(String param) {
     String val = get(param);
     return val==null ? null : StrUtils.parseBool(val);
   }
+  
+  /** Returns the boolean value of the param, or <code>false</code> if not set */
+  public boolean getPrimitiveBool(String param) {
+    return getBool(param, false);
+  }
 
   /** Returns the boolean value of the param, or def if not set */
   public boolean getBool(String param, boolean def) {
@@ -104,21 +117,46 @@ public abstract class SolrParams implements Serializable, MapSerializable {
     return val==null ? def : StrUtils.parseBool(val);
   }
 
-  /** Returns the Boolean value of the field param,
-      or the value for param, or null if neither is set. */
+  /** 
+   * Returns the Boolean value of the field param,
+   * or the value for param, or null if neither is set. 
+   * Use this method only when you want to be explicit 
+   * about absence of a value (<code>null</code>) vs the default value <code>false</code>.  
+   * @see #getFieldBool(String, String, boolean) 
+   * @see #getPrimitiveFieldBool(String, String)    
+   **/
   public Boolean getFieldBool(String field, String param) {
     String val = getFieldParam(field, param);
     return val==null ? null : StrUtils.parseBool(val);
   }
+  
+  /**
+   * Returns the boolean value of the field param, or
+   * the value for param or 
+   * the default value of boolean - <code>false</code> 
+   */
+  public boolean getPrimitiveFieldBool(String field, String param) {
+    return getFieldBool(field, param, false);
+  }
 
-  /** Returns the boolean value of the field param,
-  or the value for param, or def if neither is set. */
+  /** 
+   * Returns the boolean value of the field param,
+   * or the value for param, or def if neither is set. 
+   * 
+   * */
   public boolean getFieldBool(String field, String param, boolean def) {
     String val = getFieldParam(field, param);
     return val==null ? def : StrUtils.parseBool(val);
   }
 
-  /** Returns the Integer value of the param, or null if not set */
+  /** 
+   * Returns the Integer value of the param, or null if not set 
+   * Use this method only when you want to be explicit 
+   * about absence of a value (<code>null</code>) vs the default value for int -
+   * zero (<code>0</code>).  
+   * @see #getInt(String, int) 
+   * @see #getPrimitiveInt(String) 
+   * */
   public Integer getInt(String param) {
     String val = get(param);
     try {
@@ -128,30 +166,33 @@ public abstract class SolrParams implements Serializable, MapSerializable {
       throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, ex.getMessage(), ex );
     }
   }
-
-  /** Returns the Long value of the param, or null if not set */
-  public Long getLong(String param, Long def) {
-    String val = get(param);
-    try {
-      return val== null ? def : Long.parseLong(val);
-    }
-    catch( Exception ex ) {
-      throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, ex.getMessage(), ex );
-    }
+  
+  /**
+   * Returns int value of the the param or 
+   * default value for int - zero (<code>0</code>) if not set. 
+   */
+  public int getPrimitiveInt(String param) {
+    return getInt(param, 0);
   }
 
   /** Returns the int value of the param, or def if not set */
   public int getInt(String param, int def) {
     String val = get(param);
     try {
-      return val==null ? def : Integer.parseInt(val);
+      return val == null ? def : Integer.parseInt(val);
     }
     catch( Exception ex ) {
       throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, ex.getMessage(), ex );
     }
   }
 
-  /** Returns the Long value of the param, or null if not set */
+  /** 
+   * Returns the Long value of the param, or null if not set 
+   * Use this method only when you want to be explicit 
+   * about absence of a value (<code>null</code>) vs the default value zero (<code>0</code>).  
+   * @see #getLong(String, long) 
+   *
+   **/
   public Long getLong(String param) {
     String val = get(param);
     try {
@@ -173,8 +214,13 @@ public abstract class SolrParams implements Serializable, MapSerializable {
 
 
   /**
+   * Use this method only when you want to be explicit 
+   * about absence of a value (<code>null</code>) vs the default value zero (<code>0</code>).
+   * 
    * @return The int value of the field param, or the value for param
    * or <code>null</code> if neither is set.
+   *   
+   * @see #getFieldInt(String, String, int) 
    **/
   public Integer getFieldInt(String field, String param) {
     String val = getFieldParam(field, param);
@@ -199,7 +245,12 @@ public abstract class SolrParams implements Serializable, MapSerializable {
   }
 
 
-  /** Returns the Float value of the param, or null if not set */
+  /** 
+   * Returns the Float value of the param, or null if not set 
+   * Use this method only when you want to be explicit 
+   * about absence of a value (<code>null</code>) vs the default value zero (<code>0.0f</code>).
+   * @see #getFloat(String, float)
+   **/
   public Float getFloat(String param) {
     String val = get(param);
     try {
@@ -221,7 +272,13 @@ public abstract class SolrParams implements Serializable, MapSerializable {
     }
   }
 
-  /** Returns the Float value of the param, or null if not set */
+  /** 
+   * Returns the Float value of the param, or null if not set 
+   * Use this method only when you want to be explicit 
+   * about absence of a value (<code>null</code>) vs the default value zero (<code>0.0d</code>).
+   * @see #getDouble(String, double)
+   *
+   **/
   public Double getDouble(String param) {
     String val = get(param);
     try {
@@ -244,7 +301,15 @@ public abstract class SolrParams implements Serializable, MapSerializable {
   }
 
 
-  /** Returns the float value of the field param. */
+  /** 
+   * Returns the float value of the field param. 
+   * Use this method only when you want to be explicit 
+   * about absence of a value (<code>null</code>) vs the default value zero (<code>0.0f</code>).
+   * 
+   * @see #getFieldFloat(String, String, float)
+   * @see #getPrimitiveFieldFloat(String, String)
+   * 
+   **/
   public Float getFieldFloat(String field, String param) {
     String val = getFieldParam(field, param);
     try {
@@ -254,6 +319,15 @@ public abstract class SolrParams implements Serializable, MapSerializable {
       throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, ex.getMessage(), ex );
     }
   }
+  
+  /**
+   * Returns the float value of the field param or
+   * the value for param or 
+   * the default value for float - zero (<code>0.0f</code>)   
+   */
+  public float getPrimitiveFieldFloat(String field, String param) {
+    return getFieldFloat(field, param, 0.0f);
+  }
 
   /** Returns the float value of the field param,
   or the value for param, or def if neither is set. */
@@ -267,7 +341,13 @@ public abstract class SolrParams implements Serializable, MapSerializable {
     }
   }
 
-  /** Returns the float value of the field param. */
+  /** 
+   * Returns the float value of the field param. 
+   * Use this method only when you want to be explicit 
+   * about absence of a value (<code>null</code>) vs the default value zero (<code>0.0d</code>).
+   * @see #getDouble(String, double)
+   *
+   **/
   public Double getFieldDouble(String field, String param) {
     String val = getFieldParam(field, param);
     try {


[22/50] [abbrv] lucene-solr:jira/solr-8542-v2: LUCENE-7496: Better toString for SweetSpotSimilarity

Posted by cp...@apache.org.
LUCENE-7496: Better toString for SweetSpotSimilarity


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/c4b4830a
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/c4b4830a
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/c4b4830a

Branch: refs/heads/jira/solr-8542-v2
Commit: c4b4830ac1c984e54e23c374ec7b83e598c7fc4b
Parents: 14b6d93
Author: Jan H�ydahl <ja...@apache.org>
Authored: Thu Oct 20 14:07:11 2016 +0200
Committer: Jan H�ydahl <ja...@apache.org>
Committed: Thu Oct 20 14:07:11 2016 +0200

----------------------------------------------------------------------
 lucene/CHANGES.txt                                  |  2 ++
 .../org/apache/lucene/misc/SweetSpotSimilarity.java | 16 ++++++++++++++++
 2 files changed, 18 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c4b4830a/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 17e0b49..2bd4c28 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -109,6 +109,8 @@ Improvements
 * LUCENE-7439: FuzzyQuery now matches all terms within the specified
   edit distance, even if they are short terms (Mike McCandless)
 
+* LUCENE-7496: Better toString for SweetSpotSimilarity (janhoy)
+
 Optimizations
 
 * LUCENE-7501: BKDReader should not store the split dimension explicitly in the

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c4b4830a/lucene/misc/src/java/org/apache/lucene/misc/SweetSpotSimilarity.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/java/org/apache/lucene/misc/SweetSpotSimilarity.java b/lucene/misc/src/java/org/apache/lucene/misc/SweetSpotSimilarity.java
index ce26080..7eeeae0 100644
--- a/lucene/misc/src/java/org/apache/lucene/misc/SweetSpotSimilarity.java
+++ b/lucene/misc/src/java/org/apache/lucene/misc/SweetSpotSimilarity.java
@@ -223,4 +223,20 @@ public class SweetSpotSimilarity extends ClassicSimilarity {
     
   }
 
+  public String toString() {
+    StringBuilder sb = new StringBuilder();
+    sb.append("SweetSpotSimilarity")
+        .append("(")
+        .append("ln_min="+ln_min+", ")
+        .append("ln_max=").append(ln_max).append(", ")
+        .append("ln_steep=").append(ln_steep).append(", ")
+        .append("tf_base=").append(tf_base).append(", ")
+        .append("tf_min=").append(tf_min).append(", ")
+        .append("tf_hyper_min=").append(tf_hyper_min).append(", ")
+        .append("tf_hyper_max=").append(tf_hyper_max).append(", ")
+        .append("tf_hyper_base=").append(tf_hyper_base).append(", ")
+        .append("tf_hyper_xoffset=").append(tf_hyper_xoffset)
+        .append(")");
+    return sb.toString();
+  }
 }


[40/50] [abbrv] lucene-solr:jira/solr-8542-v2: LUCENE-7462: Give doc values APIs an `advanceExact` method.

Posted by cp...@apache.org.
LUCENE-7462: Give doc values APIs an `advanceExact` method.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/9aca4c9d
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/9aca4c9d
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/9aca4c9d

Branch: refs/heads/jira/solr-8542-v2
Commit: 9aca4c9d56089a9ac89df5fd93be76a4fe822448
Parents: 9b49c72
Author: Adrien Grand <jp...@gmail.com>
Authored: Thu Oct 20 14:07:10 2016 +0200
Committer: Adrien Grand <jp...@gmail.com>
Committed: Mon Oct 24 10:51:23 2016 +0200

----------------------------------------------------------------------
 .../codecs/lucene53/Lucene53NormsProducer.java  |   6 +
 .../lucene54/Lucene54DocValuesProducer.java     |  65 +++-
 .../lucene54/TestLucene54DocValuesFormat.java   |   5 +-
 .../simpletext/SimpleTextDocValuesReader.java   |  95 +++++-
 .../simpletext/SimpleTextDocValuesWriter.java   |   9 +
 .../apache/lucene/codecs/DocValuesConsumer.java |  25 ++
 .../org/apache/lucene/codecs/NormsConsumer.java |   5 +
 .../lucene/codecs/lucene70/IndexedDISI.java     |  88 +++--
 .../lucene70/Lucene70DocValuesProducer.java     |  62 ++++
 .../codecs/lucene70/Lucene70NormsProducer.java  |  11 +
 .../apache/lucene/index/BinaryDocValues.java    |   5 +-
 .../lucene/index/BinaryDocValuesWriter.java     |   5 +
 .../org/apache/lucene/index/CheckIndex.java     |  81 ++++-
 .../java/org/apache/lucene/index/DocValues.java |  95 +++---
 .../apache/lucene/index/DocValuesIterator.java  |  33 ++
 .../lucene/index/FilterBinaryDocValues.java     |   5 +
 .../lucene/index/FilterNumericDocValues.java    |   5 +
 .../index/LegacyBinaryDocValuesWrapper.java     |   8 +
 .../index/LegacyNumericDocValuesWrapper.java    |   9 +
 .../index/LegacySortedDocValuesWrapper.java     |   9 +
 .../LegacySortedNumericDocValuesWrapper.java    |   9 +
 .../index/LegacySortedSetDocValuesWrapper.java  |  10 +
 .../org/apache/lucene/index/MultiDocValues.java | 125 +++++++
 .../apache/lucene/index/NormValuesWriter.java   |   5 +
 .../apache/lucene/index/NumericDocValues.java   |   7 +-
 .../lucene/index/NumericDocValuesWriter.java    |   5 +
 .../apache/lucene/index/ReadersAndUpdates.java  |  10 +
 .../index/SingletonSortedNumericDocValues.java  |  24 +-
 .../index/SingletonSortedSetDocValues.java      |  18 +-
 .../apache/lucene/index/SortedDocValues.java    |   3 +
 .../lucene/index/SortedDocValuesWriter.java     |   5 +
 .../lucene/index/SortedNumericDocValues.java    |   6 +-
 .../index/SortedNumericDocValuesWriter.java     |   5 +
 .../apache/lucene/index/SortedSetDocValues.java |   5 +-
 .../lucene/index/SortedSetDocValuesWriter.java  |   5 +
 .../apache/lucene/index/SortingLeafReader.java  |  32 ++
 .../apache/lucene/search/FieldComparator.java   |  40 +--
 .../lucene/search/SortedNumericSelector.java    |  18 +
 .../apache/lucene/search/SortedSetSelector.java |  36 ++
 .../search/similarities/BM25Similarity.java     |   8 +-
 .../search/similarities/SimilarityBase.java     |   6 +-
 .../search/similarities/TFIDFSimilarity.java    |   8 +-
 .../lucene/codecs/lucene70/TestIndexedDISI.java |  28 +-
 .../lucene70/TestLucene70DocValuesFormat.java   |   4 +-
 .../SortedSetDocValuesFacetCounts.java          |  15 +-
 .../lucene/search/join/BlockJoinSelector.java   | 104 +++++-
 .../search/join/GenericTermsCollector.java      |   7 +
 .../search/join/TestBlockJoinSelector.java      |  12 +
 .../apache/lucene/index/memory/MemoryIndex.java |   6 +
 .../search/TestDiversifiedTopDocsCollector.java |   9 +
 .../lucene/index/AssertingLeafReader.java       |  89 ++++-
 .../index/BaseDocValuesFormatTestCase.java      | 331 ++++++++++++-------
 .../index/BaseIndexFileFormatTestCase.java      |  12 +
 .../lucene/index/BaseNormsFormatTestCase.java   | 101 +++---
 .../apache/solr/request/DocValuesFacets.java    |  20 +-
 .../request/PerSegmentSingleValuedFaceting.java |  10 +-
 .../apache/solr/search/SolrIndexSearcher.java   |   6 +-
 .../facet/FacetFieldProcessorByArrayDV.java     |  30 +-
 .../apache/solr/uninverting/FieldCacheImpl.java |  18 +
 59 files changed, 1405 insertions(+), 413 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene53/Lucene53NormsProducer.java
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene53/Lucene53NormsProducer.java b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene53/Lucene53NormsProducer.java
index a97cb5a..718fcd6 100644
--- a/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene53/Lucene53NormsProducer.java
+++ b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene53/Lucene53NormsProducer.java
@@ -222,6 +222,12 @@ class Lucene53NormsProducer extends NormsProducer {
     }
 
     @Override
+    public boolean advanceExact(int target) throws IOException {
+      docID = target;
+      return true;
+    }
+
+    @Override
     public long cost() {
       // TODO
       return 0;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene54/Lucene54DocValuesProducer.java
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene54/Lucene54DocValuesProducer.java b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene54/Lucene54DocValuesProducer.java
index 1f785fe..f1c169c 100644
--- a/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene54/Lucene54DocValuesProducer.java
+++ b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene54/Lucene54DocValuesProducer.java
@@ -477,6 +477,12 @@ final class Lucene54DocValuesProducer extends DocValuesProducer implements Close
           }
 
           @Override
+          public boolean advanceExact(int target) throws IOException {
+            docID = target;
+            return true;
+          }
+
+          @Override
           public long cost() {
             // TODO
             return 0;
@@ -524,6 +530,13 @@ final class Lucene54DocValuesProducer extends DocValuesProducer implements Close
       }
 
       @Override
+      public boolean advanceExact(int target) throws IOException {
+        doc = target;
+        value = values.get(doc);
+        return value != 0 || docsWithField.get(doc);
+      }
+
+      @Override
       public long cost() {
         return maxDoc;
       }
@@ -696,6 +709,16 @@ final class Lucene54DocValuesProducer extends DocValuesProducer implements Close
     }
 
     @Override
+    public boolean advanceExact(int target) throws IOException {
+      if (advance(target) == target) {
+        return true;
+      }
+      --index;
+      doc = target;
+      return false;
+    }
+
+    @Override
     public long longValue() {
       assert index >= 0;
       assert index < docIDsLength;
@@ -891,6 +914,11 @@ final class Lucene54DocValuesProducer extends DocValuesProducer implements Close
         }
 
         @Override
+        public boolean advanceExact(int target) throws IOException {
+          return sparseValues.advanceExact(target);
+        }
+
+        @Override
         public long cost() {
           return sparseValues.cost();
         }
@@ -933,7 +961,14 @@ final class Lucene54DocValuesProducer extends DocValuesProducer implements Close
           return nextDoc();
         }
       }
-          
+
+      @Override
+      public boolean advanceExact(int target) throws IOException {
+        docID = target;
+        ord = (int) ordinals.get(target);
+        return ord != -1;
+      }
+
       @Override
       public int ordValue() {
         return ord;
@@ -1017,6 +1052,11 @@ final class Lucene54DocValuesProducer extends DocValuesProducer implements Close
           }
 
           @Override
+          public boolean advanceExact(int target) throws IOException {
+            return sparseValues.advanceExact(target);
+          }
+
+          @Override
           public long cost() {
             return sparseValues.cost();
           }
@@ -1061,6 +1101,12 @@ final class Lucene54DocValuesProducer extends DocValuesProducer implements Close
         }
 
         @Override
+        public boolean advanceExact(int target) throws IOException {
+          docID = target;
+          return docsWithField.get(docID);
+        }
+
+        @Override
         public long cost() {
           // TODO
           return 0;
@@ -1122,6 +1168,14 @@ final class Lucene54DocValuesProducer extends DocValuesProducer implements Close
         }
         
         @Override
+        public boolean advanceExact(int target) throws IOException {
+          docID = target;
+          startOffset = ordIndex.get(docID);
+          endOffset = ordIndex.get(docID+1L);
+          return endOffset > startOffset;
+        }
+        
+        @Override
         public long cost() {
           // TODO
           return 0;
@@ -1185,6 +1239,15 @@ final class Lucene54DocValuesProducer extends DocValuesProducer implements Close
         }
         
         @Override
+        public boolean advanceExact(int target) throws IOException {
+          docID = target;
+          int ord = (int) ordinals.get(docID);
+          startOffset = offsets[ord];
+          endOffset = offsets[ord+1];
+          return endOffset > startOffset;
+        }
+        
+        @Override
         public long cost() {
           // TODO
           return 0;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene54/TestLucene54DocValuesFormat.java
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene54/TestLucene54DocValuesFormat.java b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene54/TestLucene54DocValuesFormat.java
index c6ca201..b231716 100644
--- a/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene54/TestLucene54DocValuesFormat.java
+++ b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene54/TestLucene54DocValuesFormat.java
@@ -106,7 +106,7 @@ public class TestLucene54DocValuesFormat extends BaseCompressingDocValuesFormatT
   public void testSortedVariableLengthBigVsStoredFields() throws Exception {
     int numIterations = atLeast(1);
     for (int i = 0; i < numIterations; i++) {
-      doTestSortedVsStoredFields(atLeast(300), 1, 32766);
+      doTestSortedVsStoredFields(atLeast(300), 1d, 1, 32766);
     }
   }
   
@@ -114,7 +114,7 @@ public class TestLucene54DocValuesFormat extends BaseCompressingDocValuesFormatT
   public void testSortedVariableLengthManyVsStoredFields() throws Exception {
     int numIterations = atLeast(1);
     for (int i = 0; i < numIterations; i++) {
-      doTestSortedVsStoredFields(TestUtil.nextInt(random(), 1024, 2049), 1, 500);
+      doTestSortedVsStoredFields(TestUtil.nextInt(random(), 1024, 2049), 1d, 1, 500);
     }
   }
   
@@ -201,6 +201,7 @@ public class TestLucene54DocValuesFormat extends BaseCompressingDocValuesFormatT
     }
 
     final IndexReader indexReader = writer.getReader();
+    TestUtil.checkReader(indexReader);
     writer.close();
 
     for (LeafReaderContext context : indexReader.leaves()) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextDocValuesReader.java
----------------------------------------------------------------------
diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextDocValuesReader.java b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextDocValuesReader.java
index adf5e42..09f97ab 100644
--- a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextDocValuesReader.java
+++ b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextDocValuesReader.java
@@ -144,7 +144,7 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
     if (values == null) {
       return null;
     } else {
-      DocIdSetIterator docsWithField = getNumericDocsWithField(fieldInfo);
+      DocValuesIterator docsWithField = getNumericDocsWithField(fieldInfo);
       return new NumericDocValues() {
         
         @Override
@@ -168,6 +168,11 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
         }
         
         @Override
+        public boolean advanceExact(int target) throws IOException {
+          return docsWithField.advanceExact(target);
+        }
+        
+        @Override
         public long longValue() throws IOException {
           return values.apply(docsWithField.docID());
         }
@@ -214,12 +219,16 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
       }
     };
   }
-  
-  private DocIdSetIterator getNumericDocsWithField(FieldInfo fieldInfo) throws IOException {
+
+  private static abstract class DocValuesIterator extends DocIdSetIterator {
+    abstract boolean advanceExact(int target) throws IOException;
+  }
+
+  private DocValuesIterator getNumericDocsWithField(FieldInfo fieldInfo) throws IOException {
     final OneField field = fields.get(fieldInfo.name);
     final IndexInput in = data.clone();
     final BytesRefBuilder scratch = new BytesRefBuilder();
-    return new DocIdSetIterator() {
+    return new DocValuesIterator() {
       
       int doc = -1;
       
@@ -250,6 +259,15 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
         }
         return doc = NO_MORE_DOCS;
       }
+
+      @Override
+      boolean advanceExact(int target) throws IOException {
+        this.doc = target;
+        in.seek(field.dataStartFilePointer + (1+field.pattern.length()+2)*target);
+        SimpleTextUtil.readLine(in, scratch); // data
+        SimpleTextUtil.readLine(in, scratch); // 'T' or 'F'
+        return scratch.byteAt(0) == (byte) 'T';
+      }
     };
   }
   
@@ -265,7 +283,7 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
     final BytesRefBuilder scratch = new BytesRefBuilder();
     final DecimalFormat decoder = new DecimalFormat(field.pattern, new DecimalFormatSymbols(Locale.ROOT));
 
-    DocIdSetIterator docsWithField = getBinaryDocsWithField(fieldInfo);
+    DocValuesIterator docsWithField = getBinaryDocsWithField(fieldInfo);
     
     IntFunction<BytesRef> values = new IntFunction<BytesRef>() {
       final BytesRefBuilder term = new BytesRefBuilder();
@@ -317,19 +335,24 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
       }
       
       @Override
+      public boolean advanceExact(int target) throws IOException {
+        return docsWithField.advanceExact(target);
+      }
+      
+      @Override
       public BytesRef binaryValue() throws IOException {
         return values.apply(docsWithField.docID());
       }
     };
   }
 
-  private DocIdSetIterator getBinaryDocsWithField(FieldInfo fieldInfo) throws IOException {
+  private DocValuesIterator getBinaryDocsWithField(FieldInfo fieldInfo) throws IOException {
     final OneField field = fields.get(fieldInfo.name);
     final IndexInput in = data.clone();
     final BytesRefBuilder scratch = new BytesRefBuilder();
     final DecimalFormat decoder = new DecimalFormat(field.pattern, new DecimalFormatSymbols(Locale.ROOT));
 
-    return new DocIdSetIterator() {
+    return new DocValuesIterator() {
       
       int doc = -1;
       
@@ -371,6 +394,26 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
         }
         return doc = NO_MORE_DOCS;
       }
+
+      @Override
+      boolean advanceExact(int target) throws IOException {
+        this.doc = target;
+        in.seek(field.dataStartFilePointer + (9+field.pattern.length() + field.maxLength+2)*target);
+        SimpleTextUtil.readLine(in, scratch);
+        assert StringHelper.startsWith(scratch.get(), LENGTH);
+        int len;
+        try {
+          len = decoder.parse(new String(scratch.bytes(), LENGTH.length, scratch.length() - LENGTH.length, StandardCharsets.UTF_8)).intValue();
+        } catch (ParseException pe) {
+          throw new CorruptIndexException("failed to parse int length", in, pe);
+        }
+        // skip past bytes
+        byte bytes[] = new byte[len];
+        in.readBytes(bytes, 0, len);
+        SimpleTextUtil.readLine(in, scratch); // newline
+        SimpleTextUtil.readLine(in, scratch); // 'T' or 'F'
+        return scratch.byteAt(0) == (byte) 'T';
+      }
     };
   }
 
@@ -424,7 +467,20 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
         }
         return doc = NO_MORE_DOCS;
       }
-      
+
+      @Override
+      public boolean advanceExact(int target) throws IOException {
+        this.doc = target;
+        in.seek(field.dataStartFilePointer + field.numValues * (9 + field.pattern.length() + field.maxLength) + target * (1 + field.ordPattern.length()));
+        SimpleTextUtil.readLine(in, scratch);
+        try {
+          ord = (int) ordDecoder.parse(scratch.get().utf8ToString()).longValue()-1;
+        } catch (ParseException pe) {
+          throw new CorruptIndexException("failed to parse ord", in, pe);
+        }
+        return ord >= 0;
+      }
+
       @Override
       public int ordValue() {
         return ord;
@@ -488,6 +544,15 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
         return doc;
       }
       
+      @Override
+      public boolean advanceExact(int target) throws IOException {
+        if (binary.advanceExact(target)) {
+          setCurrentDoc();
+          return true;
+        }
+        return false;
+      }
+      
       long values[];
       int index;
       
@@ -570,6 +635,20 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
       }
       
       @Override
+      public boolean advanceExact(int target) throws IOException {
+        in.seek(field.dataStartFilePointer + field.numValues * (9 + field.pattern.length() + field.maxLength) + target * (1 + field.ordPattern.length()));
+        SimpleTextUtil.readLine(in, scratch);
+        String ordList = scratch.get().utf8ToString().trim();
+        doc = target;
+        if (ordList.isEmpty() == false) {
+          currentOrds = ordList.split(",");
+          currentIndex = 0;
+          return true;
+        }
+        return false;
+      }
+      
+      @Override
       public long nextOrd() throws IOException {
         if (currentIndex == currentOrds.length) {
           return NO_MORE_ORDS;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextDocValuesWriter.java
----------------------------------------------------------------------
diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextDocValuesWriter.java b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextDocValuesWriter.java
index 2649ae6..8c6bdde 100644
--- a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextDocValuesWriter.java
+++ b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextDocValuesWriter.java
@@ -342,6 +342,15 @@ class SimpleTextDocValuesWriter extends DocValuesConsumer {
             return doc;
           }
 
+          @Override
+          public boolean advanceExact(int target) throws IOException {
+            if (values.advanceExact(target)) {
+              setCurrentDoc();
+              return true;
+            }
+            return false;
+          }
+          
           final StringBuilder builder = new StringBuilder();
           BytesRef binaryValue;
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/codecs/DocValuesConsumer.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/DocValuesConsumer.java b/lucene/core/src/java/org/apache/lucene/codecs/DocValuesConsumer.java
index e04d5b9..e61724f 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/DocValuesConsumer.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/DocValuesConsumer.java
@@ -228,6 +228,11 @@ public abstract class DocValuesConsumer implements Closeable {
                           }
 
                           @Override
+                          public boolean advanceExact(int target) throws IOException {
+                            throw new UnsupportedOperationException();
+                          }
+
+                          @Override
                           public long cost() {
                             return finalCost;
                           }
@@ -320,6 +325,11 @@ public abstract class DocValuesConsumer implements Closeable {
                          }
 
                          @Override
+                         public boolean advanceExact(int target) throws IOException {
+                           throw new UnsupportedOperationException();
+                         }
+
+                         @Override
                          public long cost() {
                            return finalCost;
                          }
@@ -417,6 +427,11 @@ public abstract class DocValuesConsumer implements Closeable {
                                 }
 
                                 @Override
+                                public boolean advanceExact(int target) throws IOException {
+                                  throw new UnsupportedOperationException();
+                                }
+
+                                @Override
                                 public int docValueCount() {
                                   return currentSub.values.docValueCount();
                                 }
@@ -575,6 +590,11 @@ public abstract class DocValuesConsumer implements Closeable {
                          }
 
                          @Override
+                         public boolean advanceExact(int target) throws IOException {
+                           throw new UnsupportedOperationException();
+                         }
+
+                         @Override
                          public long cost() {
                            return finalCost;
                          }
@@ -732,6 +752,11 @@ public abstract class DocValuesConsumer implements Closeable {
                             }
 
                             @Override
+                            public boolean advanceExact(int target) throws IOException {
+                              throw new UnsupportedOperationException();
+                            }
+
+                            @Override
                             public long nextOrd() throws IOException {
                               long subOrd = currentSub.values.nextOrd();
                               if (subOrd == NO_MORE_ORDS) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/codecs/NormsConsumer.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/NormsConsumer.java b/lucene/core/src/java/org/apache/lucene/codecs/NormsConsumer.java
index 3a6ce22..51abb69 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/NormsConsumer.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/NormsConsumer.java
@@ -158,6 +158,11 @@ public abstract class NormsConsumer implements Closeable {
                           }
 
                           @Override
+                          public boolean advanceExact(int target) throws IOException {
+                            throw new UnsupportedOperationException();
+                          }
+
+                          @Override
                           public long cost() {
                             return 0;
                           }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/codecs/lucene70/IndexedDISI.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene70/IndexedDISI.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene70/IndexedDISI.java
index 3ea3141..24eaf7a 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/lucene70/IndexedDISI.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene70/IndexedDISI.java
@@ -112,6 +112,9 @@ final class IndexedDISI extends DocIdSetIterator {
   private int doc = -1;
   private int index = -1;
 
+  // SPARSE variables
+  boolean exists;
+
   // DENSE variables
   private long word;
   private int wordIndex = -1;
@@ -129,7 +132,7 @@ final class IndexedDISI extends DocIdSetIterator {
   @Override
   public int advance(int target) throws IOException {
     final int targetBlock = target & 0xFFFF0000;
-    if (block != targetBlock) {
+    if (block < targetBlock) {
       advanceBlock(targetBlock);
     }
     if (block == targetBlock) {
@@ -138,7 +141,19 @@ final class IndexedDISI extends DocIdSetIterator {
       }
       readBlockHeader();
     }
-    return doc = method.readFirstDoc(this);
+    boolean found = method.advanceWithinBlock(this, block);
+    assert found;
+    return doc;
+  }
+
+  public boolean advanceExact(int target) throws IOException {
+    final int targetBlock = target & 0xFFFF0000;
+    if (block < targetBlock) {
+      advanceBlock(targetBlock);
+    }
+    boolean found = block == targetBlock && method.advanceExactWithinBlock(this, target);
+    this.doc = target;
+    return found;
   }
 
   private void advanceBlock(int targetBlock) throws IOException {
@@ -186,11 +201,6 @@ final class IndexedDISI extends DocIdSetIterator {
   enum Method {
     SPARSE {
       @Override
-      int readFirstDoc(IndexedDISI disi) throws IOException {
-        disi.index++;
-        return disi.block | Short.toUnsignedInt(disi.slice.readShort());
-      }
-      @Override
       boolean advanceWithinBlock(IndexedDISI disi, int target) throws IOException {
         final int targetInBlock = target & 0xFFFF;
         // TODO: binary search
@@ -199,23 +209,37 @@ final class IndexedDISI extends DocIdSetIterator {
           disi.index++;
           if (doc >= targetInBlock) {
             disi.doc = disi.block | doc;
+            disi.exists = true;
             return true;
           }
         }
         return false;
       }
-    },
-    DENSE {
       @Override
-      int readFirstDoc(IndexedDISI disi) throws IOException {
-        do {
-          disi.word = disi.slice.readLong();
-          disi.wordIndex++;
-        } while (disi.word == 0L);
-        disi.index = disi.numberOfOnes;
-        disi.numberOfOnes += Long.bitCount(disi.word);
-        return disi.block | (disi.wordIndex << 6) | Long.numberOfTrailingZeros(disi.word);
+      boolean advanceExactWithinBlock(IndexedDISI disi, int target) throws IOException {
+        final int targetInBlock = target & 0xFFFF;
+        // TODO: binary search
+        if (target == disi.doc) {
+          return disi.exists;
+        }
+        for (; disi.index < disi.nextBlockIndex;) {
+          int doc = Short.toUnsignedInt(disi.slice.readShort());
+          disi.index++;
+          if (doc >= targetInBlock) {
+            if (doc != targetInBlock) {
+              disi.index--;
+              disi.slice.seek(disi.slice.getFilePointer() - Short.BYTES);
+              break;
+            }
+            disi.exists = true;
+            return true;
+          }
+        }
+        disi.exists = false;
+        return false;
       }
+    },
+    DENSE {
       @Override
       boolean advanceWithinBlock(IndexedDISI disi, int target) throws IOException {
         final int targetInBlock = target & 0xFFFF;
@@ -244,26 +268,42 @@ final class IndexedDISI extends DocIdSetIterator {
         }
         return false;
       }
-    },
-    ALL {
       @Override
-      int readFirstDoc(IndexedDISI disi) {
-        return disi.block;
+      boolean advanceExactWithinBlock(IndexedDISI disi, int target) throws IOException {
+        final int targetInBlock = target & 0xFFFF;
+        final int targetWordIndex = targetInBlock >>> 6;
+        for (int i = disi.wordIndex + 1; i <= targetWordIndex; ++i) {
+          disi.word = disi.slice.readLong();
+          disi.numberOfOnes += Long.bitCount(disi.word);
+        }
+        disi.wordIndex = targetWordIndex;
+
+        long leftBits = disi.word >>> target;
+        disi.index = disi.numberOfOnes - Long.bitCount(leftBits);
+        return (leftBits & 1L) != 0;
       }
+    },
+    ALL {
       @Override
       boolean advanceWithinBlock(IndexedDISI disi, int target) throws IOException {
         disi.doc = target;
         disi.index = target - disi.gap;
         return true;
       }
+      @Override
+      boolean advanceExactWithinBlock(IndexedDISI disi, int target) throws IOException {
+        disi.index = target - disi.gap;
+        return true;
+      }
     };
 
-    /** Read the first document of the current block. */
-    abstract int readFirstDoc(IndexedDISI disi) throws IOException;
-
     /** Advance to the first doc from the block that is equal to or greater than {@code target}.
      *  Return true if there is such a doc and false otherwise. */
     abstract boolean advanceWithinBlock(IndexedDISI disi, int target) throws IOException;
+
+    /** Advance the iterator exactly to the position corresponding to the given {@code target}
+     * and return whether this document exists. */
+    abstract boolean advanceExactWithinBlock(IndexedDISI disi, int target) throws IOException;
   }
 
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70DocValuesProducer.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70DocValuesProducer.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70DocValuesProducer.java
index 637c8ee..19815ba 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70DocValuesProducer.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70DocValuesProducer.java
@@ -375,6 +375,12 @@ final class Lucene70DocValuesProducer extends DocValuesProducer implements Close
         }
 
         @Override
+        public boolean advanceExact(int target) {
+          doc = target;
+          return true;
+        }
+
+        @Override
         public long cost() {
           return maxDoc;
         }
@@ -392,6 +398,11 @@ final class Lucene70DocValuesProducer extends DocValuesProducer implements Close
         }
 
         @Override
+        public boolean advanceExact(int target) throws IOException {
+          return disi.advanceExact(target);
+        }
+
+        @Override
         public int nextDoc() throws IOException {
           return disi.nextDoc();
         }
@@ -521,6 +532,12 @@ final class Lucene70DocValuesProducer extends DocValuesProducer implements Close
         }
 
         @Override
+        public boolean advanceExact(int target) throws IOException {
+          doc = target;
+          return true;
+        }
+
+        @Override
         public BytesRef binaryValue() throws IOException {
           return bytesRefs.get(doc);
         }
@@ -551,6 +568,11 @@ final class Lucene70DocValuesProducer extends DocValuesProducer implements Close
         }
 
         @Override
+        public boolean advanceExact(int target) throws IOException {
+          return disi.advanceExact(target);
+        }
+
+        @Override
         public BytesRef binaryValue() throws IOException {
           return bytesRefs.get(disi.index());
         }
@@ -616,6 +638,12 @@ final class Lucene70DocValuesProducer extends DocValuesProducer implements Close
         }
 
         @Override
+        public boolean advanceExact(int target) {
+          doc = target;
+          return true;
+        }
+
+        @Override
         public int ordValue() {
           return (int) ords.get(doc);
         }
@@ -646,6 +674,11 @@ final class Lucene70DocValuesProducer extends DocValuesProducer implements Close
         }
 
         @Override
+        public boolean advanceExact(int target) throws IOException {
+          return disi.advanceExact(target);
+        }
+
+        @Override
         public int ordValue() {
           return (int) ords.get(disi.index());
         }
@@ -960,6 +993,15 @@ final class Lucene70DocValuesProducer extends DocValuesProducer implements Close
         }
 
         @Override
+        public boolean advanceExact(int target) throws IOException {
+          start = addresses.get(target);
+          end = addresses.get(target + 1L);
+          count = (int) (end - start);
+          doc = target;
+          return true;
+        }
+
+        @Override
         public long nextValue() throws IOException {
           return values.get(start++);
         }
@@ -1001,6 +1043,12 @@ final class Lucene70DocValuesProducer extends DocValuesProducer implements Close
         }
 
         @Override
+        public boolean advanceExact(int target) throws IOException {
+          set = false;
+          return disi.advanceExact(target);
+        }
+
+        @Override
         public long nextValue() throws IOException {
           set();
           return values.get(start++);
@@ -1073,6 +1121,14 @@ final class Lucene70DocValuesProducer extends DocValuesProducer implements Close
         }
 
         @Override
+        public boolean advanceExact(int target) throws IOException {
+          start = addresses.get(target);
+          end = addresses.get(target + 1L);
+          doc = target;
+          return true;
+        }
+
+        @Override
         public long nextOrd() throws IOException {
           if (start == end) {
             return NO_MORE_ORDS;
@@ -1113,6 +1169,12 @@ final class Lucene70DocValuesProducer extends DocValuesProducer implements Close
         }
 
         @Override
+        public boolean advanceExact(int target) throws IOException {
+          set = false;
+          return disi.advanceExact(target);
+        }
+
+        @Override
         public long nextOrd() throws IOException {
           if (set == false) {
             final int index = disi.index();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70NormsProducer.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70NormsProducer.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70NormsProducer.java
index e3f6f79..c97f1c3 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70NormsProducer.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70NormsProducer.java
@@ -160,6 +160,12 @@ final class Lucene70NormsProducer extends NormsProducer {
         }
 
         @Override
+        public boolean advanceExact(int target) throws IOException {
+          this.doc = target;
+          return true;
+        }
+
+        @Override
         public long cost() {
           return maxDoc;
         }
@@ -177,6 +183,11 @@ final class Lucene70NormsProducer extends NormsProducer {
         }
 
         @Override
+        public boolean advanceExact(int target) throws IOException {
+          return disi.advanceExact(target);
+        }
+
+        @Override
         public int nextDoc() throws IOException {
           return disi.nextDoc();
         }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/BinaryDocValues.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/BinaryDocValues.java b/lucene/core/src/java/org/apache/lucene/index/BinaryDocValues.java
index 66397e4..6d23cf1 100644
--- a/lucene/core/src/java/org/apache/lucene/index/BinaryDocValues.java
+++ b/lucene/core/src/java/org/apache/lucene/index/BinaryDocValues.java
@@ -19,13 +19,12 @@ package org.apache.lucene.index;
 
 import java.io.IOException;
 
-import org.apache.lucene.search.DocIdSetIterator;
 import org.apache.lucene.util.BytesRef;
 
 /**
  * A per-document numeric value.
  */
-public abstract class BinaryDocValues extends DocIdSetIterator {
+public abstract class BinaryDocValues extends DocValuesIterator {
   
   /** Sole constructor. (For invocation by subclass 
    *  constructors, typically implicit.) */
@@ -33,6 +32,8 @@ public abstract class BinaryDocValues extends DocIdSetIterator {
 
   /**
    * Returns the binary value for the current document ID.
+   * It is illegal to call this method after {@link #advanceExact(int)}
+   * returned {@code false}.
    * @return binary value
    */
   public abstract BytesRef binaryValue() throws IOException;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/BinaryDocValuesWriter.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/BinaryDocValuesWriter.java b/lucene/core/src/java/org/apache/lucene/index/BinaryDocValuesWriter.java
index ff2e67c..9611a03 100644
--- a/lucene/core/src/java/org/apache/lucene/index/BinaryDocValuesWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/index/BinaryDocValuesWriter.java
@@ -154,6 +154,11 @@ class BinaryDocValuesWriter extends DocValuesWriter {
     }
 
     @Override
+    public boolean advanceExact(int target) throws IOException {
+      throw new UnsupportedOperationException();
+    }
+
+    @Override
     public long cost() {
       return docsWithField.cost();
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java b/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java
index aec7537..f3d3562 100644
--- a/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java
+++ b/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java
@@ -2062,13 +2062,83 @@ public final class CheckIndex implements Closeable {
     return status;
   }
 
+  @FunctionalInterface
+  private static interface DocValuesIteratorSupplier {
+    DocValuesIterator get(FieldInfo fi) throws IOException;
+  }
+
+  private static void checkDVIterator(FieldInfo fi, int maxDoc, DocValuesIteratorSupplier producer) throws IOException {
+    String field = fi.name;
+
+    // Check advance
+    DocValuesIterator it1 = producer.get(fi);
+    DocValuesIterator it2 = producer.get(fi);
+    int i = 0;
+    for (int doc = it1.nextDoc(); ; doc = it1.nextDoc()) {
+
+      if (i++ % 10 == 1) {
+        int doc2 = it2.advance(doc - 1);
+        if (doc2 < doc - 1) {
+          throw new RuntimeException("dv iterator field=" + field + ": doc=" + (doc-1) + " went backwords (got: " + doc2 + ")");
+        }
+        if (doc2 == doc - 1) {
+          doc2 = it2.nextDoc();
+        }
+        if (doc2 != doc) {
+          throw new RuntimeException("dv iterator field=" + field + ": doc=" + doc + " was not found through advance() (got: " + doc2 + ")");
+        }
+        if (it2.docID() != doc) {
+          throw new RuntimeException("dv iterator field=" + field + ": doc=" + doc + " reports wrong doc ID (got: " + it2.docID() + ")");
+        }
+      }
+
+      if (doc == NO_MORE_DOCS) {
+        break;
+      }
+    }
+
+    // Check advanceExact
+    it1 = producer.get(fi);
+    it2 = producer.get(fi);
+    i = 0;
+    int lastDoc = -1;
+    for (int doc = it1.nextDoc(); doc != NO_MORE_DOCS ; doc = it1.nextDoc()) {
+
+      if (i++ % 13 == 1) {
+        boolean found = it2.advanceExact(doc - 1);
+        if ((doc - 1 == lastDoc) != found) {
+          throw new RuntimeException("dv iterator field=" + field + ": doc=" + (doc-1) + " disagrees about whether document exists (got: " + found + ")");
+        }
+        if (it2.docID() != doc - 1) {
+          throw new RuntimeException("dv iterator field=" + field + ": doc=" + (doc-1) + " reports wrong doc ID (got: " + it2.docID() + ")");
+        }
+        
+        boolean found2 = it2.advanceExact(doc - 1);
+        if (found != found2) {
+          throw new RuntimeException("dv iterator field=" + field + ": doc=" + (doc-1) + " has unstable advanceExact");
+        }
+
+        if (i % 1 == 0) {
+          int doc2 = it2.nextDoc();
+          if (doc != doc2) {
+            throw new RuntimeException("dv iterator field=" + field + ": doc=" + doc + " was not found through advance() (got: " + doc2 + ")");
+          }
+          if (it2.docID() != doc) {
+            throw new RuntimeException("dv iterator field=" + field + ": doc=" + doc + " reports wrong doc ID (got: " + it2.docID() + ")");
+          }
+        }
+      }
+
+      lastDoc = doc;
+    }
+  }
+
   private static void checkBinaryDocValues(String fieldName, int maxDoc, BinaryDocValues bdv) throws IOException {
     int doc;
     if (bdv.docID() != -1) {
       throw new RuntimeException("binary dv iterator for field: " + fieldName + " should start at docID=-1, but got " + bdv.docID());
     }
     // TODO: we could add stats to DVs, e.g. total doc count w/ a value for this field
-    // TODO: check advance too
     while ((doc = bdv.nextDoc()) != NO_MORE_DOCS) {
       BytesRef value = bdv.binaryValue();
       value.isValid();
@@ -2083,7 +2153,6 @@ public final class CheckIndex implements Closeable {
     FixedBitSet seenOrds = new FixedBitSet(dv.getValueCount());
     int maxOrd2 = -1;
     int docID;
-    // TODO: check advance too
     while ((docID = dv.nextDoc()) != NO_MORE_DOCS) {
       int ord = dv.ordValue();
       if (ord == -1) {
@@ -2119,7 +2188,6 @@ public final class CheckIndex implements Closeable {
     LongBitSet seenOrds = new LongBitSet(dv.getValueCount());
     long maxOrd2 = -1;
     int docID;
-    // TODO: check advance too
     while ((docID = dv.nextDoc()) != NO_MORE_DOCS) {
       long lastOrd = -1;
       long ord;
@@ -2164,7 +2232,6 @@ public final class CheckIndex implements Closeable {
     if (ndv.docID() != -1) {
       throw new RuntimeException("dv iterator for field: " + fieldName + " should start at docID=-1, but got " + ndv.docID());
     }
-    // TODO: check advance too
     while (true) {
       int docID = ndv.nextDoc();
       if (docID == NO_MORE_DOCS) {
@@ -2191,7 +2258,6 @@ public final class CheckIndex implements Closeable {
       throw new RuntimeException("dv iterator for field: " + fieldName + " should start at docID=-1, but got " + ndv.docID());
     }
     // TODO: we could add stats to DVs, e.g. total doc count w/ a value for this field
-    // TODO: check advance too
     while ((doc = ndv.nextDoc()) != NO_MORE_DOCS) {
       ndv.longValue();
     }
@@ -2201,23 +2267,28 @@ public final class CheckIndex implements Closeable {
     switch(fi.getDocValuesType()) {
       case SORTED:
         status.totalSortedFields++;
+        checkDVIterator(fi, maxDoc, dvReader::getSorted);
         checkBinaryDocValues(fi.name, maxDoc, dvReader.getSorted(fi));
         checkSortedDocValues(fi.name, maxDoc, dvReader.getSorted(fi));
         break;
       case SORTED_NUMERIC:
         status.totalSortedNumericFields++;
+        checkDVIterator(fi, maxDoc, dvReader::getSortedNumeric);
         checkSortedNumericDocValues(fi.name, maxDoc, dvReader.getSortedNumeric(fi));
         break;
       case SORTED_SET:
         status.totalSortedSetFields++;
+        checkDVIterator(fi, maxDoc, dvReader::getSortedSet);
         checkSortedSetDocValues(fi.name, maxDoc, dvReader.getSortedSet(fi));
         break;
       case BINARY:
         status.totalBinaryFields++;
+        checkDVIterator(fi, maxDoc, dvReader::getBinary);
         checkBinaryDocValues(fi.name, maxDoc, dvReader.getBinary(fi));
         break;
       case NUMERIC:
         status.totalNumericFields++;
+        checkDVIterator(fi, maxDoc, dvReader::getNumeric);
         checkNumericDocValues(fi.name, dvReader.getNumeric(fi));
         break;
       default:

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/DocValues.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/DocValues.java b/lucene/core/src/java/org/apache/lucene/index/DocValues.java
index 3377f45..b25d484 100644
--- a/lucene/core/src/java/org/apache/lucene/index/DocValues.java
+++ b/lucene/core/src/java/org/apache/lucene/index/DocValues.java
@@ -35,26 +35,27 @@ public final class DocValues {
    */
   public static final BinaryDocValues emptyBinary() {
     return new BinaryDocValues() {
-      private boolean exhausted = false;
+      private int doc = -1;
       
       @Override
       public int advance(int target) {
-        assert exhausted == false;
-        assert target >= 0;
-        exhausted = true;
-        return NO_MORE_DOCS;
+        return doc = NO_MORE_DOCS;
+      }
+      
+      @Override
+      public boolean advanceExact(int target) throws IOException {
+        doc = target;
+        return true;
       }
       
       @Override
       public int docID() {
-        return exhausted ? NO_MORE_DOCS : -1;
+        return doc;
       }
       
       @Override
       public int nextDoc() {
-        assert exhausted == false;
-        exhausted = true;
-        return NO_MORE_DOCS;
+        return doc = NO_MORE_DOCS;
       }
       
       @Override
@@ -75,26 +76,27 @@ public final class DocValues {
    */
   public static final NumericDocValues emptyNumeric() {
     return new NumericDocValues() {
-      private boolean exhausted = false;
+      private int doc = -1;
       
       @Override
       public int advance(int target) {
-        assert exhausted == false;
-        assert target >= 0;
-        exhausted = true;
-        return NO_MORE_DOCS;
+        return doc = NO_MORE_DOCS;
+      }
+      
+      @Override
+      public boolean advanceExact(int target) throws IOException {
+        doc = target;
+        return false;
       }
       
       @Override
       public int docID() {
-        return exhausted ? NO_MORE_DOCS : -1;
+        return doc;
       }
       
       @Override
       public int nextDoc() {
-        assert exhausted == false;
-        exhausted = true;
-        return NO_MORE_DOCS;
+        return doc = NO_MORE_DOCS;
       }
       
       @Override
@@ -140,26 +142,27 @@ public final class DocValues {
     final BytesRef empty = new BytesRef();
     return new SortedDocValues() {
       
-      private boolean exhausted = false;
+      private int doc = -1;
       
       @Override
       public int advance(int target) {
-        assert exhausted == false;
-        assert target >= 0;
-        exhausted = true;
-        return NO_MORE_DOCS;
+        return doc = NO_MORE_DOCS;
+      }
+      
+      @Override
+      public boolean advanceExact(int target) throws IOException {
+        doc = target;
+        return false;
       }
       
       @Override
       public int docID() {
-        return exhausted ? NO_MORE_DOCS : -1;
+        return doc;
       }
       
       @Override
       public int nextDoc() {
-        assert exhausted == false;
-        exhausted = true;
-        return NO_MORE_DOCS;
+        return doc = NO_MORE_DOCS;
       }
       
       @Override
@@ -191,26 +194,27 @@ public final class DocValues {
   public static final SortedNumericDocValues emptySortedNumeric(int maxDoc) {
     return new SortedNumericDocValues() {
       
-      private boolean exhausted = false;
+      private int doc = -1;
       
       @Override
       public int advance(int target) {
-        assert exhausted == false;
-        assert target >= 0;
-        exhausted = true;
-        return NO_MORE_DOCS;
+        return doc = NO_MORE_DOCS;
+      }
+      
+      @Override
+      public boolean advanceExact(int target) throws IOException {
+        doc = target;
+        return false;
       }
       
       @Override
       public int docID() {
-        return exhausted ? NO_MORE_DOCS : -1;
+        return doc;
       }
       
       @Override
       public int nextDoc() {
-        assert exhausted == false;
-        exhausted = true;
-        return NO_MORE_DOCS;
+        return doc = NO_MORE_DOCS;
       }
       
       @Override
@@ -237,26 +241,27 @@ public final class DocValues {
     final BytesRef empty = new BytesRef();
     return new SortedSetDocValues() {
       
-      private boolean exhausted = false;
+      private int doc = -1;
       
       @Override
       public int advance(int target) {
-        assert exhausted == false;
-        assert target >= 0;
-        exhausted = true;
-        return NO_MORE_DOCS;
+        return doc = NO_MORE_DOCS;
+      }
+      
+      @Override
+      public boolean advanceExact(int target) throws IOException {
+        doc = target;
+        return false;
       }
       
       @Override
       public int docID() {
-        return exhausted ? NO_MORE_DOCS : -1;
+        return doc;
       }
       
       @Override
       public int nextDoc() {
-        assert exhausted == false;
-        exhausted = true;
-        return NO_MORE_DOCS;
+        return doc = NO_MORE_DOCS;
       }
       
       @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/DocValuesIterator.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/DocValuesIterator.java b/lucene/core/src/java/org/apache/lucene/index/DocValuesIterator.java
new file mode 100644
index 0000000..d53e26a
--- /dev/null
+++ b/lucene/core/src/java/org/apache/lucene/index/DocValuesIterator.java
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.index;
+
+import java.io.IOException;
+
+import org.apache.lucene.search.DocIdSetIterator;
+
+abstract class DocValuesIterator extends DocIdSetIterator {
+
+  /** Advance the iterator to exactly {@code target} and return whether
+   *  {@code target} has a value.
+   *  {@code target} must be greater than or equal to the current
+   *  {@link #docID() doc ID} and must be a valid doc ID, ie. &ge; 0 and
+   *  &lt; {@code maxDoc}.
+   *  After this method returns, {@link #docID()} retuns {@code target}. */
+  public abstract boolean advanceExact(int target) throws IOException;
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/FilterBinaryDocValues.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/FilterBinaryDocValues.java b/lucene/core/src/java/org/apache/lucene/index/FilterBinaryDocValues.java
index 66c4323..650ad04 100644
--- a/lucene/core/src/java/org/apache/lucene/index/FilterBinaryDocValues.java
+++ b/lucene/core/src/java/org/apache/lucene/index/FilterBinaryDocValues.java
@@ -50,6 +50,11 @@ public abstract class FilterBinaryDocValues extends BinaryDocValues {
   }
   
   @Override
+  public boolean advanceExact(int target) throws IOException {
+    return in.advanceExact(target);
+  }
+  
+  @Override
   public long cost() {
     return in.cost();
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/FilterNumericDocValues.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/FilterNumericDocValues.java b/lucene/core/src/java/org/apache/lucene/index/FilterNumericDocValues.java
index 0058fa6..bd00cf2 100644
--- a/lucene/core/src/java/org/apache/lucene/index/FilterNumericDocValues.java
+++ b/lucene/core/src/java/org/apache/lucene/index/FilterNumericDocValues.java
@@ -48,6 +48,11 @@ public abstract class FilterNumericDocValues extends NumericDocValues {
   }
   
   @Override
+  public boolean advanceExact(int target) throws IOException {
+    return in.advanceExact(target);
+  }
+  
+  @Override
   public long cost() {
     return in.cost();
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/LegacyBinaryDocValuesWrapper.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/LegacyBinaryDocValuesWrapper.java b/lucene/core/src/java/org/apache/lucene/index/LegacyBinaryDocValuesWrapper.java
index 13bc207..919b1ff 100644
--- a/lucene/core/src/java/org/apache/lucene/index/LegacyBinaryDocValuesWrapper.java
+++ b/lucene/core/src/java/org/apache/lucene/index/LegacyBinaryDocValuesWrapper.java
@@ -17,6 +17,8 @@
 
 package org.apache.lucene.index;
 
+import java.io.IOException;
+
 import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.BytesRef;
 
@@ -71,6 +73,12 @@ public final class LegacyBinaryDocValuesWrapper extends BinaryDocValues {
   }
 
   @Override
+  public boolean advanceExact(int target) throws IOException {
+    docID = target;
+    return docsWithField.get(target);
+  }
+
+  @Override
   public long cost() {
     return 0;
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/LegacyNumericDocValuesWrapper.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/LegacyNumericDocValuesWrapper.java b/lucene/core/src/java/org/apache/lucene/index/LegacyNumericDocValuesWrapper.java
index a72efe8..aaccc05 100644
--- a/lucene/core/src/java/org/apache/lucene/index/LegacyNumericDocValuesWrapper.java
+++ b/lucene/core/src/java/org/apache/lucene/index/LegacyNumericDocValuesWrapper.java
@@ -17,6 +17,8 @@
 
 package org.apache.lucene.index;
 
+import java.io.IOException;
+
 import org.apache.lucene.util.Bits;
 
 /**
@@ -70,6 +72,13 @@ public final class LegacyNumericDocValuesWrapper extends NumericDocValues {
   }
 
   @Override
+  public boolean advanceExact(int target) throws IOException {
+    docID = target;
+    value = values.get(docID);
+    return value != 0 || docsWithField.get(docID);
+  }
+
+  @Override
   public long cost() {
     // TODO
     return 0;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/LegacySortedDocValuesWrapper.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/LegacySortedDocValuesWrapper.java b/lucene/core/src/java/org/apache/lucene/index/LegacySortedDocValuesWrapper.java
index d8ef2f4..ecc114b 100644
--- a/lucene/core/src/java/org/apache/lucene/index/LegacySortedDocValuesWrapper.java
+++ b/lucene/core/src/java/org/apache/lucene/index/LegacySortedDocValuesWrapper.java
@@ -17,6 +17,8 @@
 
 package org.apache.lucene.index;
 
+import java.io.IOException;
+
 import org.apache.lucene.util.BytesRef;
 
 /**
@@ -71,6 +73,13 @@ public final class LegacySortedDocValuesWrapper extends SortedDocValues {
   }
 
   @Override
+  public boolean advanceExact(int target) throws IOException {
+    docID = target;
+    ord = values.getOrd(docID);
+    return ord != -1;
+  }
+
+  @Override
   public long cost() {
     return 0;
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/LegacySortedNumericDocValuesWrapper.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/LegacySortedNumericDocValuesWrapper.java b/lucene/core/src/java/org/apache/lucene/index/LegacySortedNumericDocValuesWrapper.java
index bf3c6cd..cfb61e3 100644
--- a/lucene/core/src/java/org/apache/lucene/index/LegacySortedNumericDocValuesWrapper.java
+++ b/lucene/core/src/java/org/apache/lucene/index/LegacySortedNumericDocValuesWrapper.java
@@ -17,6 +17,8 @@
 
 package org.apache.lucene.index;
 
+import java.io.IOException;
+
 /**
  * Wraps a {@link LegacySortedNumericDocValues} into a {@link SortedNumericDocValues}.
  *
@@ -72,6 +74,13 @@ public final class LegacySortedNumericDocValuesWrapper extends SortedNumericDocV
   }
 
   @Override
+  public boolean advanceExact(int target) throws IOException {
+    docID = target;
+    values.setDocument(docID);
+    return values.count() != 0;
+  }
+
+  @Override
   public long cost() {
     return 0;
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/LegacySortedSetDocValuesWrapper.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/LegacySortedSetDocValuesWrapper.java b/lucene/core/src/java/org/apache/lucene/index/LegacySortedSetDocValuesWrapper.java
index 45d12d2..0e96e02 100644
--- a/lucene/core/src/java/org/apache/lucene/index/LegacySortedSetDocValuesWrapper.java
+++ b/lucene/core/src/java/org/apache/lucene/index/LegacySortedSetDocValuesWrapper.java
@@ -17,6 +17,8 @@
 
 package org.apache.lucene.index;
 
+import java.io.IOException;
+
 import org.apache.lucene.util.BytesRef;
 
 /**
@@ -72,6 +74,14 @@ public final class LegacySortedSetDocValuesWrapper extends SortedSetDocValues {
   }
 
   @Override
+  public boolean advanceExact(int target) throws IOException {
+    docID = target;
+    values.setDocument(docID);
+    ord = values.nextOrd();
+    return ord != NO_MORE_ORDS;
+  }
+
+  @Override
   public long cost() {
     return 0;
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/MultiDocValues.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/MultiDocValues.java b/lucene/core/src/java/org/apache/lucene/index/MultiDocValues.java
index aeb49c5..51d684d 100644
--- a/lucene/core/src/java/org/apache/lucene/index/MultiDocValues.java
+++ b/lucene/core/src/java/org/apache/lucene/index/MultiDocValues.java
@@ -139,6 +139,27 @@ public class MultiDocValues {
       }
 
       @Override
+      public boolean advanceExact(int targetDocID) throws IOException {
+        if (targetDocID <= docID) {
+          throw new IllegalArgumentException("can only advance beyond current document: on docID=" + docID + " but targetDocID=" + targetDocID);
+        }
+        int readerIndex = ReaderUtil.subIndex(targetDocID, leaves);
+        if (readerIndex >= nextLeaf) {
+          if (readerIndex == leaves.size()) {
+            throw new IllegalArgumentException("Out of range: " + targetDocID);
+          }
+          currentLeaf = leaves.get(readerIndex);
+          currentValues = currentLeaf.reader().getNormValues(field);
+          nextLeaf = readerIndex+1;
+        }
+        docID = targetDocID;
+        if (currentValues == null) {
+          return false;
+        }
+        return currentValues.advanceExact(targetDocID - currentLeaf.docBase);
+      }
+
+      @Override
       public long longValue() throws IOException {
         return currentValues.longValue();
       }
@@ -244,6 +265,26 @@ public class MultiDocValues {
       }
 
       @Override
+      public boolean advanceExact(int targetDocID) throws IOException {
+        if (targetDocID <= docID) {
+          throw new IllegalArgumentException("can only advance beyond current document: on docID=" + docID + " but targetDocID=" + targetDocID);
+        }
+        int readerIndex = ReaderUtil.subIndex(targetDocID, leaves);
+        if (readerIndex >= nextLeaf) {
+          if (readerIndex == leaves.size()) {
+            throw new IllegalArgumentException("Out of range: " + targetDocID);
+          }
+          currentLeaf = leaves.get(readerIndex);
+          currentValues = currentLeaf.reader().getNumericDocValues(field);
+          nextLeaf = readerIndex+1;
+        }
+        docID = targetDocID;
+        if (currentValues == null) {
+          return false;
+        }
+        return currentValues.advanceExact(targetDocID - currentLeaf.docBase);
+      }
+      @Override
       public long longValue() throws IOException {
         return currentValues.longValue();
       }
@@ -348,6 +389,27 @@ public class MultiDocValues {
       }
 
       @Override
+      public boolean advanceExact(int targetDocID) throws IOException {
+        if (targetDocID <= docID) {
+          throw new IllegalArgumentException("can only advance beyond current document: on docID=" + docID + " but targetDocID=" + targetDocID);
+        }
+        int readerIndex = ReaderUtil.subIndex(targetDocID, leaves);
+        if (readerIndex >= nextLeaf) {
+          if (readerIndex == leaves.size()) {
+            throw new IllegalArgumentException("Out of range: " + targetDocID);
+          }
+          currentLeaf = leaves.get(readerIndex);
+          currentValues = currentLeaf.reader().getBinaryDocValues(field);
+          nextLeaf = readerIndex+1;
+        }
+        docID = targetDocID;
+        if (currentValues == null) {
+          return false;
+        }
+        return currentValues.advanceExact(targetDocID - currentLeaf.docBase);
+      }
+
+      @Override
       public BytesRef binaryValue() throws IOException {
         return currentValues.binaryValue();
       }
@@ -462,6 +524,27 @@ public class MultiDocValues {
       }
 
       @Override
+      public boolean advanceExact(int targetDocID) throws IOException {
+        if (targetDocID <= docID) {
+          throw new IllegalArgumentException("can only advance beyond current document: on docID=" + docID + " but targetDocID=" + targetDocID);
+        }
+        int readerIndex = ReaderUtil.subIndex(targetDocID, leaves);
+        if (readerIndex >= nextLeaf) {
+          if (readerIndex == leaves.size()) {
+            throw new IllegalArgumentException("Out of range: " + targetDocID);
+          }
+          currentLeaf = leaves.get(readerIndex);
+          currentValues = values[readerIndex];
+          nextLeaf = readerIndex+1;
+        }
+        docID = targetDocID;
+        if (currentValues == null) {
+          return false;
+        }
+        return currentValues.advanceExact(targetDocID - currentLeaf.docBase);
+      }
+
+      @Override
       public long cost() {
         return finalTotalCost;
       }
@@ -923,6 +1006,27 @@ public class MultiDocValues {
     }
     
     @Override
+    public boolean advanceExact(int targetDocID) throws IOException {
+      if (targetDocID <= docID) {
+        throw new IllegalArgumentException("can only advance beyond current document: on docID=" + docID + " but targetDocID=" + targetDocID);
+      }
+      int readerIndex = ReaderUtil.subIndex(targetDocID, docStarts);
+      if (readerIndex >= nextLeaf) {
+        if (readerIndex == values.length) {
+          throw new IllegalArgumentException("Out of range: " + targetDocID);
+        }
+        currentDocStart = docStarts[readerIndex];
+        currentValues = values[readerIndex];
+        nextLeaf = readerIndex+1;
+      }
+      docID = targetDocID;
+      if (currentValues == null) {
+        return false;
+      }
+      return currentValues.advanceExact(targetDocID - currentDocStart);
+    }
+    
+    @Override
     public int ordValue() {
       return (int) mapping.getGlobalOrds(nextLeaf-1).get(currentValues.ordValue());
     }
@@ -1029,6 +1133,27 @@ public class MultiDocValues {
     }
 
     @Override
+    public boolean advanceExact(int targetDocID) throws IOException {
+      if (targetDocID < docID) {
+        throw new IllegalArgumentException("can only advance beyond current document: on docID=" + docID + " but targetDocID=" + targetDocID);
+      }
+      int readerIndex = ReaderUtil.subIndex(targetDocID, docStarts);
+      if (readerIndex >= nextLeaf) {
+        if (readerIndex == values.length) {
+          throw new IllegalArgumentException("Out of range: " + targetDocID);
+        }
+        currentDocStart = docStarts[readerIndex];
+        currentValues = values[readerIndex];
+        nextLeaf = readerIndex+1;
+      }
+      docID = targetDocID;
+      if (currentValues == null) {
+        return false;
+      }
+      return currentValues.advanceExact(targetDocID - currentDocStart);
+    }
+
+    @Override
     public long nextOrd() throws IOException {
       long segmentOrd = currentValues.nextOrd();
       if (segmentOrd == NO_MORE_ORDS) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/NormValuesWriter.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/NormValuesWriter.java b/lucene/core/src/java/org/apache/lucene/index/NormValuesWriter.java
index 46b8c1c..b0d05e4 100644
--- a/lucene/core/src/java/org/apache/lucene/index/NormValuesWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/index/NormValuesWriter.java
@@ -133,6 +133,11 @@ class NormValuesWriter {
     }
 
     @Override
+    public boolean advanceExact(int target) throws IOException {
+      throw new UnsupportedOperationException();
+    }
+
+    @Override
     public long cost() {
       return docsWithField.cost();
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/NumericDocValues.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/NumericDocValues.java b/lucene/core/src/java/org/apache/lucene/index/NumericDocValues.java
index 5ae2e47..29b9918 100644
--- a/lucene/core/src/java/org/apache/lucene/index/NumericDocValues.java
+++ b/lucene/core/src/java/org/apache/lucene/index/NumericDocValues.java
@@ -19,12 +19,10 @@ package org.apache.lucene.index;
 
 import java.io.IOException;
 
-import org.apache.lucene.search.DocIdSetIterator;
-
 /**
  * A per-document numeric value.
  */
-public abstract class NumericDocValues extends DocIdSetIterator {
+public abstract class NumericDocValues extends DocValuesIterator {
   
   /** Sole constructor. (For invocation by subclass 
    *  constructors, typically implicit.) */
@@ -32,7 +30,10 @@ public abstract class NumericDocValues extends DocIdSetIterator {
 
   /**
    * Returns the numeric value for the current document ID.
+   * It is illegal to call this method after {@link #advanceExact(int)}
+   * returned {@code false}.
    * @return numeric value
    */
   public abstract long longValue() throws IOException;
+
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/NumericDocValuesWriter.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/NumericDocValuesWriter.java b/lucene/core/src/java/org/apache/lucene/index/NumericDocValuesWriter.java
index adfa706..24a7010 100644
--- a/lucene/core/src/java/org/apache/lucene/index/NumericDocValuesWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/index/NumericDocValuesWriter.java
@@ -119,6 +119,11 @@ class NumericDocValuesWriter extends DocValuesWriter {
     }
 
     @Override
+    public boolean advanceExact(int target) throws IOException {
+      throw new UnsupportedOperationException();
+    }
+
+    @Override
     public long cost() {
       return docsWithField.cost();
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java b/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java
index 894c81a..3cd465c 100644
--- a/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java
+++ b/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java
@@ -360,6 +360,11 @@ class ReadersAndUpdates {
                 }
 
                 @Override
+                public boolean advanceExact(int target) throws IOException {
+                  throw new UnsupportedOperationException();
+                }
+
+                @Override
                 public long cost() {
                   // TODO
                   return 0;
@@ -462,6 +467,11 @@ class ReadersAndUpdates {
                 }
 
                 @Override
+                public boolean advanceExact(int target) throws IOException {
+                  throw new UnsupportedOperationException();
+                }
+
+                @Override
                 public long cost() {
                   return currentValues.cost();
                 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/SingletonSortedNumericDocValues.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/SingletonSortedNumericDocValues.java b/lucene/core/src/java/org/apache/lucene/index/SingletonSortedNumericDocValues.java
index d95f0c0..5dbdec8 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SingletonSortedNumericDocValues.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SingletonSortedNumericDocValues.java
@@ -27,7 +27,6 @@ import java.io.IOException;
  */
 final class SingletonSortedNumericDocValues extends SortedNumericDocValues {
   private final NumericDocValues in;
-  private long value;
   
   public SingletonSortedNumericDocValues(NumericDocValues in) {
     if (in.docID() != -1) {
@@ -51,30 +50,27 @@ final class SingletonSortedNumericDocValues extends SortedNumericDocValues {
 
   @Override
   public int nextDoc() throws IOException {
-    int docID = in.nextDoc();
-    if (docID != NO_MORE_DOCS) {
-      value = in.longValue();
-    }
-    return docID;
+    return in.nextDoc();
   }
   
   @Override
   public int advance(int target) throws IOException {
-    int docID = in.advance(target);
-    if (docID != NO_MORE_DOCS) {
-      value = in.longValue();
-    }
-    return docID;
+    return in.advance(target);
+  }
+
+  @Override
+  public boolean advanceExact(int target) throws IOException {
+    return in.advanceExact(target);
   }
-      
+
   @Override
   public long cost() {
     return in.cost();
   }
   
   @Override
-  public long nextValue() {
-    return value;
+  public long nextValue() throws IOException {
+    return in.longValue();
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/SingletonSortedSetDocValues.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/SingletonSortedSetDocValues.java b/lucene/core/src/java/org/apache/lucene/index/SingletonSortedSetDocValues.java
index cc7360e..f16cdf1 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SingletonSortedSetDocValues.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SingletonSortedSetDocValues.java
@@ -29,7 +29,6 @@ import org.apache.lucene.util.BytesRef;
  */
 final class SingletonSortedSetDocValues extends SortedSetDocValues {
   private final SortedDocValues in;
-  private long currentOrd;
   private long ord;
   
   /** Creates a multi-valued view over the provided SortedDocValues */
@@ -55,8 +54,8 @@ final class SingletonSortedSetDocValues extends SortedSetDocValues {
 
   @Override
   public long nextOrd() {
-    long v = currentOrd;
-    currentOrd = NO_MORE_ORDS;
+    long v = ord;
+    ord = NO_MORE_ORDS;
     return v;
   }
 
@@ -64,7 +63,7 @@ final class SingletonSortedSetDocValues extends SortedSetDocValues {
   public int nextDoc() throws IOException {
     int docID = in.nextDoc();
     if (docID != NO_MORE_DOCS) {
-      currentOrd = ord = in.ordValue();
+      ord = in.ordValue();
     }
     return docID;
   }
@@ -73,12 +72,21 @@ final class SingletonSortedSetDocValues extends SortedSetDocValues {
   public int advance(int target) throws IOException {
     int docID = in.advance(target);
     if (docID != NO_MORE_DOCS) {
-      currentOrd = ord = in.ordValue();
+      ord = in.ordValue();
     }
     return docID;
   }
 
   @Override
+  public boolean advanceExact(int target) throws IOException {
+    if (in.advanceExact(target)) {
+      ord = in.ordValue();
+      return true;
+    }
+    return false;
+  }
+
+  @Override
   public BytesRef lookupOrd(long ord) throws IOException {
     // cast is ok: single-valued cannot exceed Integer.MAX_VALUE
     return in.lookupOrd((int) ord);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/SortedDocValues.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/SortedDocValues.java b/lucene/core/src/java/org/apache/lucene/index/SortedDocValues.java
index 7ff084f..e2d7dfd 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SortedDocValues.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SortedDocValues.java
@@ -40,6 +40,8 @@ public abstract class SortedDocValues extends BinaryDocValues {
 
   /**
    * Returns the ordinal for the current docID.
+   * It is illegal to call this method after {@link #advanceExact(int)}
+   * returned {@code false}.
    * @return ordinal for the document: this is dense, starts at 0, then
    *         increments by 1 for the next value in sorted order.
    */
@@ -107,4 +109,5 @@ public abstract class SortedDocValues extends BinaryDocValues {
   public TermsEnum termsEnum() throws IOException {
     return new SortedDocValuesTermsEnum(this);
   }
+
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/SortedDocValuesWriter.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/SortedDocValuesWriter.java b/lucene/core/src/java/org/apache/lucene/index/SortedDocValuesWriter.java
index 885ee89..7e43e49 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SortedDocValuesWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SortedDocValuesWriter.java
@@ -166,6 +166,11 @@ class SortedDocValuesWriter extends DocValuesWriter {
     }
 
     @Override
+    public boolean advanceExact(int target) throws IOException {
+      throw new UnsupportedOperationException();
+    }
+
+    @Override
     public long cost() {
       return docsWithField.cost();
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/SortedNumericDocValues.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/SortedNumericDocValues.java b/lucene/core/src/java/org/apache/lucene/index/SortedNumericDocValues.java
index 8c11495..a76b46d 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SortedNumericDocValues.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SortedNumericDocValues.java
@@ -18,14 +18,12 @@ package org.apache.lucene.index;
 
 import java.io.IOException;
 
-import org.apache.lucene.search.DocIdSetIterator;
-
 
 /**
  * A list of per-document numeric values, sorted 
  * according to {@link Long#compare(long, long)}.
  */
-public abstract class SortedNumericDocValues extends DocIdSetIterator {
+public abstract class SortedNumericDocValues extends DocValuesIterator {
   
   /** Sole constructor. (For invocation by subclass 
    *  constructors, typically implicit.) */
@@ -40,6 +38,8 @@ public abstract class SortedNumericDocValues extends DocIdSetIterator {
   /** 
    * Retrieves the number of values for the current document.  This must always
    * be greater than zero.
+   * It is illegal to call this method after {@link #advanceExact(int)}
+   * returned {@code false}.
    */
   public abstract int docValueCount();
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/SortedNumericDocValuesWriter.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/SortedNumericDocValuesWriter.java b/lucene/core/src/java/org/apache/lucene/index/SortedNumericDocValuesWriter.java
index e154547..3f50623 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SortedNumericDocValuesWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SortedNumericDocValuesWriter.java
@@ -155,6 +155,11 @@ class SortedNumericDocValuesWriter extends DocValuesWriter {
     }
 
     @Override
+    public boolean advanceExact(int target) throws IOException {
+      throw new UnsupportedOperationException();
+    }
+
+    @Override
     public int docValueCount() {
       return valueCount;
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/SortedSetDocValues.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/SortedSetDocValues.java b/lucene/core/src/java/org/apache/lucene/index/SortedSetDocValues.java
index 439843b..6d02c25 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SortedSetDocValues.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SortedSetDocValues.java
@@ -19,7 +19,6 @@ package org.apache.lucene.index;
 
 import java.io.IOException;
 
-import org.apache.lucene.search.DocIdSetIterator;
 import org.apache.lucene.util.BytesRef;
 
 /**
@@ -30,7 +29,7 @@ import org.apache.lucene.util.BytesRef;
  * dictionary value (ordinal) can be retrieved for each document. Ordinals
  * are dense and in increasing sorted order.
  */
-public abstract class SortedSetDocValues extends DocIdSetIterator {
+public abstract class SortedSetDocValues extends DocValuesIterator {
   
   /** Sole constructor. (For invocation by subclass 
    * constructors, typically implicit.) */
@@ -43,6 +42,8 @@ public abstract class SortedSetDocValues extends DocIdSetIterator {
 
   /** 
    * Returns the next ordinal for the current document.
+   * It is illegal to call this method after {@link #advanceExact(int)}
+   * returned {@code false}.
    * @return next ordinal for the document, or {@link #NO_MORE_ORDS}. 
    *         ordinals are dense, start at 0, then increment by 1 for 
    *         the next value in sorted order. 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/SortedSetDocValuesWriter.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/SortedSetDocValuesWriter.java b/lucene/core/src/java/org/apache/lucene/index/SortedSetDocValuesWriter.java
index e7d915f..35157d4 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SortedSetDocValuesWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SortedSetDocValuesWriter.java
@@ -226,6 +226,11 @@ class SortedSetDocValuesWriter extends DocValuesWriter {
     }
 
     @Override
+    public boolean advanceExact(int target) throws IOException {
+      throw new UnsupportedOperationException();
+    }
+
+    @Override
     public long getValueCount() {
       return ordMap.length;
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/index/SortingLeafReader.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/SortingLeafReader.java b/lucene/core/src/java/org/apache/lucene/index/SortingLeafReader.java
index 8139ed1..4fb5027 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SortingLeafReader.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SortingLeafReader.java
@@ -186,6 +186,12 @@ class SortingLeafReader extends FilterLeafReader {
     }
 
     @Override
+    public boolean advanceExact(int target) throws IOException {
+      docID = target;
+      return dvs.docsWithField.get(target);
+    }
+
+    @Override
     public BytesRef binaryValue() {
       return dvs.values[docID];
     }
@@ -255,6 +261,12 @@ class SortingLeafReader extends FilterLeafReader {
     }
 
     @Override
+    public boolean advanceExact(int target) throws IOException {
+      docID = target;
+      return dvs.docsWithField.get(target);
+    }
+
+    @Override
     public long longValue() {
       return dvs.values[docID];
     }
@@ -395,6 +407,12 @@ class SortingLeafReader extends FilterLeafReader {
     }
 
     @Override
+    public boolean advanceExact(int target) throws IOException {
+      docID = target;
+      return ords[target] != -1;
+    }
+
+    @Override
     public int ordValue() {
       return ords[docID];
     }
@@ -468,6 +486,13 @@ class SortingLeafReader extends FilterLeafReader {
     }
 
     @Override
+    public boolean advanceExact(int target) throws IOException {
+      docID = target;
+      ordUpto = 0;
+      return ords[docID] != null;
+    }
+
+    @Override
     public long nextOrd() {
       if (ordUpto == ords[docID].length) {
         return NO_MORE_ORDS;
@@ -539,6 +564,13 @@ class SortingLeafReader extends FilterLeafReader {
     }
 
     @Override
+    public boolean advanceExact(int target) throws IOException {
+      docID = target;
+      upto = 0;
+      return values[docID] != null;
+    }
+
+    @Override
     public long nextValue() {
       if (upto == values[docID].length) {
         throw new AssertionError();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/search/FieldComparator.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/FieldComparator.java b/lucene/core/src/java/org/apache/lucene/search/FieldComparator.java
index b6c17c0..8216201 100644
--- a/lucene/core/src/java/org/apache/lucene/search/FieldComparator.java
+++ b/lucene/core/src/java/org/apache/lucene/search/FieldComparator.java
@@ -178,11 +178,7 @@ public abstract class FieldComparator<T> {
     }
 
     private double getValueForDoc(int doc) throws IOException {
-      int curDocID = currentReaderValues.docID();
-      if (doc > curDocID) {
-        curDocID = currentReaderValues.advance(doc);
-      }
-      if (doc == curDocID) {
+      if (currentReaderValues.advanceExact(doc)) {
         return Double.longBitsToDouble(currentReaderValues.longValue());
       } else {
         return missingValue;
@@ -242,11 +238,7 @@ public abstract class FieldComparator<T> {
     }
     
     private float getValueForDoc(int doc) throws IOException {
-      int curDocID = currentReaderValues.docID();
-      if (doc > curDocID) {
-        curDocID = currentReaderValues.advance(doc);
-      }
-      if (doc == curDocID) {
+      if (currentReaderValues.advanceExact(doc)) {
         return Float.intBitsToFloat((int) currentReaderValues.longValue());
       } else {
         return missingValue;
@@ -308,11 +300,7 @@ public abstract class FieldComparator<T> {
     }
 
     private int getValueForDoc(int doc) throws IOException {
-      int curDocID = currentReaderValues.docID();
-      if (doc > curDocID) {
-        curDocID = currentReaderValues.advance(doc);
-      }
-      if (doc == curDocID) {
+      if (currentReaderValues.advanceExact(doc)) {
         return (int) currentReaderValues.longValue();
       } else {
         return missingValue;
@@ -372,11 +360,7 @@ public abstract class FieldComparator<T> {
     }
 
     private long getValueForDoc(int doc) throws IOException {
-      int curDocID = currentReaderValues.docID();
-      if (doc > curDocID) {
-        curDocID = currentReaderValues.advance(doc);
-      }
-      if (doc == curDocID) {
+      if (currentReaderValues.advanceExact(doc)) {
         return currentReaderValues.longValue();
       } else {
         return missingValue;
@@ -656,15 +640,11 @@ public abstract class FieldComparator<T> {
     }
 
     private int getOrdForDoc(int doc) throws IOException {
-      int curDocID = termsIndex.docID();
-      if (doc > curDocID) {
-        if (termsIndex.advance(doc) == doc) {
-          return termsIndex.ordValue();
-        }
-      } else if (doc == curDocID) {
+      if (termsIndex.advanceExact(doc)) {
         return termsIndex.ordValue();
+      } else {
+        return -1;
       }
-      return -1;
     }
 
     @Override
@@ -864,11 +844,7 @@ public abstract class FieldComparator<T> {
     }
 
     private BytesRef getValueForDoc(int doc) throws IOException {
-      int curDocID = docTerms.docID();
-      if (doc > curDocID) {
-        curDocID = docTerms.advance(doc);
-      }
-      if (doc == curDocID) {
+      if (docTerms.advanceExact(doc)) {
         return docTerms.binaryValue();
       } else {
         return null;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/search/SortedNumericSelector.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/SortedNumericSelector.java b/lucene/core/src/java/org/apache/lucene/search/SortedNumericSelector.java
index 43e97e7..705454e 100644
--- a/lucene/core/src/java/org/apache/lucene/search/SortedNumericSelector.java
+++ b/lucene/core/src/java/org/apache/lucene/search/SortedNumericSelector.java
@@ -132,6 +132,15 @@ public class SortedNumericSelector {
     }
 
     @Override
+    public boolean advanceExact(int target) throws IOException {
+      if (in.advanceExact(target)) {
+        value = in.nextValue();
+        return true;
+      }
+      return false;
+    }
+
+    @Override
     public long cost() {
       return in.cost();
     }
@@ -182,6 +191,15 @@ public class SortedNumericSelector {
     }
 
     @Override
+    public boolean advanceExact(int target) throws IOException {
+      if (in.advanceExact(target)) {
+        setValue();
+        return true;
+      }
+      return false;
+    }
+
+    @Override
     public long cost() {
       return in.cost();
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aca4c9d/lucene/core/src/java/org/apache/lucene/search/SortedSetSelector.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/SortedSetSelector.java b/lucene/core/src/java/org/apache/lucene/search/SortedSetSelector.java
index f10dbf7..2d6c351 100644
--- a/lucene/core/src/java/org/apache/lucene/search/SortedSetSelector.java
+++ b/lucene/core/src/java/org/apache/lucene/search/SortedSetSelector.java
@@ -118,6 +118,15 @@ public class SortedSetSelector {
     }
 
     @Override
+    public boolean advanceExact(int target) throws IOException {
+      if (in.advanceExact(target)) {
+        setOrd();
+        return true;
+      }
+      return false;
+    }
+
+    @Override
     public long cost() {
       return in.cost();
     }
@@ -180,6 +189,15 @@ public class SortedSetSelector {
     }
 
     @Override
+    public boolean advanceExact(int target) throws IOException {
+      if (in.advanceExact(target)) {
+        setOrd();
+        return true;
+      }
+      return false;
+    }
+
+    @Override
     public long cost() {
       return in.cost();
     }
@@ -249,6 +267,15 @@ public class SortedSetSelector {
     }
 
     @Override
+    public boolean advanceExact(int target) throws IOException {
+      if (in.advanceExact(target)) {
+        setOrd();
+        return true;
+      }
+      return false;
+    }
+
+    @Override
     public long cost() {
       return in.cost();
     }
@@ -330,6 +357,15 @@ public class SortedSetSelector {
     }
 
     @Override
+    public boolean advanceExact(int target) throws IOException {
+      if (in.advanceExact(target)) {
+        setOrd();
+        return true;
+      }
+      return false;
+    }
+
+    @Override
     public long cost() {
       return in.cost();
     }