You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by ab...@apache.org on 2017/03/23 11:49:48 UTC

[01/46] lucene-solr:jira/solr-9959: LUCENE-7742: fix places where we were unboxing and then re-boxing according to FindBugs

Repository: lucene-solr
Updated Branches:
  refs/heads/jira/solr-9959 8e0c23096 -> d7772e731


LUCENE-7742: fix places where we were unboxing and then re-boxing according to FindBugs


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/716d43ec
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/716d43ec
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/716d43ec

Branch: refs/heads/jira/solr-9959
Commit: 716d43eca936e3ea5325a0ee0cceb087a4ff32e5
Parents: 124b505
Author: Mike McCandless <mi...@apache.org>
Authored: Wed Mar 15 06:03:54 2017 -0400
Committer: Mike McCandless <mi...@apache.org>
Committed: Wed Mar 15 06:03:54 2017 -0400

----------------------------------------------------------------------
 lucene/CHANGES.txt                                              | 3 +++
 .../java/org/apache/lucene/document/LatLonDocValuesField.java   | 2 +-
 .../java/org/apache/lucene/spatial3d/Geo3DDocValuesField.java   | 2 +-
 .../update/processor/TikaLanguageIdentifierUpdateProcessor.java | 5 +++--
 solr/core/src/java/org/apache/solr/core/RequestParams.java      | 2 +-
 .../src/java/org/apache/solr/handler/sql/SolrEnumerator.java    | 4 ++--
 solr/core/src/java/org/apache/solr/schema/NumericFieldType.java | 4 ++--
 solr/core/src/java/org/apache/solr/search/Grouping.java         | 2 +-
 .../core/src/java/org/apache/solr/search/SolrIndexSearcher.java | 2 +-
 .../responseprocessor/SearchGroupShardResponseProcessor.java    | 2 +-
 .../org/apache/solr/security/AutorizationEditOperation.java     | 2 +-
 .../test/org/apache/solr/search/TestCollapseQParserPlugin.java  | 2 +-
 .../test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java  | 2 +-
 .../org/apache/solr/client/solrj/io/eval/EqualsEvaluator.java   | 2 +-
 .../src/java/org/apache/solr/common/cloud/DocCollection.java    | 2 +-
 .../src/test/org/apache/solr/common/params/SolrParamTest.java   | 4 ++--
 16 files changed, 23 insertions(+), 19 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/716d43ec/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index e14ab53..62f4763 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -256,6 +256,9 @@ Optimizations
 * LUCENE-7699: Query parsers now use span queries to produce more efficient
   phrase queries for multi-token synonyms. (Matt Webber via Jim Ferenczi)
 
+* LUCENE-7742: Fix places where we were unboxing and then re-boxing
+  according to FindBugs (Daniel Jelinski via Mike McCandless)
+
 Build
 
 * LUCENE-7653: Update randomizedtesting to version 2.5.0. (Dawid Weiss)

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/716d43ec/lucene/sandbox/src/java/org/apache/lucene/document/LatLonDocValuesField.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/java/org/apache/lucene/document/LatLonDocValuesField.java b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonDocValuesField.java
index 08a7da7..10e90b4 100644
--- a/lucene/sandbox/src/java/org/apache/lucene/document/LatLonDocValuesField.java
+++ b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonDocValuesField.java
@@ -106,7 +106,7 @@ public class LatLonDocValuesField extends Field {
     result.append(name);
     result.append(':');
 
-    long currentValue = Long.valueOf((Long)fieldsData);
+    long currentValue = (Long)fieldsData;
     result.append(decodeLatitude((int)(currentValue >> 32)));
     result.append(',');
     result.append(decodeLongitude((int)(currentValue & 0xFFFFFFFF)));

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/716d43ec/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/Geo3DDocValuesField.java
----------------------------------------------------------------------
diff --git a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/Geo3DDocValuesField.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/Geo3DDocValuesField.java
index 551fa5a..dc9df35 100644
--- a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/Geo3DDocValuesField.java
+++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/Geo3DDocValuesField.java
@@ -305,7 +305,7 @@ public class Geo3DDocValuesField extends Field {
     result.append(name);
     result.append(':');
 
-    long currentValue = Long.valueOf((Long)fieldsData);
+    long currentValue = (Long)fieldsData;
     
     result.append(decodeXValue(currentValue));
     result.append(',');

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/716d43ec/solr/contrib/langid/src/java/org/apache/solr/update/processor/TikaLanguageIdentifierUpdateProcessor.java
----------------------------------------------------------------------
diff --git a/solr/contrib/langid/src/java/org/apache/solr/update/processor/TikaLanguageIdentifierUpdateProcessor.java b/solr/contrib/langid/src/java/org/apache/solr/update/processor/TikaLanguageIdentifierUpdateProcessor.java
index 3d3fa41..836a3bf 100644
--- a/solr/contrib/langid/src/java/org/apache/solr/update/processor/TikaLanguageIdentifierUpdateProcessor.java
+++ b/solr/contrib/langid/src/java/org/apache/solr/update/processor/TikaLanguageIdentifierUpdateProcessor.java
@@ -56,8 +56,9 @@ public class TikaLanguageIdentifierUpdateProcessor extends LanguageIdentifierUpd
       // FIXME: Hack - we get the distance from toString and calculate our own certainty score
       Double distance = Double.parseDouble(tikaSimilarityPattern.matcher(identifier.toString()).replaceFirst("$1"));
       // This formula gives: 0.02 => 0.8, 0.1 => 0.5 which is a better sweetspot than isReasonablyCertain()
-      Double certainty = 1 - (5 * distance); 
-      certainty = (certainty < 0) ? 0 : certainty;
+      Double certainty = 1 - (5 * distance);
+      if (certainty < 0)
+        certainty = 0d;
       DetectedLanguage language = new DetectedLanguage(identifier.getLanguage(), certainty);
       languages.add(language);
       log.debug("Language detected as "+language+" with a certainty of "+language.getCertainty()+" (Tika distance="+identifier.toString()+")");

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/716d43ec/solr/core/src/java/org/apache/solr/core/RequestParams.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/RequestParams.java b/solr/core/src/java/org/apache/solr/core/RequestParams.java
index ff0d36c..fbb2555 100644
--- a/solr/core/src/java/org/apache/solr/core/RequestParams.java
+++ b/solr/core/src/java/org/apache/solr/core/RequestParams.java
@@ -222,7 +222,7 @@ public class RequestParams implements MapSerializable {
     }
 
     public Long getVersion() {
-      return meta == null ? 0l : (Long) meta.get("v");
+      return meta == null ? Long.valueOf(0l) : (Long) meta.get("v");
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/716d43ec/solr/core/src/java/org/apache/solr/handler/sql/SolrEnumerator.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/sql/SolrEnumerator.java b/solr/core/src/java/org/apache/solr/handler/sql/SolrEnumerator.java
index 6f9dddf..be6046c 100644
--- a/solr/core/src/java/org/apache/solr/handler/sql/SolrEnumerator.java
+++ b/solr/core/src/java/org/apache/solr/handler/sql/SolrEnumerator.java
@@ -103,10 +103,10 @@ class SolrEnumerator implements Enumerator<Object> {
   private Object getRealVal(Object val) {
     // Check if Double is really a Long
     if(val instanceof Double) {
-      Double doubleVal = (double) val;
+      double doubleVal = (double) val;
       //make sure that double has no decimals and fits within Long
       if(doubleVal % 1 == 0 && doubleVal >= Long.MIN_VALUE && doubleVal <= Long.MAX_VALUE) {
-        return doubleVal.longValue();
+        return (long)doubleVal;
       }
       return doubleVal;
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/716d43ec/solr/core/src/java/org/apache/solr/schema/NumericFieldType.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/NumericFieldType.java b/solr/core/src/java/org/apache/solr/schema/NumericFieldType.java
index 5801766..6cda9ca 100644
--- a/solr/core/src/java/org/apache/solr/schema/NumericFieldType.java
+++ b/solr/core/src/java/org/apache/solr/schema/NumericFieldType.java
@@ -114,10 +114,10 @@ public abstract class NumericFieldType extends PrimitiveFieldType {
       if ((minVal == null || minVal.doubleValue() < 0d || minBits == minusZeroBits) &&
           (maxVal != null && (maxVal.doubleValue() < 0d || maxBits == minusZeroBits))) {
         query = numericDocValuesRangeQuery
-            (fieldName, maxBits, (min == null ? negativeInfinityBits : minBits), maxInclusive, minInclusive, false);
+            (fieldName, maxBits, (min == null ? Long.valueOf(negativeInfinityBits) : minBits), maxInclusive, minInclusive, false);
       } else { // If both max and min are positive, then issue range query
         query = numericDocValuesRangeQuery
-            (fieldName, minBits, (max == null ? positiveInfinityBits : maxBits), minInclusive, maxInclusive, false);
+            (fieldName, minBits, (max == null ? Long.valueOf(positiveInfinityBits) : maxBits), minInclusive, maxInclusive, false);
       }
     }
     return query;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/716d43ec/solr/core/src/java/org/apache/solr/search/Grouping.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/Grouping.java b/solr/core/src/java/org/apache/solr/search/Grouping.java
index 327e5bb..302383a 100644
--- a/solr/core/src/java/org/apache/solr/search/Grouping.java
+++ b/solr/core/src/java/org/apache/solr/search/Grouping.java
@@ -601,7 +601,7 @@ public class Grouping {
       groupResult.add("matches", matches);
       if (totalCount == TotalCount.grouped) {
         Integer totalNrOfGroups = getNumberOfGroups();
-        groupResult.add("ngroups", totalNrOfGroups == null ? 0 : totalNrOfGroups);
+        groupResult.add("ngroups", totalNrOfGroups == null ? Integer.valueOf(0) : totalNrOfGroups);
       }
       maxMatches = Math.max(maxMatches, matches);
       return groupResult;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/716d43ec/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
index a7ee433..19e47d0 100644
--- a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
@@ -847,7 +847,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
                 newVal = val.intValue();
                 break;
               case LONG:
-                newVal = val.longValue();
+                newVal = val;
                 break;
               case FLOAT:
                 newVal = Float.intBitsToFloat(val.intValue());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/716d43ec/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/SearchGroupShardResponseProcessor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/SearchGroupShardResponseProcessor.java b/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/SearchGroupShardResponseProcessor.java
index 1645b1e..a12cad1 100644
--- a/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/SearchGroupShardResponseProcessor.java
+++ b/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/SearchGroupShardResponseProcessor.java
@@ -119,7 +119,7 @@ public class SearchGroupShardResponseProcessor implements ShardResponseProcessor
         if (groupCount != null) {
           Integer existingGroupCount = rb.mergedGroupCounts.get(field);
           // Assuming groups don't cross shard boundary...
-          rb.mergedGroupCounts.put(field, existingGroupCount != null ? existingGroupCount + groupCount : groupCount);
+          rb.mergedGroupCounts.put(field, existingGroupCount != null ? Integer.valueOf(existingGroupCount + groupCount) : groupCount);
         }
 
         final Collection<SearchGroup<BytesRef>> searchGroups = firstPhaseCommandResult.getSearchGroups();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/716d43ec/solr/core/src/java/org/apache/solr/security/AutorizationEditOperation.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/security/AutorizationEditOperation.java b/solr/core/src/java/org/apache/solr/security/AutorizationEditOperation.java
index 88c7987..6a5230c 100644
--- a/solr/core/src/java/org/apache/solr/security/AutorizationEditOperation.java
+++ b/solr/core/src/java/org/apache/solr/security/AutorizationEditOperation.java
@@ -83,7 +83,7 @@ enum AutorizationEditOperation {
       boolean indexSatisfied = index == null;
       for (int i = 0; i < permissions.size(); i++) {
         Map perm = permissions.get(i);
-        Integer thisIdx = (int) perm.get("index");
+        Integer thisIdx = (Integer) perm.get("index");
         if (thisIdx.equals(beforeIdx)) {
           beforeSatisfied = true;
           permissionsCopy.add(dataMap);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/716d43ec/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java b/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java
index 2fca452..ea4114a 100644
--- a/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java
+++ b/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java
@@ -336,7 +336,7 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
 
     if(boostedResults.size() == controlResults.size()) {
       for(int i=0; i<boostedResults.size(); i++) {
-        if(!boostedResults.get(i).equals(controlResults.get(i).intValue())) {
+        if(!boostedResults.get(i).equals(controlResults.get(i))) {
           throw new Exception("boosted results do not match control results, boostedResults size:"+boostedResults.toString()+", controlResults size:"+controlResults.toString());
         }
       }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/716d43ec/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java b/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java
index 7a4fa86..4538e90 100644
--- a/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java
+++ b/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java
@@ -342,7 +342,7 @@ public class TestInPlaceUpdatesDistrib extends AbstractFullDistribZkTestBase {
     SolrDocumentList results = LEADER.query(params).getResults();
     assertEquals(numDocs, results.size());
     for (SolrDocument doc : results) {
-      luceneDocids.add((int) doc.get("[docid]"));
+      luceneDocids.add((Integer) doc.get("[docid]"));
       valuesList.add((Float) doc.get("inplace_updatable_float"));
     }
     log.info("Initial results: "+results);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/716d43ec/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EqualsEvaluator.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EqualsEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EqualsEvaluator.java
index 051a2de..c467a16 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EqualsEvaluator.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EqualsEvaluator.java
@@ -86,7 +86,7 @@ public class EqualsEvaluator extends BooleanEvaluator {
       return new BooleanChecker(){
         @Override
         public boolean test(Object left, Object right) {
-          return (boolean)left.equals((boolean)right);
+          return (boolean)left == (boolean)right;
         }
       };
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/716d43ec/solr/solrj/src/java/org/apache/solr/common/cloud/DocCollection.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/DocCollection.java b/solr/solrj/src/java/org/apache/solr/common/cloud/DocCollection.java
index bf0f04f..d89b2f6 100644
--- a/solr/solrj/src/java/org/apache/solr/common/cloud/DocCollection.java
+++ b/solr/solrj/src/java/org/apache/solr/common/cloud/DocCollection.java
@@ -85,7 +85,7 @@ public class DocCollection extends ZkNodeProps implements Iterable<Slice> {
     this.replicationFactor = (Integer) verifyProp(props, REPLICATION_FACTOR);
     this.maxShardsPerNode = (Integer) verifyProp(props, MAX_SHARDS_PER_NODE);
     Boolean autoAddReplicas = (Boolean) verifyProp(props, AUTO_ADD_REPLICAS);
-    this.autoAddReplicas = autoAddReplicas == null ? false : autoAddReplicas;
+    this.autoAddReplicas = autoAddReplicas == null ? Boolean.FALSE : autoAddReplicas;
     Integer realtimeReplicas = (Integer) verifyProp(props, REALTIME_REPLICAS);
     this.realtimeReplicas = realtimeReplicas == null ? -1 : realtimeReplicas;
     if (this.realtimeReplicas != -1 && this.realtimeReplicas != 1) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/716d43ec/solr/solrj/src/test/org/apache/solr/common/params/SolrParamTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/common/params/SolrParamTest.java b/solr/solrj/src/test/org/apache/solr/common/params/SolrParamTest.java
index 48237c5..80f9036 100644
--- a/solr/solrj/src/test/org/apache/solr/common/params/SolrParamTest.java
+++ b/solr/solrj/src/test/org/apache/solr/common/params/SolrParamTest.java
@@ -213,10 +213,10 @@ public class SolrParamTest extends LuceneTestCase {
     
     // Get things with defaults
     assertEquals( pstr                  , params.get(          "xxx", pstr   ) );
-    assertEquals( pbool.booleanValue()  , params.getBool(      "xxx", pbool   ) );
+    assertEquals( pbool                 , params.getBool(      "xxx", pbool   ) );
     assertEquals( pint.intValue()       , params.getInt(       "xxx", pint   ) );
     assertEquals( pfloat.floatValue()   , params.getFloat(     "xxx", pfloat  ), 0.1);
-    assertEquals( pbool.booleanValue()  , params.getFieldBool( "xxx", "bool", pbool ) );
+    assertEquals( pbool                 , params.getFieldBool( "xxx", "bool", pbool ) );
     assertEquals( pint.intValue()       , params.getFieldInt(  "xxx", "int", pint  ) );
     assertEquals( pfloat.floatValue()   , params.getFieldFloat("xxx", "float", pfloat  ), 0.1);
     assertEquals( pstr                  , params.getFieldParam("xxx", "str", pstr  ) );


[37/46] lucene-solr:jira/solr-9959: SOLR-7452: json facet API, refine/skip through buckets already visited

Posted by ab...@apache.org.
SOLR-7452: json facet API, refine/skip through buckets already visited


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/6786089b
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/6786089b
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/6786089b

Branch: refs/heads/jira/solr-9959
Commit: 6786089b0bc8be50287c2527874ca4503114addd
Parents: 4171ef7
Author: yonik <yo...@apache.org>
Authored: Tue Mar 21 08:42:33 2017 -0400
Committer: yonik <yo...@apache.org>
Committed: Tue Mar 21 08:42:47 2017 -0400

----------------------------------------------------------------------
 .../solr/search/facet/FacetFieldProcessor.java  | 52 +++++++++++++++-----
 .../FacetFieldProcessorByEnumTermsStream.java   |  2 +-
 .../apache/solr/search/facet/FacetModule.java   |  1 +
 .../solr/search/facet/FacetProcessor.java       | 10 ++--
 .../apache/solr/search/facet/FacetQuery.java    |  2 +-
 .../apache/solr/search/facet/FacetRange.java    |  4 +-
 .../search/facet/TestJsonFacetRefinement.java   | 22 +++++----
 7 files changed, 62 insertions(+), 31 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6786089b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java
index fb44f62..1ba252e 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java
@@ -19,6 +19,7 @@ package org.apache.solr.search.facet;
 
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.LinkedHashMap;
 import java.util.List;
@@ -311,7 +312,7 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
     if (freq.missing) {
       // TODO: it would be more efficient to build up a missing DocSet if we need it here anyway.
       SimpleOrderedMap<Object> missingBucket = new SimpleOrderedMap<>();
-      fillBucket(missingBucket, getFieldMissingQuery(fcontext.searcher, freq.field), null, false);
+      fillBucket(missingBucket, getFieldMissingQuery(fcontext.searcher, freq.field), null, false, null);
       res.add("missing", missingBucket);
     }
 
@@ -379,7 +380,7 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
       }
     }
 
-    processSubs(target, filter, subDomain, false);
+    processSubs(target, filter, subDomain, false, null);
   }
 
   @Override
@@ -513,31 +514,43 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
   }
 
 
+  /*
+   "qfacet":{"cat2":{"_l":["A"]}},
+   "all":{"_s":[[
+     "all",
+     {"cat3":{"_l":["A"]}}]]},
+   "cat1":{"_l":["A"]}}}
+
+   */
+
+  static <T> List<T> asList(Object list) {
+    return list != null ? (List<T>)list : Collections.EMPTY_LIST;
+  }
 
   protected SimpleOrderedMap<Object> refineFacets() throws IOException {
-    List leaves = (List)fcontext.facetInfo.get("_l");
+    List leaves = asList(fcontext.facetInfo.get("_l"));
+    List<List> skip = asList(fcontext.facetInfo.get("_s"));
+    List<List> missing = asList(fcontext.facetInfo.get("_m"));
 
     // For leaf refinements, we do full faceting for each leaf bucket.  Any sub-facets of these buckets will be fully evaluated.  Because of this, we should never
     // encounter leaf refinements that have sub-facets that return partial results.
 
     SimpleOrderedMap<Object> res = new SimpleOrderedMap<>();
-    List<SimpleOrderedMap> bucketList = new ArrayList<>(leaves.size());
+    List<SimpleOrderedMap> bucketList = new ArrayList<>( leaves.size() + skip.size() + missing.size() );
     res.add("buckets", bucketList);
 
     // TODO: an alternate implementations can fill all accs at once
     createAccs(-1, 1);
 
-    FieldType ft = sf.getType();
     for (Object bucketVal : leaves) {
-      SimpleOrderedMap<Object> bucket = new SimpleOrderedMap<>();
-      bucketList.add(bucket);
-      bucket.add("val", bucketVal);
-
-      // String internal = ft.toInternal( tobj.toString() );  // TODO - we need a better way to get from object to query...
-
-      Query domainQ = ft.getFieldQuery(null, sf, bucketVal.toString());
+      bucketList.add( refineBucket(bucketVal, false, null) );
+    }
+    for (List bucketAndFacetInfo : skip) {
+      assert bucketAndFacetInfo.size() == 2;
+      Object bucketVal = bucketAndFacetInfo.get(0);
+      Map<String,Object> facetInfo = (Map<String, Object>) bucketAndFacetInfo.get(1);
 
-      fillBucket(bucket, domainQ, null, false);
+      bucketList.add( refineBucket(bucketVal, true, facetInfo ) );
     }
 
     // If there are just a couple of leaves, and if the domain is large, then
@@ -548,4 +561,17 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
     return res;
   }
 
+  private SimpleOrderedMap<Object> refineBucket(Object bucketVal, boolean skip, Map<String,Object> facetInfo) throws IOException {
+    SimpleOrderedMap<Object> bucket = new SimpleOrderedMap<>();
+    FieldType ft = sf.getType();
+    bucket.add("val", bucketVal);
+    // String internal = ft.toInternal( tobj.toString() );  // TODO - we need a better way to get from object to query...
+
+    Query domainQ = ft.getFieldQuery(null, sf, bucketVal.toString());
+
+    fillBucket(bucket, domainQ, null, skip, facetInfo);
+
+    return bucket;
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6786089b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByEnumTermsStream.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByEnumTermsStream.java b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByEnumTermsStream.java
index 94f3b2d..d28e024 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByEnumTermsStream.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByEnumTermsStream.java
@@ -333,7 +333,7 @@ class FacetFieldProcessorByEnumTermsStream extends FacetFieldProcessor implement
         bucket.add("val", bucketVal);
         addStats(bucket, 0);
         if (hasSubFacets) {
-          processSubs(bucket, bucketQuery, termSet, false);
+          processSubs(bucket, bucketQuery, termSet, false, null);
         }
 
         // TODO... termSet needs to stick around for streaming sub-facets?

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6786089b/solr/core/src/java/org/apache/solr/search/facet/FacetModule.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetModule.java b/solr/core/src/java/org/apache/solr/search/facet/FacetModule.java
index 630e968..bf13791 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetModule.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetModule.java
@@ -235,6 +235,7 @@ public class FacetModule extends SearchComponent {
       Map<String,Object> finfo = new HashMap<>(1);
       finfo.put(FACET_REFINE, refinement);
       String finfoStr = JSONUtil.toJSON(finfo);
+      // System.err.println("##################### REFINE=" + finfoStr);
       shardsRefineRequest.params.add(FACET_INFO, finfoStr);
 
       if (newRequest) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6786089b/solr/core/src/java/org/apache/solr/search/facet/FacetProcessor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetProcessor.java b/solr/core/src/java/org/apache/solr/search/facet/FacetProcessor.java
index de6dd72..cf4d0fe 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetProcessor.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetProcessor.java
@@ -367,7 +367,7 @@ public abstract class FacetProcessor<FacetRequestT extends FacetRequest>  {
   }
 
   // TODO: rather than just have a raw "response", perhaps we should model as a bucket object that contains the response plus extra info?
-  void fillBucket(SimpleOrderedMap<Object> bucket, Query q, DocSet result, boolean skip) throws IOException {
+  void fillBucket(SimpleOrderedMap<Object> bucket, Query q, DocSet result, boolean skip, Map<String,Object> facetInfo) throws IOException {
 
     // TODO: we don't need the DocSet if we've already calculated everything during the first phase
     boolean needDocSet = freq.getFacetStats().size() > 0 || freq.getSubFacets().size() > 0;
@@ -398,7 +398,7 @@ public abstract class FacetProcessor<FacetRequestT extends FacetRequest>  {
       if (!skip) {
         processStats(bucket, result, count);
       }
-      processSubs(bucket, q, result, skip);
+      processSubs(bucket, q, result, skip, facetInfo);
     } finally {
       if (result != null) {
         // result.decref(); // OFF-HEAP
@@ -407,7 +407,7 @@ public abstract class FacetProcessor<FacetRequestT extends FacetRequest>  {
     }
   }
 
-  void processSubs(SimpleOrderedMap<Object> response, Query filter, DocSet domain, boolean skip) throws IOException {
+  void processSubs(SimpleOrderedMap<Object> response, Query filter, DocSet domain, boolean skip, Map<String,Object> facetInfo) throws IOException {
 
     boolean emptyDomain = domain == null || domain.size() == 0;
 
@@ -423,8 +423,8 @@ public abstract class FacetProcessor<FacetRequestT extends FacetRequest>  {
       }
 
       Map<String,Object>facetInfoSub = null;
-      if (fcontext.facetInfo != null) {
-        facetInfoSub = (Map<String,Object>)fcontext.facetInfo.get(sub.getKey());
+      if (facetInfo != null) {
+        facetInfoSub = (Map<String,Object>)facetInfo.get(sub.getKey());
       }
 
       // If we're skipping this node, then we only need to process sub-facets that have facet info specified.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6786089b/solr/core/src/java/org/apache/solr/search/facet/FacetQuery.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetQuery.java b/solr/core/src/java/org/apache/solr/search/facet/FacetQuery.java
index 584bec3..a6782bf7 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetQuery.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetQuery.java
@@ -61,7 +61,7 @@ class FacetQueryProcessor extends FacetProcessor<FacetQuery> {
       // FIXME - what needs to be done here?
     }
     response = new SimpleOrderedMap<>();
-    fillBucket(response, freq.q, null, (fcontext.flags & FacetContext.SKIP_FACET)!=0);
+    fillBucket(response, freq.q, null, (fcontext.flags & FacetContext.SKIP_FACET)!=0, fcontext.facetInfo);
   }
 
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6786089b/solr/core/src/java/org/apache/solr/search/facet/FacetRange.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetRange.java b/solr/core/src/java/org/apache/solr/search/facet/FacetRange.java
index 5d0989b..682dc19 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetRange.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetRange.java
@@ -350,7 +350,7 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
     if (freq.getSubFacets().size() > 0) {
       DocSet subBase = intersections[slot];
       try {
-        processSubs(bucket, filters[slot], subBase, false);
+        processSubs(bucket, filters[slot], subBase, false, null);
       } finally {
         // subContext.base.decref();  // OFF-HEAP
         // subContext.base = null;  // do not modify context after creation... there may be deferred execution (i.e. streaming)
@@ -367,7 +367,7 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
     }
 
     Query rangeQ = sf.getType().getRangeQuery(null, sf, range.low == null ? null : calc.formatValue(range.low), range.high==null ? null : calc.formatValue(range.high), range.includeLower, range.includeUpper);
-    fillBucket(bucket, rangeQ, null, false);
+    fillBucket(bucket, rangeQ, null, false, null);
 
     return bucket;
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6786089b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetRefinement.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetRefinement.java b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetRefinement.java
index f23ae8c..869c90b 100644
--- a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetRefinement.java
+++ b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetRefinement.java
@@ -227,16 +227,16 @@ public class TestJsonFacetRefinement extends SolrTestCaseHS {
     String cat_s = p.get("cat_s");
     String num_d = p.get("num_d");
 
-    clients.get(0).add( sdoc("id", "01", cat_s, "A", num_d, -1) ); // A wins count tie
-    clients.get(0).add( sdoc("id", "02", cat_s, "B", num_d, 3) );
+    clients.get(0).add( sdoc("id", "01", "all_s","all", cat_s, "A", num_d, -1) ); // A wins count tie
+    clients.get(0).add( sdoc("id", "02", "all_s","all", cat_s, "B", num_d, 3) );
 
-    clients.get(1).add( sdoc("id", "11", cat_s, "B", num_d, -5) ); // B highest count
-    clients.get(1).add( sdoc("id", "12", cat_s, "B", num_d, -11) );
-    clients.get(1).add( sdoc("id", "13", cat_s, "A", num_d, 7) );
+    clients.get(1).add( sdoc("id", "11", "all_s","all", cat_s, "B", num_d, -5) ); // B highest count
+    clients.get(1).add( sdoc("id", "12", "all_s","all", cat_s, "B", num_d, -11) );
+    clients.get(1).add( sdoc("id", "13", "all_s","all", cat_s, "A", num_d, 7) );
 
-    clients.get(2).add( sdoc("id", "21", cat_s, "A", num_d, 17) ); // A highest count
-    clients.get(2).add( sdoc("id", "22", cat_s, "A", num_d, -19) );
-    clients.get(2).add( sdoc("id", "23", cat_s, "B", num_d, 11) );
+    clients.get(2).add( sdoc("id", "21", "all_s","all", cat_s, "A", num_d, 17) ); // A highest count
+    clients.get(2).add( sdoc("id", "22", "all_s","all", cat_s, "A", num_d, -19) );
+    clients.get(2).add( sdoc("id", "23", "all_s","all", cat_s, "B", num_d, 11) );
 
     client.commit();
 
@@ -291,12 +291,16 @@ public class TestJsonFacetRefinement extends SolrTestCaseHS {
         "json.facet", "{" +
             " cat0:{type:terms, field:${cat_s}, sort:'min1 asc', limit:1, overrequest:0, refine:false, facet:{ min1:'min(${num_d})'}   }" +
             ",cat1:{type:terms, field:${cat_s}, sort:'min1 asc', limit:1, overrequest:0, refine:true,  facet:{ min1:'min(${num_d})'}   }" +
+            ",qfacet:{type:query, q:'*:*', facet:{  cat2:{type:terms, field:${cat_s}, sort:'min1 asc', limit:1, overrequest:0, refine:true,  facet:{ min1:'min(${num_d})'}   }  }}" +  // refinement needed through a query facet
+            ",allf:{type:terms, field:all_s,  facet:{  cat3:{type:terms, field:${cat_s}, sort:'min1 asc', limit:1, overrequest:0, refine:true,  facet:{ min1:'min(${num_d})'}   }  }}" +  // refinement needed through field facet
             ",sum1:'sum(num_d)'" +  // make sure that root bucket stats aren't affected by refinement
             "}"
         )
         , "facets=={ count:8" +
-            ", cat0:{ buckets:[ {val:A,count:3, min1:-19.0} ] }" +  // B wins in shard2, so we're missing the "A" count for that shar w/o refinement.
+            ", cat0:{ buckets:[ {val:A,count:3, min1:-19.0} ] }" +  // B wins in shard2, so we're missing the "A" count for that shard w/o refinement.
             ", cat1:{ buckets:[ {val:A,count:4, min1:-19.0} ] }" +  // with refinement, we get the right count
+            ", qfacet:{ count:8,  cat2:{ buckets:[ {val:A,count:4, min1:-19.0} ] }    }" +  // just like the previous response, just nested under a query facet
+            ", allf:{ buckets:[  {cat3:{ buckets:[ {val:A,count:4, min1:-19.0} ] }  ,count:8,val:all   }]  }" +  // just like the previous response, just nested under a field facet
             ", sum1:2.0" +
             "}"
     );


[12/46] lucene-solr:jira/solr-9959: SOLR-9990: Add PointFields in example/default schemas

Posted by ab...@apache.org.
SOLR-9990: Add PointFields in example/default schemas


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/7a625bbb
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/7a625bbb
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/7a625bbb

Branch: refs/heads/jira/solr-9959
Commit: 7a625bbb25c119014e94a952c06d0f789bf04c3c
Parents: 3d81a9c
Author: Tomas Fernandez Lobbe <tf...@apache.org>
Authored: Thu Mar 16 11:08:50 2017 -0700
Committer: Tomas Fernandez Lobbe <tf...@apache.org>
Committed: Thu Mar 16 11:09:42 2017 -0700

----------------------------------------------------------------------
 solr/CHANGES.txt                                |  2 +
 .../basic_configs/conf/managed-schema           | 69 +++++++++++-----
 .../conf/managed-schema                         | 66 ++++++++++-----
 .../conf/managed-schema                         | 85 +++++++++++++++-----
 4 files changed, 159 insertions(+), 63 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7a625bbb/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 6ffb93f..d49b439 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -365,6 +365,8 @@ Other Changes
 
 * SOLR-10247: Support non-numeric metrics and a "compact" format of /admin/metrics output. (ab)
 
+* SOLR-9990: Add PointFields in example/default schemas (Tom�s Fern�ndez L�bbe)
+
 ==================  6.4.2 ==================
 
 Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7a625bbb/solr/server/solr/configsets/basic_configs/conf/managed-schema
----------------------------------------------------------------------
diff --git a/solr/server/solr/configsets/basic_configs/conf/managed-schema b/solr/server/solr/configsets/basic_configs/conf/managed-schema
index 22553d8..d7aacc4 100644
--- a/solr/server/solr/configsets/basic_configs/conf/managed-schema
+++ b/solr/server/solr/configsets/basic_configs/conf/managed-schema
@@ -71,14 +71,14 @@
      indexed: true if this field should be indexed (searchable or sortable)
      stored: true if this field should be retrievable
      docValues: true if this field should have doc values. Doc values are
-       useful for faceting, grouping, sorting and function queries. Although not
-       required, doc values will make the index faster to load, more
-       NRT-friendly and more memory-efficient. They however come with some
-       limitations: they are currently only supported by StrField, UUIDField
-       and all Trie*Fields, and depending on the field type, they might
-       require the field to be single-valued, be required or have a default
-       value (check the documentation of the field type you're interested in
-       for more information)
+       useful (required, if you are using *Point fields) for faceting, 
+       grouping, sorting and function queries. Doc values will make the index 
+       faster to load, more NRT-friendly and more memory-efficient. 
+       They however come with some limitations: they are currently only 
+       supported by StrField, UUIDField, all Trie*Fields and *PointFields,
+       and depending on the field type, they might require the field to be
+       single-valued, be required or have a default value (check the
+       documentation of the field type you're interested in for more information)
      multiValued: true if this field may contain multiple values per document
      omitNorms: (expert) set to true to omit the norms associated with
        this field (this disables length normalization and index-time
@@ -152,8 +152,20 @@
     <dynamicField name="*_dts" type="date"    indexed="true"  stored="true" multiValued="true"/>
     <dynamicField name="*_p"  type="location" indexed="true" stored="true"/>
     <dynamicField name="*_srpt"  type="location_rpt" indexed="true" stored="true"/>
-
-    <!-- some trie-coded dynamic fields for faster range queries -->
+    
+    <!-- KD-tree (point) numerics -->
+    <dynamicField name="*_pi" type="pint"    indexed="true"  stored="true"/>
+    <dynamicField name="*_pis" type="pints"    indexed="true"  stored="true"/>
+    <dynamicField name="*_pl" type="plong"   indexed="true"  stored="true"/>
+    <dynamicField name="*_pls" type="plongs"   indexed="true"  stored="true"/>
+    <dynamicField name="*_pf" type="pfloat"  indexed="true"  stored="true"/>
+    <dynamicField name="*_pfs" type="pfloats"  indexed="true"  stored="true"/>
+    <dynamicField name="*_pd" type="pdouble" indexed="true"  stored="true"/>
+    <dynamicField name="*_pds" type="pdoubles" indexed="true"  stored="true"/>
+    <dynamicField name="*_pdt" type="pdate"  indexed="true"  stored="true"/>
+    <dynamicField name="*_pdts" type="pdates"  indexed="true"  stored="true"/>
+
+    <!-- some trie-coded dynamic fields -->
     <dynamicField name="*_ti" type="tint"    indexed="true"  stored="true"/>
     <dynamicField name="*_tis" type="tints"    indexed="true"  stored="true"/>
     <dynamicField name="*_tl" type="tlong"   indexed="true"  stored="true"/>
@@ -216,8 +228,8 @@
     <!-- sortMissingLast and sortMissingFirst attributes are optional attributes are
          currently supported on types that are sorted internally as strings
          and on numeric types.
-	     This includes "string","boolean", and, as of 3.5 (and 4.x),
-	     int, float, long, date, double, including the "Trie" variants.
+	     This includes "string","boolean", "int", "float", "long", "date", "double",
+	     including the "Trie" and "Point" variants.
        - If sortMissingLast="true", then a sort on this field will cause documents
          without the field to come after documents with the field,
          regardless of the requested sort order (asc or desc).
@@ -227,13 +239,26 @@
        - If sortMissingLast="false" and sortMissingFirst="false" (the default),
          then default lucene sorting will be used which places docs without the
          field first in an ascending sort and last in a descending sort.
-    -->    
-
+    -->
+    
     <!--
-      Default numeric field types. For faster range queries, consider the tint/tfloat/tlong/tdouble types.
+      Numeric field types that index values using KD-trees. *Point fields are faster and more efficient than Trie* fields both, at
+      search time and at index time, but some features are still not supported.
+      Point fields don't support FieldCache, so they must have docValues="true" if needed for sorting, faceting, functions, etc.
+    -->
+    <fieldType name="pint" class="solr.IntPointField" docValues="true"/>
+    <fieldType name="pfloat" class="solr.FloatPointField" docValues="true"/>
+    <fieldType name="plong" class="solr.LongPointField" docValues="true"/>
+    <fieldType name="pdouble" class="solr.DoublePointField" docValues="true"/>
+    
+    <fieldType name="pints" class="solr.IntPointField" docValues="true" multiValued="true"/>
+    <fieldType name="pfloats" class="solr.FloatPointField" docValues="true" multiValued="true"/>
+    <fieldType name="plongs" class="solr.LongPointField" docValues="true" multiValued="true"/>
+    <fieldType name="pdoubles" class="solr.DoublePointField" docValues="true" multiValued="true"/>
 
-      These fields support doc values, but they require the field to be
-      single-valued and either be required or have a default value.
+    <!--
+      Default numeric field types. For faster range queries, consider *PointFields (pint/pfloat/plong/pdouble), or the 
+      tint/tfloat/tlong/tdouble types.
     -->
     <fieldType name="int" class="solr.TrieIntField" docValues="true" precisionStep="0" positionIncrementGap="0"/>
     <fieldType name="float" class="solr.TrieFloatField" docValues="true" precisionStep="0" positionIncrementGap="0"/>
@@ -254,6 +279,8 @@
      Smaller precisionStep values (specified in bits) will lead to more tokens
      indexed per value, slightly larger index size, and faster range queries.
      A precisionStep of 0 disables indexing at different precision levels.
+     
+     Consider using pint/pfloat/plong/pdouble instead of Trie* fields if possible
     -->
     <fieldType name="tint" class="solr.TrieIntField" docValues="true" precisionStep="8" positionIncrementGap="0"/>
     <fieldType name="tfloat" class="solr.TrieFloatField" docValues="true" precisionStep="8" positionIncrementGap="0"/>
@@ -284,15 +311,15 @@
                       the current day
                       
          Consult the TrieDateField javadocs for more information.
-
-         Note: For faster range queries, consider the tdate type
       -->
+    <!-- KD-tree versions of date fields -->
+    <fieldType name="pdate" class="solr.DatePointField" docValues="true"/>
+    <fieldType name="pdates" class="solr.DatePointField" docValues="true" multiValued="true"/>
+    
     <fieldType name="date" class="solr.TrieDateField" docValues="true" precisionStep="0" positionIncrementGap="0"/>
     <fieldType name="dates" class="solr.TrieDateField" docValues="true" precisionStep="0" positionIncrementGap="0" multiValued="true"/>
 
-    <!-- A Trie based date field for faster date range queries and date faceting. -->
     <fieldType name="tdate" class="solr.TrieDateField" docValues="true" precisionStep="6" positionIncrementGap="0"/>
-
     <fieldType name="tdates" class="solr.TrieDateField" docValues="true" precisionStep="6" positionIncrementGap="0" multiValued="true"/>
 
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7a625bbb/solr/server/solr/configsets/data_driven_schema_configs/conf/managed-schema
----------------------------------------------------------------------
diff --git a/solr/server/solr/configsets/data_driven_schema_configs/conf/managed-schema b/solr/server/solr/configsets/data_driven_schema_configs/conf/managed-schema
index 558c05e..b716f9c 100644
--- a/solr/server/solr/configsets/data_driven_schema_configs/conf/managed-schema
+++ b/solr/server/solr/configsets/data_driven_schema_configs/conf/managed-schema
@@ -71,14 +71,14 @@
      indexed: true if this field should be indexed (searchable or sortable)
      stored: true if this field should be retrievable
      docValues: true if this field should have doc values. Doc values are
-       useful for faceting, grouping, sorting and function queries. Although not
-       required, doc values will make the index faster to load, more
-       NRT-friendly and more memory-efficient. They however come with some
-       limitations: they are currently only supported by StrField, UUIDField
-       and all Trie*Fields, and depending on the field type, they might
-       require the field to be single-valued, be required or have a default
-       value (check the documentation of the field type you're interested in
-       for more information)
+       useful (required, if you are using *Point fields) for faceting, 
+       grouping, sorting and function queries. Doc values will make the index 
+       faster to load, more NRT-friendly and more memory-efficient. 
+       They however come with some limitations: they are currently only 
+       supported by StrField, UUIDField, all Trie*Fields and *PointFields,
+       and depending on the field type, they might require the field to be
+       single-valued, be required or have a default value (check the
+       documentation of the field type you're interested in for more information)
      multiValued: true if this field may contain multiple values per document
      omitNorms: (expert) set to true to omit the norms associated with
        this field (this disables length normalization and index-time
@@ -152,8 +152,20 @@
     <dynamicField name="*_dts" type="date"    indexed="true"  stored="true" multiValued="true"/>
     <dynamicField name="*_p"  type="location" indexed="true" stored="true"/>
     <dynamicField name="*_srpt"  type="location_rpt" indexed="true" stored="true"/>
-
-    <!-- some trie-coded dynamic fields for faster range queries -->
+    
+    <!-- KD-tree (point) numerics -->
+    <dynamicField name="*_pi" type="pint"    indexed="true"  stored="true"/>
+    <dynamicField name="*_pis" type="pints"    indexed="true"  stored="true"/>
+    <dynamicField name="*_pl" type="plong"   indexed="true"  stored="true"/>
+    <dynamicField name="*_pls" type="plongs"   indexed="true"  stored="true"/>
+    <dynamicField name="*_pf" type="pfloat"  indexed="true"  stored="true"/>
+    <dynamicField name="*_pfs" type="pfloats"  indexed="true"  stored="true"/>
+    <dynamicField name="*_pd" type="pdouble" indexed="true"  stored="true"/>
+    <dynamicField name="*_pds" type="pdoubles" indexed="true"  stored="true"/>
+    <dynamicField name="*_pdt" type="pdate"  indexed="true"  stored="true"/>
+    <dynamicField name="*_pdts" type="pdates"  indexed="true"  stored="true"/>
+
+    <!-- some trie-coded dynamic fields -->
     <dynamicField name="*_ti" type="tint"    indexed="true"  stored="true"/>
     <dynamicField name="*_tis" type="tints"    indexed="true"  stored="true"/>
     <dynamicField name="*_tl" type="tlong"   indexed="true"  stored="true"/>
@@ -216,8 +228,8 @@
     <!-- sortMissingLast and sortMissingFirst attributes are optional attributes are
          currently supported on types that are sorted internally as strings
          and on numeric types.
-	     This includes "string","boolean", and, as of 3.5 (and 4.x),
-	     int, float, long, date, double, including the "Trie" variants.
+	     This includes "string","boolean", "int", "float", "long", "date", "double",
+	     including the "Trie" and "Point" variants.
        - If sortMissingLast="true", then a sort on this field will cause documents
          without the field to come after documents with the field,
          regardless of the requested sort order (asc or desc).
@@ -230,10 +242,23 @@
     -->    
 
     <!--
-      Default numeric field types. For faster range queries, consider the tint/tfloat/tlong/tdouble types.
+      Numeric field types that index values using KD-trees. *Point fields are faster and more efficient than Trie* fields both, at
+      search time and at index time, but some features are still not supported.
+      Point fields don't support FieldCache, so they must have docValues="true" if needed for sorting, faceting, functions, etc.
+    -->
+    <fieldType name="pint" class="solr.IntPointField" docValues="true"/>
+    <fieldType name="pfloat" class="solr.FloatPointField" docValues="true"/>
+    <fieldType name="plong" class="solr.LongPointField" docValues="true"/>
+    <fieldType name="pdouble" class="solr.DoublePointField" docValues="true"/>
+    
+    <fieldType name="pints" class="solr.IntPointField" docValues="true" multiValued="true"/>
+    <fieldType name="pfloats" class="solr.FloatPointField" docValues="true" multiValued="true"/>
+    <fieldType name="plongs" class="solr.LongPointField" docValues="true" multiValued="true"/>
+    <fieldType name="pdoubles" class="solr.DoublePointField" docValues="true" multiValued="true"/>
 
-      These fields support doc values, but they require the field to be
-      single-valued and either be required or have a default value.
+    <!--
+      Default numeric field types. For faster range queries, consider *PointFields (pint/pfloat/plong/pdouble), or the 
+      tint/tfloat/tlong/tdouble types.
     -->
     <fieldType name="int" class="solr.TrieIntField" docValues="true" precisionStep="0" positionIncrementGap="0"/>
     <fieldType name="float" class="solr.TrieFloatField" docValues="true" precisionStep="0" positionIncrementGap="0"/>
@@ -254,6 +279,8 @@
      Smaller precisionStep values (specified in bits) will lead to more tokens
      indexed per value, slightly larger index size, and faster range queries.
      A precisionStep of 0 disables indexing at different precision levels.
+     
+     Consider using pint/pfloat/plong/pdouble instead of Trie* fields if possible
     -->
     <fieldType name="tint" class="solr.TrieIntField" docValues="true" precisionStep="8" positionIncrementGap="0"/>
     <fieldType name="tfloat" class="solr.TrieFloatField" docValues="true" precisionStep="8" positionIncrementGap="0"/>
@@ -284,15 +311,14 @@
                       the current day
                       
          Consult the TrieDateField javadocs for more information.
-
-         Note: For faster range queries, consider the tdate type
       -->
+    <!-- KD-tree versions of date fields -->
+    <fieldType name="pdate" class="solr.DatePointField" docValues="true"/>
+    <fieldType name="pdates" class="solr.DatePointField" docValues="true" multiValued="true"/>
+    
     <fieldType name="date" class="solr.TrieDateField" docValues="true" precisionStep="0" positionIncrementGap="0"/>
     <fieldType name="dates" class="solr.TrieDateField" docValues="true" precisionStep="0" positionIncrementGap="0" multiValued="true"/>
-
-    <!-- A Trie based date field for faster date range queries and date faceting. -->
     <fieldType name="tdate" class="solr.TrieDateField" docValues="true" precisionStep="6" positionIncrementGap="0"/>
-
     <fieldType name="tdates" class="solr.TrieDateField" docValues="true" precisionStep="6" positionIncrementGap="0" multiValued="true"/>
 
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7a625bbb/solr/server/solr/configsets/sample_techproducts_configs/conf/managed-schema
----------------------------------------------------------------------
diff --git a/solr/server/solr/configsets/sample_techproducts_configs/conf/managed-schema b/solr/server/solr/configsets/sample_techproducts_configs/conf/managed-schema
index bd292a0..9caf3d6 100644
--- a/solr/server/solr/configsets/sample_techproducts_configs/conf/managed-schema
+++ b/solr/server/solr/configsets/sample_techproducts_configs/conf/managed-schema
@@ -72,14 +72,14 @@
      indexed: true if this field should be indexed (searchable or sortable)
      stored: true if this field should be retrievable
      docValues: true if this field should have doc values. Doc values are
-       useful for faceting, grouping, sorting and function queries. Although not
-       required, doc values will make the index faster to load, more
-       NRT-friendly and more memory-efficient. They however come with some
-       limitations: they are currently only supported by StrField, UUIDField
-       and all Trie*Fields, and depending on the field type, they might
-       require the field to be single-valued, be required or have a default
-       value (check the documentation of the field type you're interested in
-       for more information)
+       useful (required, if you are using *Point fields) for faceting, 
+       grouping, sorting and function queries. Doc values will make the index 
+       faster to load, more NRT-friendly and more memory-efficient. 
+       They however come with some limitations: they are currently only 
+       supported by StrField, UUIDField, all Trie*Fields and *PointFields,
+       and depending on the field type, they might require the field to be
+       single-valued, be required or have a default value (check the
+       documentation of the field type you're interested in for more information)
      multiValued: true if this field may contain multiple values per document
      omitNorms: (expert) set to true to omit the norms associated with
        this field (this disables length normalization and index-time
@@ -139,9 +139,9 @@
    <field name="features" type="text_general" indexed="true" stored="true" multiValued="true"/>
    <field name="includes" type="text_general" indexed="true" stored="true" termVectors="true" termPositions="true" termOffsets="true" />
 
-   <field name="weight" type="float" indexed="true" stored="true"/>
-   <field name="price"  type="float" indexed="true" stored="true"/>
-   <field name="popularity" type="int" indexed="true" stored="true" />
+   <field name="weight" type="pfloat" indexed="true" stored="true"/>
+   <field name="price"  type="pfloat" indexed="true" stored="true"/>
+   <field name="popularity" type="pint" indexed="true" stored="true" />
    <field name="inStock" type="boolean" indexed="true" stored="true" />
 
    <field name="store" type="location" indexed="true" stored="true"/>
@@ -164,7 +164,7 @@
    <field name="resourcename" type="text_general" indexed="true" stored="true"/>
    <field name="url" type="text_general" indexed="true" stored="true"/>
    <field name="content_type" type="string" indexed="true" stored="true" multiValued="true"/>
-   <field name="last_modified" type="date" indexed="true" stored="true"/>
+   <field name="last_modified" type="pdate" indexed="true" stored="true"/>
    <field name="links" type="string" indexed="true" stored="true" multiValued="true"/>
    <field name="_src_" type="string" indexed="false" stored="true"/>
 
@@ -216,7 +216,19 @@
    <dynamicField name="*_dts" type="date"    indexed="true"  stored="true" multiValued="true"/>
    <dynamicField name="*_p"  type="location" indexed="true" stored="true"/>
 
-   <!-- some trie-coded dynamic fields for faster range queries -->
+   <!-- KD-tree (point) numerics -->
+   <dynamicField name="*_pi" type="pint"    indexed="true"  stored="true"/>
+   <dynamicField name="*_pis" type="pints"    indexed="true"  stored="true"/>
+   <dynamicField name="*_pl" type="plong"   indexed="true"  stored="true"/>
+   <dynamicField name="*_pls" type="plongs"   indexed="true"  stored="true"/>
+   <dynamicField name="*_pf" type="pfloat"  indexed="true"  stored="true"/>
+   <dynamicField name="*_pfs" type="pfloats"  indexed="true"  stored="true"/>
+   <dynamicField name="*_pd" type="pdouble" indexed="true"  stored="true"/>
+   <dynamicField name="*_pds" type="pdoubles" indexed="true"  stored="true"/>
+   <dynamicField name="*_pdt" type="pdate"  indexed="true"  stored="true"/>
+   <dynamicField name="*_pdts" type="pdates"  indexed="true"  stored="true"/>
+
+   <!-- some trie-coded dynamic fields -->
    <dynamicField name="*_ti" type="tint"    indexed="true"  stored="true"/>
    <dynamicField name="*_tl" type="tlong"   indexed="true"  stored="true"/>
    <dynamicField name="*_tf" type="tfloat"  indexed="true"  stored="true"/>
@@ -282,6 +294,18 @@
    <copyField source="content_type" dest="text"/>
    <copyField source="resourcename" dest="text"/>
    <copyField source="url" dest="text"/>
+   
+   <!-- Copy numeric fields to *PointFields implementation -->
+   <copyField source="*_i" dest="*_pi"/>
+   <copyField source="*_l" dest="*_pl"/>
+   <copyField source="*_f" dest="*_pf"/>
+   <copyField source="*_d" dest="*_pd"/>
+   <copyField source="*_dt" dest="*_pdt"/>
+   <copyField source="*_is" dest="*_pis"/>
+   <copyField source="*_ls" dest="*_pds"/>
+   <copyField source="*_fs" dest="*_pfs"/>
+   <copyField source="*_ds" dest="*_pds"/>
+   <copyField source="*_dts" dest="*_pdts"/>
 
    <!-- Create a string version of author for faceting -->
    <copyField source="author" dest="author_s"/>
@@ -317,8 +341,8 @@
     <!-- sortMissingLast and sortMissingFirst attributes are optional attributes are
          currently supported on types that are sorted internally as strings
          and on numeric types.
-       This includes "string","boolean", and, as of 3.5 (and 4.x),
-       int, float, long, date, double, including the "Trie" variants.
+         This includes "string","boolean", "int", "float", "long", "date", "double",
+	     including the "Trie" and "Point" variants.
        - If sortMissingLast="true", then a sort on this field will cause documents
          without the field to come after documents with the field,
          regardless of the requested sort order (asc or desc).
@@ -331,10 +355,23 @@
     -->    
 
     <!--
-      Default numeric field types. For faster range queries, consider the tint/tfloat/tlong/tdouble types.
+      Numeric field types that index values using KD-trees. *Point fields are faster and more efficient than Trie* fields both, at
+      search time and at index time, but some features are still not supported.
+      Point fields don't support FieldCache, so they must have docValues="true" if needed for sorting, faceting, functions, etc.
+    -->
+    <fieldType name="pint" class="solr.IntPointField" docValues="true"/>
+    <fieldType name="pfloat" class="solr.FloatPointField" docValues="true"/>
+    <fieldType name="plong" class="solr.LongPointField" docValues="true"/>
+    <fieldType name="pdouble" class="solr.DoublePointField" docValues="true"/>
+    
+    <fieldType name="pints" class="solr.IntPointField" docValues="true" multiValued="true"/>
+    <fieldType name="pfloats" class="solr.FloatPointField" docValues="true" multiValued="true"/>
+    <fieldType name="plongs" class="solr.LongPointField" docValues="true" multiValued="true"/>
+    <fieldType name="pdoubles" class="solr.DoublePointField" docValues="true" multiValued="true"/>
 
-      These fields support doc values, but they require the field to be
-      single-valued and either be required or have a default value.
+    <!--
+      Default numeric field types. For faster range queries, consider *PointFields (pint/pfloat/plong/pdouble), or the 
+      tint/tfloat/tlong/tdouble types.
     -->
     <fieldType name="int" class="solr.TrieIntField" docValues="true" precisionStep="0" positionIncrementGap="0"/>
     <fieldType name="float" class="solr.TrieFloatField" docValues="true" precisionStep="0" positionIncrementGap="0"/>
@@ -350,6 +387,8 @@
      Smaller precisionStep values (specified in bits) will lead to more tokens
      indexed per value, slightly larger index size, and faster range queries.
      A precisionStep of 0 disables indexing at different precision levels.
+     
+     Consider using pint/pfloat/plong/pdouble instead of Trie* fields if possible
     -->
     <fieldType name="tint" class="solr.TrieIntField" docValues="true" precisionStep="8" positionIncrementGap="0"/>
     <fieldType name="tfloat" class="solr.TrieFloatField" docValues="true" precisionStep="8" positionIncrementGap="0"/>
@@ -375,12 +414,14 @@
                       the current day
                       
          Consult the TrieDateField javadocs for more information.
-
-         Note: For faster range queries, consider the tdate type
       -->
-    <fieldType name="date" class="solr.TrieDateField" docValues="true" precisionStep="0" positionIncrementGap="0"/>
+      
+    <!-- KD-tree versions of date fields -->
+    <fieldType name="pdate" class="solr.DatePointField" docValues="true"/>
+    <fieldType name="pdates" class="solr.DatePointField" docValues="true" multiValued="true"/>
 
-    <!-- A Trie based date field for faster date range queries and date faceting. -->
+
+    <fieldType name="date" class="solr.TrieDateField" docValues="true" precisionStep="0" positionIncrementGap="0"/>
     <fieldType name="tdate" class="solr.TrieDateField" docValues="true" precisionStep="6" positionIncrementGap="0"/>
 
 


[31/46] lucene-solr:jira/solr-9959: SOLR-9992: Add support for grouping with PointFIelds

Posted by ab...@apache.org.
SOLR-9992: Add support for grouping with PointFIelds


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/21adce4a
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/21adce4a
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/21adce4a

Branch: refs/heads/jira/solr-9959
Commit: 21adce4a2fa6d9a207cd8abed11ba050ff57c933
Parents: e092739
Author: Cao Manh Dat <da...@apache.org>
Authored: Mon Mar 20 15:21:36 2017 +0700
Committer: Cao Manh Dat <da...@apache.org>
Committed: Mon Mar 20 15:21:36 2017 +0700

----------------------------------------------------------------------
 .../solr/handler/component/QueryComponent.java  |  5 +++-
 .../TopGroupsShardRequestFactory.java           |  4 ++--
 .../SearchGroupsResultTransformer.java          | 24 ++++++++++++++++----
 .../TopGroupsResultTransformer.java             |  4 +++-
 .../apache/solr/TestDistributedGrouping.java    |  2 --
 .../org/apache/solr/TestGroupingSearch.java     |  8 -------
 .../cloud/SegmentTerminateEarlyTestState.java   |  4 +---
 .../apache/solr/cloud/TestSegmentSorting.java   |  2 --
 8 files changed, 29 insertions(+), 24 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/21adce4a/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java b/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java
index deff25b..08a0e84 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java
@@ -49,6 +49,7 @@ import org.apache.lucene.search.grouping.GroupDocs;
 import org.apache.lucene.search.grouping.SearchGroup;
 import org.apache.lucene.search.grouping.TopGroups;
 import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.BytesRefBuilder;
 import org.apache.lucene.util.InPlaceMergeSorter;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.common.SolrDocument;
@@ -445,7 +446,9 @@ public class QueryComponent extends SearchComponent
             for (String topGroup : topGroupsParam) {
               SearchGroup<BytesRef> searchGroup = new SearchGroup<>();
               if (!topGroup.equals(TopGroupsShardRequestFactory.GROUP_NULL_VALUE)) {
-                searchGroup.groupValue = new BytesRef(schemaField.getType().readableToIndexed(topGroup));
+                BytesRefBuilder builder = new BytesRefBuilder();
+                schemaField.getType().readableToIndexed(topGroup, builder);
+                searchGroup.groupValue = builder.get();
               }
               topGroups.add(searchGroup);
             }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/21adce4a/solr/core/src/java/org/apache/solr/search/grouping/distributed/requestfactory/TopGroupsShardRequestFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/grouping/distributed/requestfactory/TopGroupsShardRequestFactory.java b/solr/core/src/java/org/apache/solr/search/grouping/distributed/requestfactory/TopGroupsShardRequestFactory.java
index ca73a30..5067c51 100644
--- a/solr/core/src/java/org/apache/solr/search/grouping/distributed/requestfactory/TopGroupsShardRequestFactory.java
+++ b/solr/core/src/java/org/apache/solr/search/grouping/distributed/requestfactory/TopGroupsShardRequestFactory.java
@@ -19,6 +19,7 @@ package org.apache.solr.search.grouping.distributed.requestfactory;
 import org.apache.lucene.analysis.reverse.ReverseStringFilter;
 import org.apache.lucene.search.grouping.SearchGroup;
 import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.CharsRefBuilder;
 import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.GroupParams;
 import org.apache.solr.common.params.ModifiableSolrParams;
@@ -117,9 +118,8 @@ public class TopGroupsShardRequestFactory implements ShardRequestFactory {
       for (SearchGroup<BytesRef> searchGroup : entry.getValue()) {
         String groupValue;
         if (searchGroup.groupValue != null) {
-          String rawGroupValue = searchGroup.groupValue.utf8ToString();
           FieldType fieldType = schema.getField(entry.getKey()).getType();
-          groupValue = fieldType.indexedToReadable(rawGroupValue);
+          groupValue = fieldType.indexedToReadable(searchGroup.groupValue, new CharsRefBuilder()).toString();
         } else {
           groupValue = GROUP_NULL_VALUE;
         }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/21adce4a/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/SearchGroupsResultTransformer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/SearchGroupsResultTransformer.java b/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/SearchGroupsResultTransformer.java
index c5c1e81..2602221 100644
--- a/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/SearchGroupsResultTransformer.java
+++ b/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/SearchGroupsResultTransformer.java
@@ -19,6 +19,8 @@ package org.apache.solr.search.grouping.distributed.shardresultserializer;
 import org.apache.lucene.search.Sort;
 import org.apache.lucene.search.grouping.SearchGroup;
 import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.BytesRefBuilder;
+import org.apache.lucene.util.CharsRefBuilder;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.schema.SchemaField;
 import org.apache.solr.search.SolrIndexSearcher;
@@ -56,7 +58,7 @@ public class SearchGroupsResultTransformer implements ShardResultTransformer<Lis
         final SearchGroupsFieldCommandResult fieldCommandResult = fieldCommand.result();
         final Collection<SearchGroup<BytesRef>> searchGroups = fieldCommandResult.getSearchGroups();
         if (searchGroups != null) {
-          commandResult.add(TOP_GROUPS, serializeSearchGroup(searchGroups, fieldCommand.getGroupSort()));
+          commandResult.add(TOP_GROUPS, serializeSearchGroup(searchGroups, fieldCommand));
         }
         final Integer groupedCount = fieldCommandResult.getGroupCount();
         if (groupedCount != null) {
@@ -85,7 +87,17 @@ public class SearchGroupsResultTransformer implements ShardResultTransformer<Lis
       if (rawSearchGroups != null) {
         for (Map.Entry<String, List<Comparable>> rawSearchGroup : rawSearchGroups){
           SearchGroup<BytesRef> searchGroup = new SearchGroup<>();
-          searchGroup.groupValue = rawSearchGroup.getKey() != null ? new BytesRef(rawSearchGroup.getKey()) : null;
+          SchemaField groupField = rawSearchGroup.getKey() != null? searcher.getSchema().getFieldOrNull(command.getKey()) : null;
+          searchGroup.groupValue = null;
+          if (rawSearchGroup.getKey() != null) {
+            if (groupField != null) {
+              BytesRefBuilder builder = new BytesRefBuilder();
+              groupField.getType().readableToIndexed(rawSearchGroup.getKey(), builder);
+              searchGroup.groupValue = builder.get();
+            } else {
+              searchGroup.groupValue = new BytesRef(rawSearchGroup.getKey());
+            }
+          }
           searchGroup.sortValues = rawSearchGroup.getValue().toArray(new Comparable[rawSearchGroup.getValue().size()]);
           for (int i = 0; i < searchGroup.sortValues.length; i++) {
             SchemaField field = groupSort.getSort()[i].getField() != null ? searcher.getSchema().getFieldOrNull(groupSort.getSort()[i].getField()) : null;
@@ -101,17 +113,19 @@ public class SearchGroupsResultTransformer implements ShardResultTransformer<Lis
     return result;
   }
 
-  private NamedList serializeSearchGroup(Collection<SearchGroup<BytesRef>> data, Sort groupSort) {
+  private NamedList serializeSearchGroup(Collection<SearchGroup<BytesRef>> data, SearchGroupsFieldCommand command) {
     final NamedList<Object[]> result = new NamedList<>(data.size());
 
     for (SearchGroup<BytesRef> searchGroup : data) {
       Object[] convertedSortValues = new Object[searchGroup.sortValues.length];
       for (int i = 0; i < searchGroup.sortValues.length; i++) {
         Object sortValue = searchGroup.sortValues[i];
-        SchemaField field = groupSort.getSort()[i].getField() != null ? searcher.getSchema().getFieldOrNull(groupSort.getSort()[i].getField()) : null;
+        SchemaField field = command.getGroupSort().getSort()[i].getField() != null ?
+            searcher.getSchema().getFieldOrNull(command.getGroupSort().getSort()[i].getField()) : null;
         convertedSortValues[i] = ShardResultTransformerUtils.marshalSortValue(sortValue, field);
       }
-      String groupValue = searchGroup.groupValue != null ? searchGroup.groupValue.utf8ToString() : null;
+      SchemaField field = searcher.getSchema().getFieldOrNull(command.getKey());
+      String groupValue = searchGroup.groupValue != null ? field.getType().indexedToReadable(searchGroup.groupValue, new CharsRefBuilder()).toString() : null;
       result.add(groupValue, convertedSortValues);
     }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/21adce4a/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/TopGroupsResultTransformer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/TopGroupsResultTransformer.java b/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/TopGroupsResultTransformer.java
index 3ff87bd..415e513 100644
--- a/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/TopGroupsResultTransformer.java
+++ b/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/TopGroupsResultTransformer.java
@@ -33,6 +33,7 @@ import org.apache.lucene.search.TopFieldDocs;
 import org.apache.lucene.search.grouping.GroupDocs;
 import org.apache.lucene.search.grouping.TopGroups;
 import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.CharsRefBuilder;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.handler.component.ResponseBuilder;
 import org.apache.solr.handler.component.ShardDoc;
@@ -232,7 +233,8 @@ public class TopGroupsResultTransformer implements ShardResultTransformer<List<C
         document.add("sortValues", convertedSortValues);
       }
       groupResult.add("documents", documents);
-      String groupValue = searchGroup.groupValue != null ? groupField.getType().indexedToReadable(searchGroup.groupValue.utf8ToString()): null;
+      String groupValue = searchGroup.groupValue != null ?
+          groupField.getType().indexedToReadable(searchGroup.groupValue, new CharsRefBuilder()).toString(): null;
       result.add(groupValue, groupResult);
     }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/21adce4a/solr/core/src/test/org/apache/solr/TestDistributedGrouping.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/TestDistributedGrouping.java b/solr/core/src/test/org/apache/solr/TestDistributedGrouping.java
index d0c4f36..a9e6d57 100644
--- a/solr/core/src/test/org/apache/solr/TestDistributedGrouping.java
+++ b/solr/core/src/test/org/apache/solr/TestDistributedGrouping.java
@@ -20,7 +20,6 @@ import java.io.IOException;
 import java.util.List;
 
 import org.apache.lucene.util.LuceneTestCase.Slow;
-import org.apache.solr.SolrTestCaseJ4.SuppressPointFields;
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.response.QueryResponse;
@@ -38,7 +37,6 @@ import org.junit.Test;
  * @since solr 4.0
  */
 @Slow
-@SuppressPointFields
 public class TestDistributedGrouping extends BaseDistributedSearchTestCase {
 
   String t1="a_t";

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/21adce4a/solr/core/src/test/org/apache/solr/TestGroupingSearch.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/TestGroupingSearch.java b/solr/core/src/test/org/apache/solr/TestGroupingSearch.java
index 2910e0c..e659727 100644
--- a/solr/core/src/test/org/apache/solr/TestGroupingSearch.java
+++ b/solr/core/src/test/org/apache/solr/TestGroupingSearch.java
@@ -50,7 +50,6 @@ import org.noggit.ObjectBuilder;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-@SolrTestCaseJ4.SuppressPointFields(bugUrl="https://issues.apache.org/jira/browse/SOLR-9992")
 public class TestGroupingSearch extends SolrTestCaseJ4 {
 
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
@@ -536,13 +535,6 @@ public class TestGroupingSearch extends SolrTestCaseJ4 {
       ,"/facet_counts/facet_fields/"+f+"==['1',3, '2',3, '3',2, '4',1, '5',1]"
     );
 
-    // test that grouping works with highlighting
-    assertJQ(req("fq",filt,  "q","{!func}"+f2, "group","true", "group.field",f, "fl","id"
-                 ,"hl","true", "hl.fl",f)
-      ,"/grouped/"+f+"/matches==10"
-      ,"/highlighting=={'_ORDERED_':'', '8':{},'3':{},'4':{},'1':{},'2':{}}"
-    );
-
     // test that grouping works with debugging
     assertJQ(req("fq",filt,  "q","{!func}"+f2, "group","true", "group.field",f, "fl","id"
                  ,"debugQuery","true")

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/21adce4a/solr/core/src/test/org/apache/solr/cloud/SegmentTerminateEarlyTestState.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/SegmentTerminateEarlyTestState.java b/solr/core/src/test/org/apache/solr/cloud/SegmentTerminateEarlyTestState.java
index 2720350..3fe12ed 100644
--- a/solr/core/src/test/org/apache/solr/cloud/SegmentTerminateEarlyTestState.java
+++ b/solr/core/src/test/org/apache/solr/cloud/SegmentTerminateEarlyTestState.java
@@ -22,7 +22,6 @@ import java.util.Map;
 import java.util.Set;
 import java.util.Random;
 
-import org.apache.solr.SolrTestCaseJ4.SuppressPointFields;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.impl.CloudSolrClient;
 import org.apache.solr.client.solrj.response.QueryResponse;
@@ -32,8 +31,7 @@ import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.ShardParams;
 import org.apache.solr.common.util.SimpleOrderedMap;
 import org.apache.solr.response.SolrQueryResponse;
-// This test uses grouping requests, which doesn't work yet with PointFields
-@SuppressPointFields(bugUrl="https://issues.apache.org/jira/browse/SOLR-9992")
+
 class SegmentTerminateEarlyTestState {
 
   final String keyField = "id";

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/21adce4a/solr/core/src/test/org/apache/solr/cloud/TestSegmentSorting.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestSegmentSorting.java b/solr/core/src/test/org/apache/solr/cloud/TestSegmentSorting.java
index de632c9..5e6283a 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestSegmentSorting.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestSegmentSorting.java
@@ -22,7 +22,6 @@ import java.util.HashMap;
 import java.util.Map;
 
 import org.apache.lucene.util.TestUtil;
-import org.apache.solr.SolrTestCaseJ4.SuppressPointFields;
 import org.apache.solr.client.solrj.impl.CloudSolrClient;
 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
 import org.apache.solr.client.solrj.request.schema.SchemaRequest.Field;
@@ -42,7 +41,6 @@ import org.junit.rules.TestName;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-@SuppressPointFields(bugUrl="https://issues.apache.org/jira/browse/SOLR-9992")
 public class TestSegmentSorting extends SolrCloudTestCase {
 
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());


[24/46] lucene-solr:jira/solr-9959: SOLR-10237: Poly-Fields should work with subfield that have docValues=true

Posted by ab...@apache.org.
SOLR-10237: Poly-Fields should work with subfield that have docValues=true


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/3b660018
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/3b660018
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/3b660018

Branch: refs/heads/jira/solr-9959
Commit: 3b660018457234387558ff626c8b95bb6f4ce853
Parents: 540ee1d
Author: Tomas Fernandez Lobbe <tf...@apache.org>
Authored: Fri Mar 17 11:55:15 2017 -0700
Committer: Tomas Fernandez Lobbe <tf...@apache.org>
Committed: Fri Mar 17 11:55:15 2017 -0700

----------------------------------------------------------------------
 solr/CHANGES.txt                                |  2 +
 .../solr/schema/AbstractSubTypeFieldType.java   | 17 ++++++---
 .../java/org/apache/solr/schema/LatLonType.java | 12 +++++-
 .../java/org/apache/solr/schema/PointType.java  | 14 +++++--
 .../test-files/solr/collection1/conf/schema.xml | 15 ++++----
 .../org/apache/solr/schema/PolyFieldTest.java   | 40 ++++++++++++--------
 .../apache/solr/update/DocumentBuilderTest.java |  4 +-
 7 files changed, 68 insertions(+), 36 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3b660018/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 1548410..75ac5bb 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -303,6 +303,8 @@ Bug Fixes
 * SOLR-10283: Learning to Rank (LTR) SolrFeature to reject searches with missing efi (External Feature Information) used by fq.
   (Christine Poerschke)
 
+* SOLR-10237: Poly-fields should work with subfields that have docValues=true (Tom�s Fern�ndez L�bbe, David Smiley)
+
 Optimizations
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3b660018/solr/core/src/java/org/apache/solr/schema/AbstractSubTypeFieldType.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/AbstractSubTypeFieldType.java b/solr/core/src/java/org/apache/solr/schema/AbstractSubTypeFieldType.java
index 1184876..73a4f3c 100644
--- a/solr/core/src/java/org/apache/solr/schema/AbstractSubTypeFieldType.java
+++ b/solr/core/src/java/org/apache/solr/schema/AbstractSubTypeFieldType.java
@@ -76,20 +76,25 @@ public abstract class AbstractSubTypeFieldType extends FieldType implements Sche
    * and props of indexed=true, stored=false.
    *
    * @param schema the IndexSchema
-   * @param type   The {@link FieldType} of the prototype.
+   * @param subType   The {@link FieldType} of the prototype.
+   * @param polyField   The poly {@link FieldType}.
    * @return The {@link SchemaField}
    */
 
-  static SchemaField registerPolyFieldDynamicPrototype(IndexSchema schema, FieldType type) {
-    String name = "*" + FieldType.POLY_FIELD_SEPARATOR + type.typeName;
+  static SchemaField registerPolyFieldDynamicPrototype(IndexSchema schema, FieldType subType, FieldType polyField) {
+    String name = "*" + FieldType.POLY_FIELD_SEPARATOR + subType.typeName;
     Map<String, String> props = new HashMap<>();
     //Just set these, delegate everything else to the field type
     props.put("indexed", "true");
     props.put("stored", "false");
     props.put("multiValued", "false");
-    int p = SchemaField.calcProps(name, type, props);
+    // if polyField enables dv, add them to the subtypes
+    if (polyField.hasProperty(DOC_VALUES)) {
+      props.put("docValues", "true");
+    }
+    int p = SchemaField.calcProps(name, subType, props);
     SchemaField proto = SchemaField.create(name,
-            type, p, null);
+        subType, p, null);
     schema.registerDynamicFields(proto);
     return proto;
   }
@@ -107,7 +112,7 @@ public abstract class AbstractSubTypeFieldType extends FieldType implements Sche
     this.schema = schema;
     //Can't do this until here b/c the Dynamic Fields are not initialized until here.
     if (subType != null) {
-      SchemaField proto = registerPolyFieldDynamicPrototype(schema, subType);
+      SchemaField proto = registerPolyFieldDynamicPrototype(schema, subType, this);
       dynFieldProps = proto.getProperties();
     }
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3b660018/solr/core/src/java/org/apache/solr/schema/LatLonType.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/LatLonType.java b/solr/core/src/java/org/apache/solr/schema/LatLonType.java
index c484f3a..8c4e19a 100644
--- a/solr/core/src/java/org/apache/solr/schema/LatLonType.java
+++ b/solr/core/src/java/org/apache/solr/schema/LatLonType.java
@@ -75,10 +75,10 @@ public class LatLonType extends AbstractSubTypeFieldType implements SpatialQuery
       Point point = SpatialUtils.parsePointSolrException(externalVal, SpatialContext.GEO);
       //latitude
       SchemaField subLatSF = subField(field, LAT, schema);
-      f.add(subLatSF.createField(String.valueOf(point.getY())));
+      f.addAll(subLatSF.createFields(String.valueOf(point.getY())));
       //longitude
       SchemaField subLonSF = subField(field, LON, schema);
-      f.add(subLonSF.createField(String.valueOf(point.getX())));
+      f.addAll(subLonSF.createFields(String.valueOf(point.getX())));
     }
 
     if (field.stored()) {
@@ -86,6 +86,14 @@ public class LatLonType extends AbstractSubTypeFieldType implements SpatialQuery
     }
     return f;
   }
+  
+  @Override
+  protected void checkSupportsDocValues() {
+    // DocValues supported only when enabled at the fieldType 
+    if (!hasProperty(DOC_VALUES)) {
+      throw new UnsupportedOperationException("LatLonType can't have docValues=true in the field definition, use docValues=true in the fieldType definition, or in subFieldType/subFieldSuffix");
+    }
+  }
 
 
   @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3b660018/solr/core/src/java/org/apache/solr/schema/PointType.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/PointType.java b/solr/core/src/java/org/apache/solr/schema/PointType.java
index 4c022b8..e088e7f 100644
--- a/solr/core/src/java/org/apache/solr/schema/PointType.java
+++ b/solr/core/src/java/org/apache/solr/schema/PointType.java
@@ -71,12 +71,12 @@ public class PointType extends CoordinateFieldType implements SpatialQueryable {
     String[] point = parseCommaSeparatedList(externalVal, dimension);
 
     // TODO: this doesn't currently support polyFields as sub-field types
-    List<IndexableField> f = new ArrayList<>(dimension+1);
+    List<IndexableField> f = new ArrayList<>((dimension*2)+1);
 
     if (field.indexed()) {
       for (int i=0; i<dimension; i++) {
         SchemaField sf = subField(field, i, schema);
-        f.add(sf.createField(point[i]));
+        f.addAll(sf.createFields(point[i]));
       }
     }
 
@@ -84,7 +84,7 @@ public class PointType extends CoordinateFieldType implements SpatialQueryable {
       String storedVal = externalVal;  // normalize or not?
       f.add(createField(field.getName(), storedVal, StoredField.TYPE));
     }
-    
+
     return f;
   }
 
@@ -155,6 +155,14 @@ public class PointType extends CoordinateFieldType implements SpatialQueryable {
     }
     return bq.build();
   }
+  
+  @Override
+  protected void checkSupportsDocValues() {
+    // DocValues supported only when enabled at the fieldType 
+    if (!hasProperty(DOC_VALUES)) {
+      throw new UnsupportedOperationException("PointType can't have docValues=true in the field definition, use docValues=true in the fieldType definition, or in subFieldType/subFieldSuffix");
+    }
+  }
 
   /**
    * Calculates the range and creates a RangeQuery (bounding box) wrapped in a BooleanQuery (unless the dimension is

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3b660018/solr/core/src/test-files/solr/collection1/conf/schema.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/collection1/conf/schema.xml b/solr/core/src/test-files/solr/collection1/conf/schema.xml
index 8c549a3..6f5eddc 100644
--- a/solr/core/src/test-files/solr/collection1/conf/schema.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/schema.xml
@@ -392,15 +392,14 @@
   <fieldType name="uuid" class="solr.UUIDField"/>
 
   <!-- Try out some point types -->
-  <fieldType name="xy" class="solr.PointType" dimension="2" subFieldType="double"/>
-  <fieldType name="x" class="solr.PointType" dimension="1" subFieldType="double"/>
-  <fieldType name="tenD" class="solr.PointType" dimension="10" subFieldType="double"/>
-  <!-- Use the sub field suffix -->
-  <fieldType name="xyd" class="solr.PointType" dimension="2" subFieldSuffix="_d1_ndv"/>
+  <fieldType name="xy" class="solr.PointType" dimension="2" subFieldType="${solr.tests.doubleClass:pdouble}"  docValues="true"/>
+  <fieldType name="x" class="solr.PointType" dimension="1" subFieldType="${solr.tests.doubleClass:pdouble}"/>
+  <fieldType name="tenD" class="solr.PointType" dimension="10" subFieldType="${solr.tests.doubleClass:pdouble}"/>
+  <fieldType name="xyd" class="solr.PointType" dimension="2" subFieldSuffix="_d1"/>
   <fieldType name="geohash" class="solr.GeoHashField"/>
 
 
-  <fieldType name="latLon" class="solr.LatLonType" subFieldType="double"/>
+  <fieldType name="latLon" class="solr.LatLonType" subFieldType="${solr.tests.doubleClass:pdouble}"/>
 
   <!-- Currency type -->
   <fieldType name="currency" class="solr.CurrencyField" currencyConfig="currency.xml" multiValued="false"/>
@@ -621,7 +620,7 @@
   <dynamicField name="*_f1" type="${solr.tests.floatClass:pfloat}" indexed="true" stored="true" multiValued="false"/>
   <dynamicField name="*_d" type="${solr.tests.doubleClass:pdouble}" indexed="true" stored="true"/>
   <dynamicField name="*_d1" type="${solr.tests.doubleClass:pdouble}" indexed="true" stored="true" multiValued="false"/>
-  <dynamicField name="*_d1_ndv" type="${solr.tests.doubleClass:pdouble}" indexed="true" docValues="false" stored="true" multiValued="false"/>
+  <dynamicField name="*_d1_dv" type="${solr.tests.doubleClass:pdouble}" indexed="true" docValues="true" stored="true" multiValued="false"/>
   <dynamicField name="*_dt" type="${solr.tests.dateClass:pdate}" indexed="true" stored="true"/>
   <dynamicField name="*_dt1" type="${solr.tests.dateClass:pdate}" indexed="true" stored="true" multiValued="false"/>
 
@@ -671,7 +670,7 @@
   <dynamicField name="*_mfacet" type="string" indexed="true" stored="false" multiValued="true"/>
 
   <!-- Type used to index the lat and lon components for the "location" FieldType -->
-  <dynamicField name="*_coordinate" type="${solr.tests.doubleClass:pdouble}" indexed="true" stored="false" omitNorms="true"/>
+  <dynamicField name="*_coordinate" type="${solr.tests.doubleClass:pdouble}" indexed="true" stored="false" omitNorms="true" docValues="false"/>
 
   <dynamicField name="*_path" type="path" indexed="true" stored="true" omitNorms="true" multiValued="true"/>
   <dynamicField name="*_ancestor" type="ancestor_path" indexed="true" stored="true" omitNorms="true"

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3b660018/solr/core/src/test/org/apache/solr/schema/PolyFieldTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/schema/PolyFieldTest.java b/solr/core/src/test/org/apache/solr/schema/PolyFieldTest.java
index 6839c70..900e439 100644
--- a/solr/core/src/test/org/apache/solr/schema/PolyFieldTest.java
+++ b/solr/core/src/test/org/apache/solr/schema/PolyFieldTest.java
@@ -15,6 +15,7 @@
  * limitations under the License.
  */
 package org.apache.solr.schema;
+import java.util.Arrays;
 import java.util.List;
 
 import org.apache.lucene.index.IndexableField;
@@ -45,11 +46,16 @@ public class PolyFieldTest extends SolrTestCaseJ4 {
     SchemaField home = schema.getField("home");
     assertNotNull(home);
     assertTrue(home.isPolyField());
+    
+    String subFieldType = "double";
+    if (usingPointFields()) {
+      subFieldType = "pdouble";
+    }
 
     SchemaField[] dynFields = schema.getDynamicFieldPrototypes();
     boolean seen = false;
     for (SchemaField dynField : dynFields) {
-      if (dynField.getName().equals("*" + FieldType.POLY_FIELD_SEPARATOR + "double")) {
+      if (dynField.getName().equals("*" + FieldType.POLY_FIELD_SEPARATOR + subFieldType)) {
         seen = true;
       }
     }
@@ -60,7 +66,7 @@ public class PolyFieldTest extends SolrTestCaseJ4 {
     assertNotNull(xy);
     assertTrue(xy instanceof PointType);
     assertTrue(xy.isPolyField());
-    home = schema.getFieldOrNull("home_0" + FieldType.POLY_FIELD_SEPARATOR + "double");
+    home = schema.getFieldOrNull("home_0" + FieldType.POLY_FIELD_SEPARATOR + subFieldType);
     assertNotNull(home);
     home = schema.getField("home");
     assertNotNull(home);
@@ -84,9 +90,14 @@ public class PolyFieldTest extends SolrTestCaseJ4 {
     double[] xy = new double[]{35.0, -79.34};
     String point = xy[0] + "," + xy[1];
     List<IndexableField> fields = home.createFields(point);
-    assertEquals(fields.size(), 3);//should be 3, we have a stored field
-    //first two fields contain the values, third is just stored and contains the original
-    for (int i = 0; i < 3; i++) {
+    assertNotNull(pt.getSubType());
+    int expectdNumFields = 3;//If DV=false, we expect one field per dimension plus a stored field
+    if (pt.subField(home, 0, schema).hasDocValues()) {
+      expectdNumFields+=2; // If docValues=true, then we expect two more fields
+    }
+    assertEquals("Unexpected fields created: " + Arrays.toString(fields.toArray()), expectdNumFields, fields.size());
+    //first two/four fields contain the values, last one is just stored and contains the original
+    for (int i = 0; i < expectdNumFields; i++) {
       boolean hasValue = fields.get(i).binaryValue() != null
           || fields.get(i).stringValue() != null
           || fields.get(i).numericValue() != null;
@@ -100,7 +111,7 @@ public class PolyFieldTest extends SolrTestCaseJ4 {
     home = schema.getField("home_ns");
     assertNotNull(home);
     fields = home.createFields(point);
-    assertEquals(fields.size(), 2);//should be 2, since we aren't storing
+    assertEquals(expectdNumFields - 1, fields.size(), 2);//one less field than with "home", since we aren't storing
 
     home = schema.getField("home_ns");
     assertNotNull(home);
@@ -111,17 +122,12 @@ public class PolyFieldTest extends SolrTestCaseJ4 {
       //
     }
 
-    
     SchemaField s1 = schema.getField("test_p");
     SchemaField s2 = schema.getField("test_p");
-    // If we use [Int/Double/Long/Float]PointField, we can't get the valueSource, since docValues is false
-    if (s1.createFields("1,2").get(0).fieldType().pointDimensionCount() == 0) {
-      assertFalse(s2.getType().isPointField());
-      ValueSource v1 = s1.getType().getValueSource(s1, null);
-      ValueSource v2 = s2.getType().getValueSource(s2, null);
-      assertEquals(v1, v2);
-      assertEquals(v1.hashCode(), v2.hashCode());
-    }
+    ValueSource v1 = s1.getType().getValueSource(s1, null);
+    ValueSource v2 = s2.getType().getValueSource(s2, null);
+    assertEquals(v1, v2);
+    assertEquals(v1.hashCode(), v2.hashCode());
   }
 
   @Test
@@ -181,5 +187,9 @@ public class PolyFieldTest extends SolrTestCaseJ4 {
     assertEquals(2, bq.clauses().size());
     clearIndex();
   }
+  
+  private boolean usingPointFields() {
+    return h.getCore().getLatestSchema().getField("foo_d1_dv").getType().isPointField();
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3b660018/solr/core/src/test/org/apache/solr/update/DocumentBuilderTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/DocumentBuilderTest.java b/solr/core/src/test/org/apache/solr/update/DocumentBuilderTest.java
index 03dd17c..5d98d8b 100644
--- a/solr/core/src/test/org/apache/solr/update/DocumentBuilderTest.java
+++ b/solr/core/src/test/org/apache/solr/update/DocumentBuilderTest.java
@@ -131,8 +131,8 @@ public class DocumentBuilderTest extends SolrTestCaseJ4 {
     doc.addField( "home", "2.2,3.3" );
     Document out = DocumentBuilder.toDocument( doc, core.getLatestSchema() );
     assertNotNull( out.get( "home" ) );//contains the stored value and term vector, if there is one
-    assertNotNull( out.getField( "home_0" + FieldType.POLY_FIELD_SEPARATOR + "double" ) );
-    assertNotNull( out.getField( "home_1" + FieldType.POLY_FIELD_SEPARATOR + "double" ) );
+    assertNotNull( out.getField( "home_0" + FieldType.POLY_FIELD_SEPARATOR + System.getProperty("solr.tests.doubleClass", "pdouble") ) );
+    assertNotNull( out.getField( "home_1" + FieldType.POLY_FIELD_SEPARATOR + System.getProperty("solr.tests.doubleClass", "pdouble") ) );
   }
   
   /**


[43/46] lucene-solr:jira/solr-9959: SOLR-6615: use constants for 'id', '_route_', '_version_'

Posted by ab...@apache.org.
SOLR-6615: use constants for 'id', '_route_', '_version_'


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/eb587772
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/eb587772
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/eb587772

Branch: refs/heads/jira/solr-9959
Commit: eb587772ddecaea371b20feb955a197e80699f22
Parents: 88c3c3c
Author: Noble Paul <no...@apache.org>
Authored: Thu Mar 23 11:45:50 2017 +1030
Committer: Noble Paul <no...@apache.org>
Committed: Thu Mar 23 11:45:50 2017 +1030

----------------------------------------------------------------------
 .../org/apache/solr/cloud/ElectionContext.java  |  4 ++-
 .../java/org/apache/solr/cloud/Overseer.java    |  8 +++--
 .../solr/cloud/OverseerNodePrioritizer.java     |  7 ++--
 .../solr/cloud/OverseerTaskProcessor.java       |  7 ++--
 .../org/apache/solr/core/JmxMonitoredMap.java   |  4 +--
 .../org/apache/solr/handler/BlobHandler.java    | 12 ++++---
 .../org/apache/solr/handler/CdcrReplicator.java | 15 +++++----
 .../org/apache/solr/handler/StreamHandler.java  | 34 +++++++++++---------
 .../solr/handler/admin/LukeRequestHandler.java  |  2 +-
 .../solr/handler/admin/ThreadDumpHandler.java   |  5 +--
 .../component/QueryElevationComponent.java      |  4 ++-
 .../handler/component/RealTimeGetComponent.java | 16 +++++----
 .../solr/handler/loader/JavabinLoader.java      |  3 +-
 .../apache/solr/handler/loader/JsonLoader.java  | 10 ++++--
 .../apache/solr/handler/loader/XMLLoader.java   | 14 ++++----
 .../org/apache/solr/handler/sql/SolrTable.java  |  1 -
 .../reporters/solr/SolrClusterReporter.java     |  4 ++-
 .../TopGroupsResultTransformer.java             |  8 +++--
 .../apache/solr/search/mlt/CloudMLTQParser.java |  5 ++-
 .../solr/spelling/SpellCheckCollator.java       |  4 ++-
 .../apache/solr/update/AddUpdateCommand.java    |  3 +-
 .../java/org/apache/solr/update/PeerSync.java   |  3 +-
 .../org/apache/solr/update/VersionInfo.java     |  6 ++--
 .../processor/AtomicUpdateDocumentMerger.java   | 15 +++++----
 .../update/processor/CdcrUpdateProcessor.java   |  3 +-
 .../processor/DistributedUpdateProcessor.java   | 25 +++++++-------
 ...BasedVersionConstraintsProcessorFactory.java |  4 +--
 .../solr/cloud/CdcrVersionReplicationTest.java  |  4 +--
 .../cloud/FullSolrCloudDistribCmdsTest.java     | 18 +++++------
 .../solr/update/DirectUpdateHandlerTest.java    |  6 ++--
 .../update/TestInPlaceUpdatesStandalone.java    |  6 ++--
 .../org/apache/solr/update/UpdateLogTest.java   |  6 ++--
 .../solr/client/solrj/impl/CloudSolrClient.java |  3 +-
 .../apache/solr/client/solrj/io/ModelCache.java |  7 ++--
 .../client/solrj/io/stream/DaemonStream.java    |  9 ++++--
 .../client/solrj/io/stream/ExecutorStream.java  |  4 ++-
 .../io/stream/FeaturesSelectionStream.java      |  4 ++-
 .../client/solrj/io/stream/FetchStream.java     |  6 ++--
 .../client/solrj/io/stream/ModelStream.java     |  6 ++--
 .../client/solrj/io/stream/TextLogitStream.java |  4 ++-
 .../client/solrj/io/stream/TopicStream.java     | 17 ++++++----
 .../client/solrj/io/stream/UpdateStream.java    |  4 ++-
 .../request/JavaBinUpdateRequestCodec.java      |  5 +--
 .../client/solrj/request/UpdateRequest.java     |  9 +++---
 .../solr/common/ToleratedUpdateError.java       | 10 +++---
 .../solr/common/cloud/HashBasedRouter.java      |  8 +++--
 .../apache/solr/common/params/CommonParams.java |  4 +++
 .../apache/solr/common/params/ShardParams.java  | 24 +++++++-------
 48 files changed, 231 insertions(+), 159 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java b/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java
index 223a539..6e8dbda 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java
@@ -58,6 +58,8 @@ import org.apache.zookeeper.data.Stat;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.solr.common.params.CommonParams.ID;
+
 public abstract class ElectionContext implements Closeable {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   final String electionPath;
@@ -753,7 +755,7 @@ final class OverseerElectionContext extends ElectionContext {
     log.info("I am going to be the leader {}", id);
     final String id = leaderSeqPath
         .substring(leaderSeqPath.lastIndexOf("/") + 1);
-    ZkNodeProps myProps = new ZkNodeProps("id", id);
+    ZkNodeProps myProps = new ZkNodeProps(ID, id);
 
     zkClient.makePath(leaderPath, Utils.toJSON(myProps),
         CreateMode.EPHEMERAL, true);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/core/src/java/org/apache/solr/cloud/Overseer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/Overseer.java b/solr/core/src/java/org/apache/solr/cloud/Overseer.java
index 61f15fc..e0449b4 100644
--- a/solr/core/src/java/org/apache/solr/cloud/Overseer.java
+++ b/solr/core/src/java/org/apache/solr/cloud/Overseer.java
@@ -55,6 +55,8 @@ import org.apache.zookeeper.KeeperException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.solr.common.params.CommonParams.ID;
+
 /**
  * Cluster leader. Responsible for processing state updates, node assignments, creating/deleting
  * collections, shards, replicas and setting various properties.
@@ -292,7 +294,7 @@ public class Overseer implements Closeable {
       }
       try {
         Map m = (Map) Utils.fromJSON(data);
-        String id = (String) m.get("id");
+        String id = (String) m.get(ID);
         if(overseerCollectionConfigSetProcessor.getId().equals(id)){
           try {
             log.warn("I'm exiting, but I'm still the leader");
@@ -372,7 +374,7 @@ public class Overseer implements Closeable {
           case UPDATESHARDSTATE:
             return Collections.singletonList(new SliceMutator(getZkStateReader()).updateShardState(clusterState, message));
           case QUIT:
-            if (myId.equals(message.get("id"))) {
+            if (myId.equals(message.get(ID))) {
               log.info("Quit command received {} {}", message, LeaderElector.getNodeName(myId));
               overseerCollectionConfigSetProcessor.close();
               close();
@@ -396,7 +398,7 @@ public class Overseer implements Closeable {
       try {
         ZkNodeProps props = ZkNodeProps.load(zkClient.getData(
             OVERSEER_ELECT + "/leader", null, null, true));
-        if (myId.equals(props.getStr("id"))) {
+        if (myId.equals(props.getStr(ID))) {
           return LeaderStatus.YES;
         }
       } catch (KeeperException e) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/core/src/java/org/apache/solr/cloud/OverseerNodePrioritizer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/OverseerNodePrioritizer.java b/solr/core/src/java/org/apache/solr/cloud/OverseerNodePrioritizer.java
index 798eca3..7db1315 100644
--- a/solr/core/src/java/org/apache/solr/cloud/OverseerNodePrioritizer.java
+++ b/solr/core/src/java/org/apache/solr/cloud/OverseerNodePrioritizer.java
@@ -17,14 +17,13 @@
 package org.apache.solr.cloud;
 
 import java.lang.invoke.MethodHandles;
-
 import java.util.List;
 import java.util.Map;
 
+import org.apache.solr.cloud.overseer.OverseerAction;
 import org.apache.solr.common.cloud.SolrZkClient;
 import org.apache.solr.common.cloud.ZkNodeProps;
 import org.apache.solr.common.cloud.ZkStateReader;
-import org.apache.solr.cloud.overseer.OverseerAction;
 import org.apache.solr.common.params.CoreAdminParams;
 import org.apache.solr.common.params.CoreAdminParams.CoreAdminAction;
 import org.apache.solr.common.params.ModifiableSolrParams;
@@ -37,6 +36,8 @@ import org.apache.zookeeper.data.Stat;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.solr.common.params.CommonParams.ID;
+
 /**
  * Responsible for prioritization of Overseer nodes, for example with the
  * ADDROLE collection command.
@@ -90,7 +91,7 @@ public class OverseerNodePrioritizer {
     //now ask the current leader to QUIT , so that the designate can takeover
     Overseer.getStateUpdateQueue(zkStateReader.getZkClient()).offer(
         Utils.toJSON(new ZkNodeProps(Overseer.QUEUE_OPERATION, OverseerAction.QUIT.toLower(),
-            "id", OverseerTaskProcessor.getLeaderId(zkStateReader.getZkClient()))));
+            ID, OverseerTaskProcessor.getLeaderId(zkStateReader.getZkClient()))));
 
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/core/src/java/org/apache/solr/cloud/OverseerTaskProcessor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/OverseerTaskProcessor.java b/solr/core/src/java/org/apache/solr/cloud/OverseerTaskProcessor.java
index bed71a6..4dffb21 100644
--- a/solr/core/src/java/org/apache/solr/cloud/OverseerTaskProcessor.java
+++ b/solr/core/src/java/org/apache/solr/cloud/OverseerTaskProcessor.java
@@ -34,8 +34,8 @@ import com.codahale.metrics.Timer;
 import com.google.common.collect.ImmutableSet;
 import org.apache.commons.io.IOUtils;
 import org.apache.solr.client.solrj.SolrResponse;
-import org.apache.solr.cloud.OverseerTaskQueue.QueueEvent;
 import org.apache.solr.cloud.Overseer.LeaderStatus;
+import org.apache.solr.cloud.OverseerTaskQueue.QueueEvent;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.cloud.SolrZkClient;
 import org.apache.solr.common.cloud.ZkNodeProps;
@@ -50,6 +50,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import static org.apache.solr.common.params.CommonAdminParams.ASYNC;
+import static org.apache.solr.common.params.CommonParams.ID;
 
 /**
  * A generic processor run in the Overseer, used for handling items added
@@ -375,7 +376,7 @@ public class OverseerTaskProcessor implements Runnable, Closeable {
       return null;
     }
     Map m = (Map) Utils.fromJSON(data);
-    return  (String) m.get("id");
+    return  (String) m.get(ID);
   }
 
   protected LeaderStatus amILeader() {
@@ -385,7 +386,7 @@ public class OverseerTaskProcessor implements Runnable, Closeable {
     try {
       ZkNodeProps props = ZkNodeProps.load(zkStateReader.getZkClient().getData(
           Overseer.OVERSEER_ELECT + "/leader", null, null, true));
-      if (myId.equals(props.getStr("id"))) {
+      if (myId.equals(props.getStr(ID))) {
         return LeaderStatus.YES;
       }
     } catch (KeeperException e) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/core/src/java/org/apache/solr/core/JmxMonitoredMap.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/JmxMonitoredMap.java b/solr/core/src/java/org/apache/solr/core/JmxMonitoredMap.java
index 8bfa662..4fb0dcd 100644
--- a/solr/core/src/java/org/apache/solr/core/JmxMonitoredMap.java
+++ b/solr/core/src/java/org/apache/solr/core/JmxMonitoredMap.java
@@ -38,7 +38,6 @@ import javax.management.openmbean.SimpleType;
 import javax.management.remote.JMXConnectorServer;
 import javax.management.remote.JMXConnectorServerFactory;
 import javax.management.remote.JMXServiceURL;
-
 import java.lang.invoke.MethodHandles;
 import java.lang.reflect.Field;
 import java.lang.reflect.Method;
@@ -58,6 +57,7 @@ import org.apache.solr.metrics.reporters.JmxObjectNameFactory;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.solr.common.params.CommonParams.ID;
 import static org.apache.solr.common.params.CommonParams.NAME;
 
 /**
@@ -269,7 +269,7 @@ public class JmxMonitoredMap<K, V> extends
     Hashtable<String, String> map = new Hashtable<>();
     map.put("type", key);
     if (infoBean.getName() != null && !"".equals(infoBean.getName())) {
-      map.put("id", infoBean.getName());
+      map.put(ID, infoBean.getName());
     }
     return ObjectName.getInstance(jmxRootName, map);
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/core/src/java/org/apache/solr/handler/BlobHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/BlobHandler.java b/solr/core/src/java/org/apache/solr/handler/BlobHandler.java
index 177af9e..7e97f59 100644
--- a/solr/core/src/java/org/apache/solr/handler/BlobHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/BlobHandler.java
@@ -64,7 +64,9 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import static java.util.Collections.singletonMap;
+import static org.apache.solr.common.params.CommonParams.ID;
 import static org.apache.solr.common.params.CommonParams.JSON;
+import static org.apache.solr.common.params.CommonParams.VERSION;
 import static org.apache.solr.common.util.Utils.makeMap;
 
 public class BlobHandler extends RequestHandlerBase implements PluginInfoInitialized {
@@ -131,15 +133,15 @@ public class BlobHandler extends RequestHandlerBase implements PluginInfoInitial
         version++;
         String id = blobName + "/" + version;
         Map<String, Object> doc = makeMap(
-            "id", id,
+            ID, id,
             "md5", md5,
             "blobName", blobName,
-            "version", version,
+            VERSION, version,
             "timestamp", new Date(),
             "size", payload.limit(),
             "blob", payload);
         verifyWithRealtimeGet(blobName, version, req, doc);
-        log.info(StrUtils.formatString("inserting new blob {0} ,size {1}, md5 {2}", doc.get("id"), String.valueOf(payload.limit()), md5));
+        log.info(StrUtils.formatString("inserting new blob {0} ,size {1}, md5 {2}", doc.get(ID), String.valueOf(payload.limit()), md5));
         indexMap(req, rsp, doc);
         log.info(" Successfully Added and committed a blob with id {} and size {} ", id, payload.limit());
 
@@ -212,7 +214,7 @@ public class BlobHandler extends RequestHandlerBase implements PluginInfoInitial
     for (; ; ) {
       SolrQueryResponse response = new SolrQueryResponse();
       String id = blobName + "/" + version;
-      forward(req, "/get", new MapSolrParams(singletonMap("id", id)), response);
+      forward(req, "/get", new MapSolrParams(singletonMap(ID, id)), response);
       if (response.getValues().get("doc") == null) {
         //ensure that the version does not exist
         return;
@@ -221,7 +223,7 @@ public class BlobHandler extends RequestHandlerBase implements PluginInfoInitial
         version++;
         doc.put("version", version);
         id = blobName + "/" + version;
-        doc.put("id", id);
+        doc.put(ID, id);
       }
     }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/core/src/java/org/apache/solr/handler/CdcrReplicator.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/CdcrReplicator.java b/solr/core/src/java/org/apache/solr/handler/CdcrReplicator.java
index 8519815..75a787b 100644
--- a/solr/core/src/java/org/apache/solr/handler/CdcrReplicator.java
+++ b/solr/core/src/java/org/apache/solr/handler/CdcrReplicator.java
@@ -16,6 +16,11 @@
  */
 package org.apache.solr.handler;
 
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.nio.charset.Charset;
+import java.util.List;
+
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.impl.CloudSolrClient;
 import org.apache.solr.client.solrj.request.UpdateRequest;
@@ -25,14 +30,10 @@ import org.apache.solr.common.SolrInputDocument;
 import org.apache.solr.update.CdcrUpdateLog;
 import org.apache.solr.update.UpdateLog;
 import org.apache.solr.update.processor.CdcrUpdateProcessor;
-import org.apache.solr.update.processor.DistributedUpdateProcessor;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.IOException;
-import java.lang.invoke.MethodHandles;
-import java.nio.charset.Charset;
-import java.util.List;
+import static org.apache.solr.common.params.CommonParams.VERSION_FIELD;
 
 /**
  * The replication logic. Given a {@link org.apache.solr.handler.CdcrReplicatorState}, it reads all the new entries
@@ -183,14 +184,14 @@ public class CdcrReplicator implements Runnable {
       case UpdateLog.DELETE: {
         byte[] idBytes = (byte[]) entry.get(2);
         req.deleteById(new String(idBytes, Charset.forName("UTF-8")));
-        req.setParam(DistributedUpdateProcessor.VERSION_FIELD, Long.toString(version));
+        req.setParam(VERSION_FIELD, Long.toString(version));
         return req;
       }
 
       case UpdateLog.DELETE_BY_QUERY: {
         String query = (String) entry.get(2);
         req.deleteByQuery(query);
-        req.setParam(DistributedUpdateProcessor.VERSION_FIELD, Long.toString(version));
+        req.setParam(VERSION_FIELD, Long.toString(version));
         return req;
       }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/StreamHandler.java b/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
index dfae5cd..a25ede4 100644
--- a/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
@@ -80,6 +80,8 @@ import org.apache.solr.util.plugin.SolrCoreAware;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.solr.common.params.CommonParams.ID;
+
 public class StreamHandler extends RequestHandlerBase implements SolrCoreAware, PermissionNameProvider {
 
   static SolrClientCache clientCache = new SolrClientCache();
@@ -284,7 +286,7 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
   private void handleAdmin(SolrQueryRequest req, SolrQueryResponse rsp, SolrParams params) {
     String action = params.get("action");
     if("stop".equalsIgnoreCase(action)) {
-      String id = params.get("id");
+      String id = params.get(ID);
       DaemonStream d = daemons.get(id);
       if(d != null) {
         d.close();
@@ -292,21 +294,23 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
       } else {
         rsp.add("result-set", new DaemonResponseStream("Deamon:" + id + " not found on " + coreName));
       }
-    } else if("start".equalsIgnoreCase(action)) {
-      String id = params.get("id");
-      DaemonStream d = daemons.get(id);
-      d.open();
-      rsp.add("result-set", new DaemonResponseStream("Deamon:" + id + " started on " + coreName));
-    } else if("list".equalsIgnoreCase(action)) {
-      Collection<DaemonStream> vals = daemons.values();
-      rsp.add("result-set", new DaemonCollectionStream(vals));
-    } else if("kill".equalsIgnoreCase(action)) {
-      String id = params.get("id");
-      DaemonStream d = daemons.remove(id);
-      if (d != null) {
-        d.close();
+    } else {
+      if ("start".equalsIgnoreCase(action)) {
+        String id = params.get(ID);
+        DaemonStream d = daemons.get(id);
+        d.open();
+        rsp.add("result-set", new DaemonResponseStream("Deamon:" + id + " started on " + coreName));
+      } else if ("list".equalsIgnoreCase(action)) {
+        Collection<DaemonStream> vals = daemons.values();
+        rsp.add("result-set", new DaemonCollectionStream(vals));
+      } else if ("kill".equalsIgnoreCase(action)) {
+        String id = params.get("id");
+        DaemonStream d = daemons.remove(id);
+        if (d != null) {
+          d.close();
+        }
+        rsp.add("result-set", new DaemonResponseStream("Deamon:" + id + " killed on " + coreName));
       }
-      rsp.add("result-set", new DaemonResponseStream("Deamon:" + id + " killed on " + coreName));
     }
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java
index 62a50e6..8e0b1fb 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java
@@ -102,7 +102,7 @@ public class LukeRequestHandler extends RequestHandlerBase
   public static final String NUMTERMS = "numTerms";
   public static final String INCLUDE_INDEX_FIELD_FLAGS = "includeIndexFieldFlags";
   public static final String DOC_ID = "docId";
-  public static final String ID = "id";
+  public static final String ID = CommonParams.ID;
   public static final int DEFAULT_COUNT = 10;
 
   static final int HIST_ARRAY_SIZE = 33;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/core/src/java/org/apache/solr/handler/admin/ThreadDumpHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/ThreadDumpHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/ThreadDumpHandler.java
index f0e3970..bb5b3ee 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/ThreadDumpHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/ThreadDumpHandler.java
@@ -28,6 +28,7 @@ import org.apache.solr.handler.RequestHandlerBase;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.response.SolrQueryResponse;
 
+import static org.apache.solr.common.params.CommonParams.ID;
 import static org.apache.solr.common.params.CommonParams.NAME;
 
 /**
@@ -85,7 +86,7 @@ public class ThreadDumpHandler extends RequestHandlerBase
     SimpleOrderedMap<Object> info = new SimpleOrderedMap<>();
     long tid = ti.getThreadId();
 
-    info.add( "id", tid );
+    info.add( ID, tid );
     info.add(NAME, ti.getThreadName());
     info.add( "state", ti.getThreadState().toString() );
     
@@ -107,7 +108,7 @@ public class ThreadDumpHandler extends RequestHandlerBase
     if (ti.getLockOwnerName() != null) {
       SimpleOrderedMap<Object> owner = new SimpleOrderedMap<>();
       owner.add(NAME, ti.getLockOwnerName());
-      owner.add( "id", ti.getLockOwnerId() );
+      owner.add( ID, ti.getLockOwnerId() );
     }
     
     // Add the stack trace

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java b/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java
index 8482d65..c12902e 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java
@@ -92,6 +92,8 @@ import org.w3c.dom.Node;
 import org.w3c.dom.NodeList;
 import org.xml.sax.InputSource;
 
+import static org.apache.solr.common.params.CommonParams.ID;
+
 /**
  * A component to elevate some documents to the top of the result set.
  *
@@ -308,7 +310,7 @@ public class QueryElevationComponent extends SearchComponent implements SolrCore
       ArrayList<String> exclude = new ArrayList<>();
       for (int j = 0; j < children.getLength(); j++) {
         Node child = children.item(j);
-        String id = DOMUtil.getAttr(child, "id", "missing 'id'");
+        String id = DOMUtil.getAttr(child, ID, "missing 'id'");
         String e = DOMUtil.getAttr(child, EXCLUDE, null);
         if (e != null) {
           if (Boolean.valueOf(e)) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java b/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java
index 900c787..12aa403 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java
@@ -78,11 +78,13 @@ import org.apache.solr.update.DocumentBuilder;
 import org.apache.solr.update.IndexFingerprint;
 import org.apache.solr.update.PeerSync;
 import org.apache.solr.update.UpdateLog;
-import org.apache.solr.update.processor.DistributedUpdateProcessor;
 import org.apache.solr.util.RefCounted;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.solr.common.params.CommonParams.ID;
+import static org.apache.solr.common.params.CommonParams.VERSION_FIELD;
+
 public class RealTimeGetComponent extends SearchComponent
 {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
@@ -473,8 +475,8 @@ public class RealTimeGetComponent extends SearchComponent
       doc = toSolrDoc(luceneDocument, core.getLatestSchema());
       searcher.decorateDocValueFields(doc, docid, decorateFields);
 
-      long docVersion = (long) doc.getFirstValue(DistributedUpdateProcessor.VERSION_FIELD);
-      Object partialVersionObj = partialDoc.getFieldValue(DistributedUpdateProcessor.VERSION_FIELD);
+      long docVersion = (long) doc.getFirstValue(VERSION_FIELD);
+      Object partialVersionObj = partialDoc.getFieldValue(VERSION_FIELD);
       long partialDocVersion = partialVersionObj instanceof Field? ((Field) partialVersionObj).numericValue().longValue():
         partialVersionObj instanceof Number? ((Number) partialVersionObj).longValue(): Long.parseLong(partialVersionObj.toString());
       if (docVersion > partialDocVersion) {
@@ -621,8 +623,8 @@ public class RealTimeGetComponent extends SearchComponent
     }
 
     if (versionReturned != null) {
-      if (sid.containsKey(DistributedUpdateProcessor.VERSION_FIELD)) {
-        versionReturned.set((long)sid.getFieldValue(DistributedUpdateProcessor.VERSION_FIELD));
+      if (sid.containsKey(VERSION_FIELD)) {
+        versionReturned.set((long)sid.getFieldValue(VERSION_FIELD));
       }
     }
     return sid;
@@ -841,7 +843,7 @@ public class RealTimeGetComponent extends SearchComponent
     sreq.params.set("distrib",false);
 
     sreq.params.remove(ShardParams.SHARDS);
-    sreq.params.remove("id");
+    sreq.params.remove(ID);
     sreq.params.remove("ids");
     sreq.params.set("ids", StrUtils.join(ids, ','));
     
@@ -1146,7 +1148,7 @@ public class RealTimeGetComponent extends SearchComponent
         return (IdsRequsted)req.getContext().get(contextKey);
       }
       final SolrParams params = req.getParams();
-      final String id[] = params.getParams("id");
+      final String id[] = params.getParams(ID);
       final String ids[] = params.getParams("ids");
       
       if (id == null && ids == null) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/core/src/java/org/apache/solr/handler/loader/JavabinLoader.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/loader/JavabinLoader.java b/solr/core/src/java/org/apache/solr/handler/loader/JavabinLoader.java
index 873bcd1..870737a 100644
--- a/solr/core/src/java/org/apache/solr/handler/loader/JavabinLoader.java
+++ b/solr/core/src/java/org/apache/solr/handler/loader/JavabinLoader.java
@@ -20,6 +20,7 @@ import org.apache.solr.client.solrj.request.JavaBinUpdateRequestCodec;
 import org.apache.solr.client.solrj.request.UpdateRequest;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrInputDocument;
+import org.apache.solr.common.params.ShardParams;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.params.UpdateParams;
 import org.apache.solr.common.util.ContentStream;
@@ -140,7 +141,7 @@ public class JavabinLoader extends ContentStreamLoader {
           }
         }
         if (map != null) {
-          String route = (String) map.get(UpdateRequest.ROUTE);
+          String route = (String) map.get(ShardParams._ROUTE_);
           if (route != null) {
             delcmd.setRoute(route);
           }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/core/src/java/org/apache/solr/handler/loader/JsonLoader.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/loader/JsonLoader.java b/solr/core/src/java/org/apache/solr/handler/loader/JsonLoader.java
index 8650ab8..b93d5ef 100644
--- a/solr/core/src/java/org/apache/solr/handler/loader/JsonLoader.java
+++ b/solr/core/src/java/org/apache/solr/handler/loader/JsonLoader.java
@@ -15,6 +15,7 @@
  * limitations under the License.
  */
 package org.apache.solr.handler.loader;
+
 import java.io.IOException;
 import java.io.Reader;
 import java.io.StringReader;
@@ -57,8 +58,11 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import static java.util.stream.Collectors.toList;
+import static org.apache.solr.common.params.CommonParams.ID;
 import static org.apache.solr.common.params.CommonParams.JSON;
 import static org.apache.solr.common.params.CommonParams.PATH;
+import static org.apache.solr.common.params.CommonParams.VERSION_FIELD;
+import static org.apache.solr.common.params.ShardParams._ROUTE_;
 
 
 /**
@@ -364,15 +368,15 @@ public class JsonLoader extends ContentStreamLoader {
         if (ev == JSONParser.STRING) {
           String key = parser.getString();
           if (parser.wasKey()) {
-            if ("id".equals(key)) {
+            if (ID.equals(key)) {
               cmd.setId(getString(parser.nextEvent()));
             } else if ("query".equals(key)) {
               cmd.setQuery(parser.getString());
             } else if ("commitWithin".equals(key)) {
               cmd.commitWithin = (int) parser.getLong();
-            } else if ("_version_".equals(key)) {
+            } else if (VERSION_FIELD.equals(key)) {
               cmd.setVersion(parser.getLong());
-            } else if ("_route_".equals(key)) {
+            } else if (_ROUTE_.equals(key)) {
               cmd.setRoute(parser.getString());
             } else {
               throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Unknown key '" + key + "' at [" + parser.getPosition() + "]");

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/core/src/java/org/apache/solr/handler/loader/XMLLoader.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/loader/XMLLoader.java b/solr/core/src/java/org/apache/solr/handler/loader/XMLLoader.java
index 038ed9f..000edee 100644
--- a/solr/core/src/java/org/apache/solr/handler/loader/XMLLoader.java
+++ b/solr/core/src/java/org/apache/solr/handler/loader/XMLLoader.java
@@ -15,6 +15,7 @@
  * limitations under the License.
  */
 package org.apache.solr.handler.loader;
+
 import javax.xml.parsers.SAXParserFactory;
 import javax.xml.stream.FactoryConfigurationError;
 import javax.xml.stream.XMLInputFactory;
@@ -39,11 +40,12 @@ import java.util.concurrent.atomic.AtomicBoolean;
 
 import com.google.common.collect.Lists;
 import org.apache.commons.io.IOUtils;
-import org.apache.solr.client.solrj.request.UpdateRequest;
+import org.apache.solr.common.EmptyEntityResolver;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrInputDocument;
 import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.params.ShardParams;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.params.UpdateParams;
 import org.apache.solr.common.util.ContentStream;
@@ -60,13 +62,13 @@ import org.apache.solr.update.CommitUpdateCommand;
 import org.apache.solr.update.DeleteUpdateCommand;
 import org.apache.solr.update.RollbackUpdateCommand;
 import org.apache.solr.update.processor.UpdateRequestProcessor;
-import org.apache.solr.common.EmptyEntityResolver;
 import org.apache.solr.util.xslt.TransformerProvider;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.xml.sax.InputSource;
 import org.xml.sax.XMLReader;
 
+import static org.apache.solr.common.params.CommonParams.ID;
 import static org.apache.solr.common.params.CommonParams.NAME;
 
 
@@ -318,7 +320,7 @@ public class XMLLoader extends ContentStreamLoader {
       switch (event) {
         case XMLStreamConstants.START_ELEMENT:
           String mode = parser.getLocalName();
-          if (!("id".equals(mode) || "query".equals(mode))) {
+          if (!(ID.equals(mode) || "query".equals(mode))) {
             String msg = "XML element <delete> has invalid XML child element: " + mode;
             log.warn(msg);
             throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
@@ -326,14 +328,14 @@ public class XMLLoader extends ContentStreamLoader {
           }
           text.setLength(0);
           
-          if ("id".equals(mode)) {
+          if (ID.equals(mode)) {
             for (int i = 0; i < parser.getAttributeCount(); i++) {
               String attrName = parser.getAttributeLocalName(i);
               String attrVal = parser.getAttributeValue(i);
               if (UpdateRequestHandler.VERSION.equals(attrName)) {
                 deleteCmd.setVersion(Long.parseLong(attrVal));
               }
-              if (UpdateRequest.ROUTE.equals(attrName)) {
+              if (ShardParams._ROUTE_.equals(attrName)) {
                 deleteCmd.setRoute(attrVal);
               }
             }
@@ -342,7 +344,7 @@ public class XMLLoader extends ContentStreamLoader {
 
         case XMLStreamConstants.END_ELEMENT:
           String currTag = parser.getLocalName();
-          if ("id".equals(currTag)) {
+          if (ID.equals(currTag)) {
             deleteCmd.setId(text.toString());         
           } else if ("query".equals(currTag)) {
             deleteCmd.setQuery(text.toString());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java b/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java
index 644ed97..c3b83db 100644
--- a/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java
+++ b/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java
@@ -61,7 +61,6 @@ import java.util.stream.Collectors;
  */
 class SolrTable extends AbstractQueryableTable implements TranslatableTable {
   private static final String DEFAULT_QUERY = "*:*";
-  private static final String DEFAULT_VERSION_FIELD = "_version_";
 
   private final String collection;
   private final SolrSchema schema;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrClusterReporter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrClusterReporter.java b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrClusterReporter.java
index 0c3b651..a34accd 100644
--- a/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrClusterReporter.java
+++ b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrClusterReporter.java
@@ -41,6 +41,8 @@ import org.apache.zookeeper.KeeperException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.solr.common.params.CommonParams.ID;
+
 /**
  * This reporter sends selected metrics from local registries to {@link Overseer}.
  * <p>The following configuration properties are supported:</p>
@@ -253,7 +255,7 @@ public class SolrClusterReporter extends SolrMetricReporter {
       if (props == null) {
         return lastKnownUrl;
       }
-      String oid = props.getStr("id");
+      String oid = props.getStr(ID);
       if (oid == null) {
         return lastKnownUrl;
       }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/TopGroupsResultTransformer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/TopGroupsResultTransformer.java b/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/TopGroupsResultTransformer.java
index 415e513..adb81de 100644
--- a/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/TopGroupsResultTransformer.java
+++ b/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/TopGroupsResultTransformer.java
@@ -47,6 +47,8 @@ import org.apache.solr.search.grouping.distributed.command.TopGroupsFieldCommand
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.solr.common.params.CommonParams.ID;
+
 /**
  * Implementation for transforming {@link TopGroups} and {@link TopDocs} into a {@link NamedList} structure and
  * visa versa.
@@ -158,7 +160,7 @@ public class TopGroupsResultTransformer implements ShardResultTransformer<List<C
     ScoreDoc[] scoreDocs = new ScoreDoc[documents.size()];
     int j = 0;
     for (NamedList<Object> document : documents) {
-      Object docId = document.get("id");
+      Object docId = document.get(ID);
       if (docId != null) {
         docId = docId.toString();
       } else {
@@ -208,7 +210,7 @@ public class TopGroupsResultTransformer implements ShardResultTransformer<List<C
         documents.add(document);
 
         Document doc = retrieveDocument(uniqueField, searchGroup.scoreDocs[i].doc);
-        document.add("id", uniqueField.getType().toExternal(doc.getField(uniqueField.getName())));
+        document.add(ID, uniqueField.getType().toExternal(doc.getField(uniqueField.getName())));
         if (!Float.isNaN(searchGroup.scoreDocs[i].score))  {
           document.add("score", searchGroup.scoreDocs[i].score);
         }
@@ -259,7 +261,7 @@ public class TopGroupsResultTransformer implements ShardResultTransformer<List<C
       documents.add(document);
 
       Document doc = retrieveDocument(uniqueField, scoreDoc.doc);
-      document.add("id", uniqueField.getType().toExternal(doc.getField(uniqueField.getName())));
+      document.add(ID, uniqueField.getType().toExternal(doc.getField(uniqueField.getName())));
       if (!Float.isNaN(scoreDoc.score))  {
         document.add("score", scoreDoc.score);
       }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/core/src/java/org/apache/solr/search/mlt/CloudMLTQParser.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/mlt/CloudMLTQParser.java b/solr/core/src/java/org/apache/solr/search/mlt/CloudMLTQParser.java
index 5975f8f..3ff432d 100644
--- a/solr/core/src/java/org/apache/solr/search/mlt/CloudMLTQParser.java
+++ b/solr/core/src/java/org/apache/solr/search/mlt/CloudMLTQParser.java
@@ -15,6 +15,7 @@
  * limitations under the License.
  */
 package org.apache.solr.search.mlt;
+
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collection;
@@ -47,6 +48,8 @@ import org.apache.solr.search.QParser;
 import org.apache.solr.search.QueryParsing;
 import org.apache.solr.util.SolrPluginUtils;
 
+import static org.apache.solr.common.params.CommonParams.ID;
+
 public class CloudMLTQParser extends QParser {
   // Pattern is thread safe -- TODO? share this with general 'fl' param
   private static final Pattern splitList = Pattern.compile(",| ");
@@ -178,7 +181,7 @@ public class CloudMLTQParser extends QParser {
     SolrCore core = req.getCore();
     SolrQueryResponse rsp = new SolrQueryResponse();
     ModifiableSolrParams params = new ModifiableSolrParams();
-    params.add("id", id);
+    params.add(ID, id);
 
     SolrQueryRequestBase request = new SolrQueryRequestBase(core, params) {
     };

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/core/src/java/org/apache/solr/spelling/SpellCheckCollator.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/spelling/SpellCheckCollator.java b/solr/core/src/java/org/apache/solr/spelling/SpellCheckCollator.java
index 0738081..3394de1 100644
--- a/solr/core/src/java/org/apache/solr/spelling/SpellCheckCollator.java
+++ b/solr/core/src/java/org/apache/solr/spelling/SpellCheckCollator.java
@@ -41,6 +41,8 @@ import org.apache.solr.search.SolrIndexSearcher;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.solr.common.params.CommonParams.ID;
+
 public class SpellCheckCollator {
   private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   private int maxCollations = 1;
@@ -116,7 +118,7 @@ public class SpellCheckCollator {
         params.remove(CommonParams.START);
         params.set(CommonParams.ROWS, "" + docCollectionLimit);
         // we don't want any stored fields
-        params.set(CommonParams.FL, "id");
+        params.set(CommonParams.FL, ID);
         // we'll sort by doc id to ensure no scoring is done.
         params.set(CommonParams.SORT, "_docid_ asc");
         // CursorMark does not like _docid_ sorting, and we don't need it.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/core/src/java/org/apache/solr/update/AddUpdateCommand.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/AddUpdateCommand.java b/solr/core/src/java/org/apache/solr/update/AddUpdateCommand.java
index f526397..596ddd3 100644
--- a/solr/core/src/java/org/apache/solr/update/AddUpdateCommand.java
+++ b/solr/core/src/java/org/apache/solr/update/AddUpdateCommand.java
@@ -27,6 +27,7 @@ import org.apache.lucene.util.BytesRefBuilder;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrInputDocument;
 import org.apache.solr.common.SolrInputField;
+import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.schema.IndexSchema;
 import org.apache.solr.schema.SchemaField;
@@ -191,7 +192,7 @@ public class AddUpdateCommand extends UpdateCommand implements Iterable<Document
 
         for (SolrInputDocument sdoc : all) {
           sdoc.setField("_root_", idField);      // should this be a string or the same type as the ID?
-          if(isVersion) sdoc.setField(VersionInfo.VERSION_FIELD, version);
+          if(isVersion) sdoc.setField(CommonParams.VERSION_FIELD, version);
           // TODO: if possible concurrent modification exception (if SolrInputDocument not cloned and is being forwarded to replicas)
           // then we could add this field to the generated lucene document instead.
         }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/core/src/java/org/apache/solr/update/PeerSync.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/PeerSync.java b/solr/core/src/java/org/apache/solr/update/PeerSync.java
index 874e39c..425d1db 100644
--- a/solr/core/src/java/org/apache/solr/update/PeerSync.java
+++ b/solr/core/src/java/org/apache/solr/update/PeerSync.java
@@ -60,6 +60,7 @@ import org.apache.solr.update.processor.UpdateRequestProcessorChain;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.solr.common.params.CommonParams.ID;
 import static org.apache.solr.update.processor.DistributedUpdateProcessor.DistribPhase.FROMLEADER;
 import static org.apache.solr.update.processor.DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM;
 
@@ -799,7 +800,7 @@ public class PeerSync implements SolrMetricProducer {
             cmd.setVersion(version);
             cmd.setFlags(UpdateCommand.PEER_SYNC | UpdateCommand.IGNORE_AUTOCOMMIT);
             if (debug) {
-              log.debug(msg() + "add " + cmd + " id " + sdoc.getField("id"));
+              log.debug(msg() + "add " + cmd + " id " + sdoc.getField(ID));
             }
             proc.processAdd(cmd);
             break;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/core/src/java/org/apache/solr/update/VersionInfo.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/VersionInfo.java b/solr/core/src/java/org/apache/solr/update/VersionInfo.java
index 07172eb..061e7f6 100644
--- a/solr/core/src/java/org/apache/solr/update/VersionInfo.java
+++ b/solr/core/src/java/org/apache/solr/update/VersionInfo.java
@@ -41,12 +41,12 @@ import org.apache.solr.util.RefCounted;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.solr.common.params.CommonParams.VERSION_FIELD;
+
 public class VersionInfo {
 
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
-  public static final String VERSION_FIELD="_version_";
-
   private final UpdateLog ulog;
   private final VersionBucket[] buckets;
   private SchemaField versionField;
@@ -54,7 +54,7 @@ public class VersionInfo {
   final ReadWriteLock lock = new ReentrantReadWriteLock(true);
 
   /**
-   * Gets and returns the {@link #VERSION_FIELD} from the specified 
+   * Gets and returns the {@link org.apache.solr.common.params.CommonParams#VERSION_FIELD} from the specified
    * schema, after verifying that it is indexed, stored, and single-valued.  
    * If any of these pre-conditions are not met, it throws a SolrException 
    * with a user suitable message indicating the problem.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/core/src/java/org/apache/solr/update/processor/AtomicUpdateDocumentMerger.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/processor/AtomicUpdateDocumentMerger.java b/solr/core/src/java/org/apache/solr/update/processor/AtomicUpdateDocumentMerger.java
index 9061235..2689f0e 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/AtomicUpdateDocumentMerger.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/AtomicUpdateDocumentMerger.java
@@ -35,6 +35,7 @@ import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
 import org.apache.solr.common.SolrInputDocument;
 import org.apache.solr.common.SolrInputField;
+import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.handler.component.RealTimeGetComponent;
 import org.apache.solr.request.SolrQueryRequest;
@@ -47,6 +48,8 @@ import org.apache.solr.util.RefCounted;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.solr.common.params.CommonParams.ID;
+
 /**
  * @lucene.experimental
  */
@@ -160,7 +163,7 @@ public class AtomicUpdateDocumentMerger {
     final Set<String> candidateFields = new HashSet<>();
 
     // if _version_ field is not supported for in-place update, bail out early
-    SchemaField versionField = schema.getFieldOrNull(DistributedUpdateProcessor.VERSION_FIELD);
+    SchemaField versionField = schema.getFieldOrNull(CommonParams.VERSION_FIELD);
     if (versionField == null || !isSupportedFieldForInPlaceUpdate(versionField)) {
       return Collections.emptySet();
     }
@@ -169,7 +172,7 @@ public class AtomicUpdateDocumentMerger {
     // and bail out early if anything is obviously not a valid in-place update
     for (String fieldName : sdoc.getFieldNames()) {
       if (fieldName.equals(uniqueKeyFieldName)
-          || fieldName.equals(DistributedUpdateProcessor.VERSION_FIELD)) {
+          || fieldName.equals(CommonParams.VERSION_FIELD)) {
         continue;
       }
       Object fieldValue = sdoc.getField(fieldName).getValue();
@@ -245,7 +248,7 @@ public class AtomicUpdateDocumentMerger {
     SolrInputDocument inputDoc = cmd.getSolrInputDocument();
     BytesRef idBytes = cmd.getIndexedId();
 
-    updatedFields.add(DistributedUpdateProcessor.VERSION_FIELD); // add the version field so that it is fetched too
+    updatedFields.add(CommonParams.VERSION_FIELD); // add the version field so that it is fetched too
     SolrInputDocument oldDocument = RealTimeGetComponent.getInputDocument
       (cmd.getReq().getCore(), idBytes,
        null, // don't want the version to be returned
@@ -258,11 +261,11 @@ public class AtomicUpdateDocumentMerger {
       return false;
     }
 
-    if (oldDocument.containsKey(DistributedUpdateProcessor.VERSION_FIELD) == false) {
+    if (oldDocument.containsKey(CommonParams.VERSION_FIELD) == false) {
       throw new SolrException (ErrorCode.INVALID_STATE, "There is no _version_ in previous document. id=" + 
           cmd.getPrintableId());
     }
-    Long oldVersion = (Long) oldDocument.remove(DistributedUpdateProcessor.VERSION_FIELD).getValue();
+    Long oldVersion = (Long) oldDocument.remove(CommonParams.VERSION_FIELD).getValue();
 
     // If the oldDocument contains any other field apart from updatedFields (or id/version field), then remove them.
     // This can happen, despite requesting for these fields in the call to RTGC.getInputDocument, if the document was
@@ -270,7 +273,7 @@ public class AtomicUpdateDocumentMerger {
     if (updatedFields != null) {
       Collection<String> names = new HashSet<String>(oldDocument.getFieldNames());
       for (String fieldName: names) {
-        if (fieldName.equals(DistributedUpdateProcessor.VERSION_FIELD)==false && fieldName.equals("id")==false && updatedFields.contains(fieldName)==false) {
+        if (fieldName.equals(CommonParams.VERSION_FIELD)==false && fieldName.equals(ID)==false && updatedFields.contains(fieldName)==false) {
           oldDocument.remove(fieldName);
         }
       }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/core/src/java/org/apache/solr/update/processor/CdcrUpdateProcessor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/processor/CdcrUpdateProcessor.java b/solr/core/src/java/org/apache/solr/update/processor/CdcrUpdateProcessor.java
index 5bbc4a2..ee45467 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/CdcrUpdateProcessor.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/CdcrUpdateProcessor.java
@@ -19,6 +19,7 @@ package org.apache.solr.update.processor;
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
 
+import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.request.SolrQueryRequest;
@@ -101,7 +102,7 @@ public class CdcrUpdateProcessor extends DistributedUpdateProcessor {
 //      } else {
 //        log.info("+++ cdcr.update version present, params are: " + params);
 //      }
-      result.set(DistributedUpdateProcessor.VERSION_FIELD, params.get(DistributedUpdateProcessor.VERSION_FIELD));
+      result.set(CommonParams.VERSION_FIELD, params.get(CommonParams.VERSION_FIELD));
     }
 
     return result;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java b/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java
index 08ede72..fe71b0b 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java
@@ -64,6 +64,7 @@ import org.apache.solr.common.cloud.SolrZkClient;
 import org.apache.solr.common.cloud.ZkCoreNodeProps;
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.cloud.ZooKeeperException;
+import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.params.ShardParams;
 import org.apache.solr.common.params.SolrParams;
@@ -240,8 +241,6 @@ public class DistributedUpdateProcessor extends UpdateRequestProcessor {
   private final UpdateRequestProcessor next;
   private final AtomicUpdateDocumentMerger docMerger;
 
-  public static final String VERSION_FIELD = "_version_";
-
   private final UpdateHandler updateHandler;
   private final UpdateLog ulog;
   private final VersionInfo vinfo;
@@ -310,7 +309,7 @@ public class DistributedUpdateProcessor extends UpdateRequestProcessor {
     
     // this should always be used - see filterParams
     DistributedUpdateProcessorFactory.addParamToDistributedRequestWhitelist
-      (this.req, UpdateParams.UPDATE_CHAIN, TEST_DISTRIB_SKIP_SERVERS, VERSION_FIELD);
+      (this.req, UpdateParams.UPDATE_CHAIN, TEST_DISTRIB_SKIP_SERVERS, CommonParams.VERSION_FIELD);
     
     CoreDescriptor coreDesc = req.getCore().getCoreDescriptor();
     
@@ -1031,13 +1030,13 @@ public class DistributedUpdateProcessor extends UpdateRequestProcessor {
     long versionOnUpdate = cmd.getVersion();
 
     if (versionOnUpdate == 0) {
-      SolrInputField versionField = cmd.getSolrInputDocument().getField(VersionInfo.VERSION_FIELD);
+      SolrInputField versionField = cmd.getSolrInputDocument().getField(CommonParams.VERSION_FIELD);
       if (versionField != null) {
         Object o = versionField.getValue();
         versionOnUpdate = o instanceof Number ? ((Number) o).longValue() : Long.parseLong(o.toString());
       } else {
         // Find the version
-        String versionOnUpdateS = req.getParams().get(VERSION_FIELD);
+        String versionOnUpdateS = req.getParams().get(CommonParams.VERSION_FIELD);
         versionOnUpdate = versionOnUpdateS == null ? 0 : Long.parseLong(versionOnUpdateS);
       }
     }
@@ -1084,7 +1083,7 @@ public class DistributedUpdateProcessor extends UpdateRequestProcessor {
               // forwarded from a collection but we are not buffering so strip original version and apply our own
               // see SOLR-5308
               log.info("Removing version field from doc: " + cmd.getPrintableId());
-              cmd.solrDoc.remove(VERSION_FIELD);
+              cmd.solrDoc.remove(CommonParams.VERSION_FIELD);
               versionOnUpdate = 0;
             }
 
@@ -1114,7 +1113,7 @@ public class DistributedUpdateProcessor extends UpdateRequestProcessor {
 
             long version = vinfo.getNewClock();
             cmd.setVersion(version);
-            cmd.getSolrInputDocument().setField(VersionInfo.VERSION_FIELD, version);
+            cmd.getSolrInputDocument().setField(CommonParams.VERSION_FIELD, version);
             bucket.updateHighest(version);
           } else {
             // The leader forwarded us this update.
@@ -1152,7 +1151,7 @@ public class DistributedUpdateProcessor extends UpdateRequestProcessor {
                   // Make this update to become a non-inplace update containing the full document obtained from the leader
                   cmd.solrDoc = ((AddUpdateCommand)fetchedFromLeader).solrDoc;
                   cmd.prevVersion = -1;
-                  cmd.setVersion((long)cmd.solrDoc.getFieldValue(VERSION_FIELD));
+                  cmd.setVersion((long)cmd.solrDoc.getFieldValue(CommonParams.VERSION_FIELD));
                   assert cmd.isInPlaceUpdate() == false;
                 }
               } else {
@@ -1354,7 +1353,7 @@ public class DistributedUpdateProcessor extends UpdateRequestProcessor {
 
     AddUpdateCommand cmd = new AddUpdateCommand(req);
     cmd.solrDoc = leaderDoc;
-    cmd.setVersion((long)leaderDoc.getFieldValue(VERSION_FIELD));
+    cmd.setVersion((long)leaderDoc.getFieldValue(CommonParams.VERSION_FIELD));
     return cmd;
   }
   
@@ -1386,7 +1385,7 @@ public class DistributedUpdateProcessor extends UpdateRequestProcessor {
         throw new SolrException(ErrorCode.CONFLICT, "Document not found for update.  id=" + cmd.getPrintableId());
       }
     } else {
-      oldDoc.remove(VERSION_FIELD);
+      oldDoc.remove(CommonParams.VERSION_FIELD);
     }
 
 
@@ -1598,7 +1597,7 @@ public class DistributedUpdateProcessor extends UpdateRequestProcessor {
     if (zkEnabled)  {
       // forward to all replicas
       ModifiableSolrParams params = new ModifiableSolrParams(filterParams(req.getParams()));
-      params.set(VERSION_FIELD, Long.toString(cmd.getVersion()));
+      params.set(CommonParams.VERSION_FIELD, Long.toString(cmd.getVersion()));
       params.set(DISTRIB_UPDATE_PARAM, DistribPhase.FROMLEADER.toString());
       params.set(DISTRIB_FROM, ZkCoreNodeProps.getCoreUrl(
           zkController.getBaseUrl(), req.getCore().getName()));
@@ -1667,7 +1666,7 @@ public class DistributedUpdateProcessor extends UpdateRequestProcessor {
     // Find the version
     long versionOnUpdate = cmd.getVersion();
     if (versionOnUpdate == 0) {
-      String versionOnUpdateS = req.getParams().get(VERSION_FIELD);
+      String versionOnUpdateS = req.getParams().get(CommonParams.VERSION_FIELD);
       versionOnUpdate = versionOnUpdateS == null ? 0 : Long.parseLong(versionOnUpdateS);
     }
     versionOnUpdate = Math.abs(versionOnUpdate);  // normalize to positive version
@@ -1776,7 +1775,7 @@ public class DistributedUpdateProcessor extends UpdateRequestProcessor {
     // Find the version
     long versionOnUpdate = cmd.getVersion();
     if (versionOnUpdate == 0) {
-      String versionOnUpdateS = req.getParams().get(VERSION_FIELD);
+      String versionOnUpdateS = req.getParams().get(CommonParams.VERSION_FIELD);
       versionOnUpdate = versionOnUpdateS == null ? 0 : Long.parseLong(versionOnUpdateS);
     }
     long signedVersionOnUpdate = versionOnUpdate;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/core/src/java/org/apache/solr/update/processor/DocBasedVersionConstraintsProcessorFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/processor/DocBasedVersionConstraintsProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/DocBasedVersionConstraintsProcessorFactory.java
index b089c94..a8b331c 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/DocBasedVersionConstraintsProcessorFactory.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/DocBasedVersionConstraintsProcessorFactory.java
@@ -22,6 +22,7 @@ import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.BytesRefBuilder;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrInputDocument;
+import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.handler.component.RealTimeGetComponent;
@@ -33,7 +34,6 @@ import org.apache.solr.search.SolrIndexSearcher;
 import org.apache.solr.update.AddUpdateCommand;
 import org.apache.solr.update.DeleteUpdateCommand;
 import org.apache.solr.update.UpdateCommand;
-import org.apache.solr.update.VersionInfo;
 import org.apache.solr.util.RefCounted;
 import org.apache.solr.util.plugin.SolrCoreAware;
 import org.slf4j.Logger;
@@ -202,7 +202,7 @@ public class DocBasedVersionConstraintsProcessorFactory extends UpdateRequestPro
       this.core = req.getCore();
       this.versionFieldName = versionField;
       this.userVersionField = core.getLatestSchema().getField(versionField);
-      this.solrVersionField = core.getLatestSchema().getField(VersionInfo.VERSION_FIELD);
+      this.solrVersionField = core.getLatestSchema().getField(CommonParams.VERSION_FIELD);
       this.useFieldCache = useFieldCache;
 
       for (UpdateRequestProcessor proc = next ;proc != null; proc = proc.next) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/core/src/test/org/apache/solr/cloud/CdcrVersionReplicationTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/CdcrVersionReplicationTest.java b/solr/core/src/test/org/apache/solr/cloud/CdcrVersionReplicationTest.java
index 367bbaf..59d3818 100644
--- a/solr/core/src/test/org/apache/solr/cloud/CdcrVersionReplicationTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/CdcrVersionReplicationTest.java
@@ -23,9 +23,9 @@ import org.apache.solr.client.solrj.request.UpdateRequest;
 import org.apache.solr.client.solrj.response.QueryResponse;
 import org.apache.solr.common.SolrDocument;
 import org.apache.solr.common.SolrException;
+import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.util.StrUtils;
 import org.apache.solr.update.processor.CdcrUpdateProcessor;
-import org.apache.solr.update.processor.DistributedUpdateProcessor;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -40,7 +40,7 @@ public class CdcrVersionReplicationTest extends BaseCdcrDistributedZkTest {
 
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
-  private static final String vfield = DistributedUpdateProcessor.VERSION_FIELD;
+  private static final String vfield = CommonParams.VERSION_FIELD;
   SolrClient solrServer;
 
   public CdcrVersionReplicationTest() {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/core/src/test/org/apache/solr/cloud/FullSolrCloudDistribCmdsTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/FullSolrCloudDistribCmdsTest.java b/solr/core/src/test/org/apache/solr/cloud/FullSolrCloudDistribCmdsTest.java
index e15ab0d..7f3ab96 100644
--- a/solr/core/src/test/org/apache/solr/cloud/FullSolrCloudDistribCmdsTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/FullSolrCloudDistribCmdsTest.java
@@ -16,6 +16,11 @@
  */
 package org.apache.solr.cloud;
 
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.solr.SolrTestCaseJ4.SuppressSSL;
 import org.apache.solr.client.solrj.SolrClient;
@@ -38,16 +43,11 @@ import org.apache.solr.common.params.CollectionParams.CollectionAction;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.Utils;
-import org.apache.solr.update.VersionInfo;
-import org.apache.solr.update.processor.DistributedUpdateProcessor;
 import org.apache.zookeeper.CreateMode;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
+import static org.apache.solr.common.params.CommonParams.VERSION_FIELD;
 
 /**
  * Super basic testing, no shard restarting or anything.
@@ -718,12 +718,12 @@ public class FullSolrCloudDistribCmdsTest extends AbstractFullDistribZkTestBase
   
   private void testOptimisticUpdate(QueryResponse results) throws Exception {
     SolrDocument doc = results.getResults().get(0);
-    Long version = (Long) doc.getFieldValue(VersionInfo.VERSION_FIELD);
+    Long version = (Long) doc.getFieldValue(VERSION_FIELD);
     Integer theDoc = (Integer) doc.getFieldValue("id");
     UpdateRequest uReq = new UpdateRequest();
     SolrInputDocument doc1 = new SolrInputDocument();
     uReq.setParams(new ModifiableSolrParams());
-    uReq.getParams().set(DistributedUpdateProcessor.VERSION_FIELD, Long.toString(version));
+    uReq.getParams().set(VERSION_FIELD, Long.toString(version));
     addFields(doc1, "id", theDoc, t1, "theupdatestuff");
     uReq.add(doc1);
     
@@ -736,7 +736,7 @@ public class FullSolrCloudDistribCmdsTest extends AbstractFullDistribZkTestBase
     SolrInputDocument doc2 = new SolrInputDocument();
     uReq = new UpdateRequest();
     uReq.setParams(new ModifiableSolrParams());
-    uReq.getParams().set(DistributedUpdateProcessor.VERSION_FIELD, Long.toString(version));
+    uReq.getParams().set(VERSION_FIELD, Long.toString(version));
     addFields(doc2, "id", theDoc, t1, "thenewupdatestuff");
     uReq.add(doc2);
     

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/core/src/test/org/apache/solr/update/DirectUpdateHandlerTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/DirectUpdateHandlerTest.java b/solr/core/src/test/org/apache/solr/update/DirectUpdateHandlerTest.java
index 462241a..7d97ee4 100644
--- a/solr/core/src/test/org/apache/solr/update/DirectUpdateHandlerTest.java
+++ b/solr/core/src/test/org/apache/solr/update/DirectUpdateHandlerTest.java
@@ -26,8 +26,8 @@ import java.util.concurrent.atomic.AtomicLong;
 import com.codahale.metrics.Gauge;
 import com.codahale.metrics.Meter;
 import com.codahale.metrics.Metric;
-import org.apache.lucene.index.TieredMergePolicy;
 import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.TieredMergePolicy;
 import org.apache.lucene.store.Directory;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.params.CommonParams;
@@ -46,6 +46,8 @@ import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.solr.common.params.CommonParams.VERSION_FIELD;
+
 /**
  * 
  *
@@ -128,7 +130,7 @@ public class DirectUpdateHandlerTest extends SolrTestCaseJ4 {
     assertNull("This test requires a schema that has no version field, " +
                "it appears the schema file in use has been edited to violate " +
                "this requirement",
-               h.getCore().getLatestSchema().getFieldOrNull(VersionInfo.VERSION_FIELD));
+               h.getCore().getLatestSchema().getFieldOrNull(VERSION_FIELD));
 
     assertU(adoc("id","5"));
     assertU(adoc("id","6"));

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesStandalone.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesStandalone.java b/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesStandalone.java
index 877467e..aa075a4 100644
--- a/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesStandalone.java
+++ b/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesStandalone.java
@@ -43,9 +43,9 @@ import org.apache.solr.common.SolrDocumentList;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrInputDocument;
 import org.apache.solr.common.SolrInputField;
+import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.index.NoMergePolicyFactory;
 import org.apache.solr.request.SolrQueryRequest;
-import org.apache.solr.update.processor.DistributedUpdateProcessor;
 import org.apache.solr.schema.IndexSchema;
 import org.apache.solr.schema.SchemaField;
 import org.apache.solr.search.SolrIndexSearcher;
@@ -1119,8 +1119,8 @@ public class TestInPlaceUpdatesStandalone extends SolrTestCaseJ4 {
     try (SolrQueryRequest req = req()) {
       AddUpdateCommand cmd = new AddUpdateCommand(req);
       cmd.solrDoc = sdoc;
-      assertTrue(cmd.solrDoc.containsKey(DistributedUpdateProcessor.VERSION_FIELD));
-      cmd.setVersion(Long.parseLong(cmd.solrDoc.getFieldValue(DistributedUpdateProcessor.VERSION_FIELD).toString()));
+      assertTrue(cmd.solrDoc.containsKey(CommonParams.VERSION_FIELD));
+      cmd.setVersion(Long.parseLong(cmd.solrDoc.getFieldValue(CommonParams.VERSION_FIELD).toString()));
       return AtomicUpdateDocumentMerger.computeInPlaceUpdatableFields(cmd);
     }
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/core/src/test/org/apache/solr/update/UpdateLogTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/UpdateLogTest.java b/solr/core/src/test/org/apache/solr/update/UpdateLogTest.java
index 8abfe2a..9e7d977 100644
--- a/solr/core/src/test/org/apache/solr/update/UpdateLogTest.java
+++ b/solr/core/src/test/org/apache/solr/update/UpdateLogTest.java
@@ -26,11 +26,11 @@ import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrInputDocument;
 import org.apache.solr.handler.component.RealTimeGetComponent;
 import org.apache.solr.request.SolrQueryRequest;
-import org.apache.solr.update.processor.DistributedUpdateProcessor;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
+import static org.apache.solr.common.params.CommonParams.VERSION_FIELD;
 import static org.junit.internal.matchers.StringContains.containsString;
 
 public class UpdateLogTest extends SolrTestCaseJ4 {
@@ -265,8 +265,8 @@ public class UpdateLogTest extends SolrTestCaseJ4 {
   public static AddUpdateCommand buildAddUpdateCommand(final SolrQueryRequest req, final SolrInputDocument sdoc) {
     AddUpdateCommand cmd = new AddUpdateCommand(req);
     cmd.solrDoc = sdoc;
-    assertTrue("", cmd.solrDoc.containsKey(DistributedUpdateProcessor.VERSION_FIELD));
-    cmd.setVersion(Long.parseLong(cmd.solrDoc.getFieldValue(DistributedUpdateProcessor.VERSION_FIELD).toString()));
+    assertTrue("", cmd.solrDoc.containsKey(VERSION_FIELD));
+    cmd.setVersion(Long.parseLong(cmd.solrDoc.getFieldValue(VERSION_FIELD).toString()));
     return cmd;
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java
index 6941a77..83c6326 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java
@@ -89,6 +89,7 @@ import static org.apache.solr.common.params.CommonParams.AUTHZ_PATH;
 import static org.apache.solr.common.params.CommonParams.COLLECTIONS_HANDLER_PATH;
 import static org.apache.solr.common.params.CommonParams.CONFIGSETS_HANDLER_PATH;
 import static org.apache.solr.common.params.CommonParams.CORES_HANDLER_PATH;
+import static org.apache.solr.common.params.CommonParams.ID;
 
 /**
  * SolrJ client class to communicate with SolrCloud.
@@ -121,7 +122,7 @@ public class CloudSolrClient extends SolrClient {
   private ExecutorService threadPool = ExecutorUtil
       .newMDCAwareCachedThreadPool(new SolrjNamedThreadFactory(
           "CloudSolrClient ThreadPool"));
-  private String idField = "id";
+  private String idField = ID;
   public static final String STATE_VERSION = "_stateVer_";
   private long retryExpiryTime = TimeUnit.NANOSECONDS.convert(3, TimeUnit.SECONDS);//3 seconds or 3 million nanos
   private final Set<String> NON_ROUTABLE_PARAMS;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/solrj/src/java/org/apache/solr/client/solrj/io/ModelCache.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/ModelCache.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/ModelCache.java
index 521ffec..9a204e2 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/ModelCache.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/ModelCache.java
@@ -20,10 +20,13 @@ import java.io.IOException;
 import java.util.Date;
 import java.util.LinkedHashMap;
 import java.util.Map;
+
 import org.apache.solr.client.solrj.io.stream.CloudSolrStream;
 import org.apache.solr.client.solrj.io.stream.StreamContext;
 import org.apache.solr.common.params.ModifiableSolrParams;
 
+import static org.apache.solr.common.params.CommonParams.VERSION_FIELD;
+
 
 /**
  *  The Model cache keeps a local in-memory copy of models
@@ -92,8 +95,8 @@ public class ModelCache {
       Model m = models.get(modelID);
       if (m != null) {
         Tuple t = m.getTuple();
-        long v = t.getLong("_version_");
-        if (v >= tuple.getLong("_version_")) {
+        long v = t.getLong(VERSION_FIELD);
+        if (v >= tuple.getLong(VERSION_FIELD)) {
           return t;
         } else {
           models.put(modelID, new Model(tuple, currentTime));

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/DaemonStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/DaemonStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/DaemonStream.java
index 8214f9a..2d8aa34 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/DaemonStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/DaemonStream.java
@@ -15,6 +15,7 @@
  * limitations under the License.
  */
 package org.apache.solr.client.solrj.io.stream;
+
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
 import java.util.ArrayList;
@@ -38,6 +39,8 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.solr.common.params.CommonParams.ID;
+
 
 public class DaemonStream extends TupleStream implements Expressible {
 
@@ -63,7 +66,7 @@ public class DaemonStream extends TupleStream implements Expressible {
 
     TupleStream tupleStream = factory.constructStream(streamExpressions.get(0));
 
-    StreamExpressionNamedParameter idExpression = factory.getNamedOperand(expression, "id");
+    StreamExpressionNamedParameter idExpression = factory.getNamedOperand(expression, ID);
     StreamExpressionNamedParameter runExpression = factory.getNamedOperand(expression, "runInterval");
     StreamExpressionNamedParameter queueExpression = factory.getNamedOperand(expression, "queueSize");
     StreamExpressionNamedParameter terminateExpression = factory.getNamedOperand(expression, "terminate");
@@ -130,7 +133,7 @@ public class DaemonStream extends TupleStream implements Expressible {
       expression.addParameter("<stream>");
     }
 
-    expression.addParameter(new StreamExpressionNamedParameter("id", id));
+    expression.addParameter(new StreamExpressionNamedParameter(ID, id));
     expression.addParameter(new StreamExpressionNamedParameter("runInterval", Long.toString(runInterval)));
     expression.addParameter(new StreamExpressionNamedParameter("queueSize", Integer.toString(queueSize)));
     expression.addParameter(new StreamExpressionNamedParameter("terminate", Boolean.toString(terminate)));
@@ -230,7 +233,7 @@ public class DaemonStream extends TupleStream implements Expressible {
 
   public synchronized Tuple getInfo() {
     Tuple tuple = new Tuple(new HashMap());
-    tuple.put("id", id);
+    tuple.put(ID, id);
     tuple.put("startTime", startTime);
     tuple.put("stopTime", stopTime);
     tuple.put("iterations", iterations);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ExecutorStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ExecutorStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ExecutorStream.java
index 6765f72..e2f5b82 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ExecutorStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ExecutorStream.java
@@ -41,6 +41,8 @@ import org.apache.solr.common.util.SolrjNamedThreadFactory;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.solr.common.params.CommonParams.ID;
+
 /**
  *  The executor function wraps a stream with Tuples containing Streaming Expressions
  *  and executes them in parallel. Sample syntax:
@@ -197,7 +199,7 @@ public class ExecutorStream extends TupleStream implements Expressible {
       }
 
       String expr = tuple.getString("expr_s");
-      Object id = tuple.get("id");
+      Object id = tuple.get(ID);
       TupleStream stream = null;
 
       try {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FeaturesSelectionStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FeaturesSelectionStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FeaturesSelectionStream.java
index cfb3941..f15e2a7 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FeaturesSelectionStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FeaturesSelectionStream.java
@@ -59,6 +59,8 @@ import org.apache.solr.common.util.ExecutorUtil;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SolrjNamedThreadFactory;
 
+import static org.apache.solr.common.params.CommonParams.ID;
+
 public class FeaturesSelectionStream extends TupleStream implements Expressible{
 
   private static final long serialVersionUID = 1;
@@ -355,7 +357,7 @@ public class FeaturesSelectionStream extends TupleStream implements Expressible{
           if (tuples.size() == numTerms) break;
           index++;
           Map map = new HashMap();
-          map.put("id", featureSet + "_" + index);
+          map.put(ID, featureSet + "_" + index);
           map.put("index_i", index);
           map.put("term_s", termScore.getKey());
           map.put("score_f", termScore.getValue());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FetchStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FetchStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FetchStream.java
index 463ab4a..55ca51a 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FetchStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FetchStream.java
@@ -19,11 +19,11 @@ package org.apache.solr.client.solrj.io.stream;
 
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Locale;
 import java.util.Map;
-import java.util.HashMap;
 
 import org.apache.solr.client.solrj.io.Tuple;
 import org.apache.solr.client.solrj.io.comp.StreamComparator;
@@ -37,6 +37,8 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionValue;
 import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
 import org.apache.solr.common.params.ModifiableSolrParams;
 
+import static org.apache.solr.common.params.CommonParams.VERSION_FIELD;
+
 /**
  *  Iterates over a stream and fetches additional fields from a specified collection.
  *  Fetches are done in batches.
@@ -139,7 +141,7 @@ public class FetchStream extends TupleStream implements Expressible {
 
     for(int i=0; i<fields.length; i++) {
       fields[i] = fields[i].trim();
-      if(fields[i].equals("_version_")) {
+      if(fields[i].equals(VERSION_FIELD)) {
         appendVersion = false;
       }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ModelStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ModelStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ModelStream.java
index 70b740d..4be05a9 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ModelStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ModelStream.java
@@ -37,6 +37,8 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionParameter;
 import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionValue;
 import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
 
+import static org.apache.solr.common.params.CommonParams.ID;
+
 /**
 *  The ModelStream retrieves a stored model from a Solr Cloud collection.
 *
@@ -87,7 +89,7 @@ public class ModelStream extends TupleStream implements Expressible {
       }
     }
 
-    String modelID = params.get("id");
+    String modelID = params.get(ID);
     if (modelID == null) {
       throw new IOException("id param cannot be null for ModelStream");
     }
@@ -133,7 +135,7 @@ public class ModelStream extends TupleStream implements Expressible {
 
     // zkHost
     expression.addParameter(new StreamExpressionNamedParameter("zkHost", zkHost));
-    expression.addParameter(new StreamExpressionNamedParameter("id", modelID));
+    expression.addParameter(new StreamExpressionNamedParameter(ID, modelID));
     expression.addParameter(new StreamExpressionNamedParameter("cacheMillis", Long.toString(cacheMillis)));
 
     return expression;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TextLogitStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TextLogitStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TextLogitStream.java
index c40f785..7f194f4 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TextLogitStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TextLogitStream.java
@@ -60,6 +60,8 @@ import org.apache.solr.common.util.ExecutorUtil;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SolrjNamedThreadFactory;
 
+import static org.apache.solr.common.params.CommonParams.ID;
+
 public class TextLogitStream extends TupleStream implements Expressible {
 
   private static final long serialVersionUID = 1;
@@ -463,7 +465,7 @@ public class TextLogitStream extends TupleStream implements Expressible {
 
         this.weights = averageWeights(allWeights);
         Map map = new HashMap();
-        map.put("id", name+"_"+iteration);
+        map.put(ID, name+"_"+iteration);
         map.put("name_s", name);
         map.put("field_s", field);
         map.put("terms_ss", terms);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TopicStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TopicStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TopicStream.java
index 5ecee65..ccbe8c1 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TopicStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TopicStream.java
@@ -63,6 +63,9 @@ import org.apache.solr.common.util.SolrjNamedThreadFactory;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.solr.common.params.CommonParams.ID;
+import static org.apache.solr.common.params.CommonParams.VERSION_FIELD;
+
 public class TopicStream extends CloudSolrStream implements Expressible  {
 
   private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
@@ -74,7 +77,7 @@ public class TopicStream extends CloudSolrStream implements Expressible  {
   private boolean initialRun = true;
   private String id;
   protected long checkpointEvery;
-  private Map<String, Long> checkpoints = new HashMap<String, Long>();
+  private Map<String, Long> checkpoints = new HashMap<>();
   private String checkpointCollection;
   private long initialCheckpoint = -1;
 
@@ -143,7 +146,7 @@ public class TopicStream extends CloudSolrStream implements Expressible  {
     List<StreamExpressionNamedParameter> namedParams = factory.getNamedOperands(expression);
     StreamExpressionNamedParameter zkHostExpression = factory.getNamedOperand(expression, "zkHost");
 
-    StreamExpressionNamedParameter idParam = factory.getNamedOperand(expression, "id");
+    StreamExpressionNamedParameter idParam = factory.getNamedOperand(expression, ID);
     if(null == idParam) {
       throw new IOException("invalid TopicStream id cannot be null");
     }
@@ -186,7 +189,7 @@ public class TopicStream extends CloudSolrStream implements Expressible  {
     ModifiableSolrParams params = new ModifiableSolrParams();
     for(StreamExpressionNamedParameter namedParam : namedParams){
       if(!namedParam.getName().equals("zkHost") &&
-          !namedParam.getName().equals("id") &&
+          !namedParam.getName().equals(ID) &&
           !namedParam.getName().equals("checkpointEvery")) {
         params.set(namedParam.getName(), namedParam.getParameter().toString().trim());
       }
@@ -240,7 +243,7 @@ public class TopicStream extends CloudSolrStream implements Expressible  {
 
     // zkHost
     expression.addParameter(new StreamExpressionNamedParameter("zkHost", zkHost));
-    expression.addParameter(new StreamExpressionNamedParameter("id", id));
+    expression.addParameter(new StreamExpressionNamedParameter(ID, id));
     if(initialCheckpoint > -1) {
       expression.addParameter(new StreamExpressionNamedParameter("initialCheckpoint", Long.toString(initialCheckpoint)));
     }
@@ -391,7 +394,7 @@ public class TopicStream extends CloudSolrStream implements Expressible  {
       persistCheckpoints();
     }
 
-    long version = tuple.getLong("_version_");
+    long version = tuple.getLong(VERSION_FIELD);
     String slice = tuple.getString("_SLICE_");
     checkpoints.put(slice, version);
 
@@ -469,7 +472,7 @@ public class TopicStream extends CloudSolrStream implements Expressible  {
     UpdateRequest request = new UpdateRequest();
     request.setParam("collection", checkpointCollection);
     SolrInputDocument doc = new SolrInputDocument();
-    doc.addField("id", id);
+    doc.addField(ID, id);
 
     for(Map.Entry<String, Long> entry : checkpoints.entrySet()) {
       doc.addField("checkpoint_ss", entry.getKey()+"~"+entry.getValue());
@@ -523,7 +526,7 @@ public class TopicStream extends CloudSolrStream implements Expressible  {
       mParams.set("distrib", "false"); // We are the aggregator.
       String fl = mParams.get("fl");
       mParams.set("sort", "_version_ asc");
-      if(!fl.contains("_version_")) {
+      if(!fl.contains(VERSION_FIELD)) {
         fl += ",_version_";
       }
       mParams.set("fl", fl);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/UpdateStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/UpdateStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/UpdateStream.java
index 55291bf..0b29e4f 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/UpdateStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/UpdateStream.java
@@ -42,6 +42,8 @@ import org.apache.solr.common.SolrInputDocument;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.solr.common.params.CommonParams.VERSION_FIELD;
+
 /**
  * Sends tuples emitted by a wrapped {@link TupleStream} as updates to a SolrCloud collection.
  */
@@ -268,7 +270,7 @@ public class UpdateStream extends TupleStream implements Expressible {
   private SolrInputDocument convertTupleToSolrDocument(Tuple tuple) {
     SolrInputDocument doc = new SolrInputDocument();
     for (Object field : tuple.fields.keySet()) {
-      if (! ((String)field).equals("_version_")) {
+      if (! field.equals(VERSION_FIELD)) {
         Object value = tuple.get(field);
         if (value instanceof List) {
           addMultivaluedField(doc, (String)field, (List<Object>)value);


[16/46] lucene-solr:jira/solr-9959: SOLR-10286: fix test; we were writing to read-only dir. Expand solrconfig-managed-schema.xml to have toggle-able elements vis system property flags

Posted by ab...@apache.org.
SOLR-10286: fix test; we were writing to read-only dir.
Expand solrconfig-managed-schema.xml to have toggle-able elements vis system property flags


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/4ee7fc38
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/4ee7fc38
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/4ee7fc38

Branch: refs/heads/jira/solr-9959
Commit: 4ee7fc38907a94f025785ebd388dd372b260913d
Parents: 1439794
Author: David Smiley <ds...@apache.org>
Authored: Thu Mar 16 18:30:57 2017 -0400
Committer: David Smiley <ds...@apache.org>
Committed: Thu Mar 16 18:30:57 2017 -0400

----------------------------------------------------------------------
 .../conf/solrconfig-managed-schema.xml          | 25 +++++++++++++++++
 .../org/apache/solr/search/LargeFieldTest.java  | 29 ++++++++------------
 2 files changed, 37 insertions(+), 17 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4ee7fc38/solr/core/src/test-files/solr/collection1/conf/solrconfig-managed-schema.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/collection1/conf/solrconfig-managed-schema.xml b/solr/core/src/test-files/solr/collection1/conf/solrconfig-managed-schema.xml
index 26224ad..c1390c8 100644
--- a/solr/core/src/test-files/solr/collection1/conf/solrconfig-managed-schema.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/solrconfig-managed-schema.xml
@@ -29,6 +29,31 @@
 
   <codecFactory class="solr.SchemaCodecFactory"/>
 
+  <query>
+    <filterCache
+        enabled="${filterCache.enabled:false}"
+        class="solr.search.FastLRUCache"
+        size="512"
+        initialSize="512"
+        autowarmCount="2"/>
+
+    <queryResultCache
+        enabled="${queryResultCache.enabled:false}"
+        class="solr.search.LRUCache"
+        size="512"
+        initialSize="512"
+        autowarmCount="2"/>
+
+    <documentCache
+        enabled="${documentCache.enabled:false}"
+        class="solr.search.LRUCache"
+        size="512"
+        initialSize="512"
+        autowarmCount="0"/>
+
+    <enableLazyFieldLoading>${enableLazyFieldLoading:false}</enableLazyFieldLoading>
+  </query>
+
   <updateHandler>
     <updateLog enable="${enable.update.log}">
       <str name="dir">${solr.ulog.dir:}</str>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4ee7fc38/solr/core/src/test/org/apache/solr/search/LargeFieldTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/LargeFieldTest.java b/solr/core/src/test/org/apache/solr/search/LargeFieldTest.java
index d05c69c..09e7e90 100644
--- a/solr/core/src/test/org/apache/solr/search/LargeFieldTest.java
+++ b/solr/core/src/test/org/apache/solr/search/LargeFieldTest.java
@@ -29,6 +29,7 @@ import org.apache.solr.request.SolrQueryRequestBase;
 import org.apache.solr.response.SolrQueryResponse;
 import org.apache.solr.schema.IndexSchema;
 import org.apache.solr.util.RefCounted;
+import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
@@ -40,29 +41,17 @@ public class LargeFieldTest extends SolrTestCaseJ4 {
 
   @BeforeClass
   public static void initManagedSchemaCore() throws Exception {
-    // TODO propose convenience API for this?
-    // This testing approach means no new solrconfig or schema file or per-test temp solr-home!
+    // This testing approach means no schema file or per-test temp solr-home!
     System.setProperty("managed.schema.mutable", "true");
     System.setProperty("managed.schema.resourceName", "schema-one-field-no-dynamic-field-unique-key.xml");
     System.setProperty("enable.update.log", "false");
-    initCore("solrconfig-managed-schema.xml", "ignoredSchemaName?");
+    System.setProperty("documentCache.enabled", "true");
+    System.setProperty("enableLazyFieldLoading", "true");
 
-    // modify solr config  TODO propose more convenient API for this; maybe with JSON-ification of a map
-    try (SolrQueryRequestBase req = (SolrQueryRequestBase) req()) {
-      req.getContext().put("httpMethod", "POST");
-      req.setContentStreams(Collections.singleton(new ContentStreamBase.StringStream(
-          "{ 'set-property':{" +
-              "'query.enableLazyFieldLoading':true, " +
-              "'query.documentCache.class':'solr.LRUCache'" +
-              "}}"
-      )));
-      SolrQueryResponse rsp = new SolrQueryResponse();
-      h.getCore().execute(h.getCore().getRequestHandler("/config"), req, rsp);
-      assertNull(rsp.getException());
-    }
+    initCore("solrconfig-managed-schema.xml", "ignoredSchemaName?");
 
+    // TODO SOLR-10229 will make this easier
     boolean PERSIST_FALSE = false; // don't write to test resource dir
-
     IndexSchema schema = h.getCore().getLatestSchema();
     schema = schema.addFieldTypes(Collections.singletonList(
         schema.newFieldType("textType", "solr.TextField", // redundant; TODO improve api
@@ -78,6 +67,12 @@ public class LargeFieldTest extends SolrTestCaseJ4 {
     h.getCore().setLatestSchema(schema);
   }
 
+  @AfterClass
+  public static void afterClass() {
+    System.clearProperty("documentCache.enabled");
+    System.clearProperty("enableLazyFieldLoading");
+  }
+
   @Test
   public void test() throws Exception {
     // add just one document (docid 0)


[44/46] lucene-solr:jira/solr-9959: SOLR-9184: Add a static convenience method ModifiableSolrParams#of(SolrParams) which returns the same instance if it already is modifiable, otherwise creates a new ModifiableSolrParams instance.

Posted by ab...@apache.org.
SOLR-9184: Add a static convenience method ModifiableSolrParams#of(SolrParams) which returns the same instance if it already is modifiable, otherwise creates a new ModifiableSolrParams instance.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/583fec1a
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/583fec1a
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/583fec1a

Branch: refs/heads/jira/solr-9959
Commit: 583fec1a58b41a0562529e6228a29728a790d87c
Parents: eb58777
Author: koji <ko...@apache.org>
Authored: Thu Mar 23 14:57:45 2017 +0900
Committer: koji <ko...@apache.org>
Committed: Thu Mar 23 14:57:45 2017 +0900

----------------------------------------------------------------------
 solr/CHANGES.txt                                |  7 ++++++
 .../common/params/ModifiableSolrParams.java     | 13 ++++++++++
 .../common/params/ModifiableSolrParamsTest.java | 26 ++++++++++++++++++++
 3 files changed, 46 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/583fec1a/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 2bfc981..d6aba02 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -116,6 +116,13 @@ New Features
 
 * SOLR-9994: Add support for CollapseQParser with PointFields. (Varun Thacker, Cao Manh Dat) 
 
+Optimizations
+----------------------
+
+* SOLR-9184: Add a static convenience method ModifiableSolrParams#of(SolrParams) which returns the same
+  instance if it already is modifiable, otherwise creates a new ModifiableSolrParams instance.
+  (J�rg Rathlev via Koji)
+
 ==================  6.5.0 ==================
 
 Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/583fec1a/solr/solrj/src/java/org/apache/solr/common/params/ModifiableSolrParams.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/params/ModifiableSolrParams.java b/solr/solrj/src/java/org/apache/solr/common/params/ModifiableSolrParams.java
index e3cb499..da6bcf1 100644
--- a/solr/solrj/src/java/org/apache/solr/common/params/ModifiableSolrParams.java
+++ b/solr/solrj/src/java/org/apache/solr/common/params/ModifiableSolrParams.java
@@ -54,6 +54,19 @@ public class ModifiableSolrParams extends SolrParams
     }
   }
 
+  /**
+   * If the input params are of type MofifiableSolrParams, returns the input, otherwise, constructs a new
+   * ModifiableSolrParams, copying values from the given params. If params is null, returns an empty
+   * ModifiableSolrParams instance.
+   */
+  public static ModifiableSolrParams of(SolrParams params)
+  {
+    if (params instanceof ModifiableSolrParams) {
+      return (ModifiableSolrParams) params;
+    }
+    return new ModifiableSolrParams(params);
+  }
+
   public int size() {
     return vals == null ? 0 : vals.size();
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/583fec1a/solr/solrj/src/test/org/apache/solr/common/params/ModifiableSolrParamsTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/common/params/ModifiableSolrParamsTest.java b/solr/solrj/src/test/org/apache/solr/common/params/ModifiableSolrParamsTest.java
index b65b607..c315b08 100644
--- a/solr/solrj/src/test/org/apache/solr/common/params/ModifiableSolrParamsTest.java
+++ b/solr/solrj/src/test/org/apache/solr/common/params/ModifiableSolrParamsTest.java
@@ -18,6 +18,9 @@ package org.apache.solr.common.params;
 
 import org.apache.lucene.util.LuceneTestCase;
 
+import java.util.HashMap;
+import java.util.Map;
+
 /**
  * Unit Test Case for {@link org.apache.solr.common.params.ModifiableSolrParams
  * ModifiableSolrParams}
@@ -39,6 +42,29 @@ public class ModifiableSolrParamsTest extends LuceneTestCase {
     super.tearDown();
   }
 
+  public void testOf() throws Exception
+  {
+    String key = "key";
+    String value = "value";
+
+    // input is not of type ModifiableSolrParams
+    Map<String, String> values = new HashMap<>();
+    values.put(key, value);
+    SolrParams mapParams = new MapSolrParams(values);
+    ModifiableSolrParams result = ModifiableSolrParams.of(mapParams);
+    assertNotSame(mapParams, result);
+    assertEquals(value, result.get(key));
+
+    // input is of type ModifiableSolrParams
+    modifiable.add(key, value);
+    result = ModifiableSolrParams.of(modifiable);
+    assertSame(result, modifiable);
+
+    // input is null
+    result = ModifiableSolrParams.of(null);
+    assertNotNull(result);
+    assertEquals(0, result.size());
+  }
 
   public void testAdd()
   {


[02/46] lucene-solr:jira/solr-9959: LUCENE-7739: Fix places where we unnecessarily boxed while parsing a numeric value according to FindBugs

Posted by ab...@apache.org.
LUCENE-7739: Fix places where we unnecessarily boxed while parsing a numeric value according to FindBugs


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/e7b87f5b
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/e7b87f5b
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/e7b87f5b

Branch: refs/heads/jira/solr-9959
Commit: e7b87f5b79f744252bca79356c2bcdeaed503e74
Parents: 716d43e
Author: Mike McCandless <mi...@apache.org>
Authored: Wed Mar 15 06:08:10 2017 -0400
Committer: Mike McCandless <mi...@apache.org>
Committed: Wed Mar 15 06:08:10 2017 -0400

----------------------------------------------------------------------
 lucene/CHANGES.txt                                  |  4 ++++
 .../benchmark/byTask/feeds/EnwikiContentSource.java |  2 +-
 .../benchmark/byTask/tasks/ForceMergeTask.java      |  2 +-
 .../test/org/apache/lucene/util/fst/TestFSTs.java   |  4 ++--
 .../apache/lucene/search/join/TestBlockJoin.java    |  2 +-
 .../search/TestDiversifiedTopDocsCollector.java     |  2 +-
 .../lucene/queries/function/TestValueSources.java   |  6 +++---
 .../lucene/queryparser/classic/QueryParserBase.java |  6 +++---
 .../standard/parser/StandardSyntaxParser.java       |  8 ++++----
 .../standard/parser/StandardSyntaxParser.jj         |  8 ++++----
 .../queryparser/surround/parser/QueryParser.java    |  2 +-
 .../queryparser/surround/parser/QueryParser.jj      |  2 +-
 .../xml/builders/PointRangeQueryBuilder.java        | 16 ++++++++--------
 .../lucene/queryparser/classic/TestQueryParser.java |  2 +-
 .../queryparser/xml/CoreParserTestIndexData.java    |  2 +-
 .../analytics/util/RangeEndpointCalculator.java     |  8 ++++----
 .../handler/dataimport/MailEntityProcessor.java     |  2 +-
 .../src/java/org/apache/solr/response/PageTool.java |  2 +-
 .../java/org/apache/solr/handler/IndexFetcher.java  |  6 +++---
 .../org/apache/solr/handler/ReplicationHandler.java |  2 +-
 .../solr/handler/component/RangeFacetRequest.java   |  8 ++++----
 .../org/apache/solr/parser/SolrQueryParserBase.java |  4 ++--
 .../org/apache/solr/search/facet/FacetRange.java    |  8 ++++----
 .../processor/TolerantUpdateProcessorFactory.java   |  2 +-
 .../java/org/apache/solr/util/DateMathParser.java   |  2 +-
 .../java/org/apache/solr/util/SolrPluginUtils.java  |  4 ++--
 .../solr/core/snapshots/TestSolrCloudSnapshots.java |  2 +-
 .../solr/core/snapshots/TestSolrCoreSnapshots.java  |  2 +-
 .../apache/solr/search/TestSolrFieldCacheMBean.java |  4 ++--
 .../apache/solr/search/mlt/CloudMLTQParserTest.java | 14 +++++++-------
 30 files changed, 71 insertions(+), 67 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e7b87f5b/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 62f4763..bd38f3f 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -259,6 +259,10 @@ Optimizations
 * LUCENE-7742: Fix places where we were unboxing and then re-boxing
   according to FindBugs (Daniel Jelinski via Mike McCandless)
 
+* LUCENE-7739: Fix places where we unnecessarily boxed while parsing
+  a numeric value according to FindBugs (Daniel Jelinski via Mike
+  McCandless)
+
 Build
 
 * LUCENE-7653: Update randomizedtesting to version 2.5.0. (Dawid Weiss)

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e7b87f5b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/feeds/EnwikiContentSource.java
----------------------------------------------------------------------
diff --git a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/feeds/EnwikiContentSource.java b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/feeds/EnwikiContentSource.java
index a933e56..7258476 100644
--- a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/feeds/EnwikiContentSource.java
+++ b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/feeds/EnwikiContentSource.java
@@ -101,7 +101,7 @@ public class EnwikiContentSource extends ContentSource {
 
       buffer.append(original.substring(8, 10));
       buffer.append('-');
-      buffer.append(months[Integer.valueOf(original.substring(5, 7)).intValue() - 1]);
+      buffer.append(months[Integer.parseInt(original.substring(5, 7)) - 1]);
       buffer.append('-');
       buffer.append(original.substring(0, 4));
       buffer.append(' ');

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e7b87f5b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/ForceMergeTask.java
----------------------------------------------------------------------
diff --git a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/ForceMergeTask.java b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/ForceMergeTask.java
index 40a3637..292642f 100644
--- a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/ForceMergeTask.java
+++ b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/ForceMergeTask.java
@@ -46,7 +46,7 @@ public class ForceMergeTask extends PerfTask {
   @Override
   public void setParams(String params) {
     super.setParams(params);
-    maxNumSegments = Double.valueOf(params).intValue();
+    maxNumSegments = (int)Double.parseDouble(params);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e7b87f5b/lucene/core/src/test/org/apache/lucene/util/fst/TestFSTs.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/util/fst/TestFSTs.java b/lucene/core/src/test/org/apache/lucene/util/fst/TestFSTs.java
index dcce285..7a8e5f5 100644
--- a/lucene/core/src/test/org/apache/lucene/util/fst/TestFSTs.java
+++ b/lucene/core/src/test/org/apache/lucene/util/fst/TestFSTs.java
@@ -624,10 +624,10 @@ public class TestFSTs extends LuceneTestCase {
     int idx = 0;
     while (idx < args.length) {
       if (args[idx].equals("-prune")) {
-        prune = Integer.valueOf(args[1 + idx]);
+        prune = Integer.parseInt(args[1 + idx]);
         idx++;
       } else if (args[idx].equals("-limit")) {
-        limit = Integer.valueOf(args[1 + idx]);
+        limit = Integer.parseInt(args[1 + idx]);
         idx++;
       } else if (args[idx].equals("-utf8")) {
         inputMode = 0;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e7b87f5b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java
----------------------------------------------------------------------
diff --git a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java
index a13e66f..da3c20e 100644
--- a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java
+++ b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java
@@ -1011,7 +1011,7 @@ public class TestBlockJoin extends LuceneTestCase {
     TopDocs childHits = new TopDocs(0, new ScoreDoc[0], 0f);
     for (ScoreDoc controlHit : controlHits.scoreDocs) {
       Document controlDoc = r.document(controlHit.doc);
-      int parentID = Integer.valueOf(controlDoc.get("parentID"));
+      int parentID = Integer.parseInt(controlDoc.get("parentID"));
       if (parentID != currentParentID) {
         assertEquals(childHitSlot, childHits.scoreDocs.length);
         currentParentID = parentID;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e7b87f5b/lucene/misc/src/test/org/apache/lucene/search/TestDiversifiedTopDocsCollector.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/test/org/apache/lucene/search/TestDiversifiedTopDocsCollector.java b/lucene/misc/src/test/org/apache/lucene/search/TestDiversifiedTopDocsCollector.java
index 043141a..f07793a 100644
--- a/lucene/misc/src/test/org/apache/lucene/search/TestDiversifiedTopDocsCollector.java
+++ b/lucene/misc/src/test/org/apache/lucene/search/TestDiversifiedTopDocsCollector.java
@@ -392,7 +392,7 @@ public class TestDiversifiedTopDocsCollector extends LuceneTestCase {
     for (int i = 0; i < hitsOfThe60s.length; i++) {
       String cols[] = hitsOfThe60s[i].split("\t");
       Record record = new Record(String.valueOf(i), cols[0], cols[1], cols[2],
-          Float.valueOf(cols[3]));
+          Float.parseFloat(cols[3]));
       parsedRecords.put(record.id, record);
       idField.setStringValue(record.id);
       yearField.setStringValue(record.year);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e7b87f5b/lucene/queries/src/test/org/apache/lucene/queries/function/TestValueSources.java
----------------------------------------------------------------------
diff --git a/lucene/queries/src/test/org/apache/lucene/queries/function/TestValueSources.java b/lucene/queries/src/test/org/apache/lucene/queries/function/TestValueSources.java
index e008293..8008590 100644
--- a/lucene/queries/src/test/org/apache/lucene/queries/function/TestValueSources.java
+++ b/lucene/queries/src/test/org/apache/lucene/queries/function/TestValueSources.java
@@ -123,9 +123,9 @@ public class TestValueSources extends LuceneTestCase {
       document.add(new StringField("id", doc[0], Field.Store.NO));
       document.add(new SortedDocValuesField("id", new BytesRef(doc[0])));
       document.add(new NumericDocValuesField("double", Double.doubleToRawLongBits(Double.parseDouble(doc[1]))));
-      document.add(new NumericDocValuesField("float", Float.floatToRawIntBits(Float.valueOf(doc[2]))));
-      document.add(new NumericDocValuesField("int", Integer.valueOf(doc[3])));
-      document.add(new NumericDocValuesField("long", Long.valueOf(doc[4])));
+      document.add(new NumericDocValuesField("float", Float.floatToRawIntBits(Float.parseFloat(doc[2]))));
+      document.add(new NumericDocValuesField("int", Integer.parseInt(doc[3])));
+      document.add(new NumericDocValuesField("long", Long.parseLong(doc[4])));
       document.add(new StringField("string", doc[5], Field.Store.NO));
       document.add(new SortedDocValuesField("string", new BytesRef(doc[5])));
       document.add(new TextField("text", doc[6], Field.Store.NO));

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e7b87f5b/lucene/queryparser/src/java/org/apache/lucene/queryparser/classic/QueryParserBase.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/classic/QueryParserBase.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/classic/QueryParserBase.java
index 3cfa7d0..cff9efa 100644
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/classic/QueryParserBase.java
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/classic/QueryParserBase.java
@@ -837,7 +837,7 @@ public abstract class QueryParserBase extends QueryBuilder implements CommonQuer
     Query q;
     float fms = fuzzyMinSim;
     try {
-      fms = Float.valueOf(fuzzySlop.image.substring(1)).floatValue();
+      fms = Float.parseFloat(fuzzySlop.image.substring(1));
     } catch (Exception ignored) { }
     if(fms < 0.0f){
       throw new ParseException("Minimum similarity for a FuzzyQuery has to be between 0.0f and 1.0f !");
@@ -853,7 +853,7 @@ public abstract class QueryParserBase extends QueryBuilder implements CommonQuer
     int s = phraseSlop;  // default
     if (fuzzySlop != null) {
       try {
-        s = Float.valueOf(fuzzySlop.image.substring(1)).intValue();
+        s = (int)Float.parseFloat(fuzzySlop.image.substring(1));
       }
       catch (Exception ignored) { }
     }
@@ -865,7 +865,7 @@ public abstract class QueryParserBase extends QueryBuilder implements CommonQuer
     if (boost != null) {
       float f = (float) 1.0;
       try {
-        f = Float.valueOf(boost.image).floatValue();
+        f = Float.parseFloat(boost.image);
       }
       catch (Exception ignored) {
     /* Should this be handled somehow? (defaults to "no boost", if

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e7b87f5b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/parser/StandardSyntaxParser.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/parser/StandardSyntaxParser.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/parser/StandardSyntaxParser.java
index ed0d67c..8ba34a6 100644
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/parser/StandardSyntaxParser.java
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/parser/StandardSyntaxParser.java
@@ -466,7 +466,7 @@ public class StandardSyntaxParser implements SyntaxParser, StandardSyntaxParserC
       if (boost != null) {
       float f = (float)1.0;
       try {
-        f = Float.valueOf(boost.image).floatValue();
+        f = Float.parseFloat(boost.image);
         // avoid boosting null queries, such as those caused by stop words
           if (q != null) {
             q = new BoostQueryNode(q, f);
@@ -542,7 +542,7 @@ public class StandardSyntaxParser implements SyntaxParser, StandardSyntaxParserC
        if (fuzzy) {
            float fms = defaultMinSimilarity;
            try {
-            fms = Float.valueOf(fuzzySlop.image.substring(1)).floatValue();
+            fms = Float.parseFloat(fuzzySlop.image.substring(1));
            } catch (Exception ignored) { }
            if(fms < 0.0f){
                 {if (true) throw new ParseException(new MessageImpl(QueryParserMessages.INVALID_SYNTAX_FUZZY_LIMITS));}
@@ -661,7 +661,7 @@ public class StandardSyntaxParser implements SyntaxParser, StandardSyntaxParserC
 
          if (fuzzySlop != null) {
            try {
-             phraseSlop = Float.valueOf(fuzzySlop.image.substring(1)).intValue();
+             phraseSlop = (int)Float.parseFloat(fuzzySlop.image.substring(1));
              q = new SlopQueryNode(q, phraseSlop);
            }
            catch (Exception ignored) {
@@ -679,7 +679,7 @@ public class StandardSyntaxParser implements SyntaxParser, StandardSyntaxParserC
     if (boost != null) {
       float f = (float)1.0;
       try {
-        f = Float.valueOf(boost.image).floatValue();
+        f = Float.parseFloat(boost.image);
         // avoid boosting null queries, such as those caused by stop words
           if (q != null) {
             q = new BoostQueryNode(q, f);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e7b87f5b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/parser/StandardSyntaxParser.jj
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/parser/StandardSyntaxParser.jj b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/parser/StandardSyntaxParser.jj
index 868b257..b53bab3 100644
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/parser/StandardSyntaxParser.jj
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/parser/StandardSyntaxParser.jj
@@ -391,7 +391,7 @@ QueryNode Clause(CharSequence field) : {
       if (boost != null) {
       float f = (float)1.0;
       try {
-        f = Float.valueOf(boost.image).floatValue();
+        f = Float.parseFloat(boost.image);
         // avoid boosting null queries, such as those caused by stop words
           if (q != null) {
             q = new BoostQueryNode(q, f);
@@ -431,7 +431,7 @@ QueryNode Term(CharSequence field) : {
        if (fuzzy) {
            float fms = defaultMinSimilarity;
            try {
-            fms = Float.valueOf(fuzzySlop.image.substring(1)).floatValue();
+            fms = Float.parseFloat(fuzzySlop.image.substring(1));
            } catch (Exception ignored) { }
            if(fms < 0.0f){
                 throw new ParseException(new MessageImpl(QueryParserMessages.INVALID_SYNTAX_FUZZY_LIMITS));
@@ -472,7 +472,7 @@ QueryNode Term(CharSequence field) : {
 
          if (fuzzySlop != null) {
            try {
-             phraseSlop = Float.valueOf(fuzzySlop.image.substring(1)).intValue();
+             phraseSlop = (int)Float.parseFloat(fuzzySlop.image.substring(1));
              q = new SlopQueryNode(q, phraseSlop);    
            }
            catch (Exception ignored) {
@@ -488,7 +488,7 @@ QueryNode Term(CharSequence field) : {
     if (boost != null) {
       float f = (float)1.0;
       try {
-        f = Float.valueOf(boost.image).floatValue();
+        f = Float.parseFloat(boost.image);
         // avoid boosting null queries, such as those caused by stop words
           if (q != null) {
             q = new BoostQueryNode(q, f);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e7b87f5b/lucene/queryparser/src/java/org/apache/lucene/queryparser/surround/parser/QueryParser.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/surround/parser/QueryParser.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/surround/parser/QueryParser.java
index bd91f04..f0f4b34 100644
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/surround/parser/QueryParser.java
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/surround/parser/QueryParser.java
@@ -481,7 +481,7 @@ public class QueryParser implements QueryParserConstants {
       weight = jj_consume_token(NUMBER);
       float f;
       try {
-        f = Float.valueOf(weight.image).floatValue();
+        f = Float.parseFloat(weight.image);
       } catch (Exception floatExc) {
         {if (true) throw new ParseException(boostErrorMessage + weight.image + " (" + floatExc + ")");}
       }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e7b87f5b/lucene/queryparser/src/java/org/apache/lucene/queryparser/surround/parser/QueryParser.jj
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/surround/parser/QueryParser.jj b/lucene/queryparser/src/java/org/apache/lucene/queryparser/surround/parser/QueryParser.jj
index d63189d..857cca1 100644
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/surround/parser/QueryParser.jj
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/surround/parser/QueryParser.jj
@@ -460,7 +460,7 @@ void OptionalWeights(SrndQuery q) : {
   ( <CARAT> weight=<NUMBER> {
       float f;
       try {
-        f = Float.valueOf(weight.image).floatValue();
+        f = Float.parseFloat(weight.image);
       } catch (Exception floatExc) {
         throw new ParseException(boostErrorMessage + weight.image + " (" + floatExc + ")");
       }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e7b87f5b/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/builders/PointRangeQueryBuilder.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/builders/PointRangeQueryBuilder.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/builders/PointRangeQueryBuilder.java
index 82f7039..1297ad4 100644
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/builders/PointRangeQueryBuilder.java
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/builders/PointRangeQueryBuilder.java
@@ -79,20 +79,20 @@ public class PointRangeQueryBuilder implements QueryBuilder {
     try {
       if (type.equalsIgnoreCase("int")) {
         return IntPoint.newRangeQuery(field,
-            (lowerTerm == null ? Integer.MIN_VALUE : Integer.valueOf(lowerTerm)),
-            (upperTerm == null ? Integer.MAX_VALUE : Integer.valueOf(upperTerm)));
+            (lowerTerm == null ? Integer.MIN_VALUE : Integer.parseInt(lowerTerm)),
+            (upperTerm == null ? Integer.MAX_VALUE : Integer.parseInt(upperTerm)));
       } else if (type.equalsIgnoreCase("long")) {
         return LongPoint.newRangeQuery(field,
-            (lowerTerm == null ? Long.MIN_VALUE : Long.valueOf(lowerTerm)),
-            (upperTerm == null ? Long.MAX_VALUE : Long.valueOf(upperTerm)));
+            (lowerTerm == null ? Long.MIN_VALUE : Long.parseLong(lowerTerm)),
+            (upperTerm == null ? Long.MAX_VALUE : Long.parseLong(upperTerm)));
       } else if (type.equalsIgnoreCase("double")) {
         return DoublePoint.newRangeQuery(field,
-            (lowerTerm == null ? Double.NEGATIVE_INFINITY : Double.valueOf(lowerTerm)),
-            (upperTerm == null ? Double.POSITIVE_INFINITY : Double.valueOf(upperTerm)));
+            (lowerTerm == null ? Double.NEGATIVE_INFINITY : Double.parseDouble(lowerTerm)),
+            (upperTerm == null ? Double.POSITIVE_INFINITY : Double.parseDouble(upperTerm)));
       } else if (type.equalsIgnoreCase("float")) {
         return FloatPoint.newRangeQuery(field,
-            (lowerTerm == null ? Float.NEGATIVE_INFINITY : Float.valueOf(lowerTerm)),
-            (upperTerm == null ? Float.POSITIVE_INFINITY : Float.valueOf(upperTerm)));
+            (lowerTerm == null ? Float.NEGATIVE_INFINITY : Float.parseFloat(lowerTerm)),
+            (upperTerm == null ? Float.POSITIVE_INFINITY : Float.parseFloat(upperTerm)));
       } else {
         throw new ParserException("type attribute must be one of: [long, int, double, float]");
       }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e7b87f5b/lucene/queryparser/src/test/org/apache/lucene/queryparser/classic/TestQueryParser.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/classic/TestQueryParser.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/classic/TestQueryParser.java
index e8533e0..3450794 100644
--- a/lucene/queryparser/src/test/org/apache/lucene/queryparser/classic/TestQueryParser.java
+++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/classic/TestQueryParser.java
@@ -193,7 +193,7 @@ public class TestQueryParser extends QueryParserTestBase {
         if(fuzzySlop.image.endsWith("\u20ac")) {
           float fms = fuzzyMinSim;
           try {
-            fms = Float.valueOf(fuzzySlop.image.substring(1, fuzzySlop.image.length()-1)).floatValue();
+            fms = Float.parseFloat(fuzzySlop.image.substring(1, fuzzySlop.image.length()-1));
           } catch (Exception ignored) { }
           float value = Float.parseFloat(termImage);
           return getRangeQuery(qfield, Float.toString(value-fms/2.f), Float.toString(value+fms/2.f), true, true);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e7b87f5b/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/CoreParserTestIndexData.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/CoreParserTestIndexData.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/CoreParserTestIndexData.java
index 5fa1523..4756888 100644
--- a/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/CoreParserTestIndexData.java
+++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/CoreParserTestIndexData.java
@@ -52,7 +52,7 @@ class CoreParserTestIndexData implements Closeable {
       Document doc = new Document();
       doc.add(LuceneTestCase.newTextField("date", date, Field.Store.YES));
       doc.add(LuceneTestCase.newTextField("contents", content, Field.Store.YES));
-      doc.add(new IntPoint("date3", Integer.valueOf(date)));
+      doc.add(new IntPoint("date3", Integer.parseInt(date)));
       writer.addDocument(doc);
       line = d.readLine();
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e7b87f5b/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/RangeEndpointCalculator.java
----------------------------------------------------------------------
diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/RangeEndpointCalculator.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/RangeEndpointCalculator.java
index c3c2088..fa29022 100644
--- a/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/RangeEndpointCalculator.java
+++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/RangeEndpointCalculator.java
@@ -265,7 +265,7 @@ public abstract class RangeEndpointCalculator<T extends Comparable<T>> {
     
     @Override
     public Float parseAndAddGap(Float value, String gap) {
-      return new Float(value.floatValue() + Float.valueOf(gap).floatValue());
+      return new Float(value.floatValue() + Float.parseFloat(gap));
     }
     
   }
@@ -281,7 +281,7 @@ public abstract class RangeEndpointCalculator<T extends Comparable<T>> {
     
     @Override
     public Double parseAndAddGap(Double value, String gap) {
-      return new Double(value.doubleValue() + Double.valueOf(gap).doubleValue());
+      return new Double(value.doubleValue() + Double.parseDouble(gap));
     }
     
   }
@@ -297,7 +297,7 @@ public abstract class RangeEndpointCalculator<T extends Comparable<T>> {
     
     @Override
     public Integer parseAndAddGap(Integer value, String gap) {
-      return new Integer(value.intValue() + Integer.valueOf(gap).intValue());
+      return new Integer(value.intValue() + Integer.parseInt(gap));
     }
     
   }
@@ -313,7 +313,7 @@ public abstract class RangeEndpointCalculator<T extends Comparable<T>> {
     
     @Override
     public Long parseAndAddGap(Long value, String gap) {
-      return new Long(value.longValue() + Long.valueOf(gap).longValue());
+      return new Long(value.longValue() + Long.parseLong(gap));
     }
     
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e7b87f5b/solr/contrib/dataimporthandler-extras/src/java/org/apache/solr/handler/dataimport/MailEntityProcessor.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler-extras/src/java/org/apache/solr/handler/dataimport/MailEntityProcessor.java b/solr/contrib/dataimporthandler-extras/src/java/org/apache/solr/handler/dataimport/MailEntityProcessor.java
index 0258c33..7545eac 100644
--- a/solr/contrib/dataimporthandler-extras/src/java/org/apache/solr/handler/dataimport/MailEntityProcessor.java
+++ b/solr/contrib/dataimporthandler-extras/src/java/org/apache/solr/handler/dataimport/MailEntityProcessor.java
@@ -843,7 +843,7 @@ public class MailEntityProcessor extends EntityProcessorBase {
       String val = context.getEntityAttribute(prop);
       if (val != null) {
         val = context.replaceTokens(val);
-        v = Integer.valueOf(val);
+        v = Integer.parseInt(val);
       }
     } catch (NumberFormatException e) {
       // do nothing

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e7b87f5b/solr/contrib/velocity/src/java/org/apache/solr/response/PageTool.java
----------------------------------------------------------------------
diff --git a/solr/contrib/velocity/src/java/org/apache/solr/response/PageTool.java b/solr/contrib/velocity/src/java/org/apache/solr/response/PageTool.java
index 1947f36..48dc826 100644
--- a/solr/contrib/velocity/src/java/org/apache/solr/response/PageTool.java
+++ b/solr/contrib/velocity/src/java/org/apache/solr/response/PageTool.java
@@ -38,7 +38,7 @@ public class PageTool {
     String rows = request.getParams().get("rows");
 
     if (rows != null) {
-      results_per_page = new Integer(rows);
+      results_per_page = Integer.parseInt(rows);
     }
     //TODO: Handle group by results
     Object docs = response.getResponse();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e7b87f5b/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java b/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java
index a07496f..33e8091 100644
--- a/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java
+++ b/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java
@@ -697,7 +697,7 @@ public class IndexFetcher {
 
       int indexCount = 1, confFilesCount = 1;
       if (props.containsKey(TIMES_INDEX_REPLICATED)) {
-        indexCount = Integer.valueOf(props.getProperty(TIMES_INDEX_REPLICATED)) + 1;
+        indexCount = Integer.parseInt(props.getProperty(TIMES_INDEX_REPLICATED)) + 1;
       }
       StringBuilder sb = readToStringBuilder(replicationTime, props.getProperty(INDEX_REPLICATED_AT_LIST));
       props.setProperty(INDEX_REPLICATED_AT_LIST, sb.toString());
@@ -708,7 +708,7 @@ public class IndexFetcher {
         props.setProperty(CONF_FILES_REPLICATED, confFiles.toString());
         props.setProperty(CONF_FILES_REPLICATED_AT, String.valueOf(replicationTime));
         if (props.containsKey(TIMES_CONFIG_REPLICATED)) {
-          confFilesCount = Integer.valueOf(props.getProperty(TIMES_CONFIG_REPLICATED)) + 1;
+          confFilesCount = Integer.parseInt(props.getProperty(TIMES_CONFIG_REPLICATED)) + 1;
         }
         props.setProperty(TIMES_CONFIG_REPLICATED, String.valueOf(confFilesCount));
       }
@@ -717,7 +717,7 @@ public class IndexFetcher {
       if (!successfulInstall) {
         int numFailures = 1;
         if (props.containsKey(TIMES_FAILED)) {
-          numFailures = Integer.valueOf(props.getProperty(TIMES_FAILED)) + 1;
+          numFailures = Integer.parseInt(props.getProperty(TIMES_FAILED)) + 1;
         }
         props.setProperty(TIMES_FAILED, String.valueOf(numFailures));
         props.setProperty(REPLICATION_FAILED_AT, String.valueOf(replicationTime));

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e7b87f5b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
index e40b2c3..4f6a408 100644
--- a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
@@ -1075,7 +1075,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
       String ss[] = s.split(",");
       List<String> l = new ArrayList<>();
       for (String s1 : ss) {
-        l.add(new Date(Long.valueOf(s1)).toString());
+        l.add(new Date(Long.parseLong(s1)).toString());
       }
       nl.add(key, l);
     } else {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e7b87f5b/solr/core/src/java/org/apache/solr/handler/component/RangeFacetRequest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/RangeFacetRequest.java b/solr/core/src/java/org/apache/solr/handler/component/RangeFacetRequest.java
index 3ac7300..c234866 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/RangeFacetRequest.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/RangeFacetRequest.java
@@ -659,7 +659,7 @@ public class RangeFacetRequest extends FacetComponent.FacetBase {
 
     @Override
     public Float parseAndAddGap(Float value, String gap) {
-      return new Float(value.floatValue() + Float.valueOf(gap).floatValue());
+      return new Float(value.floatValue() + Float.parseFloat(gap));
     }
   }
 
@@ -677,7 +677,7 @@ public class RangeFacetRequest extends FacetComponent.FacetBase {
 
     @Override
     public Double parseAndAddGap(Double value, String gap) {
-      return new Double(value.doubleValue() + Double.valueOf(gap).doubleValue());
+      return new Double(value.doubleValue() + Double.parseDouble(gap));
     }
   }
 
@@ -695,7 +695,7 @@ public class RangeFacetRequest extends FacetComponent.FacetBase {
 
     @Override
     public Integer parseAndAddGap(Integer value, String gap) {
-      return new Integer(value.intValue() + Integer.valueOf(gap).intValue());
+      return new Integer(value.intValue() + Integer.parseInt(gap));
     }
   }
 
@@ -713,7 +713,7 @@ public class RangeFacetRequest extends FacetComponent.FacetBase {
 
     @Override
     public Long parseAndAddGap(Long value, String gap) {
-      return new Long(value.longValue() + Long.valueOf(gap).longValue());
+      return new Long(value.longValue() + Long.parseLong(gap));
     }
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e7b87f5b/solr/core/src/java/org/apache/solr/parser/SolrQueryParserBase.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/parser/SolrQueryParserBase.java b/solr/core/src/java/org/apache/solr/parser/SolrQueryParserBase.java
index 84ffcb9..cb3b1ee 100644
--- a/solr/core/src/java/org/apache/solr/parser/SolrQueryParserBase.java
+++ b/solr/core/src/java/org/apache/solr/parser/SolrQueryParserBase.java
@@ -623,7 +623,7 @@ public abstract class SolrQueryParserBase extends QueryBuilder {
     } else if (fuzzy) {
       float fms = fuzzyMinSim;
       try {
-        fms = Float.valueOf(fuzzySlop.image.substring(1)).floatValue();
+        fms = Float.parseFloat(fuzzySlop.image.substring(1));
       } catch (Exception ignored) { }
       if(fms < 0.0f){
         throw new SyntaxError("Minimum similarity for a FuzzyQuery has to be between 0.0f and 1.0f !");
@@ -644,7 +644,7 @@ public abstract class SolrQueryParserBase extends QueryBuilder {
     int s = phraseSlop;  // default
     if (fuzzySlop != null) {
       try {
-        s = Float.valueOf(fuzzySlop.image.substring(1)).intValue();
+        s = (int)Float.parseFloat(fuzzySlop.image.substring(1));
       }
       catch (Exception ignored) { }
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e7b87f5b/solr/core/src/java/org/apache/solr/search/facet/FacetRange.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetRange.java b/solr/core/src/java/org/apache/solr/search/facet/FacetRange.java
index 276af5f..a50fa2c 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetRange.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetRange.java
@@ -499,7 +499,7 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
     }
     @Override
     public Float parseAndAddGap(Comparable value, String gap) {
-      return new Float(((Number)value).floatValue() + Float.valueOf(gap).floatValue());
+      return new Float(((Number)value).floatValue() + Float.parseFloat(gap));
     }
   }
   private static class DoubleCalc extends Calc {
@@ -520,7 +520,7 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
     }
     @Override
     public Double parseAndAddGap(Comparable value, String gap) {
-      return new Double(((Number)value).doubleValue() + Double.valueOf(gap).doubleValue());
+      return new Double(((Number)value).doubleValue() + Double.parseDouble(gap));
     }
   }
   private static class IntCalc extends Calc {
@@ -532,7 +532,7 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
     }
     @Override
     public Integer parseAndAddGap(Comparable value, String gap) {
-      return new Integer(((Number)value).intValue() + Integer.valueOf(gap).intValue());
+      return new Integer(((Number)value).intValue() + Integer.parseInt(gap));
     }
   }
   private static class LongCalc extends Calc {
@@ -544,7 +544,7 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
     }
     @Override
     public Long parseAndAddGap(Comparable value, String gap) {
-      return new Long(((Number)value).longValue() + Long.valueOf(gap).longValue());
+      return new Long(((Number)value).longValue() + Long.parseLong(gap));
     }
   }
   private static class DateCalc extends Calc {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e7b87f5b/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessorFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessorFactory.java
index b642d89..8ee5ff2 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessorFactory.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessorFactory.java
@@ -99,7 +99,7 @@ public class TolerantUpdateProcessorFactory extends UpdateRequestProcessorFactor
     Object maxErrorsObj = args.get(MAX_ERRORS_PARAM); 
     if (maxErrorsObj != null) {
       try {
-        defaultMaxErrors = Integer.valueOf(maxErrorsObj.toString());
+        defaultMaxErrors = Integer.parseInt(maxErrorsObj.toString());
       } catch (Exception e) {
         throw new SolrException(ErrorCode.SERVER_ERROR, "Unnable to parse maxErrors parameter: " + maxErrorsObj, e);
       }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e7b87f5b/solr/core/src/java/org/apache/solr/util/DateMathParser.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/util/DateMathParser.java b/solr/core/src/java/org/apache/solr/util/DateMathParser.java
index 643fde8..2124d1d 100644
--- a/solr/core/src/java/org/apache/solr/util/DateMathParser.java
+++ b/solr/core/src/java/org/apache/solr/util/DateMathParser.java
@@ -381,7 +381,7 @@ public class DateMathParser  {
         }
         int val = 0;
         try {
-          val = Integer.valueOf(ops[pos++]);
+          val = Integer.parseInt(ops[pos++]);
         } catch (NumberFormatException e) {
           throw new ParseException
             ("Not a Number: \"" + ops[pos-1] + "\"", pos-1);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e7b87f5b/solr/core/src/java/org/apache/solr/util/SolrPluginUtils.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/util/SolrPluginUtils.java b/solr/core/src/java/org/apache/solr/util/SolrPluginUtils.java
index 9386600..4445e07 100644
--- a/solr/core/src/java/org/apache/solr/util/SolrPluginUtils.java
+++ b/solr/core/src/java/org/apache/solr/util/SolrPluginUtils.java
@@ -583,8 +583,8 @@ public class SolrPluginUtils {
         String[] fieldAndSlopVsBoost = caratPattern.split(s);
         String[] fieldVsSlop = tildePattern.split(fieldAndSlopVsBoost[0]);
         String field = fieldVsSlop[0];
-        int slop  = (2 == fieldVsSlop.length) ? Integer.valueOf(fieldVsSlop[1]) : defaultSlop;
-        Float boost = (1 == fieldAndSlopVsBoost.length) ? 1  : Float.valueOf(fieldAndSlopVsBoost[1]);
+        int slop  = (2 == fieldVsSlop.length) ? Integer.parseInt(fieldVsSlop[1]) : defaultSlop;
+        float boost = (1 == fieldAndSlopVsBoost.length) ? 1  : Float.parseFloat(fieldAndSlopVsBoost[1]);
         FieldParams fp = new FieldParams(field,wordGrams,slop,boost);
         out.add(fp);
       }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e7b87f5b/solr/core/src/test/org/apache/solr/core/snapshots/TestSolrCloudSnapshots.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/snapshots/TestSolrCloudSnapshots.java b/solr/core/src/test/org/apache/solr/core/snapshots/TestSolrCloudSnapshots.java
index bb56a94..9503ee4 100644
--- a/solr/core/src/test/org/apache/solr/core/snapshots/TestSolrCloudSnapshots.java
+++ b/solr/core/src/test/org/apache/solr/core/snapshots/TestSolrCloudSnapshots.java
@@ -295,7 +295,7 @@ public class TestSolrCloudSnapshots extends SolrCloudTestCase {
     for(int i = 0 ; i < apiResult.size(); i++) {
       String commitName = apiResult.getName(i);
       String indexDirPath = (String)((NamedList)apiResult.get(commitName)).get(SolrSnapshotManager.INDEX_DIR_PATH);
-      long genNumber = Long.valueOf((String)((NamedList)apiResult.get(commitName)).get(SolrSnapshotManager.GENERATION_NUM));
+      long genNumber = Long.parseLong((String)((NamedList)apiResult.get(commitName)).get(SolrSnapshotManager.GENERATION_NUM));
       result.add(new SnapshotMetaData(commitName, indexDirPath, genNumber));
     }
     return result;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e7b87f5b/solr/core/src/test/org/apache/solr/core/snapshots/TestSolrCoreSnapshots.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/snapshots/TestSolrCoreSnapshots.java b/solr/core/src/test/org/apache/solr/core/snapshots/TestSolrCoreSnapshots.java
index da6dbac..7a9b0bb 100644
--- a/solr/core/src/test/org/apache/solr/core/snapshots/TestSolrCoreSnapshots.java
+++ b/solr/core/src/test/org/apache/solr/core/snapshots/TestSolrCoreSnapshots.java
@@ -293,7 +293,7 @@ public class TestSolrCoreSnapshots extends SolrCloudTestCase {
     for(int i = 0 ; i < apiResult.size(); i++) {
       String commitName = apiResult.getName(i);
       String indexDirPath = (String)((NamedList)apiResult.get(commitName)).get("indexDirPath");
-      long genNumber = Long.valueOf((String)((NamedList)apiResult.get(commitName)).get("generation"));
+      long genNumber = Long.parseLong((String)((NamedList)apiResult.get(commitName)).get("generation"));
       result.add(new SnapshotMetaData(commitName, indexDirPath, genNumber));
     }
     return result;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e7b87f5b/solr/core/src/test/org/apache/solr/search/TestSolrFieldCacheMBean.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/TestSolrFieldCacheMBean.java b/solr/core/src/test/org/apache/solr/search/TestSolrFieldCacheMBean.java
index 35bdec6..d11c919 100644
--- a/solr/core/src/test/org/apache/solr/search/TestSolrFieldCacheMBean.java
+++ b/solr/core/src/test/org/apache/solr/search/TestSolrFieldCacheMBean.java
@@ -68,7 +68,7 @@ public class TestSolrFieldCacheMBean extends SolrTestCaseJ4 {
   private void assertEntryListIncluded(boolean checkJmx) {
     SolrFieldCacheMBean mbean = new SolrFieldCacheMBean();
     NamedList stats = checkJmx ? mbean.getStatisticsForJmx() : mbean.getStatistics();
-    assert(new Integer(stats.get("entries_count").toString()) > 0);
+    assert(Integer.parseInt(stats.get("entries_count").toString()) > 0);
     assertNotNull(stats.get("total_size"));
     assertNotNull(stats.get("entry#0"));
   }
@@ -76,7 +76,7 @@ public class TestSolrFieldCacheMBean extends SolrTestCaseJ4 {
   private void assertEntryListNotIncluded(boolean checkJmx) {
     SolrFieldCacheMBean mbean = new SolrFieldCacheMBean();
     NamedList stats = checkJmx ? mbean.getStatisticsForJmx() : mbean.getStatistics();
-    assert(new Integer(stats.get("entries_count").toString()) > 0);
+    assert(Integer.parseInt(stats.get("entries_count").toString()) > 0);
     assertNull(stats.get("total_size"));
     assertNull(stats.get("entry#0"));
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e7b87f5b/solr/core/src/test/org/apache/solr/search/mlt/CloudMLTQParserTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/mlt/CloudMLTQParserTest.java b/solr/core/src/test/org/apache/solr/search/mlt/CloudMLTQParserTest.java
index e3a8d7b..f502f24 100644
--- a/solr/core/src/test/org/apache/solr/search/mlt/CloudMLTQParserTest.java
+++ b/solr/core/src/test/org/apache/solr/search/mlt/CloudMLTQParserTest.java
@@ -102,7 +102,7 @@ public class CloudMLTQParserTest extends SolrCloudTestCase {
     int[] actualIds = new int[10];
     int i = 0;
     for (SolrDocument solrDocument : solrDocuments) {
-      actualIds[i++] = Integer.valueOf(String.valueOf(solrDocument.getFieldValue("id")));
+      actualIds[i++] = Integer.parseInt(String.valueOf(solrDocument.getFieldValue("id")));
     }
     assertArrayEquals(expectedIds, actualIds);
 
@@ -117,7 +117,7 @@ public class CloudMLTQParserTest extends SolrCloudTestCase {
     int[] actualIds = new int[solrDocuments.size()];
     int i = 0;
     for (SolrDocument solrDocument : solrDocuments) {
-      actualIds[i++] = Integer.valueOf(String.valueOf(solrDocument.getFieldValue("id")));
+      actualIds[i++] = Integer.parseInt(String.valueOf(solrDocument.getFieldValue("id")));
     }
     assertArrayEquals(expectedIds, actualIds);
 
@@ -127,7 +127,7 @@ public class CloudMLTQParserTest extends SolrCloudTestCase {
     actualIds = new int[solrDocuments.size()];
     i = 0;
     for (SolrDocument solrDocument : solrDocuments) {
-      actualIds[i++] = Integer.valueOf(String.valueOf(solrDocument.getFieldValue("id")));
+      actualIds[i++] = Integer.parseInt(String.valueOf(solrDocument.getFieldValue("id")));
     }
     System.out.println("DEBUG ACTUAL IDS 1: " + Arrays.toString(actualIds));
     assertArrayEquals(expectedIds, actualIds);
@@ -138,7 +138,7 @@ public class CloudMLTQParserTest extends SolrCloudTestCase {
     actualIds = new int[solrDocuments.size()];
     i = 0;
     for (SolrDocument solrDocument : solrDocuments) {
-      actualIds[i++] = Integer.valueOf(String.valueOf(solrDocument.getFieldValue("id")));
+      actualIds[i++] = Integer.parseInt(String.valueOf(solrDocument.getFieldValue("id")));
     }
     System.out.println("DEBUG ACTUAL IDS 2: " + Arrays.toString(actualIds));
     assertArrayEquals(expectedIds, actualIds);
@@ -154,7 +154,7 @@ public class CloudMLTQParserTest extends SolrCloudTestCase {
     int[] actualIds = new int[solrDocuments.size()];
     int i = 0;
     for (SolrDocument solrDocument : solrDocuments) {
-      actualIds[i++] = Integer.valueOf(String.valueOf(solrDocument.getFieldValue("id")));
+      actualIds[i++] = Integer.parseInt(String.valueOf(solrDocument.getFieldValue("id")));
     }
     assertArrayEquals(expectedIds, actualIds);
 
@@ -184,7 +184,7 @@ public class CloudMLTQParserTest extends SolrCloudTestCase {
     int[] actualIds = new int[solrDocuments.size()];
     int i = 0;
     for (SolrDocument solrDocument : solrDocuments) {
-      actualIds[i++] = Integer.valueOf(String.valueOf(solrDocument.getFieldValue("id")));
+      actualIds[i++] = Integer.parseInt(String.valueOf(solrDocument.getFieldValue("id")));
     }
 
     assertArrayEquals(expectedIds, actualIds);
@@ -236,7 +236,7 @@ public class CloudMLTQParserTest extends SolrCloudTestCase {
     int i = 0;
     StringBuilder sb = new StringBuilder();
     for (SolrDocument solrDocument : solrDocuments) {
-      actualIds[i++] =  Integer.valueOf(String.valueOf(solrDocument.getFieldValue("id")));
+      actualIds[i++] =  Integer.parseInt(String.valueOf(solrDocument.getFieldValue("id")));
       sb.append(actualIds[i-1]).append(", ");
     }
     assertArrayEquals(expectedIds, actualIds);


[33/46] lucene-solr:jira/solr-9959: SOLR-10319 SolrCore "instanceDir" metric not visible in JMX.

Posted by ab...@apache.org.
SOLR-10319 SolrCore "instanceDir" metric not visible in JMX.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/a3e4f57e
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/a3e4f57e
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/a3e4f57e

Branch: refs/heads/jira/solr-9959
Commit: a3e4f57e1b520787fc332bf7471d68331af65e25
Parents: fb296fd
Author: Andrzej Bialecki <ab...@apache.org>
Authored: Mon Mar 20 19:03:55 2017 +0100
Committer: Andrzej Bialecki <ab...@apache.org>
Committed: Mon Mar 20 19:03:55 2017 +0100

----------------------------------------------------------------------
 solr/CHANGES.txt                                      | 2 ++
 solr/core/src/java/org/apache/solr/core/SolrCore.java | 2 +-
 2 files changed, 3 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a3e4f57e/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index e628694..b4196ed 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -313,6 +313,8 @@ Bug Fixes
 * SOLR-10218: The Schema API commands "add-field-type" and "replace-field-type" improperly specify SimilarityFactory params.
   (Benjamin Deininger, Troy Mohl, Steve Rowe)
 
+* SOLR-10319: SolrCore "instanceDir" metric not visible in JMX. (ab)
+
 Optimizations
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a3e4f57e/solr/core/src/java/org/apache/solr/core/SolrCore.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java
index 6e25280..9d77c7e 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrCore.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java
@@ -1134,7 +1134,7 @@ public final class SolrCore implements SolrInfoMBean, SolrMetricProducer, Closea
     manager.registerGauge(registry, () -> name == null ? "(null)" : name, true, "coreName", Category.CORE.toString());
     manager.registerGauge(registry, () -> startTime, true, "startTime", Category.CORE.toString());
     manager.registerGauge(registry, () -> getOpenCount(), true, "refCount", Category.CORE.toString());
-    manager.registerGauge(registry, () -> resourceLoader.getInstancePath(), true, "instanceDir", Category.CORE.toString());
+    manager.registerGauge(registry, () -> resourceLoader.getInstancePath().toString(), true, "instanceDir", Category.CORE.toString());
     manager.registerGauge(registry, () -> getIndexDir(), true, "indexDir", Category.CORE.toString());
     manager.registerGauge(registry, () -> getIndexSize(), true, "sizeInBytes", Category.INDEX.toString());
     manager.registerGauge(registry, () -> NumberUtils.readableSize(getIndexSize()), true, "size", Category.INDEX.toString());


[13/46] lucene-solr:jira/solr-9959: Fixed typos in CHANGES.txt

Posted by ab...@apache.org.
Fixed typos in CHANGES.txt


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/f8831ce3
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/f8831ce3
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/f8831ce3

Branch: refs/heads/jira/solr-9959
Commit: f8831ce3c4c608d4662bd2179454f7992de4bea6
Parents: 7a625bb
Author: Joel Bernstein <jb...@apache.org>
Authored: Thu Mar 16 14:18:43 2017 -0400
Committer: Joel Bernstein <jb...@apache.org>
Committed: Thu Mar 16 14:18:43 2017 -0400

----------------------------------------------------------------------
 solr/CHANGES.txt | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f8831ce3/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index d49b439..7ec0c52 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -112,8 +112,8 @@ Upgrade Notes
   number of requests. New Codahale Metrics implementation applies exponential decay to this value,
   which heavily biases the average towards the last 5 minutes. (ab)
 
-* SOLR-8593: Parallel SQL now uses Apache Calcite as it's SQL framework. As part of this change
-  the default aggregation mode has been changed to facet rather map_reduce. There has also beeen changes
+* SOLR-8593: Parallel SQL now uses Apache Calcite as its SQL framework. As part of this change
+  the default aggregation mode has been changed to facet rather than map_reduce. There has also been changes
   to the SQL aggregate response and some SQL syntax changes. Consult the documentation for full details.
 
 


[08/46] lucene-solr:jira/solr-9959: SOLR-10270: Stop exporting _version_ during GROUP BY aggregations in map_reduce mode

Posted by ab...@apache.org.
SOLR-10270: Stop exporting _version_ during GROUP BY aggregations in map_reduce mode


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/22f91ba0
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/22f91ba0
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/22f91ba0

Branch: refs/heads/jira/solr-9959
Commit: 22f91ba0cafeb04e1568cb8c61b5356ef7e91ade
Parents: 2bce98b
Author: Joel Bernstein <jb...@apache.org>
Authored: Mon Mar 13 12:50:17 2017 -0400
Committer: Joel Bernstein <jb...@apache.org>
Committed: Wed Mar 15 18:31:45 2017 -0400

----------------------------------------------------------------------
 .../core/src/java/org/apache/solr/handler/sql/SolrTable.java | 8 --------
 1 file changed, 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/22f91ba0/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java b/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java
index 9375bc0..37ed7d9 100644
--- a/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java
+++ b/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java
@@ -330,24 +330,16 @@ class SolrTable extends AbstractQueryableTable implements TranslatableTable {
 
   private String getFields(Set<String> fieldSet) {
     StringBuilder buf = new StringBuilder();
-    boolean appendVersion = true;
     for(String field : fieldSet) {
 
       if(buf.length() > 0) {
         buf.append(",");
       }
 
-      if(field.equals("_version_")) {
-        appendVersion = false;
-      }
 
       buf.append(field);
     }
 
-    if(appendVersion){
-      buf.append(",_version_");
-    }
-
     return buf.toString();
   }
 


[23/46] lucene-solr:jira/solr-9959: SOLR-7452: add refine param to json facets, implement for array field faceting

Posted by ab...@apache.org.
SOLR-7452: add refine param to json facets, implement for array field faceting


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/540ee1db
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/540ee1db
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/540ee1db

Branch: refs/heads/jira/solr-9959
Commit: 540ee1db10b64aead7d7756b161c2c7348319d81
Parents: 3ca4d80
Author: yonik <yo...@apache.org>
Authored: Fri Mar 17 12:13:43 2017 -0400
Committer: yonik <yo...@apache.org>
Committed: Fri Mar 17 12:13:43 2017 -0400

----------------------------------------------------------------------
 .../solr/search/facet/FacetFieldProcessor.java  |  42 +++++-
 .../facet/FacetFieldProcessorByArray.java       |   4 +
 .../FacetFieldProcessorByEnumTermsStream.java   |   2 +-
 .../apache/solr/search/facet/FacetModule.java   | 127 +++++++++++--------
 .../solr/search/facet/FacetProcessor.java       |  27 +++-
 .../apache/solr/search/facet/FacetQuery.java    |   6 +-
 .../apache/solr/search/facet/FacetRange.java    |   4 +-
 .../apache/solr/search/facet/FacetRequest.java  |   3 +
 .../search/facet/TestJsonFacetRefinement.java   |  94 ++++++++++++++
 9 files changed, 247 insertions(+), 62 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/540ee1db/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java
index bbc782c..fb44f62 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java
@@ -31,6 +31,7 @@ import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.util.PriorityQueue;
 import org.apache.solr.common.util.SimpleOrderedMap;
+import org.apache.solr.schema.FieldType;
 import org.apache.solr.schema.SchemaField;
 import org.apache.solr.search.DocSet;
 
@@ -310,7 +311,7 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
     if (freq.missing) {
       // TODO: it would be more efficient to build up a missing DocSet if we need it here anyway.
       SimpleOrderedMap<Object> missingBucket = new SimpleOrderedMap<>();
-      fillBucket(missingBucket, getFieldMissingQuery(fcontext.searcher, freq.field), null);
+      fillBucket(missingBucket, getFieldMissingQuery(fcontext.searcher, freq.field), null, false);
       res.add("missing", missingBucket);
     }
 
@@ -378,7 +379,7 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
       }
     }
 
-    processSubs(target, filter, subDomain);
+    processSubs(target, filter, subDomain, false);
   }
 
   @Override
@@ -510,4 +511,41 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
       }
     }
   }
+
+
+
+  protected SimpleOrderedMap<Object> refineFacets() throws IOException {
+    List leaves = (List)fcontext.facetInfo.get("_l");
+
+    // For leaf refinements, we do full faceting for each leaf bucket.  Any sub-facets of these buckets will be fully evaluated.  Because of this, we should never
+    // encounter leaf refinements that have sub-facets that return partial results.
+
+    SimpleOrderedMap<Object> res = new SimpleOrderedMap<>();
+    List<SimpleOrderedMap> bucketList = new ArrayList<>(leaves.size());
+    res.add("buckets", bucketList);
+
+    // TODO: an alternate implementations can fill all accs at once
+    createAccs(-1, 1);
+
+    FieldType ft = sf.getType();
+    for (Object bucketVal : leaves) {
+      SimpleOrderedMap<Object> bucket = new SimpleOrderedMap<>();
+      bucketList.add(bucket);
+      bucket.add("val", bucketVal);
+
+      // String internal = ft.toInternal( tobj.toString() );  // TODO - we need a better way to get from object to query...
+
+      Query domainQ = ft.getFieldQuery(null, sf, bucketVal.toString());
+
+      fillBucket(bucket, domainQ, null, false);
+    }
+
+    // If there are just a couple of leaves, and if the domain is large, then
+    // going by term is likely the most efficient?
+    // If the domain is small, or if the number of leaves is large, then doing
+    // the normal collection method may be best.
+
+    return res;
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/540ee1db/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArray.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArray.java b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArray.java
index 767bb55..95b9f0b 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArray.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArray.java
@@ -57,6 +57,10 @@ abstract class FacetFieldProcessorByArray extends FacetFieldProcessor {
   }
 
   private SimpleOrderedMap<Object> calcFacets() throws IOException {
+    if (fcontext.facetInfo != null) {
+      return refineFacets();
+    }
+
     String prefix = freq.prefix;
     if (prefix == null || prefix.length() == 0) {
       prefixRef = null;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/540ee1db/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByEnumTermsStream.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByEnumTermsStream.java b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByEnumTermsStream.java
index 2feff15..94f3b2d 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByEnumTermsStream.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByEnumTermsStream.java
@@ -333,7 +333,7 @@ class FacetFieldProcessorByEnumTermsStream extends FacetFieldProcessor implement
         bucket.add("val", bucketVal);
         addStats(bucket, 0);
         if (hasSubFacets) {
-          processSubs(bucket, bucketQuery, termSet);
+          processSubs(bucket, bucketQuery, termSet, false);
         }
 
         // TODO... termSet needs to stick around for streaming sub-facets?

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/540ee1db/solr/core/src/java/org/apache/solr/search/facet/FacetModule.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetModule.java b/solr/core/src/java/org/apache/solr/search/facet/FacetModule.java
index 87aaa8f..630e968 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetModule.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetModule.java
@@ -39,6 +39,7 @@ import org.apache.solr.search.QueryContext;
 import org.apache.solr.search.SyntaxError;
 import org.apache.solr.util.RTimer;
 import org.noggit.JSONUtil;
+import org.noggit.ObjectBuilder;
 
 public class FacetModule extends SearchComponent {
 
@@ -52,7 +53,7 @@ public class FacetModule extends SearchComponent {
   public final static int PURPOSE_REFINE_JSON_FACETS   = 0x00200000;
 
   // Internal information passed down from the top level to shards for distributed faceting.
-  private final static String FACET_STATE = "_facet_";
+  private final static String FACET_INFO = "_facet_";
   private final static String FACET_REFINE = "refine";
 
 
@@ -62,43 +63,6 @@ public class FacetModule extends SearchComponent {
     return (FacetComponentState) rb.req.getContext().get(FacetComponentState.class);
   }
 
-  @Override
-  public void process(ResponseBuilder rb) throws IOException {
-    // if this is null, faceting is not enabled
-    FacetComponentState facetState = getFacetComponentState(rb);
-    if (facetState == null) return;
-
-    boolean isShard = rb.req.getParams().getBool(ShardParams.IS_SHARD, false);
-
-    FacetContext fcontext = new FacetContext();
-    fcontext.base = rb.getResults().docSet;
-    fcontext.req = rb.req;
-    fcontext.searcher = rb.req.getSearcher();
-    fcontext.qcontext = QueryContext.newContext(fcontext.searcher);
-    if (isShard) {
-      fcontext.flags |= FacetContext.IS_SHARD;
-    }
-
-    FacetProcessor fproc = facetState.facetRequest.createFacetProcessor(fcontext);
-    if (rb.isDebug()) {
-      FacetDebugInfo fdebug = new FacetDebugInfo();
-      fcontext.setDebugInfo(fdebug);
-      fdebug.setReqDescription(facetState.facetRequest.getFacetDescription());
-      fdebug.setProcessor(fproc.getClass().getSimpleName());
-     
-      final RTimer timer = new RTimer();
-      fproc.process();
-      long timeElapsed = (long) timer.getTime();
-      fdebug.setElapse(timeElapsed);
-      fdebug.putInfoItem("domainSize", (long)fcontext.base.size());
-      rb.req.getContext().put("FacetDebugInfo", fdebug);
-    } else {
-      fproc.process();
-    }
-    
-    rb.rsp.add("facets", fproc.getResponse());
-  }
-
 
   @Override
   public void prepare(ResponseBuilder rb) throws IOException {
@@ -118,12 +82,14 @@ public class FacetModule extends SearchComponent {
     SolrParams params = rb.req.getParams();
 
     boolean isShard = params.getBool(ShardParams.IS_SHARD, false);
+    Map<String,Object> facetInfo = null;
     if (isShard) {
-      String jfacet = params.get(FACET_STATE);
+      String jfacet = params.get(FACET_INFO);
       if (jfacet == null) {
-        // if this is a shard request, but there is no facet state, then don't do anything.
+        // if this is a shard request, but there is no _facet_ info, then don't do anything.
         return;
       }
+      facetInfo = (Map<String,Object>) ObjectBuilder.fromJSON(jfacet);
     }
 
     // At this point, we know we need to do something.  Create and save the state.
@@ -141,6 +107,7 @@ public class FacetModule extends SearchComponent {
     FacetComponentState fcState = new FacetComponentState();
     fcState.rb = rb;
     fcState.isShard = isShard;
+    fcState.facetInfo = facetInfo;
     fcState.facetCommands = jsonFacet;
     fcState.facetRequest = facetRequest;
 
@@ -148,12 +115,57 @@ public class FacetModule extends SearchComponent {
   }
 
 
+  @Override
+  public void process(ResponseBuilder rb) throws IOException {
+    // if this is null, faceting is not enabled
+    FacetComponentState facetState = getFacetComponentState(rb);
+    if (facetState == null) return;
+
+    boolean isShard = rb.req.getParams().getBool(ShardParams.IS_SHARD, false);
+
+    FacetContext fcontext = new FacetContext();
+    fcontext.base = rb.getResults().docSet;
+    fcontext.req = rb.req;
+    fcontext.searcher = rb.req.getSearcher();
+    fcontext.qcontext = QueryContext.newContext(fcontext.searcher);
+    if (isShard) {
+      fcontext.flags |= FacetContext.IS_SHARD;
+      fcontext.facetInfo = facetState.facetInfo.isEmpty() ? null : (Map<String,Object>)facetState.facetInfo.get(FACET_REFINE);
+      if (fcontext.facetInfo != null) {
+        fcontext.flags |= FacetContext.IS_REFINEMENT;
+        fcontext.flags |= FacetContext.SKIP_FACET; // the root bucket should have been received from all shards previously
+      }
+    }
+
+    FacetProcessor fproc = facetState.facetRequest.createFacetProcessor(fcontext);
+    if (rb.isDebug()) {
+      FacetDebugInfo fdebug = new FacetDebugInfo();
+      fcontext.setDebugInfo(fdebug);
+      fdebug.setReqDescription(facetState.facetRequest.getFacetDescription());
+      fdebug.setProcessor(fproc.getClass().getSimpleName());
+     
+      final RTimer timer = new RTimer();
+      fproc.process();
+      long timeElapsed = (long) timer.getTime();
+      fdebug.setElapse(timeElapsed);
+      fdebug.putInfoItem("domainSize", (long)fcontext.base.size());
+      rb.req.getContext().put("FacetDebugInfo", fdebug);
+    } else {
+      fproc.process();
+    }
+    
+    rb.rsp.add("facets", fproc.getResponse());
+  }
+
+
+
+
   private void clearFaceting(List<ShardRequest> outgoing) {
     // turn off faceting for requests not marked as being for faceting refinements
     for (ShardRequest sreq : outgoing) {
       if ((sreq.purpose & PURPOSE_REFINE_JSON_FACETS) != 0) continue;
-      sreq.params.remove("json.facet");  // this just saves space... the presence of FACET_STATE really control the faceting
-      sreq.params.remove(FACET_STATE);
+      sreq.params.remove("json.facet");  // this just saves space... the presence of FACET_INFO is enough to control the faceting
+      sreq.params.remove(FACET_INFO);
     }
   }
 
@@ -215,16 +227,15 @@ public class FacetModule extends SearchComponent {
         // don't request any documents
         shardsRefineRequest.params.remove(CommonParams.START);
         shardsRefineRequest.params.set(CommonParams.ROWS, "0");
-        shardsRefineRequest.params.set(CommonParams.ROWS, "0");
         shardsRefineRequest.params.set(FacetParams.FACET, false);
       }
 
       shardsRefineRequest.purpose |= PURPOSE_REFINE_JSON_FACETS;
 
-      Map<String,Object> fstate = new HashMap<>(1);
-      fstate.put(FACET_REFINE, refinement);
-      String fstateString = JSONUtil.toJSON(fstate);
-      shardsRefineRequest.params.add(FACET_STATE, fstateString);
+      Map<String,Object> finfo = new HashMap<>(1);
+      finfo.put(FACET_REFINE, refinement);
+      String finfoStr = JSONUtil.toJSON(finfo);
+      shardsRefineRequest.params.add(FACET_INFO, finfoStr);
 
       if (newRequest) {
         rb.addRequest(this, shardsRefineRequest);
@@ -242,12 +253,12 @@ public class FacetModule extends SearchComponent {
 
     if ((sreq.purpose & ShardRequest.PURPOSE_GET_TOP_IDS) != 0) {
       sreq.purpose |= FacetModule.PURPOSE_GET_JSON_FACETS;
-      sreq.params.set(FACET_STATE, "{}"); // The presence of FACET_STATE (_facet_) turns on json faceting
+      sreq.params.set(FACET_INFO, "{}"); // The presence of FACET_INFO (_facet_) turns on json faceting
     } else {
       // turn off faceting on other requests
       /*** distributedProcess will need to use other requests for refinement
-      sreq.params.remove("json.facet");  // this just saves space... the presence of FACET_STATE really control the faceting
-      sreq.params.remove(FACET_STATE);
+      sreq.params.remove("json.facet");  // this just saves space... the presence of FACET_INFO really control the faceting
+      sreq.params.remove(FACET_INFO);
        **/
     }
   }
@@ -267,6 +278,18 @@ public class FacetModule extends SearchComponent {
         facetState.merger = facetState.facetRequest.createFacetMerger(facet);
         facetState.mcontext = new FacetMerger.Context( sreq.responses.size() );
       }
+
+      if ((sreq.purpose & PURPOSE_REFINE_JSON_FACETS) != 0) {
+        System.err.println("REFINE FACET RESULT FROM SHARD = " + facet);
+        // call merge again with a diff flag set on the context???
+//        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "WORK IN PROGRESS, MERGING FACET REFINEMENT NOT SUPPORTED YET!");
+
+        facetState.mcontext.root = facet;
+        facetState.mcontext.setShard(shardRsp.getShard());  // TODO: roll newShard into setShard?
+        facetState.merger.merge(facet , facetState.mcontext);
+        return;
+      }
+
       facetState.mcontext.root = facet;
       facetState.mcontext.newShard(shardRsp.getShard());
       facetState.merger.merge(facet , facetState.mcontext);
@@ -304,11 +327,15 @@ public class FacetModule extends SearchComponent {
 }
 
 
+// TODO: perhaps factor out some sort of root/parent facet object that doesn't depend
+// on stuff like ResponseBuilder, but contains request parameters,
+// root filter lists (for filter exclusions), etc?
 class FacetComponentState {
   ResponseBuilder rb;
   Map<String,Object> facetCommands;
   FacetRequest facetRequest;
   boolean isShard;
+  Map<String,Object> facetInfo; // _facet_ param: contains out-of-band facet info, mainly for refinement requests
 
   //
   // Only used for distributed search

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/540ee1db/solr/core/src/java/org/apache/solr/search/facet/FacetProcessor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetProcessor.java b/solr/core/src/java/org/apache/solr/search/facet/FacetProcessor.java
index 4a839a2..de6dd72 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetProcessor.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetProcessor.java
@@ -366,10 +366,13 @@ public abstract class FacetProcessor<FacetRequestT extends FacetRequest>  {
     }
   }
 
-  void fillBucket(SimpleOrderedMap<Object> bucket, Query q, DocSet result) throws IOException {
+  // TODO: rather than just have a raw "response", perhaps we should model as a bucket object that contains the response plus extra info?
+  void fillBucket(SimpleOrderedMap<Object> bucket, Query q, DocSet result, boolean skip) throws IOException {
+
+    // TODO: we don't need the DocSet if we've already calculated everything during the first phase
     boolean needDocSet = freq.getFacetStats().size() > 0 || freq.getSubFacets().size() > 0;
 
-    // TODO: always collect counts or not???
+    // TODO: put info in for the merger (like "skip=true"?) Maybe we don't need to if we leave out all extraneous info?
 
     int count;
 
@@ -382,7 +385,7 @@ public abstract class FacetProcessor<FacetRequestT extends FacetRequest>  {
       } else {
         result = fcontext.searcher.getDocSet(q, fcontext.base);
       }
-      count = result.size();
+      count = result.size();  // don't really need this if we are skipping, but it's free.
     } else {
       if (q == null) {
         count = fcontext.base.size();
@@ -392,8 +395,10 @@ public abstract class FacetProcessor<FacetRequestT extends FacetRequest>  {
     }
 
     try {
-      processStats(bucket, result, count);
-      processSubs(bucket, q, result);
+      if (!skip) {
+        processStats(bucket, result, count);
+      }
+      processSubs(bucket, q, result, skip);
     } finally {
       if (result != null) {
         // result.decref(); // OFF-HEAP
@@ -402,7 +407,7 @@ public abstract class FacetProcessor<FacetRequestT extends FacetRequest>  {
     }
   }
 
-  void processSubs(SimpleOrderedMap<Object> response, Query filter, DocSet domain) throws IOException {
+  void processSubs(SimpleOrderedMap<Object> response, Query filter, DocSet domain, boolean skip) throws IOException {
 
     boolean emptyDomain = domain == null || domain.size() == 0;
 
@@ -417,8 +422,18 @@ public abstract class FacetProcessor<FacetRequestT extends FacetRequest>  {
         continue;
       }
 
+      Map<String,Object>facetInfoSub = null;
+      if (fcontext.facetInfo != null) {
+        facetInfoSub = (Map<String,Object>)fcontext.facetInfo.get(sub.getKey());
+      }
+
+      // If we're skipping this node, then we only need to process sub-facets that have facet info specified.
+      if (skip && facetInfoSub == null) continue;
+
       // make a new context for each sub-facet since they can change the domain
       FacetContext subContext = fcontext.sub(filter, domain);
+      subContext.facetInfo = facetInfoSub;
+      if (!skip) subContext.flags &= ~FacetContext.SKIP_FACET;  // turn off the skip flag if we're not skipping this bucket
       FacetProcessor subProcessor = subRequest.createFacetProcessor(subContext);
 
       if (fcontext.getDebugInfo() != null) {   // if fcontext.debugInfo != null, it means rb.debug() == true

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/540ee1db/solr/core/src/java/org/apache/solr/search/facet/FacetQuery.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetQuery.java b/solr/core/src/java/org/apache/solr/search/facet/FacetQuery.java
index 174b832..584bec3 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetQuery.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetQuery.java
@@ -56,8 +56,12 @@ class FacetQueryProcessor extends FacetProcessor<FacetQuery> {
   @Override
   public void process() throws IOException {
     super.process();
+
+    if (fcontext.facetInfo != null) {
+      // FIXME - what needs to be done here?
+    }
     response = new SimpleOrderedMap<>();
-    fillBucket(response, freq.q, null);
+    fillBucket(response, freq.q, null, (fcontext.flags & FacetContext.SKIP_FACET)!=0);
   }
 
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/540ee1db/solr/core/src/java/org/apache/solr/search/facet/FacetRange.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetRange.java b/solr/core/src/java/org/apache/solr/search/facet/FacetRange.java
index a50fa2c..5d0989b 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetRange.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetRange.java
@@ -350,7 +350,7 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
     if (freq.getSubFacets().size() > 0) {
       DocSet subBase = intersections[slot];
       try {
-        processSubs(bucket, filters[slot], subBase);
+        processSubs(bucket, filters[slot], subBase, false);
       } finally {
         // subContext.base.decref();  // OFF-HEAP
         // subContext.base = null;  // do not modify context after creation... there may be deferred execution (i.e. streaming)
@@ -367,7 +367,7 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
     }
 
     Query rangeQ = sf.getType().getRangeQuery(null, sf, range.low == null ? null : calc.formatValue(range.low), range.high==null ? null : calc.formatValue(range.high), range.includeLower, range.includeUpper);
-    fillBucket(bucket, rangeQ, null);
+    fillBucket(bucket, rangeQ, null, false);
 
     return bucket;
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/540ee1db/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java b/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java
index 636460f..9835f7d 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java
@@ -168,7 +168,10 @@ public abstract class FacetRequest {
 class FacetContext {
   // Context info for actually executing a local facet command
   public static final int IS_SHARD=0x01;
+  public static final int IS_REFINEMENT=0x02;
+  public static final int SKIP_FACET=0x04;  // refinement: skip calculating this immediate facet, but proceed to specific sub-facets based on facetInfo
 
+  Map<String,Object> facetInfo; // refinement info for this node
   QueryContext qcontext;
   SolrQueryRequest req;  // TODO: replace with params?
   SolrIndexSearcher searcher;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/540ee1db/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetRefinement.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetRefinement.java b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetRefinement.java
index a8f8ff2..f23ae8c 100644
--- a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetRefinement.java
+++ b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetRefinement.java
@@ -18,9 +18,12 @@
 package org.apache.solr.search.facet;
 
 import java.io.IOException;
+import java.util.List;
 
 import org.apache.solr.JSONTestUtil;
 import org.apache.solr.SolrTestCaseHS;
+import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.util.SimpleOrderedMap;
 import org.apache.solr.request.SolrQueryRequest;
 import org.junit.AfterClass;
@@ -209,6 +212,97 @@ public class TestJsonFacetRefinement extends SolrTestCaseHS {
   }
 
 
+  @Test
+  public void testBasicRefinement() throws Exception {
+    initServers();
+    Client client = servers.getClient(random().nextInt());
+    client.queryDefaults().set( "shards", servers.getShards(), "debugQuery", Boolean.toString(random().nextBoolean()) );
+
+    List<SolrClient> clients = client.getClientProvider().all();
+    assertTrue(clients.size() >= 3);
+
+    client.deleteByQuery("*:*", null);
+
+    ModifiableSolrParams p = params("cat_s", "cat_s", "num_d", "num_d");
+    String cat_s = p.get("cat_s");
+    String num_d = p.get("num_d");
+
+    clients.get(0).add( sdoc("id", "01", cat_s, "A", num_d, -1) ); // A wins count tie
+    clients.get(0).add( sdoc("id", "02", cat_s, "B", num_d, 3) );
+
+    clients.get(1).add( sdoc("id", "11", cat_s, "B", num_d, -5) ); // B highest count
+    clients.get(1).add( sdoc("id", "12", cat_s, "B", num_d, -11) );
+    clients.get(1).add( sdoc("id", "13", cat_s, "A", num_d, 7) );
+
+    clients.get(2).add( sdoc("id", "21", cat_s, "A", num_d, 17) ); // A highest count
+    clients.get(2).add( sdoc("id", "22", cat_s, "A", num_d, -19) );
+    clients.get(2).add( sdoc("id", "23", cat_s, "B", num_d, 11) );
+
+    client.commit();
+
+    // Shard responses should be A=1, B=2, A=2, merged should be "A=3, B=2"
+    // One shard will have _facet_={"refine":{"cat0":{"_l":["A"]}}} on the second phase
+
+    /****
+    // fake a refinement request... good for development/debugging
+    assertJQ(clients.get(1),
+        params(p, "q", "*:*",     "_facet_","{refine:{cat0:{_l:[A]}}}", "isShard","true", "distrib","false", "shards.purpose","2097216", "ids","11,12,13",
+            "json.facet", "{" +
+                "cat0:{type:terms, field:cat_s, sort:'count desc', limit:1, overrequest:0, refine:true}" +
+                "}"
+        )
+        , "facets=={foo:555}"
+    );
+    ****/
+
+    client.testJQ(params(p, "q", "*:*",
+        "json.facet", "{" +
+            "cat0:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:0, refine:false}" +
+            "}"
+        )
+        , "facets=={ count:8" +
+            ", cat0:{ buckets:[ {val:A,count:3} ] }" +  // w/o overrequest and refinement, count is lower than it should be (we don't see the A from the middle shard)
+            "}"
+    );
+
+    client.testJQ(params(p, "q", "*:*",
+        "json.facet", "{" +
+            "cat0:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:0, refine:true}" +
+            "}"
+        )
+        , "facets=={ count:8" +
+            ", cat0:{ buckets:[ {val:A,count:4} ] }" +  // w/o overrequest, we need refining to get the correct count.
+            "}"
+    );
+
+    // test that basic stats work for refinement
+    client.testJQ(params(p, "q", "*:*",
+        "json.facet", "{" +
+            "cat0:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:0, refine:true, facet:{ stat1:'sum(${num_d})'}   }" +
+            "}"
+        )
+        , "facets=={ count:8" +
+            ", cat0:{ buckets:[ {val:A,count:4, stat1:4.0} ] }" +
+            "}"
+    );
+
+    // test sorting buckets by a different stat
+    client.testJQ(params(p, "q", "*:*",
+        "json.facet", "{" +
+            " cat0:{type:terms, field:${cat_s}, sort:'min1 asc', limit:1, overrequest:0, refine:false, facet:{ min1:'min(${num_d})'}   }" +
+            ",cat1:{type:terms, field:${cat_s}, sort:'min1 asc', limit:1, overrequest:0, refine:true,  facet:{ min1:'min(${num_d})'}   }" +
+            ",sum1:'sum(num_d)'" +  // make sure that root bucket stats aren't affected by refinement
+            "}"
+        )
+        , "facets=={ count:8" +
+            ", cat0:{ buckets:[ {val:A,count:3, min1:-19.0} ] }" +  // B wins in shard2, so we're missing the "A" count for that shar w/o refinement.
+            ", cat1:{ buckets:[ {val:A,count:4, min1:-19.0} ] }" +  // with refinement, we get the right count
+            ", sum1:2.0" +
+            "}"
+    );
+
+
+  }
 
 
 }


[40/46] lucene-solr:jira/solr-9959: SOLR-7452: facet refinement - don't generate domain if skipping bucket

Posted by ab...@apache.org.
SOLR-7452: facet refinement - don't generate domain if skipping bucket


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/725cd4e2
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/725cd4e2
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/725cd4e2

Branch: refs/heads/jira/solr-9959
Commit: 725cd4e2f546a71ccf43218ffc88739a3e05a260
Parents: 8a99675
Author: yonik <yo...@apache.org>
Authored: Wed Mar 22 19:53:50 2017 -0400
Committer: yonik <yo...@apache.org>
Committed: Wed Mar 22 19:53:50 2017 -0400

----------------------------------------------------------------------
 .../java/org/apache/solr/search/facet/FacetFieldProcessor.java | 6 +++---
 .../src/java/org/apache/solr/search/facet/FacetProcessor.java  | 6 +-----
 2 files changed, 4 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/725cd4e2/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java
index 1ba252e..e8b234d 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java
@@ -528,9 +528,9 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
   }
 
   protected SimpleOrderedMap<Object> refineFacets() throws IOException {
-    List leaves = asList(fcontext.facetInfo.get("_l"));
-    List<List> skip = asList(fcontext.facetInfo.get("_s"));
-    List<List> missing = asList(fcontext.facetInfo.get("_m"));
+    List leaves = asList(fcontext.facetInfo.get("_l"));        // We have not seen this bucket: do full faceting for this bucket, including all sub-facets
+    List<List> skip = asList(fcontext.facetInfo.get("_s"));    // We have seen this bucket, so skip stats on it, and skip sub-facets except for the specified sub-facets that should calculate specified buckets.
+    List<List> missing = asList(fcontext.facetInfo.get("_m")); // We have not seen this bucket, do full faceting for this bucket, and most sub-facets... but some sub-facets should only visit specified buckets.
 
     // For leaf refinements, we do full faceting for each leaf bucket.  Any sub-facets of these buckets will be fully evaluated.  Because of this, we should never
     // encounter leaf refinements that have sub-facets that return partial results.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/725cd4e2/solr/core/src/java/org/apache/solr/search/facet/FacetProcessor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetProcessor.java b/solr/core/src/java/org/apache/solr/search/facet/FacetProcessor.java
index cf4d0fe..9f05d8e 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetProcessor.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetProcessor.java
@@ -366,13 +366,9 @@ public abstract class FacetProcessor<FacetRequestT extends FacetRequest>  {
     }
   }
 
-  // TODO: rather than just have a raw "response", perhaps we should model as a bucket object that contains the response plus extra info?
   void fillBucket(SimpleOrderedMap<Object> bucket, Query q, DocSet result, boolean skip, Map<String,Object> facetInfo) throws IOException {
 
-    // TODO: we don't need the DocSet if we've already calculated everything during the first phase
-    boolean needDocSet = freq.getFacetStats().size() > 0 || freq.getSubFacets().size() > 0;
-
-    // TODO: put info in for the merger (like "skip=true"?) Maybe we don't need to if we leave out all extraneous info?
+    boolean needDocSet = (skip==false && freq.getFacetStats().size() > 0) || freq.getSubFacets().size() > 0;
 
     int count;
 


[46/46] lucene-solr:jira/solr-9959: Merge branch 'master' into jira/solr-9959

Posted by ab...@apache.org.
Merge branch 'master' into jira/solr-9959


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/d7772e73
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/d7772e73
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/d7772e73

Branch: refs/heads/jira/solr-9959
Commit: d7772e731e9ac916894dcc6ffbe01bd42e2a327a
Parents: 8e0c230 4edfc1a
Author: Andrzej Bialecki <ab...@apache.org>
Authored: Thu Mar 23 12:49:22 2017 +0100
Committer: Andrzej Bialecki <ab...@apache.org>
Committed: Thu Mar 23 12:49:22 2017 +0100

----------------------------------------------------------------------
 dev-tools/scripts/buildAndPushRelease.py        |   64 ++
 lucene/CHANGES.txt                              |   13 +
 .../byTask/feeds/EnwikiContentSource.java       |    2 +-
 .../benchmark/byTask/tasks/ForceMergeTask.java  |    2 +-
 .../org/apache/lucene/util/QueryBuilder.java    |   46 +-
 .../java/org/apache/lucene/util/Version.java    |    7 +
 .../org/apache/lucene/util/fst/TestFSTs.java    |    4 +-
 .../lucene/search/join/TestBlockJoin.java       |    2 +-
 .../search/TestDiversifiedTopDocsCollector.java |    2 +-
 .../queries/function/TestValueSources.java      |    6 +-
 .../queryparser/classic/QueryParserBase.java    |    6 +-
 .../standard/parser/StandardSyntaxParser.java   |    8 +-
 .../standard/parser/StandardSyntaxParser.jj     |    8 +-
 .../surround/parser/QueryParser.java            |    2 +-
 .../queryparser/surround/parser/QueryParser.jj  |    2 +-
 .../xml/builders/PointRangeQueryBuilder.java    |   16 +-
 .../queryparser/classic/TestQueryParser.java    |    2 +-
 .../xml/CoreParserTestIndexData.java            |    2 +-
 .../lucene/document/LatLonDocValuesField.java   |    2 +-
 .../lucene/spatial3d/Geo3DDocValuesField.java   |    2 +-
 solr/CHANGES.txt                                |   79 ++
 .../apache/solr/schema/ICUCollationField.java   |    5 +-
 .../analytics/util/RangeEndpointCalculator.java |    8 +-
 .../handler/dataimport/MailEntityProcessor.java |    2 +-
 .../TikaLanguageIdentifierUpdateProcessor.java  |    5 +-
 .../apache/solr/ltr/feature/SolrFeature.java    |    3 +
 .../featureExamples/external_features.json      |    6 +
 .../solr/ltr/TestSelectiveWeightCreation.java   |    8 +-
 .../solr/ltr/feature/TestExternalFeatures.java  |   25 +-
 .../java/org/apache/solr/response/PageTool.java |    2 +-
 .../solr/response/VelocityResponseWriter.java   |    4 +-
 .../org/apache/solr/cloud/ElectionContext.java  |    4 +-
 .../java/org/apache/solr/cloud/Overseer.java    |    8 +-
 .../solr/cloud/OverseerNodePrioritizer.java     |    7 +-
 .../solr/cloud/OverseerTaskProcessor.java       |    7 +-
 .../org/apache/solr/cloud/SyncStrategy.java     |    4 +-
 .../apache/solr/core/QuerySenderListener.java   |   22 +-
 .../org/apache/solr/core/RequestParams.java     |    2 +-
 .../org/apache/solr/handler/BlobHandler.java    |   15 +-
 .../org/apache/solr/handler/CdcrReplicator.java |   15 +-
 .../org/apache/solr/handler/GraphHandler.java   |    4 +-
 .../org/apache/solr/handler/IndexFetcher.java   |    6 +-
 .../apache/solr/handler/PingRequestHandler.java |    6 +-
 .../apache/solr/handler/ReplicationHandler.java |    2 +-
 .../org/apache/solr/handler/SQLHandler.java     |   37 +-
 .../org/apache/solr/handler/StreamHandler.java  |   42 +-
 .../solr/handler/admin/LukeRequestHandler.java  |    2 +-
 .../solr/handler/admin/ThreadDumpHandler.java   |    5 +-
 .../component/IterativeMergeStrategy.java       |    4 +-
 .../component/MoreLikeThisComponent.java        |    4 +-
 .../solr/handler/component/QueryComponent.java  |    5 +-
 .../component/QueryElevationComponent.java      |    4 +-
 .../handler/component/RangeFacetRequest.java    |    8 +-
 .../handler/component/RealTimeGetComponent.java |   55 +-
 .../solr/handler/component/SearchHandler.java   |    5 +-
 .../solr/handler/loader/JavabinLoader.java      |    3 +-
 .../apache/solr/handler/loader/JsonLoader.java  |   10 +-
 .../apache/solr/handler/loader/XMLLoader.java   |   14 +-
 .../apache/solr/handler/sql/SolrEnumerator.java |    4 +-
 .../org/apache/solr/handler/sql/SolrTable.java  |   22 +-
 .../solr/index/SortingMergePolicyFactory.java   |    7 +-
 .../UninvertDocValuesMergePolicyFactory.java    |  218 ++++
 .../reporters/solr/SolrClusterReporter.java     |    4 +-
 .../org/apache/solr/parser/QueryParser.java     |  417 +++++--
 .../java/org/apache/solr/parser/QueryParser.jj  |  276 +++--
 .../apache/solr/parser/SolrQueryParserBase.java |  217 +++-
 .../org/apache/solr/request/SimpleFacets.java   |    4 +-
 .../apache/solr/request/json/RequestUtil.java   |    5 +-
 .../transform/BaseEditorialTransformer.java     |   36 +-
 .../solr/rest/schema/FieldTypeXmlAdapter.java   |   25 +-
 .../solr/schema/AbstractSubTypeFieldType.java   |   17 +-
 .../org/apache/solr/schema/BinaryField.java     |    9 +
 .../java/org/apache/solr/schema/BoolField.java  |    3 -
 .../org/apache/solr/schema/CollationField.java  |    5 +-
 .../org/apache/solr/schema/DatePointField.java  |   62 ++
 .../java/org/apache/solr/schema/EnumField.java  |    7 -
 .../org/apache/solr/schema/FieldProperties.java |   43 +-
 .../java/org/apache/solr/schema/FieldType.java  |   20 +-
 .../solr/schema/LatLonPointSpatialField.java    |    3 +-
 .../java/org/apache/solr/schema/LatLonType.java |   12 +-
 .../apache/solr/schema/NumericFieldType.java    |    4 +-
 .../java/org/apache/solr/schema/PointField.java |    4 -
 .../java/org/apache/solr/schema/PointType.java  |   14 +-
 .../apache/solr/schema/PrimitiveFieldType.java  |    4 +
 .../org/apache/solr/schema/SchemaField.java     |    7 +-
 .../java/org/apache/solr/schema/StrField.java   |    4 -
 .../java/org/apache/solr/schema/TrieField.java  |    3 -
 .../solr/search/CollapsingQParserPlugin.java    |   55 +-
 .../solr/search/ExtendedDismaxQParser.java      |  175 ++-
 .../java/org/apache/solr/search/Grouping.java   |    2 +-
 .../org/apache/solr/search/LuceneQParser.java   |    3 +
 .../apache/solr/search/LuceneQParserPlugin.java |    2 +
 .../QueryParserConfigurationException.java      |   24 +
 .../org/apache/solr/search/QueryParsing.java    |    1 +
 .../apache/solr/search/SolrIndexSearcher.java   |  269 +++--
 .../org/apache/solr/search/SolrQueryParser.java |    2 +-
 .../solr/search/facet/FacetFieldProcessor.java  |   68 +-
 .../facet/FacetFieldProcessorByArray.java       |    4 +
 .../FacetFieldProcessorByEnumTermsStream.java   |    2 +-
 .../apache/solr/search/facet/FacetModule.java   |  128 ++-
 .../solr/search/facet/FacetProcessor.java       |   25 +-
 .../apache/solr/search/facet/FacetQuery.java    |    6 +-
 .../apache/solr/search/facet/FacetRange.java    |   12 +-
 .../apache/solr/search/facet/FacetRequest.java  |    6 +-
 .../apache/solr/search/facet/LegacyFacet.java   |    8 +-
 .../TopGroupsShardRequestFactory.java           |    4 +-
 .../SearchGroupShardResponseProcessor.java      |    2 +-
 .../SearchGroupsResultTransformer.java          |   24 +-
 .../TopGroupsResultTransformer.java             |   12 +-
 .../apache/solr/search/mlt/CloudMLTQParser.java |    5 +-
 .../security/AutorizationEditOperation.java     |    2 +-
 .../solr/spelling/SpellCheckCollator.java       |    4 +-
 .../apache/solr/update/AddUpdateCommand.java    |    3 +-
 .../org/apache/solr/update/DocumentBuilder.java |   67 +-
 .../java/org/apache/solr/update/PeerSync.java   |   14 +-
 .../java/org/apache/solr/update/UpdateLog.java  |    5 +-
 .../org/apache/solr/update/VersionInfo.java     |    6 +-
 .../processor/AtomicUpdateDocumentMerger.java   |   15 +-
 .../update/processor/CdcrUpdateProcessor.java   |    3 +-
 .../processor/DistributedUpdateProcessor.java   |   28 +-
 ...BasedVersionConstraintsProcessorFactory.java |    4 +-
 .../TolerantUpdateProcessorFactory.java         |    2 +-
 .../org/apache/solr/util/DateMathParser.java    |    2 +-
 .../src/java/org/apache/solr/util/SolrCLI.java  |    3 +-
 .../org/apache/solr/util/SolrPluginUtils.java   |    4 +-
 .../collection1/conf/multiword-synonyms.txt     |   13 +
 .../solr/collection1/conf/schema-docValues.xml  |    1 +
 .../conf/schema-multiword-synonyms.xml          |   50 +
 .../conf/schema-unifiedhighlight.xml            |    4 +-
 .../test-files/solr/collection1/conf/schema.xml |   15 +-
 .../conf/solrconfig-managed-schema.xml          |   27 +-
 ...nfig-uninvertdocvaluesmergepolicyfactory.xml |   38 +
 .../solr/collection1/conf/synonyms.txt          |    2 +
 .../apache/solr/TestDistributedGrouping.java    |    2 -
 .../org/apache/solr/TestGroupingSearch.java     |    8 -
 .../solr/cloud/CdcrVersionReplicationTest.java  |    4 +-
 .../cloud/FullSolrCloudDistribCmdsTest.java     |   18 +-
 .../cloud/SegmentTerminateEarlyTestState.java   |    4 +-
 .../apache/solr/cloud/TestRandomFlRTGCloud.java |    2 +-
 .../apache/solr/cloud/TestSegmentSorting.java   |    2 -
 .../core/snapshots/TestSolrCloudSnapshots.java  |    2 +-
 .../core/snapshots/TestSolrCoreSnapshots.java   |    2 +-
 .../org/apache/solr/handler/TestSQLHandler.java |   31 +
 .../solr/handler/admin/StatsReloadRaceTest.java |   31 +-
 .../highlight/TestUnifiedSolrHighlighter.java   |   12 +-
 .../index/UninvertDocValuesMergePolicyTest.java |  243 +++++
 .../solr/rest/schema/TestBulkSchemaAPI.java     |   72 +-
 .../org/apache/solr/schema/PolyFieldTest.java   |   40 +-
 .../apache/solr/schema/SortableBinaryField.java |    3 +-
 .../org/apache/solr/search/LargeFieldTest.java  |  133 +++
 .../solr/search/TestCollapseQParserPlugin.java  |  218 ++--
 .../solr/search/TestExtendedDismaxParser.java   | 1032 +++++++++++++-----
 .../solr/search/TestMultiWordSynonyms.java      |  100 ++
 .../search/TestRandomCollapseQParserPlugin.java |    5 -
 .../apache/solr/search/TestSolrQueryParser.java |  565 ++++++++--
 .../search/facet/TestJsonFacetRefinement.java   |   98 ++
 .../solr/search/mlt/CloudMLTQParserTest.java    |   14 +-
 .../solr/update/DirectUpdateHandlerTest.java    |    6 +-
 .../apache/solr/update/DocumentBuilderTest.java |   71 +-
 .../solr/update/TestInPlaceUpdatesDistrib.java  |    5 +-
 .../update/TestInPlaceUpdatesStandalone.java    |    6 +-
 .../org/apache/solr/update/UpdateLogTest.java   |    6 +-
 solr/server/scripts/cloud-scripts/zkcli.bat     |    2 +-
 solr/server/scripts/cloud-scripts/zkcli.sh      |    2 +-
 .../basic_configs/conf/managed-schema           |   69 +-
 .../conf/managed-schema                         |   66 +-
 .../conf/managed-schema                         |   85 +-
 .../solr/client/solrj/impl/CloudSolrClient.java |    3 +-
 .../apache/solr/client/solrj/io/ModelCache.java |   10 +-
 .../org/apache/solr/client/solrj/io/Tuple.java  |   21 +-
 .../client/solrj/io/comp/FieldComparator.java   |    5 +
 .../solrj/io/comp/MultipleFieldComparator.java  |   22 +
 .../client/solrj/io/comp/StreamComparator.java  |    1 +
 .../client/solrj/io/eval/EqualsEvaluator.java   |    2 +-
 .../client/solrj/io/eval/FieldEvaluator.java    |   28 +-
 .../solrj/io/graph/GatherNodesStream.java       |    4 +-
 .../solrj/io/graph/ShortestPathStream.java      |    4 +-
 .../client/solrj/io/ops/GroupOperation.java     |    6 +-
 .../solrj/io/stream/CartesianProductStream.java |  301 +++++
 .../client/solrj/io/stream/CloudSolrStream.java |    9 +-
 .../client/solrj/io/stream/DaemonStream.java    |    9 +-
 .../client/solrj/io/stream/ExecutorStream.java  |    4 +-
 .../io/stream/FeaturesSelectionStream.java      |    7 +-
 .../client/solrj/io/stream/FetchStream.java     |    9 +-
 .../solr/client/solrj/io/stream/JDBCStream.java |    8 +-
 .../client/solrj/io/stream/ModelStream.java     |    6 +-
 .../client/solrj/io/stream/ParallelStream.java  |   13 +-
 .../client/solrj/io/stream/RandomStream.java    |   14 +-
 .../solr/client/solrj/io/stream/RankStream.java |    6 +-
 .../solrj/io/stream/ScoreNodesStream.java       |    6 +-
 .../solrj/io/stream/SignificantTermsStream.java |    4 +-
 .../client/solrj/io/stream/TextLogitStream.java |    7 +-
 .../client/solrj/io/stream/TopicStream.java     |   27 +-
 .../client/solrj/io/stream/UpdateStream.java    |    4 +-
 .../request/JavaBinUpdateRequestCodec.java      |    5 +-
 .../client/solrj/request/UpdateRequest.java     |    9 +-
 .../solr/common/ToleratedUpdateError.java       |   10 +-
 .../apache/solr/common/cloud/DocCollection.java |    2 +-
 .../solr/common/cloud/HashBasedRouter.java      |    8 +-
 .../apache/solr/common/params/CommonParams.java |    4 +
 .../common/params/ModifiableSolrParams.java     |   13 +
 .../apache/solr/common/params/ShardParams.java  |   24 +-
 .../apache/solr/common/params/TermsParams.java  |   12 +-
 .../solr/configsets/streaming/conf/schema.xml   |    1 +
 .../solr/client/solrj/SolrExampleTests.java     |   16 +-
 .../solrj/io/stream/StreamExpressionTest.java   |  132 +++
 .../stream/eval/AbsoluteValueEvaluatorTest.java |    8 +-
 .../solrj/io/stream/eval/AddEvaluatorTest.java  |    8 +-
 .../solrj/io/stream/eval/AndEvaluatorTest.java  |    8 +-
 .../io/stream/eval/CompoundEvaluatorTest.java   |    8 +-
 .../io/stream/eval/DivideEvaluatorTest.java     |   10 +-
 .../io/stream/eval/EqualsEvaluatorTest.java     |    8 +-
 .../stream/eval/ExclusiveOrEvaluatorTest.java   |    8 +-
 .../io/stream/eval/FieldEvaluatorTest.java      |  114 ++
 .../eval/GreaterThanEqualToEvaluatorTest.java   |    8 +-
 .../stream/eval/GreaterThanEvaluatorTest.java   |    8 +-
 .../eval/LessThanEqualToEvaluatorTest.java      |    8 +-
 .../io/stream/eval/LessThanEvaluatorTest.java   |    8 +-
 .../io/stream/eval/MultiplyEvaluatorTest.java   |    8 +-
 .../io/stream/eval/NaturalLogEvaluatorTest.java |    8 +-
 .../solrj/io/stream/eval/NotEvaluatorTest.java  |   10 +-
 .../solrj/io/stream/eval/OrEvaluatorTest.java   |    8 +-
 .../io/stream/eval/RawValueEvaluatorTest.java   |    8 +-
 .../io/stream/eval/SubtractEvaluatorTest.java   |    8 +-
 .../common/params/ModifiableSolrParamsTest.java |   26 +
 .../solr/common/params/SolrParamTest.java       |    4 +-
 .../java/org/apache/solr/SolrTestCaseJ4.java    |    2 +-
 solr/webapp/web/WEB-INF/web.xml                 |    2 +-
 228 files changed, 5660 insertions(+), 1615 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d7772e73/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
----------------------------------------------------------------------
diff --cc solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
index 85cbc75,4f6a408..0d051bc
--- a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
@@@ -1104,12 -1075,13 +1104,12 @@@ public class ReplicationHandler extend
        String ss[] = s.split(",");
        List<String> l = new ArrayList<>();
        for (String s1 : ss) {
-         l.add(new Date(Long.valueOf(s1)).toString());
+         l.add(new Date(Long.parseLong(s1)).toString());
        }
 -      nl.add(key, l);
 +      return l;
      } else {
 -      nl.add(key, s);
 +      return s;
      }
 -
    }
  
    private List<String> getReplicateAfterStrings() {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d7772e73/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d7772e73/solr/core/src/java/org/apache/solr/handler/component/MoreLikeThisComponent.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d7772e73/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d7772e73/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d7772e73/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d7772e73/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrClusterReporter.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d7772e73/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
----------------------------------------------------------------------
diff --cc solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
index e68de08,83df60f..1c30a6c
--- a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
@@@ -18,7 -18,9 +18,8 @@@ package org.apache.solr.search
  
  import java.io.Closeable;
  import java.io.IOException;
+ import java.io.Reader;
  import java.lang.invoke.MethodHandles;
 -import java.net.URL;
  import java.nio.charset.StandardCharsets;
  import java.util.ArrayList;
  import java.util.Arrays;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d7772e73/solr/core/src/java/org/apache/solr/search/facet/FacetModule.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d7772e73/solr/core/src/java/org/apache/solr/update/PeerSync.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d7772e73/solr/core/src/java/org/apache/solr/update/UpdateLog.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d7772e73/solr/core/src/test/org/apache/solr/handler/admin/StatsReloadRaceTest.java
----------------------------------------------------------------------
diff --cc solr/core/src/test/org/apache/solr/handler/admin/StatsReloadRaceTest.java
index 7bf4939,7bf4939..c1bd408
--- a/solr/core/src/test/org/apache/solr/handler/admin/StatsReloadRaceTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/admin/StatsReloadRaceTest.java
@@@ -17,7 -17,7 +17,6 @@@
  package org.apache.solr.handler.admin;
  
  import java.util.List;
--import java.util.Map;
  import java.util.Random;
  import java.util.concurrent.atomic.AtomicInteger;
  
@@@ -68,7 -68,7 +67,7 @@@ public class StatsReloadRaceTest extend
        boolean isCompleted;
        do {
          if (random.nextBoolean()) {
--          requestMbeans();
++          requestMetrics();
          } else {
            requestCoreStatus();
          }
@@@ -106,22 -106,22 +105,18 @@@
      return isCompleted;
    }
  
--  private void requestMbeans() throws Exception {
--    String stats = h.query(req(
--        CommonParams.QT, "/admin/mbeans",
--        "stats", "true"));
--
--    NamedList<NamedList<Object>> actualStats = SolrInfoMBeanHandler.fromXML(stats).get("CORE");
--    
--    for (Map.Entry<String, NamedList<Object>> tuple : actualStats) {
--      if (tuple.getKey().contains("earcher")) { // catches "searcher" and "Searcher@345345 blah"
--        NamedList<Object> searcherStats = tuple.getValue();
--        @SuppressWarnings("unchecked")
--        NamedList<Object> statsList = (NamedList<Object>)searcherStats.get("stats");
--        assertEquals("expect to have exactly one indexVersion at "+statsList, 1, statsList.getAll("indexVersion").size());
--        assertTrue(statsList.get("indexVersion") instanceof Long); 
--      }
--    }
++  private void requestMetrics() throws Exception {
++    SolrQueryResponse rsp = new SolrQueryResponse();
++    String registry = "solr.core." + h.coreName;
++    h.getCoreContainer().getRequestHandler("/admin/metrics").handleRequest(
++        req("prefix", "SEARCHER", "registry", registry, "compact", "true"), rsp);
++
++    NamedList values = rsp.getValues();
++    NamedList metrics = (NamedList)values.get("metrics");
++    metrics = (NamedList)metrics.get(registry);
++    String key = "SEARCHER.searcher.indexVersion";
++    assertNotNull(metrics.get(key));
++    assertTrue(metrics.get(key) instanceof Long);
    }
  
  }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d7772e73/solr/core/src/test/org/apache/solr/search/TestSolrQueryParser.java
----------------------------------------------------------------------
diff --cc solr/core/src/test/org/apache/solr/search/TestSolrQueryParser.java
index ff7979b,92bd6c0..6fc6e2d
--- a/solr/core/src/test/org/apache/solr/search/TestSolrQueryParser.java
+++ b/solr/core/src/test/org/apache/solr/search/TestSolrQueryParser.java
@@@ -28,7 -33,8 +33,8 @@@ import org.apache.lucene.search.Query
  import org.apache.lucene.search.TermInSetQuery;
  import org.apache.lucene.search.TermQuery;
  import org.apache.solr.SolrTestCaseJ4;
+ import org.apache.solr.common.params.MapSolrParams;
 -import org.apache.solr.core.SolrInfoMBean;
 +import org.apache.solr.metrics.MetricsMap;
  import org.apache.solr.parser.QueryParser;
  import org.apache.solr.query.FilterQuery;
  import org.apache.solr.request.SolrQueryRequest;


[05/46] lucene-solr:jira/solr-9959: SOLR-9516: Updating CHANGES.txt entry

Posted by ab...@apache.org.
SOLR-9516: Updating CHANGES.txt entry


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/2bce98b0
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/2bce98b0
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/2bce98b0

Branch: refs/heads/jira/solr-9959
Commit: 2bce98b0c162c5d8a815bc3e2ec32ba6d08c62fa
Parents: 9d56f13
Author: Ishan Chattopadhyaya <is...@apache.org>
Authored: Thu Mar 16 02:03:09 2017 +0530
Committer: Ishan Chattopadhyaya <is...@apache.org>
Committed: Thu Mar 16 02:07:11 2017 +0530

----------------------------------------------------------------------
 solr/CHANGES.txt | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2bce98b0/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 6829cd1..f1a12ea 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -255,7 +255,8 @@ Bug Fixes
 
 * SOLR-10184: Fix bin/solr so it can run properly on java9 (hossman, Uwe Schindler)
 
-* SOLR-9516: Admin UI (angular) didn't work with Kerberos (Cassandra Targett, Amrit Sarkar via Ishan Chattopadhyaya)
+* SOLR-9516: Admin UI (angular) now works with Kerberos, by excluding serving of /solr/libs/* through
+  SolrDispatchFilter. (Cassandra Targett, Amrit Sarkar via Ishan Chattopadhyaya)
 
 Optimizations
 ----------------------


[20/46] lucene-solr:jira/solr-9959: SOLR-10273: DocumentBuilder move longest field to last position

Posted by ab...@apache.org.
SOLR-10273: DocumentBuilder move longest field to last position


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/8fbd9f1e
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/8fbd9f1e
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/8fbd9f1e

Branch: refs/heads/jira/solr-9959
Commit: 8fbd9f1e403cc697f77d827cd1aa85876c8665ae
Parents: 4a55bc4
Author: David Smiley <ds...@apache.org>
Authored: Thu Mar 16 21:22:08 2017 -0400
Committer: David Smiley <ds...@apache.org>
Committed: Thu Mar 16 21:22:08 2017 -0400

----------------------------------------------------------------------
 solr/CHANGES.txt                                |  4 ++
 .../org/apache/solr/update/DocumentBuilder.java | 60 +++++++++++++++++-
 .../apache/solr/update/DocumentBuilderTest.java | 67 ++++++++++++++++++++
 3 files changed, 130 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8fbd9f1e/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 9a5299c..dfe8d93 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -301,6 +301,10 @@ Optimizations
 * SOLR-10143: PointFields will create IndexOrDocValuesQuery when a field is both, indexed=true and docValues=true
   (Tom�s Fern�ndez L�bbe)
 
+* SOLR-10273: The field with the longest value (if it exceeds 4K) is moved to be last in the Lucene Document in order
+  to benefit from stored field optimizations in Lucene that can avoid reading it when it's not needed.  If the field is
+  multi-valued, they all move together to the end to retain order. (David Smiley)
+
 Other Changes
 ----------------------
 * SOLR-9980: Expose configVersion in core admin status (Jessica Cheng Mallet via Tom�s Fern�ndez L�bbe)

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8fbd9f1e/solr/core/src/java/org/apache/solr/update/DocumentBuilder.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/DocumentBuilder.java b/solr/core/src/java/org/apache/solr/update/DocumentBuilder.java
index e3d2011..b97af3b 100644
--- a/solr/core/src/java/org/apache/solr/update/DocumentBuilder.java
+++ b/solr/core/src/java/org/apache/solr/update/DocumentBuilder.java
@@ -16,12 +16,15 @@
  */
 package org.apache.solr.update;
 
+import java.util.Iterator;
+import java.util.LinkedList;
 import java.util.List;
 import java.util.Set;
 
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.NumericDocValuesField;
 import org.apache.lucene.index.IndexableField;
+import org.apache.lucene.util.BytesRef;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrInputDocument;
 import org.apache.solr.common.SolrInputField;
@@ -33,10 +36,13 @@ import org.apache.solr.schema.SchemaField;
 import com.google.common.collect.Sets;
 
 /**
- *
+ * Builds a Lucene {@link Document} from a {@link SolrInputDocument}.
  */
 public class DocumentBuilder {
 
+  // accessible only for tests
+  static int MIN_LENGTH_TO_MOVE_LAST = Integer.getInteger("solr.docBuilder.minLengthToMoveLast", 4*1024); // internal setting
+
   /**
    * Add a field value to a given document.
    * @param doc Document that the field needs to be added to
@@ -227,6 +233,58 @@ public class DocumentBuilder {
         }
       }
     }
+
+    if (!forInPlaceUpdate) {
+      moveLargestFieldLast(out);
+    }
+    
     return out;
   }
+
+  /** Move the largest stored field last, because Lucene can avoid loading that one if it's not needed. */
+  private static void moveLargestFieldLast(Document doc) {
+    String largestField = null;
+    int largestFieldLen = -1;
+    boolean largestIsLast = true;
+    for (IndexableField field : doc) {
+      if (!field.fieldType().stored()) {
+        continue;
+      }
+      if (largestIsLast && !field.name().equals(largestField)) {
+        largestIsLast = false;
+      }
+      if (field.numericValue() != null) { // just ignore these as non-competitive (avoid toString'ing their number)
+        continue;
+      }
+      String strVal = field.stringValue();
+      if (strVal != null) {
+        if (strVal.length() > largestFieldLen) {
+          largestField = field.name();
+          largestFieldLen = strVal.length();
+          largestIsLast = true;
+        }
+      } else {
+        BytesRef bytesRef = field.binaryValue();
+        if (bytesRef != null && bytesRef.length > largestFieldLen) {
+          largestField = field.name();
+          largestFieldLen = bytesRef.length;
+          largestIsLast = true;
+        }
+      }
+    }
+    if (!largestIsLast && largestField != null && largestFieldLen > MIN_LENGTH_TO_MOVE_LAST) { // only bother if the value isn't tiny
+      LinkedList<IndexableField> addToEnd = new LinkedList<>();
+      Iterator<IndexableField> iterator = doc.iterator();
+      while (iterator.hasNext()) {
+        IndexableField field = iterator.next();
+        if (field.name().equals(largestField)) {
+          addToEnd.add(field);
+          iterator.remove(); // Document may not have "remove" but it's iterator allows mutation
+        }
+      }
+      for (IndexableField field : addToEnd) {
+        doc.add(field);
+      }
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8fbd9f1e/solr/core/src/test/org/apache/solr/update/DocumentBuilderTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/DocumentBuilderTest.java b/solr/core/src/test/org/apache/solr/update/DocumentBuilderTest.java
index 2a78d6b..03dd17c 100644
--- a/solr/core/src/test/org/apache/solr/update/DocumentBuilderTest.java
+++ b/solr/core/src/test/org/apache/solr/update/DocumentBuilderTest.java
@@ -16,7 +16,14 @@
  */
 package org.apache.solr.update;
 
+import java.util.Iterator;
+import java.util.List;
+import java.util.stream.Collectors;
+import java.util.stream.StreamSupport;
+
+import com.carrotsearch.randomizedtesting.generators.RandomStrings;
 import org.apache.lucene.document.Document;
+import org.apache.lucene.index.IndexableField;
 import org.apache.lucene.util.TestUtil;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.SolrDocument;
@@ -25,6 +32,8 @@ import org.apache.solr.common.SolrInputDocument;
 import org.apache.solr.common.SolrInputField;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.schema.FieldType;
+import org.junit.After;
+import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
@@ -33,12 +42,23 @@ import org.junit.Test;
  *
  */
 public class DocumentBuilderTest extends SolrTestCaseJ4 {
+  static final int save_min_len = DocumentBuilder.MIN_LENGTH_TO_MOVE_LAST;
 
   @BeforeClass
   public static void beforeClass() throws Exception {
     initCore("solrconfig.xml", "schema.xml");
   }
 
+  @AfterClass
+  public static void afterClass() {
+    DocumentBuilder.MIN_LENGTH_TO_MOVE_LAST = save_min_len;
+  }
+
+  @After
+  public void afterTest() {
+    DocumentBuilder.MIN_LENGTH_TO_MOVE_LAST = save_min_len;
+  }
+
   @Test
   public void testBuildDocument() throws Exception 
   {
@@ -222,7 +242,54 @@ public class DocumentBuilderTest extends SolrTestCaseJ4 {
     sif2.setName("foo");
     assertFalse(assertSolrInputFieldEquals(sif1, sif2));
 
+  }
 
+  public void testMoveLargestLast() {
+    SolrInputDocument inDoc = new SolrInputDocument();
+    String TEXT_FLD = "text"; // not stored.  It won't be moved.  This value is the longest, however.
+    inDoc.addField(TEXT_FLD,
+        "NOT STORED|" + RandomStrings.randomAsciiOfLength(random(), 4 * DocumentBuilder.MIN_LENGTH_TO_MOVE_LAST));
+
+    String CAT_FLD = "cat"; // stored, multiValued
+    inDoc.addField(CAT_FLD,
+        "STORED V1|");
+    //  pretty long value
+    inDoc.addField(CAT_FLD,
+        "STORED V2|" + RandomStrings.randomAsciiOfLength(random(), 2 * DocumentBuilder.MIN_LENGTH_TO_MOVE_LAST));
+    inDoc.addField(CAT_FLD,
+        "STORED V3|" + RandomStrings.randomAsciiOfLength(random(), DocumentBuilder.MIN_LENGTH_TO_MOVE_LAST));
+
+    String SUBJECT_FLD = "subject"; // stored.  This value is long, but not long enough.
+    inDoc.addField(SUBJECT_FLD,
+        "2ndplace|" + RandomStrings.randomAsciiOfLength(random(), DocumentBuilder.MIN_LENGTH_TO_MOVE_LAST));
+
+    Document outDoc = DocumentBuilder.toDocument(inDoc, h.getCore().getLatestSchema());
+
+    // filter outDoc by stored fields; convert to list.
+    List<IndexableField> storedFields = StreamSupport.stream(outDoc.spliterator(), false)
+        .filter(f -> f.fieldType().stored()).collect(Collectors.toList());
+    // clip to last 3.  We expect these to be for CAT_FLD
+    storedFields = storedFields.subList(storedFields.size() - 3, storedFields.size());
+
+    Iterator<IndexableField> fieldIterator = storedFields.iterator();
+    IndexableField field;
+
+    // Test that we retained the particular value ordering, even though though the 2nd of three was longest
+
+    assertTrue(fieldIterator.hasNext());
+    field = fieldIterator.next();
+    assertEquals(CAT_FLD, field.name());
+    assertTrue(field.stringValue().startsWith("STORED V1|"));
+
+    assertTrue(fieldIterator.hasNext());
+    field = fieldIterator.next();
+    assertEquals(CAT_FLD, field.name());
+    assertTrue(field.stringValue().startsWith("STORED V2|"));
+
+    assertTrue(fieldIterator.hasNext());
+    field = fieldIterator.next();
+    assertEquals(CAT_FLD, field.name());
+    assertTrue(field.stringValue().startsWith("STORED V3|"));
   }
 
 }


[26/46] lucene-solr:jira/solr-9959: SOLR-10218: The Schema API commands add-field-type and replace-field-type improperly specify SimilarityFactory params

Posted by ab...@apache.org.
SOLR-10218: The Schema API commands add-field-type and replace-field-type improperly specify SimilarityFactory params


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/9b57545d
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/9b57545d
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/9b57545d

Branch: refs/heads/jira/solr-9959
Commit: 9b57545df01fd0f20fd473e80d4fba621a51abf9
Parents: 258fddb
Author: Steve Rowe <sa...@apache.org>
Authored: Sat Mar 18 00:00:59 2017 -0400
Committer: Steve Rowe <sa...@apache.org>
Committed: Sat Mar 18 00:00:59 2017 -0400

----------------------------------------------------------------------
 solr/CHANGES.txt                                |  3 +
 .../solr/rest/schema/FieldTypeXmlAdapter.java   | 25 ++++++-
 .../solr/rest/schema/TestBulkSchemaAPI.java     | 72 +++++++++++++++++++-
 3 files changed, 97 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9b57545d/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 75ac5bb..b7e55cc 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -305,6 +305,9 @@ Bug Fixes
 
 * SOLR-10237: Poly-fields should work with subfields that have docValues=true (Tom�s Fern�ndez L�bbe, David Smiley)
 
+* SOLR-10218: The Schema API commands "add-field-type" and "replace-field-type" improperly specify SimilarityFactory params.
+  (Benjamin Deininger, Troy Mohl, Steve Rowe)
+
 Optimizations
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9b57545d/solr/core/src/java/org/apache/solr/rest/schema/FieldTypeXmlAdapter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/rest/schema/FieldTypeXmlAdapter.java b/solr/core/src/java/org/apache/solr/rest/schema/FieldTypeXmlAdapter.java
index a674040..d25ab89 100644
--- a/solr/core/src/java/org/apache/solr/rest/schema/FieldTypeXmlAdapter.java
+++ b/solr/core/src/java/org/apache/solr/rest/schema/FieldTypeXmlAdapter.java
@@ -25,7 +25,9 @@ import javax.xml.parsers.ParserConfigurationException;
 
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
+import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.schema.IndexSchema;
+import org.apache.solr.schema.SimilarityFactory;
 import org.w3c.dom.Document;
 import org.w3c.dom.Element;
 import org.w3c.dom.Node;
@@ -83,9 +85,30 @@ public class FieldTypeXmlAdapter {
           jsonFieldName+" not a "+jsonField.getClass().getName());
 
     Element similarity = doc.createElement("similarity");
-    appendAttrs(similarity, (Map<String,?>)jsonField);
+    Map<String,?> config = (Map<String,?>)jsonField;
+    similarity.setAttribute(SimilarityFactory.CLASS_NAME, (String)config.remove(SimilarityFactory.CLASS_NAME));
+    for (Map.Entry<String,?> entry : config.entrySet()) {
+      Object val = entry.getValue();
+      if (val != null) {
+        Element child = doc.createElement(classToXmlTag(val.getClass()));
+        child.setAttribute(CommonParams.NAME, entry.getKey());
+        child.setTextContent(entry.getValue().toString());
+        similarity.appendChild(child);
+      }
+    }
     return similarity;
   }
+
+  /** Convert types produced by noggit's ObjectBuilder (Boolean, Double, Long, String) to plugin param XML tags. */
+  protected static String classToXmlTag(Class<?> clazz) {
+    switch (clazz.getSimpleName()) {
+      case "Boolean": return "bool";
+      case "Double":  return "double";
+      case "Long":    return "long";
+      case "String":  return "str";
+    }
+    throw new SolrException(ErrorCode.BAD_REQUEST, "Unsupported object type '" + clazz.getSimpleName() + "'");
+  }
   
   @SuppressWarnings("unchecked")
   protected static Element transformAnalyzer(Document doc, Map<String,?> json, String jsonFieldName, String analyzerType) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9b57545d/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java b/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java
index e2dc2bf..166d1fc 100644
--- a/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java
+++ b/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java
@@ -18,6 +18,7 @@ package org.apache.solr.rest.schema;
 
 import org.apache.commons.io.FileUtils;
 
+import org.apache.lucene.search.similarities.DFISimilarity;
 import org.apache.lucene.search.similarities.PerFieldSimilarityWrapper;
 import org.apache.lucene.search.similarities.BM25Similarity;
 import org.apache.lucene.misc.SweetSpotSimilarity;
@@ -42,10 +43,12 @@ import java.io.File;
 import java.io.StringReader;
 import java.lang.invoke.MethodHandles;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
+import java.util.function.Consumer;
 
 
 public class TestBulkSchemaAPI extends RestTestBase {
@@ -798,6 +801,68 @@ public class TestBulkSchemaAPI extends RestTestBase {
     assertNull(map.get("errors"));
   }
 
+  public void testSimilarityParser() throws Exception {
+    RestTestHarness harness = restTestHarness;
+
+    final float k1 = 2.25f;
+    final float b = 0.33f;
+
+    String fieldTypeName = "MySimilarityField";
+    String fieldName = "similarityTestField";
+    String payload = "{\n" +
+        "  'add-field-type' : {" +
+        "    'name' : '" + fieldTypeName + "',\n" +
+        "    'class':'solr.TextField',\n" +
+        "    'analyzer' : {'tokenizer':{'class':'solr.WhitespaceTokenizerFactory'}},\n" +
+        "    'similarity' : {'class':'org.apache.solr.search.similarities.BM25SimilarityFactory', 'k1':"+k1+", 'b':"+b+" }\n" +
+        "  },\n"+
+        "  'add-field' : {\n" +
+        "    'name':'" + fieldName + "',\n" +
+        "    'type': 'MySimilarityField',\n" +
+        "    'stored':true,\n" +
+        "    'indexed':true\n" +
+        "  }\n" +
+        "}\n";
+
+    String response = harness.post("/schema?wt=json&indent=on", json(payload));
+
+    Map map = (Map) ObjectBuilder.getVal(new JSONParser(new StringReader(response)));
+    assertNull(response, map.get("errors"));
+
+    Map fields = getObj(harness, fieldName, "fields");
+    assertNotNull("field " + fieldName + " not created", fields);
+    
+    assertFieldSimilarity(fieldName, BM25Similarity.class,
+       sim -> assertEquals("Unexpected k1", k1, sim.getK1(), .001),
+       sim -> assertEquals("Unexpected b", b, sim.getB(), .001));
+
+    final String independenceMeasure = "Saturated";
+    final boolean discountOverlaps = false; 
+    payload = "{\n" +
+        "  'replace-field-type' : {" +
+        "    'name' : '" + fieldTypeName + "',\n" +
+        "    'class':'solr.TextField',\n" +
+        "    'analyzer' : {'tokenizer':{'class':'solr.WhitespaceTokenizerFactory'}},\n" +
+        "    'similarity' : {\n" +
+        "      'class':'org.apache.solr.search.similarities.DFISimilarityFactory',\n" +
+        "      'independenceMeasure':'" + independenceMeasure + "',\n" +
+        "      'discountOverlaps':" + discountOverlaps + "\n" +
+        "     }\n" +
+        "  }\n"+
+        "}\n";
+
+    response = harness.post("/schema?wt=json&indent=on", json(payload));
+
+    map = (Map)ObjectBuilder.getVal(new JSONParser(new StringReader(response)));
+    assertNull(response, map.get("errors"));
+    fields = getObj(harness, fieldName, "fields");
+    assertNotNull("field " + fieldName + " not created", fields);
+
+    assertFieldSimilarity(fieldName, DFISimilarity.class,
+        sim -> assertEquals("Unexpected independenceMeasure", independenceMeasure, sim.getIndependence().toString()),
+        sim -> assertEquals("Unexpected discountedOverlaps", discountOverlaps, sim.getDiscountOverlaps()));
+  }
+
   public static Map getObj(RestTestHarness restHarness, String fld, String key) throws Exception {
     Map map = getRespMap(restHarness);
     List l = (List) ((Map)map.get("schema")).get(key);
@@ -842,8 +907,11 @@ public class TestBulkSchemaAPI extends RestTestBase {
 
   /**
    * whitebox checks the Similarity for the specified field according to {@link SolrCore#getLatestSchema}
+   * 
+   * Executes each of the specified Similarity-accepting validators.
    */
-  private static void assertFieldSimilarity(String fieldname, Class<? extends Similarity> expected) {
+  @SafeVarargs
+  private static <T extends Similarity> void assertFieldSimilarity(String fieldname, Class<T> expected, Consumer<T>... validators) {
     CoreContainer cc = jetty.getCoreContainer();
     try (SolrCore core = cc.getCore("collection1")) {
       SimilarityFactory simfac = core.getLatestSchema().getSimilarityFactory();
@@ -861,7 +929,7 @@ public class TestBulkSchemaAPI extends RestTestBase {
                  mainSim instanceof PerFieldSimilarityWrapper);
       Similarity fieldSim = ((PerFieldSimilarityWrapper)mainSim).get(fieldname);
       assertEquals("wrong sim for field=" + fieldname, expected, fieldSim.getClass());
-      
+      Arrays.asList(validators).forEach(v -> v.accept((T)fieldSim));
     }
   }
 }


[39/46] lucene-solr:jira/solr-9959: SOLR-9986: Add javadoc to DatePointField class

Posted by ab...@apache.org.
SOLR-9986: Add javadoc to DatePointField class


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/8a996753
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/8a996753
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/8a996753

Branch: refs/heads/jira/solr-9959
Commit: 8a996753920170ac1e6e8960d6b63848ccc1ea44
Parents: 6786089
Author: Tomas Fernandez Lobbe <tf...@apache.org>
Authored: Wed Mar 22 10:52:14 2017 -0700
Committer: Tomas Fernandez Lobbe <tf...@apache.org>
Committed: Wed Mar 22 10:52:34 2017 -0700

----------------------------------------------------------------------
 .../org/apache/solr/schema/DatePointField.java  | 62 ++++++++++++++++++++
 1 file changed, 62 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8a996753/solr/core/src/java/org/apache/solr/schema/DatePointField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/DatePointField.java b/solr/core/src/java/org/apache/solr/schema/DatePointField.java
index 377f571..50f85e3 100644
--- a/solr/core/src/java/org/apache/solr/schema/DatePointField.java
+++ b/solr/core/src/java/org/apache/solr/schema/DatePointField.java
@@ -36,8 +36,70 @@ import org.apache.lucene.util.mutable.MutableValueDate;
 import org.apache.lucene.util.mutable.MutableValueLong;
 import org.apache.solr.search.QParser;
 import org.apache.solr.uninverting.UninvertingReader;
+import org.apache.solr.update.processor.TimestampUpdateProcessorFactory;
 import org.apache.solr.util.DateMathParser;
 
+/**
+ * FieldType that can represent any Date/Time with millisecond precision.
+ * <p>
+ * Date Format for the XML, incoming and outgoing:
+ * </p>
+ * <blockquote>
+ * A date field shall be of the form 1995-12-31T23:59:59Z
+ * The trailing "Z" designates UTC time and is mandatory
+ * (See below for an explanation of UTC).
+ * Optional fractional seconds are allowed, as long as they do not end
+ * in a trailing 0 (but any precision beyond milliseconds will be ignored).
+ * All other parts are mandatory.
+ * </blockquote>
+ * <p>
+ * This format was derived to be standards compliant (ISO 8601) and is a more
+ * restricted form of the
+ * <a href="http://www.w3.org/TR/xmlschema-2/#dateTime-canonical-representation">canonical
+ * representation of dateTime</a> from XML schema part 2.  Examples...
+ * </p>
+ * <ul>
+ *   <li>1995-12-31T23:59:59Z</li>
+ *   <li>1995-12-31T23:59:59.9Z</li>
+ *   <li>1995-12-31T23:59:59.99Z</li>
+ *   <li>1995-12-31T23:59:59.999Z</li>
+ * </ul>
+ * <p>
+ * Note that <code>DatePointField</code> is lenient with regards to parsing fractional
+ * seconds that end in trailing zeros and will ensure that those values
+ * are indexed in the correct canonical format.
+ * </p>
+ * <p>
+ * This FieldType also supports incoming "Date Math" strings for computing
+ * values by adding/rounding internals of time relative either an explicit
+ * datetime (in the format specified above) or the literal string "NOW",
+ * ie: "NOW+1YEAR", "NOW/DAY", "1995-12-31T23:59:59.999Z+5MINUTES", etc...
+ * -- see {@link DateMathParser} for more examples.
+ * </p>
+ * <p>
+ * <b>NOTE:</b> Although it is possible to configure a <code>DatePointField</code>
+ * instance with a default value of "<code>NOW</code>" to compute a timestamp
+ * of when the document was indexed, this is not advisable when using SolrCloud
+ * since each replica of the document may compute a slightly different value.
+ * {@link TimestampUpdateProcessorFactory} is recommended instead.
+ * </p>
+ *
+ * <p>
+ * Explanation of "UTC"...
+ * </p>
+ * <blockquote>
+ * "In 1970 the Coordinated Universal Time system was devised by an
+ * international advisory group of technical experts within the International
+ * Telecommunication Union (ITU).  The ITU felt it was best to designate a
+ * single abbreviation for use in all languages in order to minimize
+ * confusion.  Since unanimous agreement could not be achieved on using
+ * either the English word order, CUT, or the French word order, TUC, the
+ * acronym UTC was chosen as a compromise."
+ * </blockquote>
+ *
+ * @see TrieDateField
+ * @see PointField
+ */
 public class DatePointField extends PointField implements DateValueFieldType {
 
   public DatePointField() {


[45/46] lucene-solr:jira/solr-9959: SOLR-6615: use constants for 'sort', 'distrib'

Posted by ab...@apache.org.
SOLR-6615: use constants for 'sort', 'distrib'


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/4edfc1a9
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/4edfc1a9
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/4edfc1a9

Branch: refs/heads/jira/solr-9959
Commit: 4edfc1a9c7b877a23cffbf5f73a2728173fa663d
Parents: 583fec1
Author: Noble Paul <no...@apache.org>
Authored: Thu Mar 23 18:12:20 2017 +1030
Committer: Noble Paul <no...@apache.org>
Committed: Thu Mar 23 18:12:20 2017 +1030

----------------------------------------------------------------------
 .../solr/response/VelocityResponseWriter.java   |  4 +++-
 .../org/apache/solr/cloud/SyncStrategy.java     |  4 +++-
 .../apache/solr/core/QuerySenderListener.java   | 22 +++++++++++---------
 .../org/apache/solr/handler/BlobHandler.java    |  3 ++-
 .../org/apache/solr/handler/GraphHandler.java   |  4 +++-
 .../apache/solr/handler/PingRequestHandler.java |  6 ++++--
 .../org/apache/solr/handler/StreamHandler.java  |  3 ++-
 .../component/IterativeMergeStrategy.java       |  4 +++-
 .../component/MoreLikeThisComponent.java        |  4 +++-
 .../handler/component/RealTimeGetComponent.java |  3 ++-
 .../solr/handler/component/SearchHandler.java   |  5 +++--
 .../org/apache/solr/handler/sql/SolrTable.java  | 11 +++++-----
 .../solr/index/SortingMergePolicyFactory.java   |  7 +++----
 .../org/apache/solr/request/SimpleFacets.java   |  4 +++-
 .../apache/solr/request/json/RequestUtil.java   |  5 +++--
 .../solr/search/CollapsingQParserPlugin.java    | 15 ++++++-------
 .../apache/solr/search/facet/FacetRequest.java  |  3 ++-
 .../apache/solr/search/facet/LegacyFacet.java   |  8 ++++---
 .../java/org/apache/solr/update/PeerSync.java   | 11 +++++-----
 .../processor/DistributedUpdateProcessor.java   |  3 ++-
 .../src/java/org/apache/solr/util/SolrCLI.java  |  3 ++-
 .../apache/solr/client/solrj/io/ModelCache.java |  3 ++-
 .../solrj/io/graph/GatherNodesStream.java       |  4 +++-
 .../solrj/io/graph/ShortestPathStream.java      |  4 +++-
 .../client/solrj/io/ops/GroupOperation.java     |  6 ++++--
 .../client/solrj/io/stream/CloudSolrStream.java |  9 +++++---
 .../io/stream/FeaturesSelectionStream.java      |  3 ++-
 .../client/solrj/io/stream/FetchStream.java     |  3 ++-
 .../solr/client/solrj/io/stream/JDBCStream.java |  8 ++++---
 .../client/solrj/io/stream/ParallelStream.java  | 13 +++++++-----
 .../client/solrj/io/stream/RandomStream.java    | 14 +++++++------
 .../solr/client/solrj/io/stream/RankStream.java |  6 ++++--
 .../solrj/io/stream/ScoreNodesStream.java       |  6 ++++--
 .../solrj/io/stream/SignificantTermsStream.java |  4 +++-
 .../client/solrj/io/stream/TextLogitStream.java |  3 ++-
 .../client/solrj/io/stream/TopicStream.java     | 10 +++++----
 .../apache/solr/common/params/TermsParams.java  | 12 ++++++-----
 37 files changed, 149 insertions(+), 91 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4edfc1a9/solr/contrib/velocity/src/java/org/apache/solr/response/VelocityResponseWriter.java
----------------------------------------------------------------------
diff --git a/solr/contrib/velocity/src/java/org/apache/solr/response/VelocityResponseWriter.java b/solr/contrib/velocity/src/java/org/apache/solr/response/VelocityResponseWriter.java
index 7a0f2a1..e7cc9b7 100644
--- a/solr/contrib/velocity/src/java/org/apache/solr/response/VelocityResponseWriter.java
+++ b/solr/contrib/velocity/src/java/org/apache/solr/response/VelocityResponseWriter.java
@@ -58,6 +58,8 @@ import org.apache.velocity.tools.generic.SortTool;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.solr.common.params.CommonParams.SORT;
+
 public class VelocityResponseWriter implements QueryResponseWriter, SolrCoreAware {
   // init param names, these are _only_ loaded at init time (no per-request control of these)
   //   - multiple different named writers could be created with different init params
@@ -204,7 +206,7 @@ public class VelocityResponseWriter implements QueryResponseWriter, SolrCoreAwar
     context.put("esc", new EscapeTool());
     context.put("date", new ComparisonDateTool());
     context.put("list", new ListTool());
-    context.put("sort", new SortTool());
+    context.put(SORT, new SortTool());
 
     MathTool mathTool = new MathTool();
     mathTool.configure(toolConfig);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4edfc1a9/solr/core/src/java/org/apache/solr/cloud/SyncStrategy.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/SyncStrategy.java b/solr/core/src/java/org/apache/solr/cloud/SyncStrategy.java
index 6356da7..386fef9 100644
--- a/solr/core/src/java/org/apache/solr/cloud/SyncStrategy.java
+++ b/solr/core/src/java/org/apache/solr/cloud/SyncStrategy.java
@@ -44,6 +44,8 @@ import org.apache.solr.update.UpdateShardHandler;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.solr.common.params.CommonParams.DISTRIB;
+
 public class SyncStrategy {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
@@ -258,7 +260,7 @@ public class SyncStrategy {
     sreq.actualShards = sreq.shards;
     sreq.params = new ModifiableSolrParams();
     sreq.params.set("qt","/get");
-    sreq.params.set("distrib",false);
+    sreq.params.set(DISTRIB,false);
     sreq.params.set("getVersions",Integer.toString(nUpdates));
     sreq.params.set("sync",leaderUrl);
     

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4edfc1a9/solr/core/src/java/org/apache/solr/core/QuerySenderListener.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/QuerySenderListener.java b/solr/core/src/java/org/apache/solr/core/QuerySenderListener.java
index a5cda61..9037ef3 100644
--- a/solr/core/src/java/org/apache/solr/core/QuerySenderListener.java
+++ b/solr/core/src/java/org/apache/solr/core/QuerySenderListener.java
@@ -16,21 +16,23 @@
  */
 package org.apache.solr.core;
 
+import java.lang.invoke.MethodHandles;
+import java.util.List;
+
+import org.apache.solr.common.params.CommonParams;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.request.LocalSolrQueryRequest;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.request.SolrRequestInfo;
 import org.apache.solr.response.ResultContext;
+import org.apache.solr.response.SolrQueryResponse;
+import org.apache.solr.search.DocIterator;
+import org.apache.solr.search.DocList;
 import org.apache.solr.search.SolrIndexSearcher;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.apache.solr.search.DocList;
-import org.apache.solr.search.DocIterator;
-import org.apache.solr.common.params.CommonParams;
-import org.apache.solr.common.util.NamedList;
-import org.apache.solr.request.LocalSolrQueryRequest;
-import org.apache.solr.response.SolrQueryResponse;
 
-import java.lang.invoke.MethodHandles;
-import java.util.List;
+import static org.apache.solr.common.params.CommonParams.DISTRIB;
 
 /**
  *
@@ -55,8 +57,8 @@ public class QuerySenderListener extends AbstractSolrEventListener {
         // bind the request to a particular searcher (the newSearcher)
         NamedList params = addEventParms(currentSearcher, nlst);
         // for this, we default to distrib = false
-        if (params.get("distrib") == null) {
-          params.add("distrib", false);
+        if (params.get(DISTRIB) == null) {
+          params.add(DISTRIB, false);
         }
         req = new LocalSolrQueryRequest(getCore(),params) {
           @Override public SolrIndexSearcher getSearcher() { return searcher; }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4edfc1a9/solr/core/src/java/org/apache/solr/handler/BlobHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/BlobHandler.java b/solr/core/src/java/org/apache/solr/handler/BlobHandler.java
index 7e97f59..052fcfe 100644
--- a/solr/core/src/java/org/apache/solr/handler/BlobHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/BlobHandler.java
@@ -66,6 +66,7 @@ import org.slf4j.LoggerFactory;
 import static java.util.Collections.singletonMap;
 import static org.apache.solr.common.params.CommonParams.ID;
 import static org.apache.solr.common.params.CommonParams.JSON;
+import static org.apache.solr.common.params.CommonParams.SORT;
 import static org.apache.solr.common.params.CommonParams.VERSION;
 import static org.apache.solr.common.util.Utils.makeMap;
 
@@ -204,7 +205,7 @@ public class BlobHandler extends RequestHandlerBase implements PluginInfoInitial
             new MapSolrParams((Map) makeMap(
                 "q", StrUtils.formatString(q, blobName, version),
                 "fl", "id,size,version,timestamp,blobName,md5",
-                "sort", "version desc"))
+                SORT, "version desc"))
             , rsp);
       }
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4edfc1a9/solr/core/src/java/org/apache/solr/handler/GraphHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/GraphHandler.java b/solr/core/src/java/org/apache/solr/handler/GraphHandler.java
index 3b52154..6d41d83 100644
--- a/solr/core/src/java/org/apache/solr/handler/GraphHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/GraphHandler.java
@@ -57,6 +57,8 @@ import org.apache.solr.util.plugin.SolrCoreAware;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.solr.common.params.CommonParams.SORT;
+
 public class GraphHandler extends RequestHandlerBase implements SolrCoreAware, PermissionNameProvider {
 
   private StreamFactory streamFactory = new StreamFactory();
@@ -118,7 +120,7 @@ public class GraphHandler extends RequestHandlerBase implements SolrCoreAware, P
         .withFunctionName("shortestPath", ShortestPathStream.class)
         .withFunctionName("gatherNodes", GatherNodesStream.class)
         .withFunctionName("nodes", GatherNodesStream.class)
-        .withFunctionName("sort", SortStream.class)
+        .withFunctionName(SORT, SortStream.class)
         .withFunctionName("scoreNodes", ScoreNodesStream.class)
         .withFunctionName("random", RandomStream.class)
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4edfc1a9/solr/core/src/java/org/apache/solr/handler/PingRequestHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/PingRequestHandler.java b/solr/core/src/java/org/apache/solr/handler/PingRequestHandler.java
index 8230bf5..0cd9e1d 100644
--- a/solr/core/src/java/org/apache/solr/handler/PingRequestHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/PingRequestHandler.java
@@ -38,6 +38,8 @@ import org.apache.solr.util.plugin.SolrCoreAware;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.solr.common.params.CommonParams.DISTRIB;
+
 /**
  * Ping Request Handler for reporting SolrCore health to a Load Balancer.
  *
@@ -180,10 +182,10 @@ public class PingRequestHandler extends RequestHandlerBase implements SolrCoreAw
     
     // in this case, we want to default distrib to false so
     // we only ping the single node
-    Boolean distrib = params.getBool("distrib");
+    Boolean distrib = params.getBool(DISTRIB);
     if (distrib == null)   {
       ModifiableSolrParams mparams = new ModifiableSolrParams(params);
-      mparams.set("distrib", false);
+      mparams.set(DISTRIB, false);
       req.setParams(mparams);
     }
     

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4edfc1a9/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/StreamHandler.java b/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
index a25ede4..5d10664 100644
--- a/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
@@ -81,6 +81,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import static org.apache.solr.common.params.CommonParams.ID;
+import static org.apache.solr.common.params.CommonParams.SORT;
 
 public class StreamHandler extends RequestHandlerBase implements SolrCoreAware, PermissionNameProvider {
 
@@ -148,7 +149,7 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
       .withFunctionName("outerHashJoin", OuterHashJoinStream.class)
       .withFunctionName("intersect", IntersectStream.class)
       .withFunctionName("complement", ComplementStream.class)
-      .withFunctionName("sort", SortStream.class)
+      .withFunctionName(SORT, SortStream.class)
       .withFunctionName("train", TextLogitStream.class)
       .withFunctionName("features", FeaturesSelectionStream.class)
       .withFunctionName("daemon", DaemonStream.class)

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4edfc1a9/solr/core/src/java/org/apache/solr/handler/component/IterativeMergeStrategy.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/IterativeMergeStrategy.java b/solr/core/src/java/org/apache/solr/handler/component/IterativeMergeStrategy.java
index 2138948..97d4199 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/IterativeMergeStrategy.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/IterativeMergeStrategy.java
@@ -38,6 +38,8 @@ import org.apache.http.client.HttpClient;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.solr.common.params.CommonParams.DISTRIB;
+
 public abstract class IterativeMergeStrategy implements MergeStrategy  {
 
   protected ExecutorService executorService;
@@ -89,7 +91,7 @@ public abstract class IterativeMergeStrategy implements MergeStrategy  {
       this.originalShardResponse = originalShardResponse;
       req.setMethod(SolrRequest.METHOD.POST);
       ModifiableSolrParams params = (ModifiableSolrParams)req.getParams();
-      params.add("distrib", "false");
+      params.add(DISTRIB, "false");
     }
 
     public QueryResponse getResponse() {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4edfc1a9/solr/core/src/java/org/apache/solr/handler/component/MoreLikeThisComponent.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/MoreLikeThisComponent.java b/solr/core/src/java/org/apache/solr/handler/component/MoreLikeThisComponent.java
index 55edc63..ffb5858 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/MoreLikeThisComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/MoreLikeThisComponent.java
@@ -51,6 +51,8 @@ import org.apache.solr.search.SolrReturnFields;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.solr.common.params.CommonParams.SORT;
+
 /**
  * TODO!
  * 
@@ -339,7 +341,7 @@ public class MoreLikeThisComponent extends SearchComponent {
     String id = rb.req.getSchema().getUniqueKeyField()
     .getName();
     s.params.set(CommonParams.FL, "score," + id);
-    s.params.set("sort", "score desc");
+    s.params.set(SORT, "score desc");
     // MLT Query is submitted as normal query to shards.
     s.params.set(CommonParams.Q, q);
     

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4edfc1a9/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java b/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java
index 12aa403..421e74f 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java
@@ -82,6 +82,7 @@ import org.apache.solr.util.RefCounted;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.solr.common.params.CommonParams.DISTRIB;
 import static org.apache.solr.common.params.CommonParams.ID;
 import static org.apache.solr.common.params.CommonParams.VERSION_FIELD;
 
@@ -840,7 +841,7 @@ public class RealTimeGetComponent extends SearchComponent
 
     // TODO: how to avoid hardcoding this and hit the same handler?
     sreq.params.set(ShardParams.SHARDS_QT,"/get");      
-    sreq.params.set("distrib",false);
+    sreq.params.set(DISTRIB,false);
 
     sreq.params.remove(ShardParams.SHARDS);
     sreq.params.remove(ID);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4edfc1a9/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java b/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java
index c05c6c4..a00839b 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java
@@ -53,6 +53,7 @@ import org.apache.solr.util.plugin.SolrCoreAware;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.solr.common.params.CommonParams.DISTRIB;
 import static org.apache.solr.common.params.CommonParams.PATH;
 
 
@@ -212,7 +213,7 @@ public class SearchHandler extends RequestHandlerBase implements SolrCoreAware ,
 
     CoreContainer cc = req.getCore().getCoreDescriptor().getCoreContainer();
     boolean isZkAware = cc.isZooKeeperAware();
-    rb.isDistrib = req.getParams().getBool("distrib", isZkAware);
+    rb.isDistrib = req.getParams().getBool(DISTRIB, isZkAware);
     if (!rb.isDistrib) {
       // for back compat, a shards param with URLs like localhost:8983/solr will mean that this
       // search is distributed.
@@ -361,7 +362,7 @@ public class SearchHandler extends RequestHandlerBase implements SolrCoreAware ,
             for (String shard : sreq.actualShards) {
               ModifiableSolrParams params = new ModifiableSolrParams(sreq.params);
               params.remove(ShardParams.SHARDS);      // not a top-level request
-              params.set(CommonParams.DISTRIB, "false");               // not a top-level request
+              params.set(DISTRIB, "false");               // not a top-level request
               params.remove("indent");
               params.remove(CommonParams.HEADER_ECHO_PARAMS);
               params.set(ShardParams.IS_SHARD, true);  // a sub (shard) request

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4edfc1a9/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java b/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java
index c3b83db..e313b44 100644
--- a/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java
+++ b/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java
@@ -55,6 +55,7 @@ import org.apache.solr.common.params.ModifiableSolrParams;
 import java.io.IOException;
 import java.util.*;
 import java.util.stream.Collectors;
+import static org.apache.solr.common.params.CommonParams.SORT;
 
 /**
  * Table based on a Solr collection
@@ -271,13 +272,13 @@ class SolrTable extends AbstractQueryableTable implements TranslatableTable {
     String fl = getFields(fields);
 
     if(orders.size() > 0) {
-      params.add(CommonParams.SORT, getSort(orders));
+      params.add(SORT, getSort(orders));
     } else {
       if(limit == null) {
-        params.add(CommonParams.SORT, "_version_ desc");
+        params.add(SORT, "_version_ desc");
         fl = fl+",_version_";
       } else {
-        params.add(CommonParams.SORT, "score desc");
+        params.add(SORT, "score desc");
         if(fl.indexOf("score") == -1) {
           fl = fl + ",score";
         }
@@ -460,7 +461,7 @@ class SolrTable extends AbstractQueryableTable implements TranslatableTable {
       params.set("partitionKeys", getPartitionKeys(buckets));
     }
 
-    params.set("sort", sort);
+    params.set(SORT, sort);
 
     TupleStream tupleStream = null;
 
@@ -699,7 +700,7 @@ class SolrTable extends AbstractQueryableTable implements TranslatableTable {
       params.set("partitionKeys", getPartitionKeys(buckets));
     }
 
-    params.set("sort", sort);
+    params.set(SORT, sort);
 
     TupleStream tupleStream = null;
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4edfc1a9/solr/core/src/java/org/apache/solr/index/SortingMergePolicyFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/index/SortingMergePolicyFactory.java b/solr/core/src/java/org/apache/solr/index/SortingMergePolicyFactory.java
index b22df3b..ea2387d 100644
--- a/solr/core/src/java/org/apache/solr/index/SortingMergePolicyFactory.java
+++ b/solr/core/src/java/org/apache/solr/index/SortingMergePolicyFactory.java
@@ -18,25 +18,24 @@ package org.apache.solr.index;
 
 import org.apache.lucene.index.MergePolicy;
 import org.apache.lucene.search.Sort;
-
 import org.apache.solr.core.SolrResourceLoader;
 import org.apache.solr.schema.IndexSchema;
 import org.apache.solr.search.SortSpecParsing;
 
+import static org.apache.solr.common.params.CommonParams.SORT;
+
 /**
  * A {@link MergePolicyFactory} for {@code SortingMergePolicy} objects.
  */
 public class SortingMergePolicyFactory extends WrapperMergePolicyFactory {
 
-  static final String SORT = "sort"; // not private so that test(s) can use it
-
   protected final Sort mergeSort;
 
   public SortingMergePolicyFactory(SolrResourceLoader resourceLoader, MergePolicyFactoryArgs args, IndexSchema schema) {
     super(resourceLoader, args, schema);
     final String sortArg = (String) args.remove(SORT);
     if (sortArg == null) {
-      throw new IllegalArgumentException(SortingMergePolicyFactory.class.getSimpleName()+" requires a '"+SORT+ "' argument.");
+      throw new IllegalArgumentException(SortingMergePolicyFactory.class.getSimpleName()+" requires a '"+ SORT + "' argument.");
     }
     this.mergeSort = SortSpecParsing.parseSortSpec(sortArg, schema).getSort();
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4edfc1a9/solr/core/src/java/org/apache/solr/request/SimpleFacets.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/request/SimpleFacets.java b/solr/core/src/java/org/apache/solr/request/SimpleFacets.java
index 5370dd9..b452802 100644
--- a/solr/core/src/java/org/apache/solr/request/SimpleFacets.java
+++ b/solr/core/src/java/org/apache/solr/request/SimpleFacets.java
@@ -96,6 +96,8 @@ import org.apache.solr.util.BoundedTreeSet;
 import org.apache.solr.util.DefaultSolrThreadFactory;
 import org.apache.solr.util.RTimer;
 
+import static org.apache.solr.common.params.CommonParams.SORT;
+
 /**
  * A class that generates simple Facet information for a request.
  *
@@ -531,7 +533,7 @@ public class SimpleFacets {
               default:
                 sortVal = sort;
             }
-            jsonFacet.put("sort", sortVal );
+            jsonFacet.put(SORT, sortVal );
 
             Map<String, Object> topLevel = new HashMap<>();
             topLevel.put(field, jsonFacet);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4edfc1a9/solr/core/src/java/org/apache/solr/request/json/RequestUtil.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/request/json/RequestUtil.java b/solr/core/src/java/org/apache/solr/request/json/RequestUtil.java
index 2529e74..ac0dc19 100644
--- a/solr/core/src/java/org/apache/solr/request/json/RequestUtil.java
+++ b/solr/core/src/java/org/apache/solr/request/json/RequestUtil.java
@@ -36,6 +36,7 @@ import org.noggit.JSONParser;
 import org.noggit.ObjectBuilder;
 
 import static org.apache.solr.common.params.CommonParams.JSON;
+import static org.apache.solr.common.params.CommonParams.SORT;
 
 public class RequestUtil {
   /**
@@ -206,8 +207,8 @@ public class RequestUtil {
           out = "start";
         } else if ("limit".equals(key)) {
           out = "rows";
-        } else if ("sort".equals(key)) {
-          out = "sort";
+        } else if (SORT.equals(key)) {
+          out = SORT;
         } else if ("params".equals(key) || "facet".equals(key) ) {
           // handled elsewhere
           continue;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4edfc1a9/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java
index 22f9f29..61756ea 100644
--- a/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java
+++ b/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java
@@ -25,6 +25,12 @@ import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 
+import com.carrotsearch.hppc.FloatArrayList;
+import com.carrotsearch.hppc.IntArrayList;
+import com.carrotsearch.hppc.IntIntHashMap;
+import com.carrotsearch.hppc.IntLongHashMap;
+import com.carrotsearch.hppc.cursors.IntIntCursor;
+import com.carrotsearch.hppc.cursors.IntLongCursor;
 import org.apache.commons.lang.StringUtils;
 import org.apache.lucene.codecs.DocValuesProducer;
 import org.apache.lucene.index.DocValues;
@@ -72,12 +78,7 @@ import org.apache.solr.schema.TrieIntField;
 import org.apache.solr.schema.TrieLongField;
 import org.apache.solr.uninverting.UninvertingReader;
 
-import com.carrotsearch.hppc.FloatArrayList;
-import com.carrotsearch.hppc.IntArrayList;
-import com.carrotsearch.hppc.IntIntHashMap;
-import com.carrotsearch.hppc.IntLongHashMap;
-import com.carrotsearch.hppc.cursors.IntIntCursor;
-import com.carrotsearch.hppc.cursors.IntLongCursor;
+import static org.apache.solr.common.params.CommonParams.SORT;
 
 /**
 
@@ -187,7 +188,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
      * returns a new GroupHeadSelector based on the specified local params
      */
     public static GroupHeadSelector build(final SolrParams localParams) {
-      final String sortString = StringUtils.defaultIfBlank(localParams.get("sort"), null);
+      final String sortString = StringUtils.defaultIfBlank(localParams.get(SORT), null);
       final String max = StringUtils.defaultIfBlank(localParams.get("max"), null);
       final String min = StringUtils.defaultIfBlank(localParams.get("min"), null);
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4edfc1a9/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java b/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java
index 9835f7d..b446ece 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java
@@ -36,6 +36,7 @@ import org.apache.solr.search.QueryContext;
 import org.apache.solr.search.SolrIndexSearcher;
 import org.apache.solr.search.SyntaxError;
 
+import static org.apache.solr.common.params.CommonParams.SORT;
 import static org.apache.solr.search.facet.FacetRequest.RefineMethod.NONE;
 
 
@@ -645,7 +646,7 @@ class FacetFieldParser extends FacetParser<FacetField> {
       Object o = m.get("facet");
       parseSubs(o);
 
-      parseSort( m.get("sort") );
+      parseSort( m.get(SORT) );
     }
 
     return facet;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4edfc1a9/solr/core/src/java/org/apache/solr/search/facet/LegacyFacet.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/LegacyFacet.java b/solr/core/src/java/org/apache/solr/search/facet/LegacyFacet.java
index 9457d9c..7c2bde4 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/LegacyFacet.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/LegacyFacet.java
@@ -35,6 +35,8 @@ import org.apache.solr.search.SolrReturnFields;
 import org.apache.solr.search.StrParser;
 import org.apache.solr.search.SyntaxError;
 
+import static org.apache.solr.common.params.CommonParams.SORT;
+
 public class LegacyFacet {
   private SolrParams params;
   private Map<String,Object> json;
@@ -172,7 +174,7 @@ public class LegacyFacet {
     String sort = params.getFieldParam(f, FacetParams.FACET_SORT, limit>0 ? FacetParams.FACET_SORT_COUNT : FacetParams.FACET_SORT_INDEX);
     String prefix = params.getFieldParam(f, FacetParams.FACET_PREFIX);
 
-    Map<String,Object> cmd = new HashMap<String,Object>();
+    Map<String,Object> cmd = new HashMap<>();
     cmd.put("field", facetValue);
     if (offset != 0) cmd.put("offset", offset);
     if (limit != 10) cmd.put("limit", limit);
@@ -182,9 +184,9 @@ public class LegacyFacet {
     if (sort.equals("count")) {
       // our default
     } else if (sort.equals("index")) {
-      cmd.put("sort", "index asc");
+      cmd.put(SORT, "index asc");
     } else {
-      cmd.put("sort", sort);  // can be sort by one of our stats
+      cmd.put(SORT, sort);  // can be sort by one of our stats
     }
 
     Map<String,Object> type = new HashMap<>(1);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4edfc1a9/solr/core/src/java/org/apache/solr/update/PeerSync.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/PeerSync.java b/solr/core/src/java/org/apache/solr/update/PeerSync.java
index 425d1db..9470cca 100644
--- a/solr/core/src/java/org/apache/solr/update/PeerSync.java
+++ b/solr/core/src/java/org/apache/solr/update/PeerSync.java
@@ -60,6 +60,7 @@ import org.apache.solr.update.processor.UpdateRequestProcessorChain;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.solr.common.params.CommonParams.DISTRIB;
 import static org.apache.solr.common.params.CommonParams.ID;
 import static org.apache.solr.update.processor.DistributedUpdateProcessor.DistribPhase.FROMLEADER;
 import static org.apache.solr.update.processor.DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM;
@@ -405,7 +406,7 @@ public class PeerSync implements SolrMetricProducer {
     sreq.params = new ModifiableSolrParams();
     sreq.params = new ModifiableSolrParams();
     sreq.params.set("qt","/get");
-    sreq.params.set("distrib",false);
+    sreq.params.set(DISTRIB,false);
     sreq.params.set("getFingerprint", String.valueOf(Long.MAX_VALUE));
     
     shardHandler.submit(sreq, replica, sreq.params);
@@ -421,7 +422,7 @@ public class PeerSync implements SolrMetricProducer {
     sreq.actualShards = sreq.shards;
     sreq.params = new ModifiableSolrParams();
     sreq.params.set("qt","/get");
-    sreq.params.set("distrib",false);
+    sreq.params.set(DISTRIB,false);
     sreq.params.set("getVersions",nUpdates);
     sreq.params.set("fingerprint",doFingerprint);
     shardHandler.submit(sreq, replica, sreq.params);
@@ -506,7 +507,7 @@ public class PeerSync implements SolrMetricProducer {
     sreq.actualShards = sreq.shards;
     sreq.params = new ModifiableSolrParams();
     sreq.params.set("qt", "/get");
-    sreq.params.set("distrib", false);
+    sreq.params.set(DISTRIB, false);
     sreq.params.set("checkCanHandleVersionRanges", false);
 
     ShardHandler sh = shardHandlerFactory.getShardHandler(client);
@@ -725,7 +726,7 @@ public class PeerSync implements SolrMetricProducer {
     sreq.purpose = 0;
     sreq.params = new ModifiableSolrParams();
     sreq.params.set("qt", "/get");
-    sreq.params.set("distrib", false);
+    sreq.params.set(DISTRIB, false);
     sreq.params.set("getUpdates", versionsAndRanges);
     sreq.params.set("onlyIfActive", onlyIfActive);
     
@@ -890,7 +891,7 @@ public class PeerSync implements SolrMetricProducer {
       sreq.shards = new String[]{replica};
       sreq.params = new ModifiableSolrParams();
       sreq.params.set("qt","/get");
-      sreq.params.set("distrib", false);
+      sreq.params.set(DISTRIB, false);
       sreq.params.set("getVersions",nUpdates);
       shardHandler.submit(sreq, replica, sreq.params);
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4edfc1a9/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java b/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java
index fe71b0b..55d1fc8 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java
@@ -16,6 +16,7 @@
  */
 package org.apache.solr.update.processor;
 
+import static org.apache.solr.common.params.CommonParams.DISTRIB;
 import static org.apache.solr.update.processor.DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM;
 
 import java.io.IOException;
@@ -1303,7 +1304,7 @@ public class DistributedUpdateProcessor extends UpdateRequestProcessor {
     String id = inplaceAdd.getPrintableId();
     UpdateShardHandler updateShardHandler = inplaceAdd.getReq().getCore().getCoreDescriptor().getCoreContainer().getUpdateShardHandler();
     ModifiableSolrParams params = new ModifiableSolrParams();
-    params.set("distrib", false);
+    params.set(DISTRIB, false);
     params.set("getInputDocument", id);
     params.set("onlyIfActive", true);
     SolrRequest<SimpleSolrResponse> ur = new GenericSolrRequest(METHOD.GET, "/get", params);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4edfc1a9/solr/core/src/java/org/apache/solr/util/SolrCLI.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/util/SolrCLI.java b/solr/core/src/java/org/apache/solr/util/SolrCLI.java
index 4f6cf8d..6a85422 100644
--- a/solr/core/src/java/org/apache/solr/util/SolrCLI.java
+++ b/solr/core/src/java/org/apache/solr/util/SolrCLI.java
@@ -113,6 +113,7 @@ import org.slf4j.LoggerFactory;
 import static java.nio.charset.StandardCharsets.UTF_8;
 import static org.apache.solr.common.SolrException.ErrorCode.FORBIDDEN;
 import static org.apache.solr.common.SolrException.ErrorCode.UNAUTHORIZED;
+import static org.apache.solr.common.params.CommonParams.DISTRIB;
 import static org.apache.solr.common.params.CommonParams.NAME;
 
 /**
@@ -1193,7 +1194,7 @@ public class SolrCLI {
             // query this replica directly to get doc count and assess health
             q = new SolrQuery("*:*");
             q.setRows(0);
-            q.set("distrib", "false");
+            q.set(DISTRIB, "false");
             try (HttpSolrClient solr = new HttpSolrClient.Builder(coreUrl).build()) {
 
               String solrUrl = solr.getBaseURL();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4edfc1a9/solr/solrj/src/java/org/apache/solr/client/solrj/io/ModelCache.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/ModelCache.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/ModelCache.java
index 9a204e2..4676594 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/ModelCache.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/ModelCache.java
@@ -25,6 +25,7 @@ import org.apache.solr.client.solrj.io.stream.CloudSolrStream;
 import org.apache.solr.client.solrj.io.stream.StreamContext;
 import org.apache.solr.common.params.ModifiableSolrParams;
 
+import static org.apache.solr.common.params.CommonParams.SORT;
 import static org.apache.solr.common.params.CommonParams.VERSION_FIELD;
 
 
@@ -74,7 +75,7 @@ public class ModelCache {
     ModifiableSolrParams params = new ModifiableSolrParams();
     params.set("q","name_s:"+modelID);
     params.set("fl", "terms_ss, idfs_ds, weights_ds, iteration_i, _version_");
-    params.set("sort", "iteration_i desc");
+    params.set(SORT, "iteration_i desc");
     StreamContext streamContext = new StreamContext();
     streamContext.setSolrClientCache(solrClientCache);
     CloudSolrStream stream = new CloudSolrStream(zkHost, collection, params);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4edfc1a9/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/GatherNodesStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/GatherNodesStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/GatherNodesStream.java
index 52a6a1e..8059677 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/GatherNodesStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/GatherNodesStream.java
@@ -51,6 +51,8 @@ import org.apache.solr.common.util.ExecutorUtil;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SolrjNamedThreadFactory;
 
+import static org.apache.solr.common.params.CommonParams.SORT;
+
 public class GatherNodesStream extends TupleStream implements Expressible {
 
   private String zkHost;
@@ -449,7 +451,7 @@ public class GatherNodesStream extends TupleStream implements Expressible {
       ModifiableSolrParams joinSParams = new ModifiableSolrParams(SolrParams.toMultiMap(new NamedList(queryParams)));
       joinSParams.set("fl", buf.toString());
       joinSParams.set("qt", "/export");
-      joinSParams.set("sort", gather + " asc,"+traverseTo +" asc");
+      joinSParams.set(SORT, gather + " asc,"+traverseTo +" asc");
 
       StringBuffer nodeQuery = new StringBuffer();
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4edfc1a9/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/ShortestPathStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/ShortestPathStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/ShortestPathStream.java
index 768ce07..aa546ae 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/ShortestPathStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/ShortestPathStream.java
@@ -52,6 +52,8 @@ import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.ExecutorUtil;
 import org.apache.solr.common.util.SolrjNamedThreadFactory;
 
+import static org.apache.solr.common.params.CommonParams.SORT;
+
 public class ShortestPathStream extends TupleStream implements Expressible {
 
   private static final long serialVersionUID = 1;
@@ -450,7 +452,7 @@ public class ShortestPathStream extends TupleStream implements Expressible {
 
       joinParams.set("fl", fl);
       joinParams.set("qt", "/export");
-      joinParams.set("sort", toField + " asc,"+fromField +" asc");
+      joinParams.set(SORT, toField + " asc,"+fromField +" asc");
 
       StringBuffer nodeQuery = new StringBuffer();
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4edfc1a9/solr/solrj/src/java/org/apache/solr/client/solrj/io/ops/GroupOperation.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/ops/GroupOperation.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/ops/GroupOperation.java
index 9ed5cbe..4a28cc1 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/ops/GroupOperation.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/ops/GroupOperation.java
@@ -39,6 +39,8 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionParameter;
 import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionValue;
 import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
 
+import static org.apache.solr.common.params.CommonParams.SORT;
+
 public class GroupOperation implements ReduceOperation {
 
   private static final long serialVersionUID = 1L;
@@ -52,7 +54,7 @@ public class GroupOperation implements ReduceOperation {
   public GroupOperation(StreamExpression expression, StreamFactory factory) throws IOException {
 
     StreamExpressionNamedParameter nParam = factory.getNamedOperand(expression, "n");
-    StreamExpressionNamedParameter sortExpression = factory.getNamedOperand(expression, "sort");
+    StreamExpressionNamedParameter sortExpression = factory.getNamedOperand(expression, SORT);
 
     StreamComparator streamComparator = factory.constructComparator(((StreamExpressionValue) sortExpression.getParameter()).getValue(), FieldComparator.class);
     String nStr = ((StreamExpressionValue)nParam.getParameter()).getValue();
@@ -87,7 +89,7 @@ public class GroupOperation implements ReduceOperation {
     expression.addParameter(new StreamExpressionNamedParameter("n", Integer.toString(size)));
 
     // sort
-    expression.addParameter(new StreamExpressionNamedParameter("sort", streamComparator.toExpression(factory)));
+    expression.addParameter(new StreamExpressionNamedParameter(SORT, streamComparator.toExpression(factory)));
     return expression;
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4edfc1a9/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CloudSolrStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CloudSolrStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CloudSolrStream.java
index f177585..1acd79d 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CloudSolrStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CloudSolrStream.java
@@ -63,6 +63,9 @@ import org.apache.solr.common.util.ExecutorUtil;
 import org.apache.solr.common.util.SolrjNamedThreadFactory;
 import org.apache.solr.common.util.StrUtils;
 
+import static org.apache.solr.common.params.CommonParams.DISTRIB;
+import static org.apache.solr.common.params.CommonParams.SORT;
+
 /**
  * Connects to Zookeeper to pick replicas from a specific collection to send the query to.
  * Under the covers the SolrStream instances send the query to the replicas.
@@ -269,10 +272,10 @@ public class CloudSolrStream extends TupleStream implements Expressible {
     }
     String fls = String.join(",", params.getParams("fl"));
 
-    if (params.getParams("sort") == null) {
+    if (params.getParams(SORT) == null) {
       throw new IOException("sort param expected for search function");
     }
-    String sorts = String.join(",", params.getParams("sort"));
+    String sorts = String.join(",", params.getParams(SORT));
     this.comp = parseComp(sorts, fls);
   }
   
@@ -403,7 +406,7 @@ public class CloudSolrStream extends TupleStream implements Expressible {
       Collection<Slice> slices = CloudSolrStream.getSlices(this.collection, zkStateReader, true);
 
       ModifiableSolrParams mParams = new ModifiableSolrParams(params); 
-      mParams.set("distrib", "false"); // We are the aggregator.
+      mParams.set(DISTRIB, "false"); // We are the aggregator.
 
       Set<String> liveNodes = clusterState.getLiveNodes();
       for(Slice slice : slices) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4edfc1a9/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FeaturesSelectionStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FeaturesSelectionStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FeaturesSelectionStream.java
index f15e2a7..c5339cb 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FeaturesSelectionStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FeaturesSelectionStream.java
@@ -59,6 +59,7 @@ import org.apache.solr.common.util.ExecutorUtil;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SolrjNamedThreadFactory;
 
+import static org.apache.solr.common.params.CommonParams.DISTRIB;
 import static org.apache.solr.common.params.CommonParams.ID;
 
 public class FeaturesSelectionStream extends TupleStream implements Expressible{
@@ -415,7 +416,7 @@ public class FeaturesSelectionStream extends TupleStream implements Expressible{
       ModifiableSolrParams params = new ModifiableSolrParams();
       HttpSolrClient solrClient = cache.getHttpSolrClient(baseUrl);
 
-      params.add("distrib", "false");
+      params.add(DISTRIB, "false");
       params.add("fq","{!igain}");
 
       for(String key : paramsMap.keySet()) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4edfc1a9/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FetchStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FetchStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FetchStream.java
index 55ca51a..06e6fdc 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FetchStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FetchStream.java
@@ -37,6 +37,7 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionValue;
 import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
 import org.apache.solr.common.params.ModifiableSolrParams;
 
+import static org.apache.solr.common.params.CommonParams.SORT;
 import static org.apache.solr.common.params.CommonParams.VERSION_FIELD;
 
 /**
@@ -238,7 +239,7 @@ public class FetchStream extends TupleStream implements Expressible {
       params.add("q", buf.toString());
       params.add("fl", fieldList+appendFields());
       params.add("rows", Integer.toString(batchSize));
-      params.add("sort", "_version_ desc");
+      params.add(SORT, "_version_ desc");
 
       CloudSolrStream cloudSolrStream = new CloudSolrStream(zkHost, collection, params);
       StreamContext newContext = new StreamContext();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4edfc1a9/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/JDBCStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/JDBCStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/JDBCStream.java
index 0f95103..172b9ef 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/JDBCStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/JDBCStream.java
@@ -45,6 +45,8 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionParameter;
 import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionValue;
 import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
 
+import static org.apache.solr.common.params.CommonParams.SORT;
+
 /**
  * Connects to a datasource using a registered JDBC driver and execute a query. The results of
  * that query will be returned as tuples. An EOF tuple will indicate that all have been read.
@@ -104,7 +106,7 @@ public class JDBCStream extends TupleStream implements Expressible {
     List<StreamExpressionNamedParameter> namedParams = factory.getNamedOperands(expression);
     StreamExpressionNamedParameter connectionUrlExpression = factory.getNamedOperand(expression, "connection");
     StreamExpressionNamedParameter sqlQueryExpression = factory.getNamedOperand(expression, "sql");
-    StreamExpressionNamedParameter definedSortExpression = factory.getNamedOperand(expression, "sort");
+    StreamExpressionNamedParameter definedSortExpression = factory.getNamedOperand(expression, SORT);
     StreamExpressionNamedParameter driverClassNameExpression = factory.getNamedOperand(expression, "driver");
     
     // Validate there are no unknown parameters - zkHost and alias are namedParameter so we don't need to count it twice
@@ -115,7 +117,7 @@ public class JDBCStream extends TupleStream implements Expressible {
     // All named params we don't care about will be passed to the driver on connection
     Properties connectionProperties = new Properties();
     for(StreamExpressionNamedParameter namedParam : namedParams){
-      if(!namedParam.getName().equals("driver") && !namedParam.getName().equals("connection") && !namedParam.getName().equals("sql") && !namedParam.getName().equals("sort")){
+      if(!namedParam.getName().equals("driver") && !namedParam.getName().equals("connection") && !namedParam.getName().equals("sql") && !namedParam.getName().equals(SORT)){
         connectionProperties.put(namedParam.getName(), namedParam.getParameter().toString().trim());
       }
     }
@@ -367,7 +369,7 @@ public class JDBCStream extends TupleStream implements Expressible {
     expression.addParameter(new StreamExpressionNamedParameter("sql", sqlQuery));
     
     // sort
-    expression.addParameter(new StreamExpressionNamedParameter("sort", definedSort.toExpression(factory)));
+    expression.addParameter(new StreamExpressionNamedParameter(SORT, definedSort.toExpression(factory)));
     
     // driver class
     if(null != driverClassName){

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4edfc1a9/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ParallelStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ParallelStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ParallelStream.java
index 10e80ad..87e1354 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ParallelStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ParallelStream.java
@@ -21,23 +21,23 @@ import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
-import java.util.Set;
 import java.util.List;
 import java.util.Locale;
 import java.util.Map;
 import java.util.Random;
+import java.util.Set;
 
 import org.apache.solr.client.solrj.io.Tuple;
 import org.apache.solr.client.solrj.io.comp.FieldComparator;
 import org.apache.solr.client.solrj.io.comp.StreamComparator;
 import org.apache.solr.client.solrj.io.stream.expr.Explanation;
+import org.apache.solr.client.solrj.io.stream.expr.Explanation.ExpressionType;
 import org.apache.solr.client.solrj.io.stream.expr.Expressible;
 import org.apache.solr.client.solrj.io.stream.expr.StreamExplanation;
 import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
 import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionNamedParameter;
 import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionValue;
 import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
-import org.apache.solr.client.solrj.io.stream.expr.Explanation.ExpressionType;
 import org.apache.solr.common.cloud.ClusterState;
 import org.apache.solr.common.cloud.Replica;
 import org.apache.solr.common.cloud.Slice;
@@ -45,6 +45,9 @@ import org.apache.solr.common.cloud.ZkCoreNodeProps;
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.params.ModifiableSolrParams;
 
+import static org.apache.solr.common.params.CommonParams.DISTRIB;
+import static org.apache.solr.common.params.CommonParams.SORT;
+
 /**
  * The ParallelStream decorates a TupleStream implementation and pushes it to N workers for parallel execution.
  * Workers are chosen from a SolrCloud collection.
@@ -85,7 +88,7 @@ public class ParallelStream extends CloudSolrStream implements Expressible {
     String collectionName = factory.getValueOperand(expression, 0);
     StreamExpressionNamedParameter workersParam = factory.getNamedOperand(expression, "workers");
     List<StreamExpression> streamExpressions = factory.getExpressionOperandsRepresentingTypes(expression, Expressible.class, TupleStream.class);
-    StreamExpressionNamedParameter sortExpression = factory.getNamedOperand(expression, "sort");
+    StreamExpressionNamedParameter sortExpression = factory.getNamedOperand(expression, SORT);
     StreamExpressionNamedParameter zkHostExpression = factory.getNamedOperand(expression, "zkHost");
     
     // validate expression contains only what we want.
@@ -188,7 +191,7 @@ public class ParallelStream extends CloudSolrStream implements Expressible {
     }
         
     // sort
-    expression.addParameter(new StreamExpressionNamedParameter("sort",comp.toExpression(factory)));
+    expression.addParameter(new StreamExpressionNamedParameter(SORT,comp.toExpression(factory)));
     
     // zkHost
     expression.addParameter(new StreamExpressionNamedParameter("zkHost", zkHost));
@@ -284,7 +287,7 @@ public class ParallelStream extends CloudSolrStream implements Expressible {
 
       for(int w=0; w<workers; w++) {
         ModifiableSolrParams paramsLoc = new ModifiableSolrParams();
-        paramsLoc.set("distrib","false"); // We are the aggregator.
+        paramsLoc.set(DISTRIB,"false"); // We are the aggregator.
         paramsLoc.set("numWorkers", workers);
         paramsLoc.set("workerID", w);
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4edfc1a9/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/RandomStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/RandomStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/RandomStream.java
index 246f09e..5ba485d 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/RandomStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/RandomStream.java
@@ -18,21 +18,22 @@
 package org.apache.solr.client.solrj.io.stream;
 
 import java.io.IOException;
-import java.util.HashMap;
 import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Locale;
 import java.util.Map;
 import java.util.Map.Entry;
-import java.util.stream.Collectors;
 import java.util.Random;
-import java.util.Iterator;
+import java.util.stream.Collectors;
 
 import org.apache.solr.client.solrj.impl.CloudSolrClient;
 import org.apache.solr.client.solrj.io.SolrClientCache;
 import org.apache.solr.client.solrj.io.Tuple;
 import org.apache.solr.client.solrj.io.comp.StreamComparator;
 import org.apache.solr.client.solrj.io.stream.expr.Explanation;
+import org.apache.solr.client.solrj.io.stream.expr.Explanation.ExpressionType;
 import org.apache.solr.client.solrj.io.stream.expr.Expressible;
 import org.apache.solr.client.solrj.io.stream.expr.StreamExplanation;
 import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
@@ -40,13 +41,14 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionNamedParamete
 import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionParameter;
 import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionValue;
 import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
-import org.apache.solr.client.solrj.io.stream.expr.Explanation.ExpressionType;
 import org.apache.solr.client.solrj.request.QueryRequest;
 import org.apache.solr.client.solrj.response.QueryResponse;
 import org.apache.solr.common.SolrDocument;
 import org.apache.solr.common.SolrDocumentList;
 import org.apache.solr.common.params.ModifiableSolrParams;
 
+import static org.apache.solr.common.params.CommonParams.SORT;
+
 /**
  *  The RandomStream emits a stream of psuedo random Tuples that match the query parameters. Sample expression syntax:
  *  random(collection, q="Hello word", rows="50", fl="title, body")
@@ -177,13 +179,13 @@ public class RandomStream extends TupleStream implements Expressible  {
 
     ModifiableSolrParams params = getParams(this.props);
 
-    params.remove("sort"); //Override any sort.
+    params.remove(SORT); //Override any sort.
 
     Random rand = new Random();
     int seed = rand.nextInt();
 
     String sortField = "random_"+seed;
-    params.add("sort", sortField+" asc");
+    params.add(SORT, sortField+" asc");
 
     QueryRequest request = new QueryRequest(params);
     try {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4edfc1a9/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/RankStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/RankStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/RankStream.java
index aaef849..a9fb1af 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/RankStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/RankStream.java
@@ -37,6 +37,8 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionNamedParamete
 import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionValue;
 import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
 
+import static org.apache.solr.common.params.CommonParams.SORT;
+
 
 /**
 *  Iterates over a TupleStream and Ranks the topN tuples based on a Comparator.
@@ -61,7 +63,7 @@ public class RankStream extends TupleStream implements Expressible {
     // grab all parameters out
     List<StreamExpression> streamExpressions = factory.getExpressionOperandsRepresentingTypes(expression, Expressible.class, TupleStream.class);
     StreamExpressionNamedParameter nParam = factory.getNamedOperand(expression, "n");
-    StreamExpressionNamedParameter sortExpression = factory.getNamedOperand(expression, "sort");
+    StreamExpressionNamedParameter sortExpression = factory.getNamedOperand(expression, SORT);
     
     // validate expression contains only what we want.
     if(expression.getParameters().size() != streamExpressions.size() + 2){
@@ -129,7 +131,7 @@ public class RankStream extends TupleStream implements Expressible {
     }
         
     // sort
-    expression.addParameter(new StreamExpressionNamedParameter("sort",comp.toExpression(factory)));
+    expression.addParameter(new StreamExpressionNamedParameter(SORT, comp.toExpression(factory)));
     
     return expression;   
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4edfc1a9/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ScoreNodesStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ScoreNodesStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ScoreNodesStream.java
index f394424..177cee0 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ScoreNodesStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ScoreNodesStream.java
@@ -19,11 +19,11 @@ package org.apache.solr.client.solrj.io.stream;
 
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Locale;
 import java.util.Map;
-import java.util.HashMap;
 
 import org.apache.solr.client.solrj.impl.CloudSolrClient;
 import org.apache.solr.client.solrj.io.SolrClientCache;
@@ -42,6 +42,8 @@ import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.params.TermsParams;
 import org.apache.solr.common.util.NamedList;
 
+import static org.apache.solr.common.params.CommonParams.DISTRIB;
+
 /**
  *  Iterates over a gatherNodes() expression and scores the Tuples based on tf-idf.
  *
@@ -211,7 +213,7 @@ public class ScoreNodesStream extends TupleStream implements Expressible
     params.add(TermsParams.TERMS_STATS, "true");
     params.add(TermsParams.TERMS_LIST, builder.toString());
     params.add(TermsParams.TERMS_LIMIT, Integer.toString(nodes.size()));
-    params.add("distrib", "true");
+    params.add(DISTRIB, "true");
 
     QueryRequest request = new QueryRequest(params);
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4edfc1a9/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java
index 101a71d..b4decd5 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java
@@ -49,6 +49,8 @@ import org.apache.solr.common.util.ExecutorUtil;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SolrjNamedThreadFactory;
 
+import static org.apache.solr.common.params.CommonParams.DISTRIB;
+
 public class SignificantTermsStream extends TupleStream implements Expressible{
 
   private static final long serialVersionUID = 1;
@@ -376,7 +378,7 @@ public class SignificantTermsStream extends TupleStream implements Expressible{
       ModifiableSolrParams params = new ModifiableSolrParams();
       HttpSolrClient solrClient = cache.getHttpSolrClient(baseUrl);
 
-      params.add("distrib", "false");
+      params.add(DISTRIB, "false");
       params.add("fq","{!sigificantTerms}");
 
       for(String key : paramsMap.keySet()) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4edfc1a9/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TextLogitStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TextLogitStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TextLogitStream.java
index 7f194f4..5a70ced 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TextLogitStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TextLogitStream.java
@@ -60,6 +60,7 @@ import org.apache.solr.common.util.ExecutorUtil;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SolrjNamedThreadFactory;
 
+import static org.apache.solr.common.params.CommonParams.DISTRIB;
 import static org.apache.solr.common.params.CommonParams.ID;
 
 public class TextLogitStream extends TupleStream implements Expressible {
@@ -615,7 +616,7 @@ public class TextLogitStream extends TupleStream implements Expressible {
       ModifiableSolrParams params = new ModifiableSolrParams();
       HttpSolrClient solrClient = cache.getHttpSolrClient(baseUrl);
 
-      params.add("distrib", "false");
+      params.add(DISTRIB, "false");
       params.add("fq","{!tlogit}");
       params.add("feature", feature);
       params.add("terms", TextLogitStream.toString(terms));

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4edfc1a9/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TopicStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TopicStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TopicStream.java
index ccbe8c1..58063d0 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TopicStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TopicStream.java
@@ -63,7 +63,9 @@ import org.apache.solr.common.util.SolrjNamedThreadFactory;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.solr.common.params.CommonParams.DISTRIB;
 import static org.apache.solr.common.params.CommonParams.ID;
+import static org.apache.solr.common.params.CommonParams.SORT;
 import static org.apache.solr.common.params.CommonParams.VERSION_FIELD;
 
 public class TopicStream extends CloudSolrStream implements Expressible  {
@@ -436,8 +438,8 @@ public class TopicStream extends CloudSolrStream implements Expressible  {
     long checkpoint = -1;
     ModifiableSolrParams params = new ModifiableSolrParams();
     params.set("q","*:*");
-    params.set("sort", "_version_ desc");
-    params.set("distrib", "false");
+    params.set(SORT, "_version_ desc");
+    params.set(DISTRIB, "false");
     params.set("rows", 1);
     for(Replica replica : replicas) {
       if(replica.getState() == Replica.State.ACTIVE && liveNodes.contains(replica.getNodeName())) {
@@ -523,9 +525,9 @@ public class TopicStream extends CloudSolrStream implements Expressible  {
       Collection<Slice> slices = CloudSolrStream.getSlices(this.collection, zkStateReader, false);
 
       ModifiableSolrParams mParams = new ModifiableSolrParams(params);
-      mParams.set("distrib", "false"); // We are the aggregator.
+      mParams.set(DISTRIB, "false"); // We are the aggregator.
       String fl = mParams.get("fl");
-      mParams.set("sort", "_version_ asc");
+      mParams.set(SORT, "_version_ asc");
       if(!fl.contains(VERSION_FIELD)) {
         fl += ",_version_";
       }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4edfc1a9/solr/solrj/src/java/org/apache/solr/common/params/TermsParams.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/params/TermsParams.java b/solr/solrj/src/java/org/apache/solr/common/params/TermsParams.java
index d719500..4975846 100644
--- a/solr/solrj/src/java/org/apache/solr/common/params/TermsParams.java
+++ b/solr/solrj/src/java/org/apache/solr/common/params/TermsParams.java
@@ -18,6 +18,8 @@ package org.apache.solr.common.params;
 
 import java.util.regex.Pattern;
 
+import static org.apache.solr.common.params.CommonParams.SORT;
+
 /**
  *
  *
@@ -112,20 +114,20 @@ public interface TermsParams {
   /**
    * Optional.  The maximum value of docFreq to be returned.  -1 by default means no boundary
    */
-  public static final String TERMS_MAXCOUNT = TERMS_PREFIX + "maxcount";
+  String TERMS_MAXCOUNT = TERMS_PREFIX + "maxcount";
 
   /**
    * Optional.  If true, return the raw characters of the indexed term, regardless of if it is readable.
    * For instance, the index form of numeric numbers is not human readable.  The default is false.
    */
-  public static final String TERMS_RAW = TERMS_PREFIX + "raw";
+  String TERMS_RAW = TERMS_PREFIX + "raw";
 
   /**
    * Optional.  If sorting by frequency is enabled.  Defaults to sorting by count.
    */
-  public static final String TERMS_SORT = TERMS_PREFIX + "sort";
+  String TERMS_SORT = TERMS_PREFIX + SORT;
   
-  public static final String TERMS_SORT_COUNT = "count";
-  public static final String TERMS_SORT_INDEX = "index";
+  String TERMS_SORT_COUNT = "count";
+  String TERMS_SORT_INDEX = "index";
 }
 


[11/46] lucene-solr:jira/solr-9959: SOLR-10254, 10085: Update CHANGES.txt

Posted by ab...@apache.org.
SOLR-10254, 10085: Update CHANGES.txt


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/3d81a9c8
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/3d81a9c8
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/3d81a9c8

Branch: refs/heads/jira/solr-9959
Commit: 3d81a9c8e72c396503bde41e1b53a090822fbec7
Parents: 17cc3e7
Author: Joel Bernstein <jb...@apache.org>
Authored: Thu Mar 16 13:54:25 2017 -0400
Committer: Joel Bernstein <jb...@apache.org>
Committed: Thu Mar 16 13:54:25 2017 -0400

----------------------------------------------------------------------
 solr/CHANGES.txt | 10 ++++++++++
 1 file changed, 10 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3d81a9c8/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 2d4ce6c..6ffb93f 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -100,6 +100,7 @@ Velocity 1.7 and Velocity Tools 2.0
 Apache UIMA 2.3.1
 Apache ZooKeeper 3.4.6
 Jetty 9.3.14.v20161028
+Apache Calcite 1.11.0
 
 Detailed Change List
 ----------------------
@@ -111,6 +112,11 @@ Upgrade Notes
   number of requests. New Codahale Metrics implementation applies exponential decay to this value,
   which heavily biases the average towards the last 5 minutes. (ab)
 
+* SOLR-8593: Parallel SQL now uses Apache Calcite as it's SQL framework. As part of this change
+  the default aggregation mode has been changed to facet rather map_reduce. There has also beeen changes
+  to the SQL aggregate response and some SQL syntax changes. Consult the documentation for full details.
+
+
 New Features
 ----------------------
 
@@ -187,6 +193,10 @@ New Features
 
 * SOLR-10046: Add UninvertDocValuesMergePolicyFactory class. (Keith Laban, Christine Poerschke)
 
+* SOLR-10085: SQL result set fields should be ordered by the field list (Joel Bernstein)
+
+* SOLR-10254: significantTerms Streaming Expression should work in non-SolrCloud mode (Joel Bernstein)
+
 Bug Fixes
 ----------------------
 


[34/46] lucene-solr:jira/solr-9959: SOLR-10046: move from 6.5.0 to 6.6.0 CHANGES.txt (backport yet to be completed)

Posted by ab...@apache.org.
SOLR-10046: move from 6.5.0 to 6.6.0 CHANGES.txt (backport yet to be completed)


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/ffaa2345
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/ffaa2345
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/ffaa2345

Branch: refs/heads/jira/solr-9959
Commit: ffaa2345b40cff5287fcf8edaf9059cf3a8d1892
Parents: a3e4f57
Author: Christine Poerschke <cp...@apache.org>
Authored: Mon Mar 20 19:00:33 2017 +0000
Committer: Christine Poerschke <cp...@apache.org>
Committed: Mon Mar 20 19:00:33 2017 +0000

----------------------------------------------------------------------
 solr/CHANGES.txt | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ffaa2345/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index b4196ed..7767453 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -109,6 +109,8 @@ New Features
 
 * SOLR-9992: Add support for grouping with PointFIelds. (Cao Manh Dat) 
 
+* SOLR-10046: Add UninvertDocValuesMergePolicyFactory class. (Keith Laban, Christine Poerschke)
+
 
 ==================  6.5.0 ==================
 
@@ -213,8 +215,6 @@ New Features
 
 * SOLR-10224: Add disk total and disk free metrics. (ab)
 
-* SOLR-10046: Add UninvertDocValuesMergePolicyFactory class. (Keith Laban, Christine Poerschke)
-
 * SOLR-10085: SQL result set fields should be ordered by the field list (Joel Bernstein)
 
 * SOLR-10254: significantTerms Streaming Expression should work in non-SolrCloud mode (Joel Bernstein)


[38/46] lucene-solr:jira/solr-9959: Add support for CollapseQParser with PointFields

Posted by ab...@apache.org.
Add support for CollapseQParser with PointFields


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/b7042c1f
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/b7042c1f
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/b7042c1f

Branch: refs/heads/jira/solr-9959
Commit: b7042c1f6e449d7eb33a9daaabda0e0d2a53e95b
Parents: 6786089
Author: Cao Manh Dat <da...@apache.org>
Authored: Wed Mar 22 15:00:33 2017 +0700
Committer: Cao Manh Dat <da...@apache.org>
Committed: Wed Mar 22 15:00:33 2017 +0700

----------------------------------------------------------------------
 solr/CHANGES.txt                                |   1 +
 .../solr/search/CollapsingQParserPlugin.java    |  40 ++--
 .../solr/search/TestCollapseQParserPlugin.java  | 216 +++++++++----------
 .../search/TestRandomCollapseQParserPlugin.java |   5 -
 4 files changed, 131 insertions(+), 131 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b7042c1f/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index f61c4c2..2bfc981 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -114,6 +114,7 @@ New Features
 
 * SOLR-10046: Add UninvertDocValuesMergePolicyFactory class. (Keith Laban, Christine Poerschke)
 
+* SOLR-9994: Add support for CollapseQParser with PointFields. (Varun Thacker, Cao Manh Dat) 
 
 ==================  6.5.0 ==================
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b7042c1f/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java
index 71478aa..22f9f29 100644
--- a/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java
+++ b/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java
@@ -63,6 +63,9 @@ import org.apache.solr.request.LocalSolrQueryRequest;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.request.SolrRequestInfo;
 import org.apache.solr.schema.FieldType;
+import org.apache.solr.schema.FloatPointField;
+import org.apache.solr.schema.IntPointField;
+import org.apache.solr.schema.LongPointField;
 import org.apache.solr.schema.StrField;
 import org.apache.solr.schema.TrieFloatField;
 import org.apache.solr.schema.TrieIntField;
@@ -962,14 +965,14 @@ public class CollapsingQParserPlugin extends QParserPlugin {
       } else if (funcQuery != null) {
         this.collapseStrategy =  new OrdValueSourceStrategy(maxDoc, nullPolicy, new int[valueCount], groupHeadSelector, this.needsScores, boostDocs, funcQuery, searcher, collapseValues);
       } else {
-        if(fieldType instanceof TrieIntField) {
+        if (fieldType instanceof TrieIntField || fieldType instanceof IntPointField) {
           this.collapseStrategy = new OrdIntStrategy(maxDoc, nullPolicy, new int[valueCount], groupHeadSelector, this.needsScores, boostDocs, collapseValues);
-        } else if(fieldType instanceof TrieFloatField) {
+        } else if (fieldType instanceof TrieFloatField || fieldType instanceof FloatPointField) {
           this.collapseStrategy = new OrdFloatStrategy(maxDoc, nullPolicy, new int[valueCount], groupHeadSelector, this.needsScores, boostDocs, collapseValues);
-        } else if(fieldType instanceof TrieLongField) {
+        } else if (fieldType instanceof TrieLongField || fieldType instanceof LongPointField) {
           this.collapseStrategy =  new OrdLongStrategy(maxDoc, nullPolicy, new int[valueCount], groupHeadSelector, this.needsScores, boostDocs, collapseValues);
         } else {
-          throw new IOException("min/max must be either TrieInt, TrieLong, TrieFloat.");
+          throw new IOException("min/max must be either Int/Long/Float field types");
         }
       }
     }
@@ -1146,12 +1149,12 @@ public class CollapsingQParserPlugin extends QParserPlugin {
       } else if (funcQuery != null) {
         this.collapseStrategy =  new IntValueSourceStrategy(maxDoc, size, collapseField, nullValue, nullPolicy, groupHeadSelector, this.needsScores, boostDocsMap, funcQuery, searcher);
       } else {
-        if(fieldType instanceof TrieIntField) {
+        if (fieldType instanceof TrieIntField || fieldType instanceof IntPointField) {
           this.collapseStrategy = new IntIntStrategy(maxDoc, size, collapseField, nullValue, nullPolicy, groupHeadSelector, this.needsScores, boostDocsMap);
-        } else if(fieldType instanceof TrieFloatField) {
+        } else if (fieldType instanceof TrieFloatField || fieldType instanceof FloatPointField) {
           this.collapseStrategy = new IntFloatStrategy(maxDoc, size, collapseField, nullValue, nullPolicy, groupHeadSelector, this.needsScores, boostDocsMap);
         } else {
-          throw new IOException("min/max must be TrieInt or TrieFloat when collapsing on numeric fields .");
+          throw new IOException("min/max must be Int or Float field types when collapsing on numeric fields");
         }
       }
     }
@@ -1259,6 +1262,15 @@ public class CollapsingQParserPlugin extends QParserPlugin {
 
   private static class CollectorFactory {
 
+    private boolean isNumericCollapsible(FieldType collapseFieldType) {
+      if (collapseFieldType instanceof TrieIntField || collapseFieldType instanceof IntPointField ||
+          collapseFieldType instanceof TrieFloatField || collapseFieldType instanceof FloatPointField) {
+        return true;
+      } else {
+        return false;
+      }
+    }
+
     public DelegatingCollector getCollector(String collapseField,
                                             GroupHeadSelector groupHeadSelector,
                                             SortSpec sortSpec,
@@ -1335,19 +1347,18 @@ public class CollapsingQParserPlugin extends QParserPlugin {
 
           return new OrdScoreCollector(maxDoc, leafCount, docValuesProducer, nullPolicy, boostDocs);
 
-        } else if (collapseFieldType instanceof TrieIntField ||
-                   collapseFieldType instanceof TrieFloatField) {
+        } else if (isNumericCollapsible(collapseFieldType)) {
 
           int nullValue = 0;
 
-          if(collapseFieldType instanceof TrieFloatField) {
-            if(defaultValue != null) {
+          if (collapseFieldType instanceof TrieFloatField || collapseFieldType instanceof FloatPointField) {
+            if (defaultValue != null) {
               nullValue = Float.floatToIntBits(Float.parseFloat(defaultValue));
             } else {
               nullValue = Float.floatToIntBits(0.0f);
             }
           } else {
-            if(defaultValue != null) {
+              if (defaultValue != null) {
               nullValue = Integer.parseInt(defaultValue);
             }
           }
@@ -1374,12 +1385,11 @@ public class CollapsingQParserPlugin extends QParserPlugin {
                                             funcQuery,
                                             searcher);
 
-        } else if((collapseFieldType instanceof TrieIntField ||
-                   collapseFieldType instanceof TrieFloatField)) {
+        } else if(isNumericCollapsible(collapseFieldType)) {
 
           int nullValue = 0;
 
-          if(collapseFieldType instanceof TrieFloatField) {
+          if (collapseFieldType instanceof TrieFloatField || collapseFieldType instanceof FloatPointField) {
             if(defaultValue != null) {
               nullValue = Float.floatToIntBits(Float.parseFloat(defaultValue));
             } else {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b7042c1f/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java b/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java
index ea4114a..026e194 100644
--- a/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java
+++ b/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java
@@ -16,16 +16,14 @@
  */
 package org.apache.solr.search;
 
-import java.util.List;
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.Set;
 import java.util.HashSet;
 import java.util.Iterator;
+import java.util.List;
+import java.util.Set;
 
-import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
 import org.apache.solr.SolrTestCaseJ4;
-import org.apache.solr.SolrTestCaseJ4.SuppressPointFields;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.params.SolrParams;
@@ -35,11 +33,7 @@ import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
-//We want codecs that support DocValues, and ones supporting blank/empty values.
-@SuppressCodecs({"Appending","Lucene3x","Lucene40","Lucene41","Lucene42"})
-@SuppressPointFields
 public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
-  
   @BeforeClass
   public static void beforeClass() throws Exception {
     initCore("solrconfig-collapseqparser.xml", "schema11.xml");
@@ -56,17 +50,17 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
   }
 
   public void testMultiSort() throws Exception {
-    assertU(adoc("id", "1", "group_s", "group1", "test_ti", "5", "test_tl", "10"));
+    assertU(adoc("id", "1", "group_s", "group1", "test_i", "5", "test_l", "10"));
     assertU(commit());
-    assertU(adoc("id", "2", "group_s", "group1", "test_ti", "5", "test_tl", "1000"));
-    assertU(adoc("id", "3", "group_s", "group1", "test_ti", "5", "test_tl", "1000"));
-    assertU(adoc("id", "4", "group_s", "group1", "test_ti", "10", "test_tl", "100"));
+    assertU(adoc("id", "2", "group_s", "group1", "test_i", "5", "test_l", "1000"));
+    assertU(adoc("id", "3", "group_s", "group1", "test_i", "5", "test_l", "1000"));
+    assertU(adoc("id", "4", "group_s", "group1", "test_i", "10", "test_l", "100"));
     //
-    assertU(adoc("id", "5", "group_s", "group2", "test_ti", "5", "test_tl", "10", "term_s", "YYYY"));
+    assertU(adoc("id", "5", "group_s", "group2", "test_i", "5", "test_l", "10", "term_s", "YYYY"));
     assertU(commit());
-    assertU(adoc("id", "6", "group_s", "group2", "test_ti", "5", "test_tl","1000"));
-    assertU(adoc("id", "7", "group_s", "group2", "test_ti", "5", "test_tl","1000", "term_s", "XXXX"));
-    assertU(adoc("id", "8", "group_s", "group2", "test_ti", "10","test_tl", "100"));
+    assertU(adoc("id", "6", "group_s", "group2", "test_i", "5", "test_l","1000"));
+    assertU(adoc("id", "7", "group_s", "group2", "test_i", "5", "test_l","1000", "term_s", "XXXX"));
+    assertU(adoc("id", "8", "group_s", "group2", "test_i", "10","test_l", "100"));
     assertU(commit());
     
     ModifiableSolrParams params;
@@ -75,7 +69,7 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
     params.add("fq", "{!collapse field=group_s sort=$sort}");
-    params.add("sort", "test_ti asc, test_tl desc, id desc");
+    params.add("sort", "test_i asc, test_l desc, id desc");
     assertQ(req(params)
             , "*[count(//doc)=2]"
             ,"//result/doc[1]/float[@name='id'][.='7.0']"
@@ -85,7 +79,7 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
     // group heads are selected using a complex sort, simpler sort used for final groups
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field=group_s sort='test_ti asc, test_tl desc, id desc'}");
+    params.add("fq", "{!collapse field=group_s sort='test_i asc, test_l desc, id desc'}");
     params.add("sort", "id asc");
     assertQ(req(params)
             , "*[count(//doc)=2]"
@@ -96,7 +90,7 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
     // diff up the sort directions, only first clause matters with our data
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field=group_s sort='test_ti desc, test_tl asc, id asc'}");
+    params.add("fq", "{!collapse field=group_s sort='test_i desc, test_l asc, id asc'}");
     params.add("sort", "id desc");
     assertQ(req(params)
             , "*[count(//doc)=2]"
@@ -107,7 +101,7 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
     // tie broken by index order
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field=group_s sort='test_tl desc'}");
+    params.add("fq", "{!collapse field=group_s sort='test_l desc'}");
     params.add("sort", "id desc");
     assertQ(req(params)
             , "*[count(//doc)=2]"
@@ -118,7 +112,7 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
     // score, then tiebreakers; note top level sort by score ASCENDING (just for weirdness)
     params = new ModifiableSolrParams();
     params.add("q", "*:* term_s:YYYY");
-    params.add("fq", "{!collapse field=group_s sort='score desc, test_tl desc, test_ti asc, id asc'}");
+    params.add("fq", "{!collapse field=group_s sort='score desc, test_l desc, test_i asc, id asc'}");
     params.add("sort", "score asc");
     assertQ(req(params)
             , "*[count(//doc)=2]"
@@ -129,7 +123,7 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
     // score, then tiebreakers; note no score in top level sort/fl to check needsScores logic
     params = new ModifiableSolrParams();
     params.add("q", "*:* term_s:YYYY");
-    params.add("fq", "{!collapse field=group_s sort='score desc, test_tl desc, test_ti asc, id asc'}");
+    params.add("fq", "{!collapse field=group_s sort='score desc, test_l desc, test_i asc, id asc'}");
     params.add("sort", "id desc");
     assertQ(req(params)
             , "*[count(//doc)=2]"
@@ -140,7 +134,7 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
     // term_s desc -- term_s is missing from many docs, and uses sortMissingLast=true
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field=group_s sort='term_s desc, test_tl asc'}");
+    params.add("fq", "{!collapse field=group_s sort='term_s desc, test_l asc'}");
     params.add("sort", "id asc");
     assertQ(req(params)
             , "*[count(//doc)=2]"
@@ -151,7 +145,7 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
     // term_s asc -- term_s is missing from many docs, and uses sortMissingLast=true
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field=group_s sort='term_s asc, test_tl asc'}");
+    params.add("fq", "{!collapse field=group_s sort='term_s asc, test_l asc'}");
     params.add("sort", "id asc");
     assertQ(req(params)
             , "*[count(//doc)=2]"
@@ -162,7 +156,7 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
     // collapse on int field
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field=test_ti sort='term_s asc, group_s asc'}");
+    params.add("fq", "{!collapse field=test_i sort='term_s asc, group_s asc'}");
     params.add("sort", "id asc");
     assertQ(req(params)
             , "*[count(//doc)=2]"
@@ -173,8 +167,8 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
     // collapse on term_s (very sparse) with nullPolicy=collapse
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field=term_s nullPolicy=collapse sort='test_ti asc, test_tl desc, id asc'}");
-    params.add("sort", "test_tl asc, id asc");
+    params.add("fq", "{!collapse field=term_s nullPolicy=collapse sort='test_i asc, test_l desc, id asc'}");
+    params.add("sort", "test_l asc, id asc");
     assertQ(req(params)
             , "*[count(//doc)=3]"
             ,"//result/doc[1]/float[@name='id'][.='5.0']"
@@ -185,8 +179,8 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
     // sort local param + elevation
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field=group_s sort='term_s desc, test_tl asc'}");
-    params.add("sort", "test_tl asc");
+    params.add("fq", "{!collapse field=group_s sort='term_s desc, test_l asc'}");
+    params.add("sort", "test_l asc");
     params.add("qt", "/elevate");
     params.add("forceElevation", "true");
     params.add("elevateIds", "4.0");
@@ -197,8 +191,8 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
     //
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field=group_s sort='term_s desc, test_tl asc'}");
-    params.add("sort", "test_tl asc");
+    params.add("fq", "{!collapse field=group_s sort='term_s desc, test_l asc'}");
+    params.add("sort", "test_l asc");
     params.add("qt", "/elevate");
     params.add("forceElevation", "true");
     params.add("elevateIds", "7.0");
@@ -228,38 +222,38 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
 
   @Test
   public void testFieldValueCollapseWithNegativeMinMax() throws Exception {
-    String[] doc = {"id","1", "group_i", "-1000", "test_ti", "5", "test_tl", "-10", "test_tf", "2000.32"};
+    String[] doc = {"id","1", "group_i", "-1000", "test_i", "5", "test_l", "-10", "test_f", "2000.32"};
     assertU(adoc(doc));
     assertU(commit());
-    String[] doc1 = {"id","2", "group_i", "-1000", "test_ti", "50", "test_tl", "-100", "test_tf", "2000.33"};
+    String[] doc1 = {"id","2", "group_i", "-1000", "test_i", "50", "test_l", "-100", "test_f", "2000.33"};
     assertU(adoc(doc1));
 
-    String[] doc2 = {"id","3", "group_i", "-1000", "test_tl", "100", "test_tf", "200"};
+    String[] doc2 = {"id","3", "group_i", "-1000", "test_l", "100", "test_f", "200"};
     assertU(adoc(doc2));
     assertU(commit());
-    String[] doc3 = {"id","4", "test_ti", "500", "test_tl", "1000", "test_tf", "2000"};
+    String[] doc3 = {"id","4", "test_i", "500", "test_l", "1000", "test_f", "2000"};
     assertU(adoc(doc3));
 
-    String[] doc4 = {"id","5", "group_i", "-1000", "test_ti", "4", "test_tl", "10", "test_tf", "2000.31"};
+    String[] doc4 = {"id","5", "group_i", "-1000", "test_i", "4", "test_l", "10", "test_f", "2000.31"};
     assertU(adoc(doc4));
     assertU(commit());
-    String[] doc5 = {"id","6", "group_i", "-1000", "test_ti", "10", "test_tl", "100", "test_tf", "-2000.12"};
+    String[] doc5 = {"id","6", "group_i", "-1000", "test_i", "10", "test_l", "100", "test_f", "-2000.12"};
     assertU(adoc(doc5));
     assertU(commit());
 
-    String[] doc6 = {"id","7", "group_i", "-1000", "test_ti", "8", "test_tl", "-50", "test_tf", "-100.2"};
+    String[] doc6 = {"id","7", "group_i", "-1000", "test_i", "8", "test_l", "-50", "test_f", "-100.2"};
     assertU(adoc(doc6));
     assertU(commit());
 
     ModifiableSolrParams params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field=group_i min=test_tf}");
+    params.add("fq", "{!collapse field=group_i min=test_f}");
     assertQ(req(params), "*[count(//doc)=1]",
         "//result/doc[1]/float[@name='id'][.='6.0']");
 
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field=group_i max=test_tf}");
+    params.add("fq", "{!collapse field=group_i max=test_f}");
     assertQ(req(params), "*[count(//doc)=1]",
         "//result/doc[1]/float[@name='id'][.='2.0']");
 
@@ -349,29 +343,29 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
 
   private void testCollapseQueries(String group, String hint, boolean numeric) throws Exception {
 
-    String[] doc = {"id","1", "term_s", "YYYY", group, "1", "test_ti", "5", "test_tl", "10", "test_tf", "2000"};
+    String[] doc = {"id","1", "term_s", "YYYY", group, "1", "test_i", "5", "test_l", "10", "test_f", "2000"};
     assertU(adoc(doc));
     assertU(commit());
-    String[] doc1 = {"id","2", "term_s","YYYY", group, "1", "test_ti", "50", "test_tl", "100", "test_tf", "200"};
+    String[] doc1 = {"id","2", "term_s","YYYY", group, "1", "test_i", "50", "test_l", "100", "test_f", "200"};
     assertU(adoc(doc1));
 
 
 
-    String[] doc2 = {"id","3", "term_s", "YYYY", "test_ti", "5000", "test_tl", "100", "test_tf", "200"};
+    String[] doc2 = {"id","3", "term_s", "YYYY", "test_i", "5000", "test_l", "100", "test_f", "200"};
     assertU(adoc(doc2));
     assertU(commit());
-    String[] doc3 = {"id","4", "term_s", "YYYY", "test_ti", "500", "test_tl", "1000", "test_tf", "2000"};
+    String[] doc3 = {"id","4", "term_s", "YYYY", "test_i", "500", "test_l", "1000", "test_f", "2000"};
     assertU(adoc(doc3));
 
 
-    String[] doc4 = {"id","5", "term_s", "YYYY", group, "2", "test_ti", "4", "test_tl", "10", "test_tf", "2000"};
+    String[] doc4 = {"id","5", "term_s", "YYYY", group, "2", "test_i", "4", "test_l", "10", "test_f", "2000"};
     assertU(adoc(doc4));
     assertU(commit());
-    String[] doc5 = {"id","6", "term_s","YYYY", group, "2", "test_ti", "10", "test_tl", "100", "test_tf", "200"};
+    String[] doc5 = {"id","6", "term_s","YYYY", group, "2", "test_i", "10", "test_l", "100", "test_f", "200"};
     assertU(adoc(doc5));
     assertU(commit());
 
-    String[] doc6 = {"id","7", "term_s", "YYYY", group, "1", "test_ti", "8", "test_tl", "50", "test_tf", "300"};
+    String[] doc6 = {"id","7", "term_s", "YYYY", group, "1", "test_i", "8", "test_l", "50", "test_f", "300"};
     assertU(adoc(doc6));
     assertU(commit());
 
@@ -381,7 +375,7 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
     params.add("q", "*:*");
     params.add("fq", "{!collapse field="+group+""+hint+"}");
     params.add("defType", "edismax");
-    params.add("bf", "field(test_ti)");
+    params.add("bf", "field(test_i)");
     assertQ(req(params, "indent", "on"), "*[count(//doc)=2]",
                        "//result/doc[1]/float[@name='id'][.='2.0']",
                        "//result/doc[2]/float[@name='id'][.='6.0']"
@@ -391,9 +385,9 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
     // SOLR-5544 test ordering with empty sort param
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field="+group+" nullPolicy=expand min=test_tf"+hint+"}");
+    params.add("fq", "{!collapse field="+group+" nullPolicy=expand min=test_f"+hint+"}");
     params.add("defType", "edismax");
-    params.add("bf", "field(test_ti)");
+    params.add("bf", "field(test_i)");
     params.add("sort","");
     assertQ(req(params), "*[count(//doc)=4]",
         "//result/doc[1]/float[@name='id'][.='3.0']",
@@ -405,8 +399,8 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
     // Test value source collapse criteria
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field="+group+" nullPolicy=collapse min=field(test_ti)"+hint+"}");
-    params.add("sort", "test_ti desc");
+    params.add("fq", "{!collapse field="+group+" nullPolicy=collapse min=field(test_i)"+hint+"}");
+    params.add("sort", "test_i desc");
     assertQ(req(params), "*[count(//doc)=3]",
         "//result/doc[1]/float[@name='id'][.='4.0']",
         "//result/doc[2]/float[@name='id'][.='1.0']",
@@ -418,7 +412,7 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
     params.add("q", "*:*");
     params.add("fq", "{!collapse field="+group+" nullPolicy=collapse min=cscore()"+hint+"}");
     params.add("defType", "edismax");
-    params.add("bf", "field(test_ti)");
+    params.add("bf", "field(test_i)");
     assertQ(req(params), "*[count(//doc)=3]",
         "//result/doc[1]/float[@name='id'][.='4.0']",
         "//result/doc[2]/float[@name='id'][.='1.0']",
@@ -430,7 +424,7 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
     params.add("q", "*:*");
     params.add("fq", "{!collapse field="+group+" nullPolicy=collapse min=cscore()"+hint+"}");
     params.add("defType", "edismax");
-    params.add("bf", "field(test_ti)");
+    params.add("bf", "field(test_i)");
     params.add("fl", "id");
     params.add("sort", "id desc");
     assertQ(req(params), "*[count(//doc)=3]",
@@ -442,9 +436,9 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
     // Test value source collapse criteria with compound cscore function
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field="+group+" nullPolicy=collapse min=sum(cscore(),field(test_ti))"+hint+"}");
+    params.add("fq", "{!collapse field="+group+" nullPolicy=collapse min=sum(cscore(),field(test_i))"+hint+"}");
     params.add("defType", "edismax");
-    params.add("bf", "field(test_ti)");
+    params.add("bf", "field(test_i)");
     assertQ(req(params), "*[count(//doc)=3]",
         "//result/doc[1]/float[@name='id'][.='4.0']",
         "//result/doc[2]/float[@name='id'][.='1.0']",
@@ -457,7 +451,7 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
     params.add("q", "YYYY");
     params.add("fq", "{!collapse field="+group+" nullPolicy=collapse"+hint+"}");
     params.add("defType", "edismax");
-    params.add("bf", "field(test_ti)");
+    params.add("bf", "field(test_i)");
     params.add("qf", "term_s");
     params.add("qt", "/elevate");
     assertQ(req(params), "*[count(//doc)=4]",
@@ -473,7 +467,7 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
       params.add("q", "YYYY");
       params.add("fq", "{!collapse field="+group + maxscore + " nullPolicy=collapse"+hint+"}");
       params.add("defType", "edismax");
-      params.add("bf", "field(test_ti)");
+      params.add("bf", "field(test_i)");
       params.add("qf", "term_s");
       params.add("qt", "/elevate");
       params.add("elevateIds", "1,5");
@@ -485,12 +479,12 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
     
     //Test SOLR-5773 with max field collapse criteria
     // try both max & sort localparams as alternate ways to ask for max group head
-    for (String max : new String[] {" max=test_ti ", " sort='test_ti desc' "}) {
+    for (String max : new String[] {" max=test_i ", " sort='test_i desc' "}) {
       params = new ModifiableSolrParams();
       params.add("q", "YYYY");
       params.add("fq", "{!collapse field=" + group + max + "nullPolicy=collapse"+hint+"}");
       params.add("defType", "edismax");
-      params.add("bf", "field(test_ti)");
+      params.add("bf", "field(test_i)");
       params.add("qf", "term_s");
       params.add("qt", "/elevate");
       params.add("elevateIds", "1,5");
@@ -502,12 +496,12 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
     
     //Test SOLR-5773 with min field collapse criteria
     // try both min & sort localparams as alternate ways to ask for min group head
-    for (String min : new String[] {" min=test_ti ", " sort='test_ti asc' "}) {
+    for (String min : new String[] {" min=test_i ", " sort='test_i asc' "}) {
       params = new ModifiableSolrParams();
       params.add("q", "YYYY");
       params.add("fq", "{!collapse field=" + group + min + "nullPolicy=collapse"+hint+"}");
       params.add("defType", "edismax");
-      params.add("bf", "field(test_ti)");
+      params.add("bf", "field(test_i)");
       params.add("qf", "term_s");
       params.add("qt", "/elevate");
       params.add("elevateIds", "1,5");
@@ -522,7 +516,7 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
     params.add("q", "YYYY");
     params.add("fq", "{!collapse field="+group+""+hint+"}");
     params.add("defType", "edismax");
-    params.add("bf", "field(test_ti)");
+    params.add("bf", "field(test_i)");
     params.add("qf", "term_s");
     params.add("qt", "/elevate");
     params.add("elevateIds", "3,4");
@@ -536,7 +530,7 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
     // Non trivial sort local param for picking group head
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field="+group+" nullPolicy=collapse sort='term_s asc, test_ti asc' "+hint+"}");
+    params.add("fq", "{!collapse field="+group+" nullPolicy=collapse sort='term_s asc, test_i asc' "+hint+"}");
     params.add("sort", "id desc");
     assertQ(req(params),
             "*[count(//doc)=3]",
@@ -547,7 +541,7 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
     // 
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field="+group+" nullPolicy=collapse sort='term_s asc, test_ti desc' "+hint+"}");
+    params.add("fq", "{!collapse field="+group+" nullPolicy=collapse sort='term_s asc, test_i desc' "+hint+"}");
     params.add("sort", "id desc");
     assertQ(req(params),
             "*[count(//doc)=3]",
@@ -560,7 +554,7 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
 
     // Test collapse by min int field and top level sort
     // try both min & sort localparams as alternate ways to ask for min group head
-    for (String min : new String[] {" min=test_ti ", " sort='test_ti asc' "}) {
+    for (String min : new String[] {" min=test_i ", " sort='test_i asc' "}) {
       params = new ModifiableSolrParams();
       params.add("q", "*:*");
       params.add("fq", "{!collapse field="+group + min + hint+"}");
@@ -582,7 +576,7 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
       params = new ModifiableSolrParams();
       params.add("q", "*:*");
       params.add("fq", "{!collapse field="+group + min + hint+"}");
-      params.add("sort", "test_tl asc,id desc");
+      params.add("sort", "test_l asc,id desc");
       assertQ(req(params),
               "*[count(//doc)=2]",
               "//result/doc[1]/float[@name='id'][.='5.0']",
@@ -604,8 +598,8 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
     //Test collapse by max int field
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field="+group+" max=test_ti"+hint+"}");
-    params.add("sort", "test_ti asc");
+    params.add("fq", "{!collapse field="+group+" max=test_i"+hint+"}");
+    params.add("sort", "test_i asc");
     assertQ(req(params), "*[count(//doc)=2]",
                          "//result/doc[1]/float[@name='id'][.='6.0']",
                          "//result/doc[2]/float[@name='id'][.='2.0']"
@@ -615,8 +609,8 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
       //Test collapse by min long field
       params = new ModifiableSolrParams();
       params.add("q", "*:*");
-      params.add("fq", "{!collapse field="+group+" min=test_tl"+hint+"}");
-      params.add("sort", "test_ti desc");
+      params.add("fq", "{!collapse field="+group+" min=test_l"+hint+"}");
+      params.add("sort", "test_i desc");
       assertQ(req(params), "*[count(//doc)=2]",
           "//result/doc[1]/float[@name='id'][.='1.0']",
           "//result/doc[2]/float[@name='id'][.='5.0']");
@@ -625,8 +619,8 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
       //Test collapse by max long field
       params = new ModifiableSolrParams();
       params.add("q", "*:*");
-      params.add("fq", "{!collapse field="+group+" max=test_tl"+hint+"}");
-      params.add("sort", "test_ti desc");
+      params.add("fq", "{!collapse field="+group+" max=test_l"+hint+"}");
+      params.add("sort", "test_i desc");
       assertQ(req(params), "*[count(//doc)=2]",
                            "//result/doc[1]/float[@name='id'][.='2.0']",
                            "//result/doc[2]/float[@name='id'][.='6.0']");
@@ -640,8 +634,8 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
     //Test collapse by min float field
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field="+group+" min=test_tf"+hint+"}");
-    params.add("sort", "test_ti desc");
+    params.add("fq", "{!collapse field="+group+" min=test_f"+hint+"}");
+    params.add("sort", "test_i desc");
     assertQ(req(params), "*[count(//doc)=2]",
                          "//result/doc[1]/float[@name='id'][.='2.0']",
                          "//result/doc[2]/float[@name='id'][.='6.0']");
@@ -649,8 +643,8 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
     //Test collapse by min float field
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field="+group+" max=test_tf"+hint+"}");
-    params.add("sort", "test_ti asc");
+    params.add("fq", "{!collapse field="+group+" max=test_f"+hint+"}");
+    params.add("sort", "test_i asc");
     assertQ(req(params), "*[count(//doc)=2]",
                          "//result/doc[1]/float[@name='id'][.='5.0']",
                          "//result/doc[2]/float[@name='id'][.='1.0']");
@@ -658,7 +652,7 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
     //Test collapse by min float field sort by score
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field="+group+" max=test_tf"+hint+"}");
+    params.add("fq", "{!collapse field="+group+" max=test_f"+hint+"}");
     params.add("defType", "edismax");
     params.add("bf", "field(id)");
     params.add("fl", "score, id");
@@ -673,8 +667,8 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
     // Test collapse using selector field in no docs
     // tie selector in all of these cases
     for (String selector : new String[] {
-        " min=bogus_ti ", " sort='bogus_ti asc' ",
-        " max=bogus_ti ", " sort='bogus_ti desc' ",
+        " min=bogus_i ", " sort='bogus_i asc' ",
+        " max=bogus_i ", " sort='bogus_i desc' ",
         " min=bogus_tf ", " sort='bogus_tf asc' ",
         " max=bogus_tf ", " sort='bogus_tf desc' ",
         " sort='bogus_td asc' ", " sort='bogus_td desc' ",
@@ -695,16 +689,16 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
     
     // attempting to use cscore() in sort local param should fail
     assertQEx("expected error trying to sort on a function that includes cscore()",
-              req(params("q", "{!func}sub(sub(test_tl,1000),id)",
+              req(params("q", "{!func}sub(sub(test_l,1000),id)",
                          "fq", "{!collapse field="+group+" sort='abs(cscore()) asc, id asc'}",
                          "sort", "score asc")),
               SolrException.ErrorCode.BAD_REQUEST);
     
     // multiple params for picking groupHead should all fail
     for (String bad : new String[] {
-        "{!collapse field="+group+" min=test_tf max=test_tf}",
-        "{!collapse field="+group+" min=test_tf sort='test_tf asc'}",
-        "{!collapse field="+group+" max=test_tf sort='test_tf asc'}" }) {
+        "{!collapse field="+group+" min=test_f max=test_f}",
+        "{!collapse field="+group+" min=test_f sort='test_f asc'}",
+        "{!collapse field="+group+" max=test_f sort='test_f asc'}" }) {
       assertQEx("Expected error: " + bad, req(params("q", "*:*", "fq", bad)),
                 SolrException.ErrorCode.BAD_REQUEST);
     }
@@ -713,15 +707,15 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
     // sort used
     for (SolrParams collapse : new SolrParams[] {
         // these should all be equivilently valid
-        params("fq", "{!collapse field="+group+" nullPolicy=collapse sort='test_ti asc'"+hint+"}"),
-        params("fq", "{!collapse field="+group+" nullPolicy=collapse min='' sort='test_ti asc'"+hint+"}"),
-        params("fq", "{!collapse field="+group+" nullPolicy=collapse max='' sort='test_ti asc'"+hint+"}"),
-        params("fq", "{!collapse field="+group+" nullPolicy=collapse min=$x sort='test_ti asc'"+hint+"}"),
-        params("fq", "{!collapse field="+group+" nullPolicy=collapse min=$x sort='test_ti asc'"+hint+"}",
+        params("fq", "{!collapse field="+group+" nullPolicy=collapse sort='test_i asc'"+hint+"}"),
+        params("fq", "{!collapse field="+group+" nullPolicy=collapse min='' sort='test_i asc'"+hint+"}"),
+        params("fq", "{!collapse field="+group+" nullPolicy=collapse max='' sort='test_i asc'"+hint+"}"),
+        params("fq", "{!collapse field="+group+" nullPolicy=collapse min=$x sort='test_i asc'"+hint+"}"),
+        params("fq", "{!collapse field="+group+" nullPolicy=collapse min=$x sort='test_i asc'"+hint+"}",
                "x",""),
       }) {
       
-      assertQ(req(collapse, "q", "*:*", "sort", "test_ti desc"),
+      assertQ(req(collapse, "q", "*:*", "sort", "test_i desc"),
               "*[count(//doc)=3]",
               "//result/doc[1]/float[@name='id'][.='4.0']",
               "//result/doc[2]/float[@name='id'][.='1.0']",
@@ -732,7 +726,7 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
     //Test nullPolicy expand
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field="+group+" max=test_tf nullPolicy=expand"+hint+"}");
+    params.add("fq", "{!collapse field="+group+" max=test_f nullPolicy=expand"+hint+"}");
     params.add("sort", "id desc");
     assertQ(req(params), "*[count(//doc)=4]",
         "//result/doc[1]/float[@name='id'][.='5.0']",
@@ -743,7 +737,7 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
     //Test nullPolicy collapse
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field="+group+" max=test_tf nullPolicy=collapse"+hint+"}");
+    params.add("fq", "{!collapse field="+group+" max=test_f nullPolicy=collapse"+hint+"}");
     params.add("sort", "id desc");
     assertQ(req(params), "*[count(//doc)=3]",
         "//result/doc[1]/float[@name='id'][.='5.0']",
@@ -755,12 +749,12 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
     params.add("q", "*:*");
     params.add("fq", "{!collapse field="+group+hint+"}");
     params.add("defType", "edismax");
-    params.add("bf", "field(test_ti)");
-    params.add("fq","{!tag=test_ti}id:5");
+    params.add("bf", "field(test_i)");
+    params.add("fq","{!tag=test_i}id:5");
     params.add("facet","true");
-    params.add("facet.field","{!ex=test_ti}test_ti");
+    params.add("facet.field","{!ex=test_i}test_i");
     params.add("facet.mincount", "1");
-    assertQ(req(params), "*[count(//doc)=1]", "*[count(//lst[@name='facet_fields']/lst[@name='test_ti']/int)=2]");
+    assertQ(req(params), "*[count(//doc)=1]", "*[count(//lst[@name='facet_fields']/lst[@name='test_i']/int)=2]");
 
     // SOLR-5230 - ensure CollapsingFieldValueCollector.finish() is called
     params = new ModifiableSolrParams();
@@ -779,7 +773,7 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
     params.add("q", "YYYY");
     params.add("fq", "{!collapse field="+group+hint+" nullPolicy=collapse}");
     params.add("defType", "edismax");
-    params.add("bf", "field(test_ti)");
+    params.add("bf", "field(test_i)");
     params.add("qf", "term_s");
     params.add("qt", "/elevate");
     assertQ(req(params), "*[count(//doc)=3]",
@@ -805,7 +799,7 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
     String group = (random().nextBoolean() ? "group_s" : "group_s_dv");
 
     // min-or-max is for CollapsingScoreCollector vs. CollapsingFieldValueCollector
-    String optional_min_or_max = (random().nextBoolean() ? "" : (random().nextBoolean() ? "min=field(test_ti)" : "max=field(test_ti)"));
+    String optional_min_or_max = (random().nextBoolean() ? "" : (random().nextBoolean() ? "min=field(test_i)" : "max=field(test_i)"));
     
     ModifiableSolrParams params = new ModifiableSolrParams();
     params.add("q", "*:*");
@@ -817,17 +811,17 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
     // as unlikely as this test seems, it's important for the possibility that a segment exists w/o
     // any live docs that have DocValues for the group field -- ie: every doc in segment is in null group.
     
-    assertU(adoc("id", "1", "group_s", "group1", "test_ti", "5", "test_tl", "10"));
+    assertU(adoc("id", "1", "group_s", "group1", "test_i", "5", "test_l", "10"));
     assertU(commit());
-    assertU(adoc("id", "2", "group_s", "group1", "test_ti", "5", "test_tl", "1000"));
-    assertU(adoc("id", "3", "group_s", "group1", "test_ti", "5", "test_tl", "1000"));
-    assertU(adoc("id", "4", "group_s", "group1", "test_ti", "10", "test_tl", "100"));
+    assertU(adoc("id", "2", "group_s", "group1", "test_i", "5", "test_l", "1000"));
+    assertU(adoc("id", "3", "group_s", "group1", "test_i", "5", "test_l", "1000"));
+    assertU(adoc("id", "4", "group_s", "group1", "test_i", "10", "test_l", "100"));
     //
-    assertU(adoc("id", "5", "group_s", "group2", "test_ti", "5", "test_tl", "10", "term_s", "YYYY"));
+    assertU(adoc("id", "5", "group_s", "group2", "test_i", "5", "test_l", "10", "term_s", "YYYY"));
     assertU(commit());
-    assertU(adoc("id", "6", "group_s", "group2", "test_ti", "5", "test_tl","1000"));
-    assertU(adoc("id", "7", "group_s", "group2", "test_ti", "5", "test_tl","1000", "term_s", "XXXX"));
-    assertU(adoc("id", "8", "group_s", "group2", "test_ti", "10","test_tl", "100"));
+    assertU(adoc("id", "6", "group_s", "group2", "test_i", "5", "test_l","1000"));
+    assertU(adoc("id", "7", "group_s", "group2", "test_i", "5", "test_l","1000", "term_s", "XXXX"));
+    assertU(adoc("id", "8", "group_s", "group2", "test_i", "10","test_l", "100"));
     assertU(commit());
     
     // none of these grouping fields are in any doc
@@ -835,17 +829,17 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
         "field=bogus_s", "field=bogus_s_dv",
         "field=bogus_s hint=top_fc", // alternative docvalues codepath w/ hint
         "field=bogus_s_dv hint=top_fc", // alternative docvalues codepath w/ hint
-        "field=bogus_ti", "field=bogus_tf" }) {
+        "field=bogus_i", "field=bogus_tf" }) {
       
       // for any of these selectors, behavior of these checks should be consistent
       for (String selector : new String[] {
           "", " sort='score desc' ",
-          " min=test_ti ", " max=test_ti ", " sort='test_ti asc' ",  " sort='test_ti desc' ",
-          " min=test_tf ", " max=test_tf ", " sort='test_tf asc' ",  " sort='test_tf desc' ",
+          " min=test_i ", " max=test_i ", " sort='test_i asc' ",  " sort='test_i desc' ",
+          " min=test_f ", " max=test_f ", " sort='test_f asc' ",  " sort='test_f desc' ",
           " sort='group_s asc' ",  " sort='group_s desc' ",
           // fields that don't exist
-          " min=bogus_sort_ti ", " max=bogus_sort_ti ",
-          " sort='bogus_sort_ti asc' ",  " sort='bogus_sort_ti desc' ",
+          " min=bogus_sort_i ", " max=bogus_sort_i ",
+          " sort='bogus_sort_i asc' ",  " sort='bogus_sort_i desc' ",
           " sort='bogus_sort_s asc' ",  " sort='bogus_sort_s desc' ",
         }) {
           

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b7042c1f/solr/core/src/test/org/apache/solr/search/TestRandomCollapseQParserPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/TestRandomCollapseQParserPlugin.java b/solr/core/src/test/org/apache/solr/search/TestRandomCollapseQParserPlugin.java
index 7d135e2..bbb2623 100644
--- a/solr/core/src/test/org/apache/solr/search/TestRandomCollapseQParserPlugin.java
+++ b/solr/core/src/test/org/apache/solr/search/TestRandomCollapseQParserPlugin.java
@@ -20,11 +20,9 @@ import java.util.List;
 import java.util.ArrayList;
 import java.util.Arrays;
 
-import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
 import org.apache.lucene.util.TestUtil;
 import org.apache.solr.CursorPagingTest;
 import org.apache.solr.SolrTestCaseJ4;
-import org.apache.solr.SolrTestCaseJ4.SuppressPointFields;
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer;
 import org.apache.solr.client.solrj.response.QueryResponse;
@@ -37,9 +35,6 @@ import static org.apache.solr.search.CollapsingQParserPlugin.NULL_EXPAND;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 
-//We want codecs that support DocValues, and ones supporting blank/empty values.
-@SuppressCodecs({"Appending","Lucene3x","Lucene40","Lucene41","Lucene42"})
-@SuppressPointFields
 public class TestRandomCollapseQParserPlugin extends SolrTestCaseJ4 {
 
   /** Full SolrServer instance for arbitrary introspection of response data and adding fqs */


[42/46] lucene-solr:jira/solr-9959: SOLR-6615: use constants for 'id', '_route_', '_version_'

Posted by ab...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/solrj/src/java/org/apache/solr/client/solrj/request/JavaBinUpdateRequestCodec.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/JavaBinUpdateRequestCodec.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/JavaBinUpdateRequestCodec.java
index 7776a13..afa746e 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/JavaBinUpdateRequestCodec.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/JavaBinUpdateRequestCodec.java
@@ -30,6 +30,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
 
 import org.apache.solr.common.SolrInputDocument;
 import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.params.ShardParams;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.DataInputInputStream;
 import org.apache.solr.common.util.JavaBinCodec;
@@ -226,8 +227,8 @@ public class JavaBinUpdateRequestCodec {
         Map<String,Object> params = entry.getValue();
         if (params != null) {
           Long version = (Long) params.get(UpdateRequest.VER);
-          if (params.containsKey(UpdateRequest.ROUTE))
-            updateRequest.deleteById(entry.getKey(), (String) params.get(UpdateRequest.ROUTE));
+          if (params.containsKey(ShardParams._ROUTE_))
+            updateRequest.deleteById(entry.getKey(), (String) params.get(ShardParams._ROUTE_));
           else
           updateRequest.deleteById(entry.getKey(), version);
         } else {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/solrj/src/java/org/apache/solr/client/solrj/request/UpdateRequest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/UpdateRequest.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/UpdateRequest.java
index e7ca0fa..142710a 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/UpdateRequest.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/UpdateRequest.java
@@ -44,6 +44,8 @@ import org.apache.solr.common.params.UpdateParams;
 import org.apache.solr.common.util.ContentStream;
 import org.apache.solr.common.util.XML;
 
+import static org.apache.solr.common.params.ShardParams._ROUTE_;
+
 /**
  * 
  * 
@@ -54,7 +56,6 @@ public class UpdateRequest extends AbstractUpdateRequest {
   public static final String REPFACT = "rf";
   public static final String MIN_REPFACT = "min_rf";
   public static final String VER = "ver";
-  public static final String ROUTE = "_route_";
   public static final String OVERWRITE = "ow";
   public static final String COMMIT_WITHIN = "cw";
   private Map<SolrInputDocument,Map<String,Object>> documents = null;
@@ -188,7 +189,7 @@ public class UpdateRequest extends AbstractUpdateRequest {
     if (version != null)
       params.put(VER, version);
     if (route != null)
-      params.put(ROUTE, route);
+      params.put(_ROUTE_, route);
     deleteById.put(id, params);
     return this;
   }
@@ -221,7 +222,7 @@ public class UpdateRequest extends AbstractUpdateRequest {
   public UpdateRequest withRoute(String route) {
     if (params == null)
       params = new ModifiableSolrParams();
-    params.set(ROUTE, route);
+    params.set(_ROUTE_, route);
     return this;
   }
 
@@ -461,7 +462,7 @@ public class UpdateRequest extends AbstractUpdateRequest {
           Map<String,Object> map = entry.getValue();
           if (map != null) {
             Long version = (Long) map.get(VER);
-            String route = (String)map.get(ROUTE);
+            String route = (String)map.get(_ROUTE_);
             if (version != null) {
               writer.append(" version=\"" + version + "\"");
             }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/solrj/src/java/org/apache/solr/common/ToleratedUpdateError.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/ToleratedUpdateError.java b/solr/solrj/src/java/org/apache/solr/common/ToleratedUpdateError.java
index 55b9b8c..9c4ac9a 100644
--- a/solr/solrj/src/java/org/apache/solr/common/ToleratedUpdateError.java
+++ b/solr/solrj/src/java/org/apache/solr/common/ToleratedUpdateError.java
@@ -18,9 +18,11 @@ package org.apache.solr.common;
 
 import java.util.ArrayList;
 import java.util.List;
-import org.apache.solr.common.util.SimpleOrderedMap;
-import org.apache.solr.common.SolrException;
+
 import org.apache.solr.common.SolrException.ErrorCode;
+import org.apache.solr.common.util.SimpleOrderedMap;
+
+import static org.apache.solr.common.params.CommonParams.ID;
 
 /**
  * Models the basic information related to a single "tolerated" error that occured during updates.  
@@ -74,7 +76,7 @@ public final class ToleratedUpdateError {
    * @see #getSimpleMap
    */
   public static ToleratedUpdateError parseMap(SimpleOrderedMap<String> data) {
-    final String id = data.get("id");
+    final String id = data.get(ID);
     final String message = data.get("message");
     final String t = data.get("type");
     if (null == t || null == id || null == message) {
@@ -156,7 +158,7 @@ public final class ToleratedUpdateError {
   public SimpleOrderedMap<String> getSimpleMap() {
     SimpleOrderedMap<String> entry = new SimpleOrderedMap<String>();
     entry.add("type", type.toString());
-    entry.add("id", id);
+    entry.add(ID, id);
     entry.add("message", message);
     return entry;
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/solrj/src/java/org/apache/solr/common/cloud/HashBasedRouter.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/HashBasedRouter.java b/solr/solrj/src/java/org/apache/solr/common/cloud/HashBasedRouter.java
index f9ab5d4..5e19d38 100644
--- a/solr/solrj/src/java/org/apache/solr/common/cloud/HashBasedRouter.java
+++ b/solr/solrj/src/java/org/apache/solr/common/cloud/HashBasedRouter.java
@@ -16,13 +16,15 @@
  */
 package org.apache.solr.common.cloud;
 
+import java.util.Collection;
+import java.util.Collections;
+
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrInputDocument;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.Hash;
 
-import java.util.Collection;
-import java.util.Collections;
+import static org.apache.solr.common.params.CommonParams.ID;
 
 public abstract class HashBasedRouter extends DocRouter {
 
@@ -51,7 +53,7 @@ public abstract class HashBasedRouter extends DocRouter {
   }
 
   protected String getId(SolrInputDocument sdoc, SolrParams params) {
-    Object  idObj = sdoc.getFieldValue("id");  // blech
+    Object  idObj = sdoc.getFieldValue(ID);  // blech
     String id = idObj != null ? idObj.toString() : "null";  // should only happen on client side
     return id;
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java b/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java
index 7cf27d2..589ef7e 100644
--- a/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java
+++ b/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java
@@ -259,5 +259,9 @@ public interface CommonParams {
 
   String NAME = "name";
   String VALUE_LONG = "val";
+
+  String VERSION_FIELD="_version_";
+
+  String ID = "id";
 }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eb587772/solr/solrj/src/java/org/apache/solr/common/params/ShardParams.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/params/ShardParams.java b/solr/solrj/src/java/org/apache/solr/common/params/ShardParams.java
index 2686573..cbc33f4 100644
--- a/solr/solrj/src/java/org/apache/solr/common/params/ShardParams.java
+++ b/solr/solrj/src/java/org/apache/solr/common/params/ShardParams.java
@@ -25,35 +25,35 @@ package org.apache.solr.common.params;
  */
 public interface ShardParams {
   /** the shards to use (distributed configuration) */
-  public static final String SHARDS = "shards";
+  String SHARDS = "shards";
   
   /** per-shard start and rows */
-  public static final String SHARDS_ROWS = "shards.rows";
-  public static final String SHARDS_START = "shards.start";
+  String SHARDS_ROWS = "shards.rows";
+  String SHARDS_START = "shards.start";
   
   /** IDs of the shard documents */
-  public static final String IDS = "ids";
+  String IDS = "ids";
   
   /** whether the request goes to a shard */
-  public static final String IS_SHARD = "isShard";
+  String IS_SHARD = "isShard";
   
   /** The requested URL for this shard */
-  public static final String SHARD_URL = "shard.url";
+  String SHARD_URL = "shard.url";
   
   /** The Request Handler for shard requests */
-  public static final String SHARDS_QT = "shards.qt";
+  String SHARDS_QT = "shards.qt";
   
   /** Request detailed match info for each shard (true/false) */
-  public static final String SHARDS_INFO = "shards.info";
+  String SHARDS_INFO = "shards.info";
 
   /** Should things fail if there is an error? (true/false) */
-  public static final String SHARDS_TOLERANT = "shards.tolerant";
+  String SHARDS_TOLERANT = "shards.tolerant";
   
   /** query purpose for shard requests */
-  public static final String SHARDS_PURPOSE = "shards.purpose";
+  String SHARDS_PURPOSE = "shards.purpose";
 
-  public static final String _ROUTE_ = "_route_";
+  String _ROUTE_ = "_route_";
 
   /** Force a single-pass distributed query? (true/false) */
-  public static final String DISTRIB_SINGLE_PASS = "distrib.singlePass";
+  String DISTRIB_SINGLE_PASS = "distrib.singlePass";
 }


[27/46] lucene-solr:jira/solr-9959: SOLR-10286: fix test for Windows

Posted by ab...@apache.org.
SOLR-10286: fix test for Windows


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/e7d9db9d
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/e7d9db9d
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/e7d9db9d

Branch: refs/heads/jira/solr-9959
Commit: e7d9db9d6c4dccc158b52d53584ead93b7f55c38
Parents: 9b57545
Author: David Smiley <ds...@apache.org>
Authored: Sat Mar 18 10:42:34 2017 -0400
Committer: David Smiley <ds...@apache.org>
Committed: Sat Mar 18 10:42:34 2017 -0400

----------------------------------------------------------------------
 solr/core/src/test/org/apache/solr/search/LargeFieldTest.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e7d9db9d/solr/core/src/test/org/apache/solr/search/LargeFieldTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/LargeFieldTest.java b/solr/core/src/test/org/apache/solr/search/LargeFieldTest.java
index e32859b..57dc2de 100644
--- a/solr/core/src/test/org/apache/solr/search/LargeFieldTest.java
+++ b/solr/core/src/test/org/apache/solr/search/LargeFieldTest.java
@@ -45,7 +45,7 @@ public class LargeFieldTest extends SolrTestCaseJ4 {
     System.setProperty("documentCache.enabled", "true");
     System.setProperty("enableLazyFieldLoading", "true");
 
-    initCore("solrconfig-managed-schema.xml", "ignoredSchemaName?");
+    initCore("solrconfig-managed-schema.xml", "ignoredSchemaName");
 
     // TODO SOLR-10229 will make this easier
     boolean PERSIST_FALSE = false; // don't write to test resource dir


[25/46] lucene-solr:jira/solr-9959: Fix CHANGES.txt

Posted by ab...@apache.org.
Fix CHANGES.txt


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/258fddb9
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/258fddb9
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/258fddb9

Branch: refs/heads/jira/solr-9959
Commit: 258fddb91bd5cd8fb87e7fd38ffa31077cd91c6f
Parents: 3b66001
Author: Tomas Fernandez Lobbe <tf...@apache.org>
Authored: Fri Mar 17 13:45:29 2017 -0700
Committer: Tomas Fernandez Lobbe <tf...@apache.org>
Committed: Fri Mar 17 13:45:29 2017 -0700

----------------------------------------------------------------------
 lucene/CHANGES.txt | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/258fddb9/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 22c83f0..15f4ff3 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -229,7 +229,8 @@ Improvements
 * LUCENE-7695: ComplexPhraseQueryParser to support query time synonyms (Markus Jelsma
   via Mikhail Khludnev) 
 
-* LUCENE_7747: QueryBuilder now iterates lazily over the possible paths when building a graph query
+* LUCENE-7747: QueryBuilder now iterates lazily over the possible paths when building a graph query
+  (Jim Ferenczi)
 
 Optimizations
 


[19/46] lucene-solr:jira/solr-9959: SOLR-10286: fix precommit (unused imports)

Posted by ab...@apache.org.
SOLR-10286: fix precommit (unused imports)


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/4a55bc4e
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/4a55bc4e
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/4a55bc4e

Branch: refs/heads/jira/solr-9959
Commit: 4a55bc4e0f7a5b227f774fa3d7bbf4f1a4767eb1
Parents: d1b2fb3
Author: David Smiley <ds...@apache.org>
Authored: Thu Mar 16 21:11:39 2017 -0400
Committer: David Smiley <ds...@apache.org>
Committed: Thu Mar 16 21:11:39 2017 -0400

----------------------------------------------------------------------
 solr/core/src/test/org/apache/solr/search/LargeFieldTest.java | 3 ---
 1 file changed, 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4a55bc4e/solr/core/src/test/org/apache/solr/search/LargeFieldTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/LargeFieldTest.java b/solr/core/src/test/org/apache/solr/search/LargeFieldTest.java
index 09e7e90..e32859b 100644
--- a/solr/core/src/test/org/apache/solr/search/LargeFieldTest.java
+++ b/solr/core/src/test/org/apache/solr/search/LargeFieldTest.java
@@ -24,9 +24,6 @@ import org.apache.lucene.document.Document;
 import org.apache.lucene.document.LazyDocument;
 import org.apache.lucene.index.IndexableField;
 import org.apache.solr.SolrTestCaseJ4;
-import org.apache.solr.common.util.ContentStreamBase;
-import org.apache.solr.request.SolrQueryRequestBase;
-import org.apache.solr.response.SolrQueryResponse;
 import org.apache.solr.schema.IndexSchema;
 import org.apache.solr.util.RefCounted;
 import org.junit.AfterClass;


[30/46] lucene-solr:jira/solr-9959: SOLR-10079: Speedup TestInPlaceUpdatesDistrib in new replication mode

Posted by ab...@apache.org.
SOLR-10079: Speedup TestInPlaceUpdatesDistrib in new replication mode


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/e0927394
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/e0927394
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/e0927394

Branch: refs/heads/jira/solr-9959
Commit: e09273943b41330ca224377e2e6b6ca54dce84eb
Parents: 4bc75db
Author: Cao Manh Dat <da...@apache.org>
Authored: Mon Mar 20 09:18:54 2017 +0700
Committer: Cao Manh Dat <da...@apache.org>
Committed: Mon Mar 20 09:18:54 2017 +0700

----------------------------------------------------------------------
 .../test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java    | 3 ---
 1 file changed, 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e0927394/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java b/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java
index 4538e90..cbd7b02 100644
--- a/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java
+++ b/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java
@@ -323,9 +323,6 @@ public class TestInPlaceUpdatesDistrib extends AbstractFullDistribZkTestBase {
   }
 
   private void docValuesUpdateTest() throws Exception {
-    clearIndex();
-    commit();
-
     // number of docs we're testing (0 <= id), index may contain additional random docs (id < 0)
     final int numDocs = atLeast(100);
     log.info("Trying num docs = " + numDocs);


[10/46] lucene-solr:jira/solr-9959: SOLR-10283: Learning to Rank (LTR) SolrFeature to reject searches with missing efi (External Feature Information) used by fq.

Posted by ab...@apache.org.
SOLR-10283: Learning to Rank (LTR) SolrFeature to reject searches with missing efi (External Feature Information) used by fq.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/17cc3e7d
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/17cc3e7d
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/17cc3e7d

Branch: refs/heads/jira/solr-9959
Commit: 17cc3e7dad7eb2f691767fc5a08aac8e6d055bdc
Parents: d825737
Author: Christine Poerschke <cp...@apache.org>
Authored: Thu Mar 16 14:10:28 2017 +0000
Committer: Christine Poerschke <cp...@apache.org>
Committed: Thu Mar 16 14:10:28 2017 +0000

----------------------------------------------------------------------
 solr/CHANGES.txt                                |  3 +++
 .../apache/solr/ltr/feature/SolrFeature.java    |  3 +++
 .../featureExamples/external_features.json      |  6 +++++
 .../solr/ltr/TestSelectiveWeightCreation.java   |  8 +++++--
 .../solr/ltr/feature/TestExternalFeatures.java  | 25 ++++++++++++++++----
 5 files changed, 39 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/17cc3e7d/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 5e2a518..2d4ce6c 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -260,6 +260,9 @@ Bug Fixes
 
 * SOLR-10302: Solr's zkcli scripts now able to find the metrics libraries, which it couldn't earlier (kiran, Ishan Chattopadhyaya)
 
+* SOLR-10283: Learning to Rank (LTR) SolrFeature to reject searches with missing efi (External Feature Information) used by fq.
+  (Christine Poerschke)
+
 Optimizations
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/17cc3e7d/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/SolrFeature.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/SolrFeature.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/SolrFeature.java
index 13eb96f..4aa872d 100644
--- a/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/SolrFeature.java
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/SolrFeature.java
@@ -157,6 +157,9 @@ public class SolrFeature extends Feature {
           for (String fq : fqs) {
             if ((fq != null) && (fq.trim().length() != 0)) {
               fq = macroExpander.expand(fq);
+              if (fq == null) {
+                throw new FeatureException(this.getClass().getSimpleName()+" requires efi parameter that was not passed in request.");
+              }
               final QParser fqp = QParser.getParser(fq, req);
               final Query filterQuery = fqp.getQuery();
               if (filterQuery != null) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/17cc3e7d/solr/contrib/ltr/src/test-files/featureExamples/external_features.json
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test-files/featureExamples/external_features.json b/solr/contrib/ltr/src/test-files/featureExamples/external_features.json
index d8a9eca..5c4f12d 100644
--- a/solr/contrib/ltr/src/test-files/featureExamples/external_features.json
+++ b/solr/contrib/ltr/src/test-files/featureExamples/external_features.json
@@ -48,4 +48,10 @@
     "params" : {
         "q" : "{!field f=title}${user_query}"
     }
+}, {
+    "name" : "titlePhrasesMatch",
+    "class" : "org.apache.solr.ltr.feature.SolrFeature",
+    "params" : {
+        "fq" : [ "{!field f=title}${userTitlePhrase1}", "{!field f=title}${userTitlePhrase2}"]
+    }
 } ]

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/17cc3e7d/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestSelectiveWeightCreation.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestSelectiveWeightCreation.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestSelectiveWeightCreation.java
index 5cfd999..7bf8373 100644
--- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestSelectiveWeightCreation.java
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestSelectiveWeightCreation.java
@@ -210,18 +210,22 @@ public class TestSelectiveWeightCreation extends TestRerankBase {
   @Test
   public void testSelectiveWeightsRequestFeaturesFromDifferentStore() throws Exception {
 
-    final String docs0fv = FeatureLoggerTestUtils.toFeatureVector(
+    final String docs0fv_sparse = FeatureLoggerTestUtils.toFeatureVector(
         "matchedTitle","1.0", "titlePhraseMatch","0.6103343");
+    final String docs0fv_dense = FeatureLoggerTestUtils.toFeatureVector(
+        "matchedTitle","1.0", "titlePhraseMatch","0.6103343", "titlePhrasesMatch","0.0");
     final String docs0fv_fstore4= FeatureLoggerTestUtils.toFeatureVector(
         "popularity","3.0", "originalScore","1.0");
 
+    final String docs0fv = chooseDefaultFeatureVector(docs0fv_dense, docs0fv_sparse);
+
     // extract all features in externalmodel's store (default store)
     // rerank using externalmodel (default store)
     final SolrQuery query = new SolrQuery();
     query.setQuery("*:*");
     query.add("fl", "*,score,fv:[fv]");
     query.add("rows", "5");
-    query.add("rq", "{!ltr reRankDocs=10 model=externalmodel efi.user_query=w3}");
+    query.add("rq", "{!ltr reRankDocs=10 model=externalmodel efi.user_query=w3 efi.userTitlePhrase1=w2 efi.userTitlePhrase2=w1}");
 
     assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='3'");
     assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/id=='4'");

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/17cc3e7d/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestExternalFeatures.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestExternalFeatures.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestExternalFeatures.java
index 4010ee1..c6ae30f 100644
--- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestExternalFeatures.java
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestExternalFeatures.java
@@ -67,7 +67,7 @@ public class TestExternalFeatures extends TestRerankBase {
 
     query.remove("fl");
     query.add("fl", "*,score,[fv]");
-    query.add("rq", "{!ltr reRankDocs=10 model=externalmodel efi.user_query=w3}");
+    query.add("rq", "{!ltr reRankDocs=10 model=externalmodel efi.user_query=w3 efi.userTitlePhrase1=w4 efi.userTitlePhrase2=w5}");
 
     assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='3'");
     assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/score==0.7693934");
@@ -77,7 +77,7 @@ public class TestExternalFeatures extends TestRerankBase {
     // Adding an efi in the transformer should not affect the rq ranking with a
     // different value for efi of the same parameter
     query.remove("fl");
-    query.add("fl", "*,score,[fv efi.user_query=w2]");
+    query.add("fl", "*,score,[fv efi.user_query=w2 efi.userTitlePhrase1=w4 efi.userTitlePhrase2=w5]");
 
     assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='3'");
     assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/score==0.7693934");
@@ -92,11 +92,12 @@ public class TestExternalFeatures extends TestRerankBase {
     query.add("fl", "*,score,fv:[fv]");
     query.add("rows", "1");
     // Stopword only query passed in
-    query.add("rq", "{!ltr reRankDocs=10 model=externalmodel efi.user_query='a'}");
+    query.add("rq", "{!ltr reRankDocs=10 model=externalmodel efi.user_query='a' efi.userTitlePhrase1='b' efi.userTitlePhrase2='c'}");
 
     final String docs0fv_dense_csv = FeatureLoggerTestUtils.toFeatureVector(
         "matchedTitle","0.0",
-        "titlePhraseMatch","0.0");
+        "titlePhraseMatch","0.0",
+        "titlePhrasesMatch","0.0");
     final String docs0fv_sparse_csv = FeatureLoggerTestUtils.toFeatureVector();
 
     final String docs0fv_default_csv = chooseDefaultFeatureVector(docs0fv_dense_csv, docs0fv_sparse_csv);
@@ -181,4 +182,20 @@ public class TestExternalFeatures extends TestRerankBase {
     query.add("fl", "fvalias:[fv store=fstore4]");
     assertJQ("/query" + query.toQueryString(), "/error/msg=='Exception from createWeight for ValueFeature [name=popularity, params={value=${myPop}, required=true}] ValueFeatureWeight requires efi parameter that was not passed in request.'");
   }
+
+  @Test
+  public void featureExtraction_valueFeatureRequiredInFq_shouldThrowException() throws Exception {
+    final String userTitlePhrase1 = "userTitlePhrase1";
+    final String userTitlePhrase2 = "userTitlePhrase2";
+    final String userTitlePhrasePresent = (random().nextBoolean() ? userTitlePhrase1 : userTitlePhrase2);
+
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("*:*");
+    query.add("rows", "1");
+    query.add("fl", "score,features:[fv efi.user_query=uq "+userTitlePhrasePresent+"=utpp]");
+    assertJQ("/query" + query.toQueryString(), "/error/msg=='Exception from createWeight for "
+        + "SolrFeature [name=titlePhrasesMatch, params={fq=[{!field f=title}${"+userTitlePhrase1+"}, {!field f=title}${"+userTitlePhrase2+"}]}] "
+        + "SolrFeatureWeight requires efi parameter that was not passed in request.'");
+  }
+
 }


[32/46] lucene-solr:jira/solr-9959: SOLR-9992: Update changes.txt

Posted by ab...@apache.org.
SOLR-9992: Update changes.txt


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/fb296fd5
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/fb296fd5
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/fb296fd5

Branch: refs/heads/jira/solr-9959
Commit: fb296fd5fc91faeba1b1ab7072d40e71eec17fcf
Parents: 21adce4
Author: Cao Manh Dat <da...@apache.org>
Authored: Mon Mar 20 17:30:40 2017 +0700
Committer: Cao Manh Dat <da...@apache.org>
Committed: Mon Mar 20 17:30:40 2017 +0700

----------------------------------------------------------------------
 solr/CHANGES.txt | 7 ++++++-
 1 file changed, 6 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/fb296fd5/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index b7e55cc..e628694 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -101,8 +101,13 @@ Apache UIMA 2.3.1
 Apache ZooKeeper 3.4.6
 Jetty 9.3.14.v20161028
 
+Detailed Change List
+----------------------
 
-(No Changes)
+New Features
+----------------------
+
+* SOLR-9992: Add support for grouping with PointFIelds. (Cao Manh Dat) 
 
 
 ==================  6.5.0 ==================


[09/46] lucene-solr:jira/solr-9959: SOLR-10302: Solr's zkcli scripts now able to find the metrics libraries

Posted by ab...@apache.org.
SOLR-10302: Solr's zkcli scripts now able to find the metrics libraries


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/d8257370
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/d8257370
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/d8257370

Branch: refs/heads/jira/solr-9959
Commit: d82573704856fea7fe3980e9f4c0b995fafb718f
Parents: b46e09c
Author: Ishan Chattopadhyaya <is...@apache.org>
Authored: Thu Mar 16 05:32:27 2017 +0530
Committer: Ishan Chattopadhyaya <is...@apache.org>
Committed: Thu Mar 16 05:33:29 2017 +0530

----------------------------------------------------------------------
 solr/CHANGES.txt                            | 2 ++
 solr/server/scripts/cloud-scripts/zkcli.bat | 2 +-
 solr/server/scripts/cloud-scripts/zkcli.sh  | 2 +-
 3 files changed, 4 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8257370/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index f1a12ea..5e2a518 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -258,6 +258,8 @@ Bug Fixes
 * SOLR-9516: Admin UI (angular) now works with Kerberos, by excluding serving of /solr/libs/* through
   SolrDispatchFilter. (Cassandra Targett, Amrit Sarkar via Ishan Chattopadhyaya)
 
+* SOLR-10302: Solr's zkcli scripts now able to find the metrics libraries, which it couldn't earlier (kiran, Ishan Chattopadhyaya)
+
 Optimizations
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8257370/solr/server/scripts/cloud-scripts/zkcli.bat
----------------------------------------------------------------------
diff --git a/solr/server/scripts/cloud-scripts/zkcli.bat b/solr/server/scripts/cloud-scripts/zkcli.bat
index c372685..c5d7b72 100644
--- a/solr/server/scripts/cloud-scripts/zkcli.bat
+++ b/solr/server/scripts/cloud-scripts/zkcli.bat
@@ -22,4 +22,4 @@ REM  -DzkDigestUsername=admin-user -DzkDigestPassword=CHANGEME-ADMIN-PASSWORD ^
 REM  -DzkDigestReadonlyUsername=readonly-user -DzkDigestReadonlyPassword=CHANGEME-READONLY-PASSWORD
 
 "%JVM%" %SOLR_ZK_CREDS_AND_ACLS% %ZKCLI_JVM_FLAGS% -Dlog4j.configuration="%LOG4J_CONFIG%" ^
--classpath "%SDIR%\..\..\solr-webapp\webapp\WEB-INF\lib\*;%SDIR%\..\..\lib\ext\*" org.apache.solr.cloud.ZkCLI %*
+-classpath "%SDIR%\..\..\solr-webapp\webapp\WEB-INF\lib\*;%SDIR%\..\..\lib\ext\*;%SDIR%\..\..\lib\*" org.apache.solr.cloud.ZkCLI %*

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8257370/solr/server/scripts/cloud-scripts/zkcli.sh
----------------------------------------------------------------------
diff --git a/solr/server/scripts/cloud-scripts/zkcli.sh b/solr/server/scripts/cloud-scripts/zkcli.sh
index df43265..bd971e9 100755
--- a/solr/server/scripts/cloud-scripts/zkcli.sh
+++ b/solr/server/scripts/cloud-scripts/zkcli.sh
@@ -22,5 +22,5 @@ fi
 #  -DzkDigestReadonlyUsername=readonly-user -DzkDigestReadonlyPassword=CHANGEME-READONLY-PASSWORD"
 
 PATH=$JAVA_HOME/bin:$PATH $JVM $SOLR_ZK_CREDS_AND_ACLS $ZKCLI_JVM_FLAGS -Dlog4j.configuration=$log4j_config \
--classpath "$sdir/../../solr-webapp/webapp/WEB-INF/lib/*:$sdir/../../lib/ext/*" org.apache.solr.cloud.ZkCLI ${1+"$@"}
+-classpath "$sdir/../../solr-webapp/webapp/WEB-INF/lib/*:$sdir/../../lib/ext/*:$sdir/../../lib/*" org.apache.solr.cloud.ZkCLI ${1+"$@"}
 


[15/46] lucene-solr:jira/solr-9959: SOLR-9990: Avoid copyField in SolrExampleTests.testUpdateField

Posted by ab...@apache.org.
SOLR-9990: Avoid copyField in SolrExampleTests.testUpdateField


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/14397949
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/14397949
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/14397949

Branch: refs/heads/jira/solr-9959
Commit: 14397949006723d907d4ee681ff2802410aa19ab
Parents: 2502af9
Author: Tomas Fernandez Lobbe <tf...@apache.org>
Authored: Thu Mar 16 15:10:48 2017 -0700
Committer: Tomas Fernandez Lobbe <tf...@apache.org>
Committed: Thu Mar 16 15:10:48 2017 -0700

----------------------------------------------------------------------
 .../apache/solr/client/solrj/SolrExampleTests.java  | 16 ++++++++--------
 1 file changed, 8 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/14397949/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java
index b1e7285..326dede 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java
@@ -1613,16 +1613,16 @@ abstract public class SolrExampleTests extends SolrExampleTestsBase
     SolrInputDocument doc = new SolrInputDocument();
     doc.addField("id", "unique");
     doc.addField("name", "gadget");
-    doc.addField("price_f", 1);
+    doc.addField("price", 1);
     client.add(doc);
     client.commit();
     SolrQuery q = new SolrQuery("*:*");
-    q.setFields("id","price_f","name", "_version_");
+    q.setFields("id","price","name", "_version_");
     QueryResponse resp = client.query(q);
     assertEquals("Doc count does not match", 1, resp.getResults().getNumFound());
     Long version = (Long)resp.getResults().get(0).getFirstValue("_version_");
     assertNotNull("no version returned", version);
-    assertEquals(1.0f, resp.getResults().get(0).getFirstValue("price_f"));
+    assertEquals(1.0f, resp.getResults().get(0).getFirstValue("price"));
 
     //update "price" with incorrect version (optimistic locking)
     HashMap<String, Object> oper = new HashMap<>();  //need better api for this???
@@ -1631,7 +1631,7 @@ abstract public class SolrExampleTests extends SolrExampleTestsBase
     doc = new SolrInputDocument();
     doc.addField("id", "unique");
     doc.addField("_version_", version+1);
-    doc.addField("price_f", oper);
+    doc.addField("price", oper);
     try {
       client.add(doc);
       if(client instanceof HttpSolrClient) { //XXX concurrent client reports exceptions differently
@@ -1650,24 +1650,24 @@ abstract public class SolrExampleTests extends SolrExampleTestsBase
     doc = new SolrInputDocument();
     doc.addField("id", "unique");
     doc.addField("_version_", version);
-    doc.addField("price_f", oper);
+    doc.addField("price", oper);
     client.add(doc);
     client.commit();
     resp = client.query(q);
     assertEquals("Doc count does not match", 1, resp.getResults().getNumFound());
-    assertEquals("price was not updated?", 100.0f, resp.getResults().get(0).getFirstValue("price_f"));
+    assertEquals("price was not updated?", 100.0f, resp.getResults().get(0).getFirstValue("price"));
     assertEquals("no name?", "gadget", resp.getResults().get(0).getFirstValue("name"));
 
     //update "price", no version
     oper.put("set", 200);
     doc = new SolrInputDocument();
     doc.addField("id", "unique");
-    doc.addField("price_f", oper);
+    doc.addField("price", oper);
     client.add(doc);
     client.commit();
     resp = client.query(q);
     assertEquals("Doc count does not match", 1, resp.getResults().getNumFound());
-    assertEquals("price was not updated?", 200.0f, resp.getResults().get(0).getFirstValue("price_f"));
+    assertEquals("price was not updated?", 200.0f, resp.getResults().get(0).getFirstValue("price"));
     assertEquals("no name?", "gadget", resp.getResults().get(0).getFirstValue("name"));
   }
 


[29/46] lucene-solr:jira/solr-9959: SOLR-9835: Fix OnlyLeaderIndexesTest failure, inplace updates is not copied over properly

Posted by ab...@apache.org.
SOLR-9835: Fix OnlyLeaderIndexesTest failure, inplace updates is not copied over properly


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/4bc75dbf
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/4bc75dbf
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/4bc75dbf

Branch: refs/heads/jira/solr-9959
Commit: 4bc75dbf235145fad5ec1001004c663e15449523
Parents: d60849f
Author: Cao Manh Dat <da...@apache.org>
Authored: Mon Mar 20 08:21:54 2017 +0700
Committer: Cao Manh Dat <da...@apache.org>
Committed: Mon Mar 20 08:21:54 2017 +0700

----------------------------------------------------------------------
 solr/core/src/java/org/apache/solr/update/UpdateLog.java | 5 +----
 1 file changed, 1 insertion(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4bc75dbf/solr/core/src/java/org/apache/solr/update/UpdateLog.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/UpdateLog.java b/solr/core/src/java/org/apache/solr/update/UpdateLog.java
index bd0adbb..daa50a9 100644
--- a/solr/core/src/java/org/apache/solr/update/UpdateLog.java
+++ b/solr/core/src/java/org/apache/solr/update/UpdateLog.java
@@ -1197,10 +1197,7 @@ public static final int VERSION_IDX = 1;
             switch (oper) {
               case UpdateLog.UPDATE_INPLACE:
               case UpdateLog.ADD: {
-                SolrInputDocument sdoc = (SolrInputDocument) entry.get(entry.size() - 1);
-                AddUpdateCommand cmd = new AddUpdateCommand(req);
-                cmd.solrDoc = sdoc;
-                cmd.setVersion(version);
+                AddUpdateCommand cmd = convertTlogEntryToAddUpdateCommand(req, entry, oper, version);
                 cmd.setFlags(UpdateCommand.IGNORE_AUTOCOMMIT);
                 add(cmd);
                 break;


[22/46] lucene-solr:jira/solr-9959: LUCENE_7747: QueryBuilder now iterates lazily over the possible paths when building a graph query

Posted by ab...@apache.org.
LUCENE_7747: QueryBuilder now iterates lazily over the possible paths when building a graph query


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/3ca4d800
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/3ca4d800
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/3ca4d800

Branch: refs/heads/jira/solr-9959
Commit: 3ca4d800babe68c39b8ea6e30ba0bdcc225ed907
Parents: d374193
Author: Jim Ferenczi <ji...@apache.org>
Authored: Fri Mar 17 15:53:21 2017 +0100
Committer: Jim Ferenczi <ji...@apache.org>
Committed: Fri Mar 17 15:53:21 2017 +0100

----------------------------------------------------------------------
 lucene/CHANGES.txt                              |  2 +
 .../org/apache/lucene/util/QueryBuilder.java    | 46 ++++++++++----------
 2 files changed, 24 insertions(+), 24 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3ca4d800/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index b2ea412..22c83f0 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -229,6 +229,8 @@ Improvements
 * LUCENE-7695: ComplexPhraseQueryParser to support query time synonyms (Markus Jelsma
   via Mikhail Khludnev) 
 
+* LUCENE_7747: QueryBuilder now iterates lazily over the possible paths when building a graph query
+
 Optimizations
 
 * LUCENE-7641: Optimized point range queries to compute documents that do not

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3ca4d800/lucene/core/src/java/org/apache/lucene/util/QueryBuilder.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/util/QueryBuilder.java b/lucene/core/src/java/org/apache/lucene/util/QueryBuilder.java
index 0832bdb..9fb474a 100644
--- a/lucene/core/src/java/org/apache/lucene/util/QueryBuilder.java
+++ b/lucene/core/src/java/org/apache/lucene/util/QueryBuilder.java
@@ -19,7 +19,6 @@ package org.apache.lucene.util;
 
 import java.io.IOException;
 import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.Iterator;
 import java.util.List;
 
@@ -512,21 +511,20 @@ public class QueryBuilder {
       lastState = end;
       final Query queryPos;
       if (graph.hasSidePath(start)) {
-        List<Query> queries = new ArrayList<> ();
-        Iterator<TokenStream> it = graph.getFiniteStrings(start, end);
-        while (it.hasNext()) {
-          TokenStream ts = it.next();
-          // This is a synonym path so all terms are mandatory (MUST).
-          Query q = createFieldQuery(ts, BooleanClause.Occur.MUST, field, getAutoGenerateMultiTermSynonymsPhraseQuery(), 0);
-          if (q != null) {
-            queries.add(q);
+        final Iterator<TokenStream> it = graph.getFiniteStrings(start, end);
+        Iterator<Query> queries = new Iterator<Query>() {
+          @Override
+          public boolean hasNext() {
+            return it.hasNext();
           }
-        }
-        if (queries.size() > 0) {
-          queryPos = newGraphSynonymQuery(queries.toArray(new Query[queries.size()]));
-        } else {
-          queryPos = null;
-        }
+
+          @Override
+          public Query next() {
+            TokenStream ts = it.next();
+            return createFieldQuery(ts, BooleanClause.Occur.MUST, field, getAutoGenerateMultiTermSynonymsPhraseQuery(), 0);
+          }
+        };
+        queryPos = newGraphSynonymQuery(queries);
       } else {
         Term[] terms = graph.getTerms(field, start);
         assert terms.length > 0;
@@ -636,16 +634,16 @@ public class QueryBuilder {
    * This is intended for subclasses that wish to customize the generated queries.
    * @return new Query instance
    */
-  protected Query newGraphSynonymQuery(Query queries[]) {
-    if (queries == null) {
-      return new BooleanQuery.Builder().build();
-    } else if (queries.length == 1) {
-      return queries[0];
-    } else {
-      BooleanQuery.Builder builder = new BooleanQuery.Builder();
-      Arrays.stream(queries).forEachOrdered(qry -> builder.add(qry, BooleanClause.Occur.SHOULD));
-      return builder.build();
+  protected Query newGraphSynonymQuery(Iterator<Query> queries) {
+    BooleanQuery.Builder builder = new BooleanQuery.Builder();
+    while (queries.hasNext()) {
+      builder.add(queries.next(), BooleanClause.Occur.SHOULD);
     }
+    BooleanQuery bq = builder.build();
+    if (bq.clauses().size() == 1) {
+      return bq.clauses().get(0).getQuery();
+    }
+    return bq;
   }
   
   /**


[18/46] lucene-solr:jira/solr-9959: SOLR-9185: Solr's edismax and Lucene/standard query parsers should optionally not split on whitespace before sending terms to analysis

Posted by ab...@apache.org.
SOLR-9185: Solr's edismax and Lucene/standard query parsers should optionally not split on whitespace before sending terms to analysis


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/d1b2fb33
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/d1b2fb33
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/d1b2fb33

Branch: refs/heads/jira/solr-9959
Commit: d1b2fb33ef3bc0ced65feb98c31cffe4f209da7f
Parents: 4ee7fc3
Author: Steve Rowe <sa...@apache.org>
Authored: Thu Mar 16 19:41:37 2017 -0400
Committer: Steve Rowe <sa...@apache.org>
Committed: Thu Mar 16 19:41:47 2017 -0400

----------------------------------------------------------------------
 solr/CHANGES.txt                                |    7 +
 .../org/apache/solr/parser/QueryParser.java     |  417 +++++--
 .../java/org/apache/solr/parser/QueryParser.jj  |  276 +++--
 .../apache/solr/parser/SolrQueryParserBase.java |  213 +++-
 .../solr/search/ExtendedDismaxQParser.java      |  175 ++-
 .../org/apache/solr/search/LuceneQParser.java   |    3 +
 .../apache/solr/search/LuceneQParserPlugin.java |    2 +
 .../QueryParserConfigurationException.java      |   24 +
 .../org/apache/solr/search/QueryParsing.java    |    1 +
 .../org/apache/solr/search/SolrQueryParser.java |    2 +-
 .../collection1/conf/multiword-synonyms.txt     |   13 +
 .../conf/schema-multiword-synonyms.xml          |   50 +
 .../solr/collection1/conf/synonyms.txt          |    2 +
 .../solr/search/TestExtendedDismaxParser.java   | 1032 +++++++++++++-----
 .../solr/search/TestMultiWordSynonyms.java      |  100 ++
 .../apache/solr/search/TestSolrQueryParser.java |  565 ++++++++--
 16 files changed, 2268 insertions(+), 614 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d1b2fb33/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index af151ed..9a5299c 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -202,6 +202,13 @@ New Features
   requires "stored" and must not be multiValued.  It's intended for fields that might have very large values so that
   they don't get cached in memory. (David Smiley)
 
+* SOLR-9185: Solr's edismax and "Lucene"/standard query parsers will no longer split on whitespace before sending
+  terms to analysis, if given the "sow=false" request param ("sow"=>"split on whitespace").  This enables multi-term
+  source synonyms to match at query-time using SynonymGraphFilterFactory; other analysis components will also now
+  work at query time, e.g. ShingleFilterFactory.  By default, and when the "sow=true" param is specified, these
+  parsers' behavior remains the same: queries will be split on whitespace before sending individual terms to analysis.
+  (Steve Rowe)
+
 
 Bug Fixes
 ----------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d1b2fb33/solr/core/src/java/org/apache/solr/parser/QueryParser.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/parser/QueryParser.java b/solr/core/src/java/org/apache/solr/parser/QueryParser.java
index 42f359e..d9a64f4 100644
--- a/solr/core/src/java/org/apache/solr/parser/QueryParser.java
+++ b/solr/core/src/java/org/apache/solr/parser/QueryParser.java
@@ -3,13 +3,17 @@ package org.apache.solr.parser;
 
 import java.io.StringReader;
 import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashSet;
 import java.util.List;
+import java.util.Set;
 
+import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.search.BooleanClause;
 import org.apache.lucene.search.Query;
-import org.apache.lucene.util.Version;
-import org.apache.solr.search.QParser;
 import org.apache.solr.search.SyntaxError;
+import org.apache.solr.search.QParser;
+import org.apache.solr.search.QueryParserConfigurationException;
 
 
 public class QueryParser extends SolrQueryParserBase implements QueryParserConstants {
@@ -17,9 +21,44 @@ public class QueryParser extends SolrQueryParserBase implements QueryParserConst
    */
   static public enum Operator { OR, AND }
 
-   public QueryParser(Version matchVersion, String defaultField, QParser parser) {
+  /** default split on whitespace behavior */
+  public static final boolean DEFAULT_SPLIT_ON_WHITESPACE = true;
+
+   public QueryParser(String defaultField, QParser parser) {
     this(new FastCharStream(new StringReader("")));
-    init(matchVersion, defaultField, parser);
+    init(defaultField, parser);
+  }
+
+  /**
+   * @see #setSplitOnWhitespace(boolean)
+   */
+  public boolean getSplitOnWhitespace() {
+    return splitOnWhitespace;
+  }
+
+  /**
+   * Whether query text should be split on whitespace prior to analysis.
+   * Default is <code>{@value #DEFAULT_SPLIT_ON_WHITESPACE}</code>.
+   */
+  public void setSplitOnWhitespace(boolean splitOnWhitespace) {
+    this.splitOnWhitespace = splitOnWhitespace;
+  }
+
+  private boolean splitOnWhitespace = DEFAULT_SPLIT_ON_WHITESPACE;
+  private static Set<Integer> disallowedPostMultiTerm
+    = new HashSet<Integer>(Arrays.asList(COLON, STAR, FUZZY_SLOP, CARAT, AND, OR));
+  private static boolean allowedPostMultiTerm(int tokenKind) {
+    return disallowedPostMultiTerm.contains(tokenKind) == false;
+  }
+
+  @Override
+  protected Query newFieldQuery(Analyzer analyzer, String field, String queryText,
+                                boolean quoted, boolean fieldAutoGenPhraseQueries) throws SyntaxError {
+    if ((getAutoGeneratePhraseQueries() || fieldAutoGenPhraseQueries) && splitOnWhitespace == false) {
+      throw new QueryParserConfigurationException
+          ("Field '" + field + "': autoGeneratePhraseQueries == true is disallowed when sow/splitOnWhitespace == false");
+    }
+    return super.newFieldQuery(analyzer, field, queryText, quoted, fieldAutoGenPhraseQueries);
   }
 
 // *   Query  ::= ( Clause )*
@@ -96,13 +135,38 @@ public class QueryParser extends SolrQueryParserBase implements QueryParserConst
 
   final public Query Query(String field) throws ParseException, SyntaxError {
   List<BooleanClause> clauses = new ArrayList<BooleanClause>();
-  Query q, firstQuery=null;
+  Query q;
   int conj, mods;
-    mods = Modifiers();
-    q = Clause(field);
-    addClause(clauses, CONJ_NONE, mods, q);
-    if (mods == MOD_NONE)
-        firstQuery=q;
+    if (jj_2_1(2)) {
+      MultiTerm(field, clauses);
+    } else {
+      switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
+      case NOT:
+      case PLUS:
+      case MINUS:
+      case BAREOPER:
+      case LPAREN:
+      case STAR:
+      case QUOTED:
+      case TERM:
+      case PREFIXTERM:
+      case WILDTERM:
+      case REGEXPTERM:
+      case RANGEIN_START:
+      case RANGEEX_START:
+      case LPARAMS:
+      case FILTER:
+      case NUMBER:
+        mods = Modifiers();
+        q = Clause(field);
+        addClause(clauses, CONJ_NONE, mods, q);
+        break;
+      default:
+        jj_la1[4] = jj_gen;
+        jj_consume_token(-1);
+        throw new ParseException();
+      }
+    }
     label_1:
     while (true) {
       switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
@@ -127,19 +191,50 @@ public class QueryParser extends SolrQueryParserBase implements QueryParserConst
         ;
         break;
       default:
-        jj_la1[4] = jj_gen;
+        jj_la1[5] = jj_gen;
         break label_1;
       }
-      conj = Conjunction();
-      mods = Modifiers();
-      q = Clause(field);
-      addClause(clauses, conj, mods, q);
+      if (jj_2_2(2)) {
+        MultiTerm(field, clauses);
+      } else {
+        switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
+        case AND:
+        case OR:
+        case NOT:
+        case PLUS:
+        case MINUS:
+        case BAREOPER:
+        case LPAREN:
+        case STAR:
+        case QUOTED:
+        case TERM:
+        case PREFIXTERM:
+        case WILDTERM:
+        case REGEXPTERM:
+        case RANGEIN_START:
+        case RANGEEX_START:
+        case LPARAMS:
+        case FILTER:
+        case NUMBER:
+          conj = Conjunction();
+          mods = Modifiers();
+          q = Clause(field);
+        addClause(clauses, conj, mods, q);
+          break;
+        default:
+          jj_la1[6] = jj_gen;
+          jj_consume_token(-1);
+          throw new ParseException();
+        }
+      }
     }
-      if (clauses.size() == 1 && firstQuery != null)
+    if (clauses.size() == 1 && clauses.get(0).getOccur() == BooleanClause.Occur.SHOULD) {
+      Query firstQuery = clauses.get(0).getQuery();
+      if ( ! (firstQuery instanceof RawQuery) || ((RawQuery)firstQuery).getTermCount() == 1) {
         {if (true) return rawToNormal(firstQuery);}
-      else {
-        {if (true) return getBooleanQuery(clauses);}
       }
+    }
+    {if (true) return getBooleanQuery(clauses);}
     throw new Error("Missing return statement in function");
   }
 
@@ -148,20 +243,20 @@ public class QueryParser extends SolrQueryParserBase implements QueryParserConst
   Token fieldToken=null, boost=null;
   Token localParams=null;
   int flags = 0;
-    if (jj_2_1(2)) {
+    if (jj_2_3(2)) {
       switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
       case TERM:
         fieldToken = jj_consume_token(TERM);
         jj_consume_token(COLON);
-                               field=discardEscapeChar(fieldToken.image);
+                                  field = discardEscapeChar(fieldToken.image);
         break;
       case STAR:
         jj_consume_token(STAR);
         jj_consume_token(COLON);
-                      field="*";
+                         field = "*";
         break;
       default:
-        jj_la1[5] = jj_gen;
+        jj_la1[7] = jj_gen;
         jj_consume_token(-1);
         throw new ParseException();
       }
@@ -191,7 +286,7 @@ public class QueryParser extends SolrQueryParserBase implements QueryParserConst
         boost = jj_consume_token(NUMBER);
         break;
       default:
-        jj_la1[6] = jj_gen;
+        jj_la1[8] = jj_gen;
         ;
       }
       break;
@@ -206,10 +301,10 @@ public class QueryParser extends SolrQueryParserBase implements QueryParserConst
         boost = jj_consume_token(NUMBER);
         break;
       default:
-        jj_la1[7] = jj_gen;
+        jj_la1[9] = jj_gen;
         ;
       }
-                                                                                            q=getFilter(q); restoreFlags(flags);
+                                                                                             q=getFilter(q); restoreFlags(flags);
       break;
     case LPARAMS:
       localParams = jj_consume_token(LPARAMS);
@@ -219,17 +314,17 @@ public class QueryParser extends SolrQueryParserBase implements QueryParserConst
         boost = jj_consume_token(NUMBER);
         break;
       default:
-        jj_la1[8] = jj_gen;
+        jj_la1[10] = jj_gen;
         ;
       }
-                                                          q=getLocalParams(field, localParams.image);
+                                                           q=getLocalParams(field, localParams.image);
       break;
     default:
-      jj_la1[9] = jj_gen;
+      jj_la1[11] = jj_gen;
       jj_consume_token(-1);
       throw new ParseException();
     }
-       {if (true) return handleBoost(q, boost);}
+    {if (true) return handleBoost(q, boost);}
     throw new Error("Missing return statement in function");
   }
 
@@ -278,35 +373,48 @@ public class QueryParser extends SolrQueryParserBase implements QueryParserConst
                           term.image = term.image.substring(0,1);
         break;
       default:
-        jj_la1[10] = jj_gen;
+        jj_la1[12] = jj_gen;
         jj_consume_token(-1);
         throw new ParseException();
       }
       switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
-      case FUZZY_SLOP:
-        fuzzySlop = jj_consume_token(FUZZY_SLOP);
-                               fuzzy=true;
-        break;
-      default:
-        jj_la1[11] = jj_gen;
-        ;
-      }
-      switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
       case CARAT:
-        jj_consume_token(CARAT);
-        boost = jj_consume_token(NUMBER);
+      case FUZZY_SLOP:
         switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
+        case CARAT:
+          jj_consume_token(CARAT);
+          boost = jj_consume_token(NUMBER);
+          switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
+          case FUZZY_SLOP:
+            fuzzySlop = jj_consume_token(FUZZY_SLOP);
+                                                        fuzzy=true;
+            break;
+          default:
+            jj_la1[13] = jj_gen;
+            ;
+          }
+          break;
         case FUZZY_SLOP:
           fuzzySlop = jj_consume_token(FUZZY_SLOP);
-                                                        fuzzy=true;
+                                 fuzzy=true;
+          switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
+          case CARAT:
+            jj_consume_token(CARAT);
+            boost = jj_consume_token(NUMBER);
+            break;
+          default:
+            jj_la1[14] = jj_gen;
+            ;
+          }
           break;
         default:
-          jj_la1[12] = jj_gen;
-          ;
+          jj_la1[15] = jj_gen;
+          jj_consume_token(-1);
+          throw new ParseException();
         }
         break;
       default:
-        jj_la1[13] = jj_gen;
+        jj_la1[16] = jj_gen;
         ;
       }
       q = handleBareTokenQuery(getField(field), term, fuzzySlop, prefix, wildcard, fuzzy, regexp);
@@ -316,13 +424,13 @@ public class QueryParser extends SolrQueryParserBase implements QueryParserConst
       switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
       case RANGEIN_START:
         jj_consume_token(RANGEIN_START);
-                           startInc=true;
+                        startInc = true;
         break;
       case RANGEEX_START:
         jj_consume_token(RANGEEX_START);
         break;
       default:
-        jj_la1[14] = jj_gen;
+        jj_la1[17] = jj_gen;
         jj_consume_token(-1);
         throw new ParseException();
       }
@@ -334,7 +442,7 @@ public class QueryParser extends SolrQueryParserBase implements QueryParserConst
         goop1 = jj_consume_token(RANGE_QUOTED);
         break;
       default:
-        jj_la1[15] = jj_gen;
+        jj_la1[18] = jj_gen;
         jj_consume_token(-1);
         throw new ParseException();
       }
@@ -343,7 +451,7 @@ public class QueryParser extends SolrQueryParserBase implements QueryParserConst
         jj_consume_token(RANGE_TO);
         break;
       default:
-        jj_la1[16] = jj_gen;
+        jj_la1[19] = jj_gen;
         ;
       }
       switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
@@ -354,20 +462,20 @@ public class QueryParser extends SolrQueryParserBase implements QueryParserConst
         goop2 = jj_consume_token(RANGE_QUOTED);
         break;
       default:
-        jj_la1[17] = jj_gen;
+        jj_la1[20] = jj_gen;
         jj_consume_token(-1);
         throw new ParseException();
       }
       switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
       case RANGEIN_END:
         jj_consume_token(RANGEIN_END);
-                         endInc=true;
+                      endInc = true;
         break;
       case RANGEEX_END:
         jj_consume_token(RANGEEX_END);
         break;
       default:
-        jj_la1[18] = jj_gen;
+        jj_la1[21] = jj_gen;
         jj_consume_token(-1);
         throw new ParseException();
       }
@@ -377,46 +485,71 @@ public class QueryParser extends SolrQueryParserBase implements QueryParserConst
         boost = jj_consume_token(NUMBER);
         break;
       default:
-        jj_la1[19] = jj_gen;
+        jj_la1[22] = jj_gen;
         ;
       }
-         boolean startOpen=false;
-         boolean endOpen=false;
-         if (goop1.kind == RANGE_QUOTED) {
-           goop1.image = goop1.image.substring(1, goop1.image.length()-1);
-         } else if ("*".equals(goop1.image)) {
-           startOpen=true;
-         }
-         if (goop2.kind == RANGE_QUOTED) {
-           goop2.image = goop2.image.substring(1, goop2.image.length()-1);
-         } else if ("*".equals(goop2.image)) {
-           endOpen=true;
-         }
-         q = getRangeQuery(getField(field), startOpen ? null : discardEscapeChar(goop1.image), endOpen ? null : discardEscapeChar(goop2.image), startInc, endInc);
+      boolean startOpen=false;
+      boolean endOpen=false;
+      if (goop1.kind == RANGE_QUOTED) {
+        goop1.image = goop1.image.substring(1, goop1.image.length()-1);
+      } else if ("*".equals(goop1.image)) {
+        startOpen=true;
+      }
+      if (goop2.kind == RANGE_QUOTED) {
+        goop2.image = goop2.image.substring(1, goop2.image.length()-1);
+      } else if ("*".equals(goop2.image)) {
+        endOpen=true;
+      }
+      q = getRangeQuery(getField(field),
+                        startOpen ? null : discardEscapeChar(goop1.image),
+                        endOpen ? null : discardEscapeChar(goop2.image), startInc, endInc);
       break;
     case QUOTED:
       term = jj_consume_token(QUOTED);
       switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
-      case FUZZY_SLOP:
-        fuzzySlop = jj_consume_token(FUZZY_SLOP);
-        break;
-      default:
-        jj_la1[20] = jj_gen;
-        ;
-      }
-      switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
       case CARAT:
-        jj_consume_token(CARAT);
-        boost = jj_consume_token(NUMBER);
+      case FUZZY_SLOP:
+        switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
+        case CARAT:
+          jj_consume_token(CARAT);
+          boost = jj_consume_token(NUMBER);
+          switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
+          case FUZZY_SLOP:
+            fuzzySlop = jj_consume_token(FUZZY_SLOP);
+                                                        fuzzy=true;
+            break;
+          default:
+            jj_la1[23] = jj_gen;
+            ;
+          }
+          break;
+        case FUZZY_SLOP:
+          fuzzySlop = jj_consume_token(FUZZY_SLOP);
+                                 fuzzy=true;
+          switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
+          case CARAT:
+            jj_consume_token(CARAT);
+            boost = jj_consume_token(NUMBER);
+            break;
+          default:
+            jj_la1[24] = jj_gen;
+            ;
+          }
+          break;
+        default:
+          jj_la1[25] = jj_gen;
+          jj_consume_token(-1);
+          throw new ParseException();
+        }
         break;
       default:
-        jj_la1[21] = jj_gen;
+        jj_la1[26] = jj_gen;
         ;
       }
-        q = handleQuotedTerm(getField(field), term, fuzzySlop);
+      q = handleQuotedTerm(getField(field), term, fuzzySlop);
       break;
     default:
-      jj_la1[22] = jj_gen;
+      jj_la1[27] = jj_gen;
       jj_consume_token(-1);
       throw new ParseException();
     }
@@ -424,6 +557,44 @@ public class QueryParser extends SolrQueryParserBase implements QueryParserConst
     throw new Error("Missing return statement in function");
   }
 
+  final public void MultiTerm(String field, List<BooleanClause> clauses) throws ParseException, SyntaxError {
+  Token text;
+  List<String> terms = null;
+    text = jj_consume_token(TERM);
+    if (splitOnWhitespace) {
+      Query q = getFieldQuery(getField(field), discardEscapeChar(text.image), false, true);
+      addClause(clauses, CONJ_NONE, MOD_NONE, q);
+    } else {
+      terms = new ArrayList<String>();
+      terms.add(discardEscapeChar(text.image));
+    }
+    if (getToken(1).kind == TERM && allowedPostMultiTerm(getToken(2).kind)) {
+
+    } else {
+      jj_consume_token(-1);
+      throw new ParseException();
+    }
+    label_2:
+    while (true) {
+      text = jj_consume_token(TERM);
+      if (splitOnWhitespace) {
+        Query q = getFieldQuery(getField(field), discardEscapeChar(text.image), false, true);
+        addClause(clauses, CONJ_NONE, MOD_NONE, q);
+      } else {
+        terms.add(discardEscapeChar(text.image));
+      }
+      if (getToken(1).kind == TERM && allowedPostMultiTerm(getToken(2).kind)) {
+        ;
+      } else {
+        break label_2;
+      }
+    }
+    if (splitOnWhitespace == false) {
+      Query q = getFieldQuery(getField(field), terms, true);
+      addMultiTermClause(clauses, q);
+    }
+  }
+
   private boolean jj_2_1(int xla) {
     jj_la = xla; jj_lastpos = jj_scanpos = token;
     try { return !jj_3_1(); }
@@ -431,28 +602,76 @@ public class QueryParser extends SolrQueryParserBase implements QueryParserConst
     finally { jj_save(0, xla); }
   }
 
-  private boolean jj_3R_3() {
-    if (jj_scan_token(STAR)) return true;
-    if (jj_scan_token(COLON)) return true;
+  private boolean jj_2_2(int xla) {
+    jj_la = xla; jj_lastpos = jj_scanpos = token;
+    try { return !jj_3_2(); }
+    catch(LookaheadSuccess ls) { return true; }
+    finally { jj_save(1, xla); }
+  }
+
+  private boolean jj_2_3(int xla) {
+    jj_la = xla; jj_lastpos = jj_scanpos = token;
+    try { return !jj_3_3(); }
+    catch(LookaheadSuccess ls) { return true; }
+    finally { jj_save(2, xla); }
+  }
+
+  private boolean jj_3R_7() {
+    if (jj_scan_token(TERM)) return true;
     return false;
   }
 
-  private boolean jj_3R_2() {
+  private boolean jj_3R_4() {
     if (jj_scan_token(TERM)) return true;
     if (jj_scan_token(COLON)) return true;
     return false;
   }
 
   private boolean jj_3_1() {
+    if (jj_3R_3()) return true;
+    return false;
+  }
+
+  private boolean jj_3R_6() {
+    return false;
+  }
+
+  private boolean jj_3R_3() {
+    if (jj_scan_token(TERM)) return true;
+    jj_lookingAhead = true;
+    jj_semLA = getToken(1).kind == TERM && allowedPostMultiTerm(getToken(2).kind);
+    jj_lookingAhead = false;
+    if (!jj_semLA || jj_3R_6()) return true;
+    Token xsp;
+    if (jj_3R_7()) return true;
+    while (true) {
+      xsp = jj_scanpos;
+      if (jj_3R_7()) { jj_scanpos = xsp; break; }
+    }
+    return false;
+  }
+
+  private boolean jj_3_3() {
     Token xsp;
     xsp = jj_scanpos;
-    if (jj_3R_2()) {
+    if (jj_3R_4()) {
     jj_scanpos = xsp;
-    if (jj_3R_3()) return true;
+    if (jj_3R_5()) return true;
     }
     return false;
   }
 
+  private boolean jj_3_2() {
+    if (jj_3R_3()) return true;
+    return false;
+  }
+
+  private boolean jj_3R_5() {
+    if (jj_scan_token(STAR)) return true;
+    if (jj_scan_token(COLON)) return true;
+    return false;
+  }
+
   /** Generated Token Manager. */
   public QueryParserTokenManager token_source;
   /** Current token. */
@@ -462,8 +681,11 @@ public class QueryParser extends SolrQueryParserBase implements QueryParserConst
   private int jj_ntk;
   private Token jj_scanpos, jj_lastpos;
   private int jj_la;
+  /** Whether we are looking ahead. */
+  private boolean jj_lookingAhead = false;
+  private boolean jj_semLA;
   private int jj_gen;
-  final private int[] jj_la1 = new int[23];
+  final private int[] jj_la1 = new int[28];
   static private int[] jj_la1_0;
   static private int[] jj_la1_1;
   static {
@@ -471,12 +693,12 @@ public class QueryParser extends SolrQueryParserBase implements QueryParserConst
       jj_la1_init_1();
    }
    private static void jj_la1_init_0() {
-      jj_la1_0 = new int[] {0x6000,0x6000,0x38000,0x38000,0xfb4fe000,0x2400000,0x800000,0x800000,0x800000,0xfb4c0000,0x3a440000,0x4000000,0x4000000,0x800000,0xc0000000,0x0,0x0,0x0,0x0,0x800000,0x4000000,0x800000,0xfb440000,};
+      jj_la1_0 = new int[] {0x6000,0x6000,0x38000,0x38000,0xfb4f8000,0xfb4fe000,0xfb4fe000,0x2400000,0x800000,0x800000,0x800000,0xfb4c0000,0x3a440000,0x4000000,0x800000,0x4800000,0x4800000,0xc0000000,0x0,0x0,0x0,0x0,0x800000,0x4000000,0x800000,0x4800000,0x4800000,0xfb440000,};
    }
    private static void jj_la1_init_1() {
-      jj_la1_1 = new int[] {0x0,0x0,0x0,0x0,0x7,0x0,0x0,0x0,0x0,0x7,0x4,0x0,0x0,0x0,0x0,0xc0,0x8,0xc0,0x30,0x0,0x0,0x0,0x4,};
+      jj_la1_1 = new int[] {0x0,0x0,0x0,0x0,0x7,0x7,0x7,0x0,0x0,0x0,0x0,0x7,0x4,0x0,0x0,0x0,0x0,0x0,0xc0,0x8,0xc0,0x30,0x0,0x0,0x0,0x0,0x0,0x4,};
    }
-  final private JJCalls[] jj_2_rtns = new JJCalls[1];
+  final private JJCalls[] jj_2_rtns = new JJCalls[3];
   private boolean jj_rescan = false;
   private int jj_gc = 0;
 
@@ -486,7 +708,7 @@ public class QueryParser extends SolrQueryParserBase implements QueryParserConst
     token = new Token();
     jj_ntk = -1;
     jj_gen = 0;
-    for (int i = 0; i < 23; i++) jj_la1[i] = -1;
+    for (int i = 0; i < 28; i++) jj_la1[i] = -1;
     for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls();
   }
 
@@ -495,8 +717,9 @@ public class QueryParser extends SolrQueryParserBase implements QueryParserConst
     token_source.ReInit(stream);
     token = new Token();
     jj_ntk = -1;
+    jj_lookingAhead = false;
     jj_gen = 0;
-    for (int i = 0; i < 23; i++) jj_la1[i] = -1;
+    for (int i = 0; i < 28; i++) jj_la1[i] = -1;
     for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls();
   }
 
@@ -506,7 +729,7 @@ public class QueryParser extends SolrQueryParserBase implements QueryParserConst
     token = new Token();
     jj_ntk = -1;
     jj_gen = 0;
-    for (int i = 0; i < 23; i++) jj_la1[i] = -1;
+    for (int i = 0; i < 28; i++) jj_la1[i] = -1;
     for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls();
   }
 
@@ -516,7 +739,7 @@ public class QueryParser extends SolrQueryParserBase implements QueryParserConst
     token = new Token();
     jj_ntk = -1;
     jj_gen = 0;
-    for (int i = 0; i < 23; i++) jj_la1[i] = -1;
+    for (int i = 0; i < 28; i++) jj_la1[i] = -1;
     for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls();
   }
 
@@ -579,7 +802,7 @@ public class QueryParser extends SolrQueryParserBase implements QueryParserConst
 
 /** Get the specific Token. */
   final public Token getToken(int index) {
-    Token t = token;
+    Token t = jj_lookingAhead ? jj_scanpos : token;
     for (int i = 0; i < index; i++) {
       if (t.next != null) t = t.next;
       else t = t.next = token_source.getNextToken();
@@ -633,7 +856,7 @@ public class QueryParser extends SolrQueryParserBase implements QueryParserConst
       la1tokens[jj_kind] = true;
       jj_kind = -1;
     }
-    for (int i = 0; i < 23; i++) {
+    for (int i = 0; i < 28; i++) {
       if (jj_la1[i] == jj_gen) {
         for (int j = 0; j < 32; j++) {
           if ((jj_la1_0[i] & (1<<j)) != 0) {
@@ -672,7 +895,7 @@ public class QueryParser extends SolrQueryParserBase implements QueryParserConst
 
   private void jj_rescan_token() {
     jj_rescan = true;
-    for (int i = 0; i < 1; i++) {
+    for (int i = 0; i < 3; i++) {
     try {
       JJCalls p = jj_2_rtns[i];
       do {
@@ -680,6 +903,8 @@ public class QueryParser extends SolrQueryParserBase implements QueryParserConst
           jj_la = p.arg; jj_lastpos = jj_scanpos = p.first;
           switch (i) {
             case 0: jj_3_1(); break;
+            case 1: jj_3_2(); break;
+            case 2: jj_3_3(); break;
           }
         }
         p = p.next;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d1b2fb33/solr/core/src/java/org/apache/solr/parser/QueryParser.jj
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/parser/QueryParser.jj b/solr/core/src/java/org/apache/solr/parser/QueryParser.jj
index a6b93ca..5eb4ec7 100644
--- a/solr/core/src/java/org/apache/solr/parser/QueryParser.jj
+++ b/solr/core/src/java/org/apache/solr/parser/QueryParser.jj
@@ -27,18 +27,17 @@ package org.apache.solr.parser;
 
 import java.io.StringReader;
 import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashSet;
 import java.util.List;
-import java.util.Locale;
+import java.util.Set;
 
 import org.apache.lucene.analysis.Analyzer;
-import org.apache.lucene.document.DateTools;
 import org.apache.lucene.search.BooleanClause;
 import org.apache.lucene.search.Query;
-import org.apache.lucene.search.TermRangeQuery;
-import org.apache.lucene.search.TermQuery;
-import org.apache.lucene.util.Version;
 import org.apache.solr.search.SyntaxError;
 import org.apache.solr.search.QParser;
+import org.apache.solr.search.QueryParserConfigurationException;
 
 
 public class QueryParser extends SolrQueryParserBase {
@@ -46,9 +45,44 @@ public class QueryParser extends SolrQueryParserBase {
    */
   static public enum Operator { OR, AND }
 
-   public QueryParser(Version matchVersion, String defaultField, QParser parser) {
+  /** default split on whitespace behavior */
+  public static final boolean DEFAULT_SPLIT_ON_WHITESPACE = true;
+
+   public QueryParser(String defaultField, QParser parser) {
     this(new FastCharStream(new StringReader("")));
-    init(matchVersion, defaultField, parser);
+    init(defaultField, parser);
+  }
+
+  /**
+   * @see #setSplitOnWhitespace(boolean)
+   */
+  public boolean getSplitOnWhitespace() {
+    return splitOnWhitespace;
+  }
+
+  /**
+   * Whether query text should be split on whitespace prior to analysis.
+   * Default is <code>{@value #DEFAULT_SPLIT_ON_WHITESPACE}</code>.
+   */
+  public void setSplitOnWhitespace(boolean splitOnWhitespace) {
+    this.splitOnWhitespace = splitOnWhitespace;
+  }
+
+  private boolean splitOnWhitespace = DEFAULT_SPLIT_ON_WHITESPACE;
+  private static Set<Integer> disallowedPostMultiTerm
+    = new HashSet<Integer>(Arrays.asList(COLON, STAR, FUZZY_SLOP, CARAT, AND, OR));
+  private static boolean allowedPostMultiTerm(int tokenKind) {
+    return disallowedPostMultiTerm.contains(tokenKind) == false;
+  }
+
+  @Override
+  protected Query newFieldQuery(Analyzer analyzer, String field, String queryText,
+                                boolean quoted, boolean fieldAutoGenPhraseQueries) throws SyntaxError {
+    if ((getAutoGeneratePhraseQueries() || fieldAutoGenPhraseQueries) && splitOnWhitespace == false) {
+      throw new QueryParserConfigurationException
+          ("Field '" + field + "': autoGeneratePhraseQueries == true is disallowed when sow/splitOnWhitespace == false");
+    }
+    return super.newFieldQuery(analyzer, field, queryText, quoted, fieldAutoGenPhraseQueries);
   }
 }
 
@@ -63,17 +97,15 @@ TOKEN_MGR_DECLS : {
 /* ***************** */
 
 <*> TOKEN : {
-  <#_NUM_CHAR:   ["0"-"9"] >
- // every character that follows a backslash is considered as an escaped character
- | <#_ESCAPED_CHAR: "\\" ~[] >
- | <#_TERM_START_CHAR: ( ~[ " ", "\t", "\n", "\r", "\u3000", "+", "-", "!", "(", ")", ":", "^",
-                      "[", "]", "\"", "{", "}", "~", "*", "?", "\\", "/" ]
-                      | <_ESCAPED_CHAR> ) >
- | <#_TERM_CHAR: ( <_TERM_START_CHAR>
-                       | <_ESCAPED_CHAR> | "-" | "+" | "/" | "!") >
- | <#_WHITESPACE:  ( " " | "\t" | "\n" | "\r" | "\u3000") >
- | <#_QUOTED_CHAR:  ( ~[ "\"", "\\" ] | <_ESCAPED_CHAR> ) >
- | <#_SQUOTED_CHAR:  ( ~[ "'", "\\" ] | <_ESCAPED_CHAR> ) >
+  <#_NUM_CHAR:        ["0"-"9"] >
+| <#_ESCAPED_CHAR:    "\\" ~[] >  // every character that follows a backslash is considered as an escaped character
+| <#_TERM_START_CHAR: ( ~[ " ", "\t", "\n", "\r", "\u3000", "+", "-", "!", "(", ")", ":", "^",
+                           "[", "]", "\"", "{", "}", "~", "*", "?", "\\", "/" ]
+                        | <_ESCAPED_CHAR> ) >
+| <#_TERM_CHAR:       ( <_TERM_START_CHAR> | <_ESCAPED_CHAR> | "-" | "+" | "/" | "!") >
+| <#_WHITESPACE:      ( " " | "\t" | "\n" | "\r" | "\u3000") >
+| <#_QUOTED_CHAR:     ( ~[ "\"", "\\" ] | <_ESCAPED_CHAR> ) >
+| <#_SQUOTED_CHAR:    ( ~[ "'", "\\" ] | <_ESCAPED_CHAR> ) >
 }
 
 <DEFAULT, COMMENT> SKIP : {
@@ -93,44 +125,43 @@ TOKEN_MGR_DECLS : {
   < <_WHITESPACE>>
 }
 
-
 <DEFAULT> TOKEN : {
-  <AND:       ("AND" | "&&") >
- | <OR:        ("OR" | "||") >
- | <NOT:       ("NOT" | "!") >
- | <PLUS:      "+" >
- | <MINUS:     "-" >
- | <BAREOPER:    ("+"|"-"|"!") <_WHITESPACE> >
- | <LPAREN:    "(" >
- | <RPAREN:    ")" >
- | <COLON:     ":" >
- | <STAR:      "*" >
- | <CARAT:     "^" > : Boost
- | <QUOTED:     "\"" (<_QUOTED_CHAR>)* "\"">
- | <TERM:      <_TERM_START_CHAR> (<_TERM_CHAR>)*  >
- | <FUZZY_SLOP:     "~" ( (<_NUM_CHAR>)+ ( "." (<_NUM_CHAR>)+ )? )? >
- | <PREFIXTERM:  ("*") | ( <_TERM_START_CHAR> (<_TERM_CHAR>)* "*" ) >
- | <WILDTERM:  (<_TERM_START_CHAR> | [ "*", "?" ]) (<_TERM_CHAR> | ( [ "*", "?" ] ))* >
- | <REGEXPTERM: "/" (~["*","/"] | "\\/" ) (~[ "/" ] | "\\/" )* "/" >
- | <RANGEIN_START: "[" > : Range
- | <RANGEEX_START: "{" > : Range
+  <AND:           ("AND" | "&&") >
+| <OR:            ("OR" | "||") >
+| <NOT:           ("NOT" | "!") >
+| <PLUS:          "+" >
+| <MINUS:         "-" >
+| <BAREOPER:      ("+"|"-"|"!") <_WHITESPACE> >
+| <LPAREN:        "(" >
+| <RPAREN:        ")" >
+| <COLON:         ":" >
+| <STAR:          "*" >
+| <CARAT:         "^" > : Boost
+| <QUOTED:        "\"" (<_QUOTED_CHAR>)* "\"">
+| <TERM:          <_TERM_START_CHAR> (<_TERM_CHAR>)*  >
+| <FUZZY_SLOP:    "~" ( (<_NUM_CHAR>)+ ( "." (<_NUM_CHAR>)+ )? )? >
+| <PREFIXTERM:    ("*") | ( <_TERM_START_CHAR> (<_TERM_CHAR>)* "*" ) >
+| <WILDTERM:      (<_TERM_START_CHAR> | [ "*", "?" ]) (<_TERM_CHAR> | ( [ "*", "?" ] ))* >
+| <REGEXPTERM:    "/" (~["*","/"] | "\\/" ) (~[ "/" ] | "\\/" )* "/" >
+| <RANGEIN_START: "[" > : Range
+| <RANGEEX_START: "{" > : Range
 // TODO: consider using token states instead of inlining SQUOTED
-//  | <SQUOTED:     "'" (<_SQUOTED_CHAR>)* "'">
-//  | <LPARAMS:     ("{!" ( (<_WHITESPACE>)* (~["=","}"])+ ( "=" (<QUOTED> | <SQUOTED> | (~[" ","}"])+ )? )? )* "}")+  (~[")"," ","\t","\n","{","^"])*  >
-  | <LPARAMS:     ("{!" ( (<_WHITESPACE>)* (~["=","}"])+ ( "=" (<QUOTED> | ("'" (<_SQUOTED_CHAR>)* "'") | (~[" ","}"])+ )? )? )* "}")+  (~[")"," ","\t","\n","{","^"])*  >
-  | <FILTER:      "filter(" >
+// | <SQUOTED:       "'" (<_SQUOTED_CHAR>)* "'">
+// | <LPARAMS:       ("{!" ( (<_WHITESPACE>)* (~["=","}"])+ ( "=" (<QUOTED> | <SQUOTED> | (~[" ","}"])+ )? )? )* "}")+  (~[")"," ","\t","\n","{","^"])*  >
+| <LPARAMS:       ("{!" ( (<_WHITESPACE>)* (~["=","}"])+ ( "=" (<QUOTED> | ("'" (<_SQUOTED_CHAR>)* "'") | (~[" ","}"])+ )? )? )* "}")+  (~[")"," ","\t","\n","{","^"])*  >
+| <FILTER:        "filter(" >
 }
 
 <Boost> TOKEN : {
- <NUMBER:    ("=")?("-")? (<_NUM_CHAR>)+ ( "." (<_NUM_CHAR>)+ )? > : DEFAULT
+  <NUMBER: ("=")?("-")? (<_NUM_CHAR>)+ ( "." (<_NUM_CHAR>)+ )? > : DEFAULT
 }
 
 <Range> TOKEN : {
- <RANGE_TO: "TO">
- | <RANGEIN_END: "]"> : DEFAULT
- | <RANGEEX_END: "}"> : DEFAULT
- | <RANGE_QUOTED: "\"" (~["\""] | "\\\"")+ "\"">
- | <RANGE_GOOP: (~[ " ", "]", "}" ])+ >
+  <RANGE_TO:     "TO">
+| <RANGEIN_END:  "]"> : DEFAULT
+| <RANGEEX_END:  "}"> : DEFAULT
+| <RANGE_QUOTED: "\"" (~["\""] | "\\\"")+ "\"">
+| <RANGE_GOOP:   (~[ " ", "]", "}" ])+ >
 }
 
 // *   Query  ::= ( Clause )*
@@ -160,8 +191,7 @@ int Modifiers() : {
 }
 
 // This makes sure that there is no garbage after the query string
-Query TopLevelQuery(String field) throws SyntaxError :
-{
+Query TopLevelQuery(String field) throws SyntaxError : {
   Query q;
 }
 {
@@ -174,27 +204,31 @@ Query TopLevelQuery(String field) throws SyntaxError :
 Query Query(String field) throws SyntaxError :
 {
   List<BooleanClause> clauses = new ArrayList<BooleanClause>();
-  Query q, firstQuery=null;
+  Query q;
   int conj, mods;
 }
 {
-  mods=Modifiers() q=Clause(field)
-  {
-    addClause(clauses, CONJ_NONE, mods, q);
-    if (mods == MOD_NONE)
-        firstQuery=q;
-  }
   (
-    conj=Conjunction() mods=Modifiers() q=Clause(field)
-    { addClause(clauses, conj, mods, q); }
+    LOOKAHEAD(2)
+    MultiTerm(field, clauses)
+    | mods=Modifiers() q=Clause(field)
+      { addClause(clauses, CONJ_NONE, mods, q); }
+  )
+  (
+    LOOKAHEAD(2)
+    MultiTerm(field, clauses)
+    | conj=Conjunction() mods=Modifiers() q=Clause(field)
+      { addClause(clauses, conj, mods, q); }
   )*
-    {
-      if (clauses.size() == 1 && firstQuery != null)
+  {
+    if (clauses.size() == 1 && clauses.get(0).getOccur() == BooleanClause.Occur.SHOULD) {
+      Query firstQuery = clauses.get(0).getQuery();
+      if ( ! (firstQuery instanceof RawQuery) || ((RawQuery)firstQuery).getTermCount() == 1) {
         return rawToNormal(firstQuery);
-      else {
-        return getBooleanQuery(clauses);
       }
     }
+    return getBooleanQuery(clauses);
+  }
 }
 
 Query Clause(String field) throws SyntaxError : {
@@ -204,26 +238,22 @@ Query Clause(String field) throws SyntaxError : {
   int flags = 0;
 }
 {
- 
   [
     LOOKAHEAD(2)
     (
-    fieldToken=<TERM> <COLON> {field=discardEscapeChar(fieldToken.image);}
-    | <STAR> <COLON> {field="*";}
+      fieldToken=<TERM> <COLON> { field = discardEscapeChar(fieldToken.image); }
+      | <STAR> <COLON> { field = "*"; }
     )
   ]
-
-
   (
    q=Term(field)
-   | <LPAREN> q=Query(field) <RPAREN> (<CARAT> boost=<NUMBER>)?
-   | (<FILTER> { flags=startFilter(); } q=Query(field) <RPAREN> (<CARAT> boost=<NUMBER>)? { q=getFilter(q); restoreFlags(flags); } )
-   | (localParams = <LPARAMS> (<CARAT> boost=<NUMBER>)? { q=getLocalParams(field, localParams.image); }  )
+   | <LPAREN> q=Query(field) <RPAREN> [ <CARAT> boost=<NUMBER> ]
+   | (<FILTER> { flags=startFilter(); } q=Query(field) <RPAREN> [ <CARAT> boost=<NUMBER> ] { q=getFilter(q); restoreFlags(flags); } )
+   | (localParams = <LPARAMS> [ <CARAT> boost=<NUMBER> ] { q=getLocalParams(field, localParams.image); }  )
   )
-    {  return handleBoost(q, boost); }
+  { return handleBoost(q, boost); }
 }
 
-
 Query Term(String field) throws SyntaxError : {
   Token term, boost=null, fuzzySlop=null, goop1, goop2;
   boolean prefix = false;
@@ -245,40 +275,78 @@ Query Term(String field) throws SyntaxError : {
       | term=<NUMBER>
       | term=<BAREOPER> { term.image = term.image.substring(0,1); }
     )
-    [ fuzzySlop=<FUZZY_SLOP> { fuzzy=true; } ]
-    [ <CARAT> boost=<NUMBER> [ fuzzySlop=<FUZZY_SLOP> { fuzzy=true; } ] ]
+    [
+      <CARAT> boost=<NUMBER> [ fuzzySlop=<FUZZY_SLOP> { fuzzy=true; } ]
+      | fuzzySlop=<FUZZY_SLOP> { fuzzy=true; } [ <CARAT> boost=<NUMBER> ]
+    ]
+    { q = handleBareTokenQuery(getField(field), term, fuzzySlop, prefix, wildcard, fuzzy, regexp); }
+
+  | ( <RANGEIN_START> { startInc = true; } | <RANGEEX_START> )
+    ( goop1=<RANGE_GOOP> | goop1=<RANGE_QUOTED> )
+    [ <RANGE_TO> ]
+    ( goop2=<RANGE_GOOP> | goop2=<RANGE_QUOTED> )
+    ( <RANGEIN_END> { endInc = true; } | <RANGEEX_END> )
+    [ <CARAT> boost=<NUMBER> ]
     {
-      q = handleBareTokenQuery(getField(field), term, fuzzySlop, prefix, wildcard, fuzzy, regexp);
-    }
-    | ( ( <RANGEIN_START> {startInc=true;} | <RANGEEX_START> )
-        ( goop1=<RANGE_GOOP>|goop1=<RANGE_QUOTED> )
-        [ <RANGE_TO> ]
-        ( goop2=<RANGE_GOOP>|goop2=<RANGE_QUOTED> )
-        ( <RANGEIN_END> {endInc=true;} | <RANGEEX_END>))
-      [ <CARAT> boost=<NUMBER> ]
-       {
-         boolean startOpen=false;
-         boolean endOpen=false;
-         if (goop1.kind == RANGE_QUOTED) {
-           goop1.image = goop1.image.substring(1, goop1.image.length()-1);
-         } else if ("*".equals(goop1.image)) {
-           startOpen=true;
-         }
-         if (goop2.kind == RANGE_QUOTED) {
-           goop2.image = goop2.image.substring(1, goop2.image.length()-1);
-         } else if ("*".equals(goop2.image)) {
-           endOpen=true;
-         }
-         q = getRangeQuery(getField(field), startOpen ? null : discardEscapeChar(goop1.image), endOpen ? null : discardEscapeChar(goop2.image), startInc, endInc);
-       }
-    | term=<QUOTED>
-      [ fuzzySlop=<FUZZY_SLOP> ]
-      [ <CARAT> boost=<NUMBER> ]
-      {
-        q = handleQuotedTerm(getField(field), term, fuzzySlop);
+      boolean startOpen=false;
+      boolean endOpen=false;
+      if (goop1.kind == RANGE_QUOTED) {
+        goop1.image = goop1.image.substring(1, goop1.image.length()-1);
+      } else if ("*".equals(goop1.image)) {
+        startOpen=true;
+      }
+      if (goop2.kind == RANGE_QUOTED) {
+        goop2.image = goop2.image.substring(1, goop2.image.length()-1);
+      } else if ("*".equals(goop2.image)) {
+        endOpen=true;
       }
+      q = getRangeQuery(getField(field),
+                        startOpen ? null : discardEscapeChar(goop1.image),
+                        endOpen ? null : discardEscapeChar(goop2.image), startInc, endInc);
+    }
+  | term=<QUOTED>
+    [
+      <CARAT> boost=<NUMBER> [ fuzzySlop=<FUZZY_SLOP> { fuzzy=true; } ]
+      | fuzzySlop=<FUZZY_SLOP> { fuzzy=true; } [ <CARAT> boost=<NUMBER> ]
+    ]
+    { q = handleQuotedTerm(getField(field), term, fuzzySlop); }
   )
+  { return handleBoost(q, boost); }
+}
+
+void MultiTerm(String field, List<BooleanClause> clauses) throws SyntaxError : {
+  Token text;
+  List<String> terms = null;
+}
+{
+  text=<TERM>
+  {
+    if (splitOnWhitespace) {
+      Query q = getFieldQuery(getField(field), discardEscapeChar(text.image), false, true);
+      addClause(clauses, CONJ_NONE, MOD_NONE, q);
+    } else {
+      terms = new ArrayList<String>();
+      terms.add(discardEscapeChar(text.image));
+    }
+  }
+  // Both lookaheads are required; the first lookahead vets the first following term and the second lookahead vets the rest
+  LOOKAHEAD({ getToken(1).kind == TERM && allowedPostMultiTerm(getToken(2).kind) })
+  (
+    LOOKAHEAD({ getToken(1).kind == TERM && allowedPostMultiTerm(getToken(2).kind) })
+    text=<TERM>
+    {
+      if (splitOnWhitespace) {
+        Query q = getFieldQuery(getField(field), discardEscapeChar(text.image), false, true);
+        addClause(clauses, CONJ_NONE, MOD_NONE, q);
+      } else {
+        terms.add(discardEscapeChar(text.image));
+      }
+    }
+  )+
   {
-    return handleBoost(q, boost);
+    if (splitOnWhitespace == false) {
+      Query q = getFieldQuery(getField(field), terms, true);
+      addMultiTermClause(clauses, q);
+    }
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d1b2fb33/solr/core/src/java/org/apache/solr/parser/SolrQueryParserBase.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/parser/SolrQueryParserBase.java b/solr/core/src/java/org/apache/solr/parser/SolrQueryParserBase.java
index cb3b1ee..08ccdd1 100644
--- a/solr/core/src/java/org/apache/solr/parser/SolrQueryParserBase.java
+++ b/solr/core/src/java/org/apache/solr/parser/SolrQueryParserBase.java
@@ -18,10 +18,12 @@ package org.apache.solr.parser;
 
 import java.io.StringReader;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.EnumSet;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.stream.Collectors;
 
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.reverse.ReverseStringFilter;
@@ -41,7 +43,6 @@ import org.apache.lucene.search.Query;
 import org.apache.lucene.search.RegexpQuery;
 import org.apache.lucene.search.WildcardQuery;
 import org.apache.lucene.util.QueryBuilder;
-import org.apache.lucene.util.Version;
 import org.apache.lucene.util.automaton.Automata;
 import org.apache.lucene.util.automaton.Automaton;
 import org.apache.lucene.util.automaton.Operations;
@@ -59,7 +60,7 @@ import org.apache.solr.search.SolrConstantScoreQuery;
 import org.apache.solr.search.SyntaxError;
 
 /** This class is overridden by QueryParser in QueryParser.jj
- * and acts to separate the majority of the Java code from the .jj grammar file. 
+ * and acts to separate the majority of the Java code from the .jj grammar file.
  */
 public abstract class SolrQueryParserBase extends QueryBuilder {
 
@@ -83,7 +84,7 @@ public abstract class SolrQueryParserBase extends QueryBuilder {
   public static final Operator OR_OPERATOR = Operator.OR;
 
   /** The default operator that parser uses to combine query terms */
-  Operator operator = OR_OPERATOR;
+  protected Operator operator = OR_OPERATOR;
 
   MultiTermQuery.RewriteMethod multiTermRewriteMethod = MultiTermQuery.CONSTANT_SCORE_REWRITE;
   boolean allowLeadingWildcard = true;
@@ -133,16 +134,32 @@ public abstract class SolrQueryParserBase extends QueryBuilder {
   // internal: A simple raw fielded query
   public static class RawQuery extends Query {
     final SchemaField sfield;
-    final String externalVal;
+    private final List<String> externalVals;
 
     public RawQuery(SchemaField sfield, String externalVal) {
+      this(sfield, Collections.singletonList(externalVal));
+    }
+
+    public RawQuery(SchemaField sfield, List<String> externalVals) {
       this.sfield = sfield;
-      this.externalVal = externalVal;
+      this.externalVals = externalVals;
+    }
+
+    public int getTermCount() {
+      return externalVals.size();
+    }
+
+    public List<String> getExternalVals() {
+      return externalVals;
+    }
+
+    public String getJoinedExternalVal() {
+      return externalVals.size() == 1 ? externalVals.get(0) : String.join(" ", externalVals);
     }
 
     @Override
     public String toString(String field) {
-      return "RAW(" + field + "," + externalVal + ")";
+      return "RAW(" + field + "," + getJoinedExternalVal() + ")";
     }
 
     @Override
@@ -165,7 +182,7 @@ public abstract class SolrQueryParserBase extends QueryBuilder {
   public abstract Query TopLevelQuery(String field) throws ParseException, SyntaxError;
 
 
-  public void init(Version matchVersion, String defaultField, QParser parser) {
+  public void init(String defaultField, QParser parser) {
     this.schema = parser.getReq().getSchema();
     this.parser = parser;
     this.flags = parser.getFlags();
@@ -406,17 +423,30 @@ public abstract class SolrQueryParserBase extends QueryBuilder {
       throw new RuntimeException("Clause cannot be both required and prohibited");
   }
 
+  /**
+   * Called from QueryParser's MultiTerm rule.
+   * Assumption: no conjunction or modifiers (conj == CONJ_NONE and mods == MOD_NONE)
+   */
+  protected void addMultiTermClause(List<BooleanClause> clauses, Query q) {
+    // We might have been passed a null query; the term might have been
+    // filtered away by the analyzer.
+    if (q == null) {
+      return;
+    }
+    clauses.add(newBooleanClause(q, operator == AND_OPERATOR ? BooleanClause.Occur.MUST : BooleanClause.Occur.SHOULD));
+  }
 
-
-  protected Query newFieldQuery(Analyzer analyzer, String field, String queryText, boolean quoted)  throws SyntaxError {
+  protected Query newFieldQuery(Analyzer analyzer, String field, String queryText,
+                                boolean quoted, boolean fieldAutoGenPhraseQueries)  throws SyntaxError {
     BooleanClause.Occur occur = operator == Operator.AND ? BooleanClause.Occur.MUST : BooleanClause.Occur.SHOULD;
-    return createFieldQuery(analyzer, occur, field, queryText, quoted || autoGeneratePhraseQueries, phraseSlop);
+    return createFieldQuery(analyzer, occur, field, queryText,
+        quoted || fieldAutoGenPhraseQueries || autoGeneratePhraseQueries, phraseSlop);
   }
 
 
 
   /**
-   * Base implementation delegates to {@link #getFieldQuery(String,String,boolean)}.
+   * Base implementation delegates to {@link #getFieldQuery(String,String,boolean,boolean)}.
    * This method may be overridden, for example, to return
    * a SpanNearQuery instead of a PhraseQuery.
    *
@@ -440,7 +470,7 @@ public abstract class SolrQueryParserBase extends QueryBuilder {
         query = builder.build();
       } else if (query instanceof MultiPhraseQuery) {
         MultiPhraseQuery mpq = (MultiPhraseQuery)query;
-      
+
         if (slop != mpq.getSlop()) {
           query = new MultiPhraseQuery.Builder(mpq).setSlop(slop).build();
         }
@@ -492,7 +522,7 @@ public abstract class SolrQueryParserBase extends QueryBuilder {
   protected Query newFuzzyQuery(Term term, float minimumSimilarity, int prefixLength) {
     // FuzzyQuery doesn't yet allow constant score rewrite
     String text = term.text();
-    int numEdits = FuzzyQuery.floatToEdits(minimumSimilarity, 
+    int numEdits = FuzzyQuery.floatToEdits(minimumSimilarity,
         text.codePointCount(0, text.length()));
     return new FuzzyQuery(term,numEdits,prefixLength);
   }
@@ -536,14 +566,21 @@ public abstract class SolrQueryParserBase extends QueryBuilder {
     }
 
     SchemaField sfield = null;
-    List<String> fieldValues = null;
-
+    List<RawQuery> fieldValues = null;
 
-    boolean useTermsQuery = (flags & QParser.FLAG_FILTER)!=0 && clauses.size() > TERMS_QUERY_THRESHOLD;
-    int clausesAdded = 0;
+    boolean onlyRawQueries = true;
+    int allRawQueriesTermCount = 0;
+    for (BooleanClause clause : clauses) {
+      if (clause.getQuery() instanceof RawQuery) {
+        allRawQueriesTermCount += ((RawQuery)clause.getQuery()).getTermCount();
+      } else {
+        onlyRawQueries = false;
+      }
+    }
+    boolean useTermsQuery = (flags & QParser.FLAG_FILTER)!=0 && allRawQueriesTermCount > TERMS_QUERY_THRESHOLD;
 
     BooleanQuery.Builder booleanBuilder = newBooleanQuery();
-    Map<SchemaField, List<String>> fmap = new HashMap<>();
+    Map<SchemaField, List<RawQuery>> fmap = new HashMap<>();
 
     for (BooleanClause clause : clauses) {
       Query subq = clause.getQuery();
@@ -563,14 +600,14 @@ public abstract class SolrQueryParserBase extends QueryBuilder {
             // If this field isn't indexed, or if it is indexed and we want to use TermsQuery, then collect this value.
             // We are currently relying on things like PointField not being marked as indexed in order to bypass
             // the "useTermQuery" check.
-            if (fieldValues == null && useTermsQuery || !sfield.indexed()) {
+            if ((fieldValues == null && useTermsQuery) || !sfield.indexed()) {
               fieldValues = new ArrayList<>(2);
               fmap.put(sfield, fieldValues);
             }
           }
 
           if (fieldValues != null) {
-            fieldValues.add(rawq.externalVal);
+            fieldValues.add(rawq);
             continue;
           }
 
@@ -578,33 +615,50 @@ public abstract class SolrQueryParserBase extends QueryBuilder {
         }
       }
 
-      clausesAdded++;
       booleanBuilder.add(clause);
     }
 
 
-    for (Map.Entry<SchemaField,List<String>> entry : fmap.entrySet()) {
+    for (Map.Entry<SchemaField,List<RawQuery>> entry : fmap.entrySet()) {
       sfield = entry.getKey();
       fieldValues = entry.getValue();
       FieldType ft = sfield.getType();
 
       // TODO: pull more of this logic out to FieldType?  We would need to be able to add clauses to our existing booleanBuilder.
-      if (sfield.indexed() && fieldValues.size() < TERMS_QUERY_THRESHOLD || fieldValues.size() == 1) {
+      int termCount = fieldValues.stream().mapToInt(RawQuery::getTermCount).sum();
+      if ((sfield.indexed() && termCount < TERMS_QUERY_THRESHOLD) || termCount == 1) {
         // use boolean query instead
-        for (String externalVal : fieldValues) {
-          Query subq = ft.getFieldQuery(this.parser, sfield, externalVal);
-          clausesAdded++;
-          booleanBuilder.add(subq, BooleanClause.Occur.SHOULD);
+        for (RawQuery rawq : fieldValues) {
+          Query subq;
+          if (ft.isTokenized() && sfield.indexed()) {
+            boolean fieldAutoGenPhraseQueries = ft instanceof TextField && ((TextField)ft).getAutoGeneratePhraseQueries();
+            subq = newFieldQuery(getAnalyzer(), sfield.getName(), rawq.getJoinedExternalVal(),
+                false, fieldAutoGenPhraseQueries);
+            booleanBuilder.add(subq, BooleanClause.Occur.SHOULD);
+          } else {
+            for (String externalVal : rawq.getExternalVals()) {
+              subq = ft.getFieldQuery(this.parser, sfield, externalVal);
+              booleanBuilder.add(subq, BooleanClause.Occur.SHOULD);
+            }
+          }
         }
       } else {
-        Query subq = ft.getSetQuery(this.parser, sfield, fieldValues);
-        if (fieldValues.size() == clauses.size()) return subq; // if this is everything, don't wrap in a boolean query
-        clausesAdded++;
+        List<String> externalVals
+            = fieldValues.stream().flatMap(rawq -> rawq.getExternalVals().stream()).collect(Collectors.toList());
+        Query subq = ft.getSetQuery(this.parser, sfield, externalVals);
+        if (onlyRawQueries && termCount == allRawQueriesTermCount) return subq; // if this is everything, don't wrap in a boolean query
         booleanBuilder.add(subq, BooleanClause.Occur.SHOULD);
       }
     }
 
-    return booleanBuilder.build();
+    BooleanQuery bq = booleanBuilder.build();
+    if (bq.clauses().size() == 1) { // Unwrap single SHOULD query
+      BooleanClause clause = bq.clauses().iterator().next();
+      if (clause.getOccur() == BooleanClause.Occur.SHOULD) {
+        return clause.getQuery();
+      }
+    }
+    return bq;
   }
 
 
@@ -835,9 +889,26 @@ public abstract class SolrQueryParserBase extends QueryBuilder {
 
   // Create a "normal" query from a RawQuery (or just return the current query if it's not raw)
   Query rawToNormal(Query q) {
-    if (!(q instanceof RawQuery)) return q;
-    RawQuery rq = (RawQuery)q;
-    return rq.sfield.getType().getFieldQuery(parser, rq.sfield, rq.externalVal);
+    Query normal = q;
+    if (q instanceof RawQuery) {
+      RawQuery rawq = (RawQuery)q;
+      if (rawq.sfield.getType().isTokenized()) {
+        normal = rawq.sfield.getType().getFieldQuery(parser, rawq.sfield, rawq.getJoinedExternalVal());
+      } else {
+        FieldType ft = rawq.sfield.getType();
+        if (rawq.getTermCount() == 1) {
+          normal = ft.getFieldQuery(this.parser, rawq.sfield, rawq.getExternalVals().get(0));
+        } else {
+          BooleanQuery.Builder booleanBuilder = newBooleanQuery();
+          for (String externalVal : rawq.getExternalVals()) {
+            Query subq = ft.getFieldQuery(this.parser, rawq.sfield, externalVal);
+            booleanBuilder.add(subq, BooleanClause.Occur.SHOULD);
+          }
+          normal = booleanBuilder.build();
+        }
+      }
+    }
+    return normal;
   }
 
   protected Query getFieldQuery(String field, String queryText, boolean quoted) throws SyntaxError {
@@ -877,21 +948,87 @@ public abstract class SolrQueryParserBase extends QueryBuilder {
       FieldType ft = sf.getType();
       // delegate to type for everything except tokenized fields
       if (ft.isTokenized() && sf.indexed()) {
-        return newFieldQuery(getAnalyzer(), field, queryText, quoted || (ft instanceof TextField && ((TextField)ft).getAutoGeneratePhraseQueries()));
+        boolean fieldAutoGenPhraseQueries = ft instanceof TextField && ((TextField)ft).getAutoGeneratePhraseQueries();
+        return newFieldQuery(getAnalyzer(), field, queryText, quoted, fieldAutoGenPhraseQueries);
       } else {
         if (raw) {
           return new RawQuery(sf, queryText);
         } else {
-          return sf.getType().getFieldQuery(parser, sf, queryText);
+          return ft.getFieldQuery(parser, sf, queryText);
+        }
+      }
+    }
+
+    // default to a normal field query
+    return newFieldQuery(getAnalyzer(), field, queryText, quoted, false);
+  }
+
+  // Assumption: quoted is always false
+  protected Query getFieldQuery(String field, List<String> queryTerms, boolean raw) throws SyntaxError {
+    checkNullField(field);
+
+    SchemaField sf;
+    if (field.equals(lastFieldName)) {
+      // only look up the SchemaField on a field change... this helps with memory allocation of dynamic fields
+      // and large queries like foo_i:(1 2 3 4 5 6 7 8 9 10) when we are passed "foo_i" each time.
+      sf = lastField;
+    } else {
+      // intercept magic field name of "_" to use as a hook for our
+      // own functions.
+      if (field.charAt(0) == '_' && parser != null) {
+        MagicFieldName magic = MagicFieldName.get(field);
+        if (null != magic) {
+          subQParser = parser.subQuery(String.join(" ", queryTerms), magic.subParser);
+          return subQParser.getQuery();
+        }
+      }
+
+      lastFieldName = field;
+      sf = lastField = schema.getFieldOrNull(field);
+    }
+
+    if (sf != null) {
+      FieldType ft = sf.getType();
+      // delegate to type for everything except tokenized fields
+      if (ft.isTokenized() && sf.indexed()) {
+        String queryText = queryTerms.size() == 1 ? queryTerms.get(0) : String.join(" ", queryTerms);
+        boolean fieldAutoGenPhraseQueries = ft instanceof TextField && ((TextField)ft).getAutoGeneratePhraseQueries();
+        return newFieldQuery(getAnalyzer(), field, queryText, false, fieldAutoGenPhraseQueries);
+      } else {
+        if (raw) {
+          return new RawQuery(sf, queryTerms);
+        } else {
+          if (queryTerms.size() == 1) {
+            return ft.getFieldQuery(parser, sf, queryTerms.get(0));
+          } else {
+            List<Query> subqs = new ArrayList<>();
+            for (String queryTerm : queryTerms) {
+              try {
+                subqs.add(ft.getFieldQuery(parser, sf, queryTerm));
+              } catch (Exception e) { // assumption: raw = false only when called from ExtendedDismaxQueryParser.getQuery()
+                // for edismax: ignore parsing failures
+              }
+            }
+            if (subqs.size() == 1) {
+              return subqs.get(0);
+            } else { // delay building boolean query until we must
+              final BooleanClause.Occur occur
+                  = operator == AND_OPERATOR ? BooleanClause.Occur.MUST : BooleanClause.Occur.SHOULD;
+              BooleanQuery.Builder booleanBuilder = newBooleanQuery();
+              subqs.forEach(subq -> booleanBuilder.add(subq, occur));
+              return booleanBuilder.build();
+            }
+          }
         }
       }
     }
 
     // default to a normal field query
-    return newFieldQuery(getAnalyzer(), field, queryText, quoted);
+    String queryText = queryTerms.size() == 1 ? queryTerms.get(0) : String.join(" ", queryTerms);
+    return newFieldQuery(getAnalyzer(), field, queryText, false, false);
   }
 
- protected boolean isRangeShouldBeProtectedFromReverse(String field, String part1){
+  protected boolean isRangeShouldBeProtectedFromReverse(String field, String part1){
    checkNullField(field);
    SchemaField sf = schema.getField(field);
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d1b2fb33/solr/core/src/java/org/apache/solr/search/ExtendedDismaxQParser.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/ExtendedDismaxQParser.java b/solr/core/src/java/org/apache/solr/search/ExtendedDismaxQParser.java
index ebb6188..c0aee88 100644
--- a/solr/core/src/java/org/apache/solr/search/ExtendedDismaxQParser.java
+++ b/solr/core/src/java/org/apache/solr/search/ExtendedDismaxQParser.java
@@ -17,6 +17,7 @@
 package org.apache.solr.search;
 
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
@@ -160,6 +161,8 @@ public class ExtendedDismaxQParser extends QParser {
       
       // but always for unstructured implicit bqs created by getFieldQuery
       up.minShouldMatch = config.minShouldMatch;
+
+      up.setSplitOnWhitespace(config.splitOnWhitespace);
       
       parsedUserQuery = parseOriginalQuery(up, mainUserQuery, clauses, config);
       
@@ -307,6 +310,8 @@ public class ExtendedDismaxQParser extends QParser {
         up.setRemoveStopFilter(true);
         query = up.parse(mainUserQuery);          
       }
+    } catch (QueryParserConfigurationException e) {
+      throw e; // Don't ignore configuration exceptions
     } catch (Exception e) {
       // ignore failure and reparse later after escaping reserved chars
       up.exceptions = false;
@@ -545,6 +550,7 @@ public class ExtendedDismaxQParser extends QParser {
     pp.addAlias(IMPOSSIBLE_FIELD_NAME, tiebreaker, getFieldBoosts(fields));
     pp.setPhraseSlop(slop);
     pp.setRemoveStopFilter(true);  // remove stop filter and keep stopwords
+    pp.setSplitOnWhitespace(config.splitOnWhitespace);
     
     /* :TODO: reevaluate using makeDismax=true vs false...
      * 
@@ -976,6 +982,7 @@ public class ExtendedDismaxQParser extends QParser {
     private String field;
     private String val;
     private String val2;
+    private List<String> vals;
     private boolean bool;
     private boolean bool2;
     private float flt;
@@ -1036,6 +1043,7 @@ public class ExtendedDismaxQParser extends QParser {
       this.type = quoted ? QType.PHRASE : QType.FIELD;
       this.field = field;
       this.val = val;
+      this.vals = null;
       this.slop = getPhraseSlop(); // unspecified
       return getAliasedQuery();
     }
@@ -1045,10 +1053,21 @@ public class ExtendedDismaxQParser extends QParser {
       this.type = QType.PHRASE;
       this.field = field;
       this.val = val;
+      this.vals = null;
       this.slop = slop;
       return getAliasedQuery();
     }
-    
+
+    @Override
+    protected Query getFieldQuery(String field, List<String> queryTerms, boolean raw) throws SyntaxError {
+      this.type = QType.FIELD;
+      this.field = field;
+      this.val = null;
+      this.vals = queryTerms;
+      this.slop = getPhraseSlop();
+      return getAliasedMultiTermQuery(queryTerms);
+    }
+
     @Override
     protected Query getPrefixQuery(String field, String val) throws SyntaxError {
       if (val.equals("") && field.equals("*")) {
@@ -1057,11 +1076,17 @@ public class ExtendedDismaxQParser extends QParser {
       this.type = QType.PREFIX;
       this.field = field;
       this.val = val;
+      this.vals = null;
       return getAliasedQuery();
     }
     
     @Override
-    protected Query newFieldQuery(Analyzer analyzer, String field, String queryText, boolean quoted) throws SyntaxError {
+    protected Query newFieldQuery(Analyzer analyzer, String field, String queryText, 
+                                  boolean quoted, boolean fieldAutoGenPhraseQueries) throws SyntaxError {
+      if ((getAutoGeneratePhraseQueries() || fieldAutoGenPhraseQueries) && getSplitOnWhitespace() == false) {
+        throw new QueryParserConfigurationException
+            ("Field '" + field + "': autoGeneratePhraseQueries == true is disallowed when sow/splitOnWhitespace == false");
+      }
       Analyzer actualAnalyzer;
       if (removeStopFilter) {
         if (nonStopFilterAnalyzerPerField == null) {
@@ -1074,7 +1099,7 @@ public class ExtendedDismaxQParser extends QParser {
       } else {
         actualAnalyzer = parser.getReq().getSchema().getFieldType(field).getQueryAnalyzer();
       }
-      return super.newFieldQuery(actualAnalyzer, field, queryText, quoted);
+      return super.newFieldQuery(actualAnalyzer, field, queryText, quoted, fieldAutoGenPhraseQueries);
     }
     
     @Override
@@ -1083,6 +1108,7 @@ public class ExtendedDismaxQParser extends QParser {
       this.field = field;
       this.val = a;
       this.val2 = b;
+      this.vals = null;
       this.bool = startInclusive;
       this.bool2 = endInclusive;
       return getAliasedQuery();
@@ -1100,6 +1126,7 @@ public class ExtendedDismaxQParser extends QParser {
       this.type = QType.WILDCARD;
       this.field = field;
       this.val = val;
+      this.vals = null;
       return getAliasedQuery();
     }
     
@@ -1108,6 +1135,7 @@ public class ExtendedDismaxQParser extends QParser {
       this.type = QType.FUZZY;
       this.field = field;
       this.val = val;
+      this.vals = null;
       this.flt = minSimilarity;
       return getAliasedQuery();
     }
@@ -1157,7 +1185,129 @@ public class ExtendedDismaxQParser extends QParser {
         return getQuery();
       }
     }
-    
+
+    /**
+     * Delegates to the super class unless the field has been specified
+     * as an alias -- in which case we recurse on each of
+     * the aliased fields, and the results are composed into a
+     * DisjunctionMaxQuery.  (so yes: aliases which point at other
+     * aliases should work)
+     */
+    protected Query getAliasedMultiTermQuery(List<String> queryTerms) throws SyntaxError {
+      Alias a = aliases.get(field);
+      this.validateCyclicAliasing(field);
+      if (a != null) {
+        List<Query> lst = getQueries(a);
+        if (lst == null || lst.size() == 0) {
+          return getQuery();
+        }
+        
+        // make a DisjunctionMaxQuery in this case too... it will stop
+        // the "mm" processing from making everything required in the case
+        // that the query expanded to multiple clauses.
+        // DisMaxQuery.rewrite() removes itself if there is just a single clause anyway.
+        // if (lst.size()==1) return lst.get(0);
+        if (makeDismax) {
+          if (lst.get(0) instanceof BooleanQuery && allSameQueryStructure(lst)) {
+            BooleanQuery.Builder q = new BooleanQuery.Builder();
+            List<Query> subs = new ArrayList<>(lst.size());
+            for (int c = 0 ; c < ((BooleanQuery)lst.get(0)).clauses().size() ; ++c) {
+              subs.clear();
+              // Make a dismax query for each clause position in the boolean per-field queries.
+              for (int n = 0 ; n < lst.size() ; ++n) {
+                subs.add(((BooleanQuery)lst.get(n)).clauses().get(c).getQuery());
+              }
+              q.add(newBooleanClause(new DisjunctionMaxQuery(subs, a.tie), BooleanClause.Occur.SHOULD));
+            }
+            return q.build();
+          } else {
+            return new DisjunctionMaxQuery(lst, a.tie); 
+          }
+        } else {
+          BooleanQuery.Builder q = new BooleanQuery.Builder();
+          for (Query sub : lst) {
+            q.add(sub, BooleanClause.Occur.SHOULD);
+          }
+          return q.build();
+        }
+      } else {
+        // verify that a fielded query is actually on a field that exists... if not,
+        // then throw an exception to get us out of here, and we'll treat it like a
+        // literal when we try the escape+re-parse.
+        if (exceptions) {
+          FieldType ft = schema.getFieldTypeNoEx(field);
+          if (ft == null && null == MagicFieldName.get(field)) {
+            throw unknownField;
+          }
+        }
+        return getQuery();
+      }
+    }
+
+    /** Recursively examines the given query list for identical structure in all queries. */
+    private boolean allSameQueryStructure(List<Query> lst) {
+      boolean allSame = true;
+      Query firstQuery = lst.get(0);
+      for (int n = 1 ; n < lst.size(); ++n) {
+        Query nthQuery = lst.get(n);
+        if (nthQuery.getClass() != firstQuery.getClass()) {
+          allSame = false;
+          break;
+        }
+        if (firstQuery instanceof BooleanQuery) {
+          List<BooleanClause> firstBooleanClauses = ((BooleanQuery)firstQuery).clauses();
+          List<BooleanClause> nthBooleanClauses = ((BooleanQuery)nthQuery).clauses();
+          if (firstBooleanClauses.size() != nthBooleanClauses.size()) {
+            allSame = false;
+            break;
+          }
+          for (int c = 0 ; c < firstBooleanClauses.size() ; ++c) {
+            if (nthBooleanClauses.get(c).getQuery().getClass() != firstBooleanClauses.get(c).getQuery().getClass()
+                || nthBooleanClauses.get(c).getOccur() != firstBooleanClauses.get(c).getOccur()) {
+              allSame = false;
+              break;
+            }
+            if (firstBooleanClauses.get(c).getQuery() instanceof BooleanQuery && ! allSameQueryStructure
+                (Arrays.asList(firstBooleanClauses.get(c).getQuery(), nthBooleanClauses.get(c).getQuery()))) {
+              allSame = false;
+              break;
+            }
+          }
+        }
+      }
+      return allSame;
+    }
+
+    @Override
+    protected void addMultiTermClause(List<BooleanClause> clauses, Query q) {
+      // We might have been passed a null query; the terms might have been filtered away by the analyzer.
+      if (q == null) {
+        return;
+      }
+      
+      boolean required = operator == AND_OPERATOR;
+      BooleanClause.Occur occur = required ? BooleanClause.Occur.MUST : BooleanClause.Occur.SHOULD;  
+      
+      if (q instanceof BooleanQuery) {
+        boolean allOptionalDisMaxQueries = true;
+        for (BooleanClause c : ((BooleanQuery)q).clauses()) {
+          if (c.getOccur() != BooleanClause.Occur.SHOULD || ! (c.getQuery() instanceof DisjunctionMaxQuery)) {
+            allOptionalDisMaxQueries = false;
+            break;
+          }
+        }
+        if (allOptionalDisMaxQueries) {
+          // getAliasedMultiTermQuery() constructed a BooleanQuery containing only SHOULD DisjunctionMaxQuery-s.
+          // Unwrap the query and add a clause for each contained DisMax query.
+          for (BooleanClause c : ((BooleanQuery)q).clauses()) {
+            clauses.add(newBooleanClause(c.getQuery(), occur));
+          }
+          return;
+        }
+      }
+      clauses.add(newBooleanClause(q, occur));
+    }
+
     /**
      * Validate there is no cyclic referencing in the aliasing
      */
@@ -1212,7 +1362,12 @@ public class ExtendedDismaxQParser extends QParser {
         switch (type) {
           case FIELD:  // fallthrough
           case PHRASE:
-            Query query = super.getFieldQuery(field, val, type == QType.PHRASE, false);
+            Query query;
+            if (val == null) {
+              query = super.getFieldQuery(field, vals, false);
+            } else {
+              query = super.getFieldQuery(field, val, type == QType.PHRASE, false);
+            }
             // Boolean query on a whitespace-separated string
             // If these were synonyms we would have a SynonymQuery
             if (query instanceof BooleanQuery) {
@@ -1248,6 +1403,8 @@ public class ExtendedDismaxQParser extends QParser {
         }
         return null;
         
+      } catch (QueryParserConfigurationException e) {
+        throw e;  // Don't ignore configuration exceptions
       } catch (Exception e) {
         // an exception here is due to the field query not being compatible with the input text
         // for example, passing a string to a numeric field.
@@ -1442,7 +1599,7 @@ public class ExtendedDismaxQParser extends QParser {
    */
   public class ExtendedDismaxConfiguration {
     
-    /** 
+    /**
      * The field names specified by 'qf' that (most) clauses will 
      * be queried against 
      */
@@ -1478,7 +1635,9 @@ public class ExtendedDismaxQParser extends QParser {
     protected boolean lowercaseOperators;
     
     protected  String[] boostFuncs;
-    
+
+    protected boolean splitOnWhitespace;
+
     public ExtendedDismaxConfiguration(SolrParams localParams,
         SolrParams params, SolrQueryRequest req) {
       solrParams = SolrParams.wrapDefaults(localParams, params);
@@ -1522,6 +1681,8 @@ public class ExtendedDismaxQParser extends QParser {
       boostFuncs = solrParams.getParams(DisMaxParams.BF);
       
       multBoosts = solrParams.getParams(DMP.MULT_BOOST);
+
+      splitOnWhitespace = solrParams.getBool(QueryParsing.SPLIT_ON_WHITESPACE, SolrQueryParser.DEFAULT_SPLIT_ON_WHITESPACE);
     }
     /**
      * 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d1b2fb33/solr/core/src/java/org/apache/solr/search/LuceneQParser.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/LuceneQParser.java b/solr/core/src/java/org/apache/solr/search/LuceneQParser.java
index 9ac318b..9668d8f 100644
--- a/solr/core/src/java/org/apache/solr/search/LuceneQParser.java
+++ b/solr/core/src/java/org/apache/solr/search/LuceneQParser.java
@@ -19,6 +19,7 @@ package org.apache.solr.search;
 import org.apache.lucene.search.Query;
 import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.SolrParams;
+import org.apache.solr.common.util.StrUtils;
 import org.apache.solr.request.SolrQueryRequest;
 
 /**
@@ -46,6 +47,8 @@ public class LuceneQParser extends QParser {
     lparser.setDefaultOperator
       (QueryParsing.getQueryParserDefaultOperator(getReq().getSchema(),
                                                   getParam(QueryParsing.OP)));
+    lparser.setSplitOnWhitespace(StrUtils.parseBool
+      (getParam(QueryParsing.SPLIT_ON_WHITESPACE), SolrQueryParser.DEFAULT_SPLIT_ON_WHITESPACE));
 
     return lparser.parse(qstr);
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d1b2fb33/solr/core/src/java/org/apache/solr/search/LuceneQParserPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/LuceneQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/LuceneQParserPlugin.java
index 07b35ad..4e2a4d6 100644
--- a/solr/core/src/java/org/apache/solr/search/LuceneQParserPlugin.java
+++ b/solr/core/src/java/org/apache/solr/search/LuceneQParserPlugin.java
@@ -28,6 +28,8 @@ import java.util.List;
  * <br>Other parameters:<ul>
  * <li>q.op - the default operator "OR" or "AND"</li>
  * <li>df - the default field name</li>
+ * <li>sow - split on whitespace prior to analysis, boolean,
+ *           default=<code>{@value org.apache.solr.search.SolrQueryParser#DEFAULT_SPLIT_ON_WHITESPACE}</code></li>
  * </ul>
  * <br>Example: <code>{!lucene q.op=AND df=text sort='price asc'}myfield:foo +bar -baz</code>
  */

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d1b2fb33/solr/core/src/java/org/apache/solr/search/QueryParserConfigurationException.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/QueryParserConfigurationException.java b/solr/core/src/java/org/apache/solr/search/QueryParserConfigurationException.java
new file mode 100644
index 0000000..0dd2a33
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/search/QueryParserConfigurationException.java
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.search;
+
+public class QueryParserConfigurationException extends IllegalArgumentException {
+  public QueryParserConfigurationException(String message) {
+    super(message);
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d1b2fb33/solr/core/src/java/org/apache/solr/search/QueryParsing.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/QueryParsing.java b/solr/core/src/java/org/apache/solr/search/QueryParsing.java
index fb32c6e..692de1a 100644
--- a/solr/core/src/java/org/apache/solr/search/QueryParsing.java
+++ b/solr/core/src/java/org/apache/solr/search/QueryParsing.java
@@ -51,6 +51,7 @@ public class QueryParsing {
   public static final String F = "f";      // field that a query or command pertains to
   public static final String TYPE = "type";// parser for this query or command
   public static final String DEFTYPE = "defType"; // default parser for any direct subqueries
+  public static final String SPLIT_ON_WHITESPACE = "sow"; // Whether to split on whitespace prior to analysis
   public static final String LOCALPARAM_START = "{!";
   public static final char LOCALPARAM_END = '}';
   // true if the value was specified by the "v" param (i.e. v=myval, or v=$param)

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d1b2fb33/solr/core/src/java/org/apache/solr/search/SolrQueryParser.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/SolrQueryParser.java b/solr/core/src/java/org/apache/solr/search/SolrQueryParser.java
index 49a492b..60ef9fb 100644
--- a/solr/core/src/java/org/apache/solr/search/SolrQueryParser.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrQueryParser.java
@@ -25,7 +25,7 @@ import org.apache.solr.parser.QueryParser;
 public class SolrQueryParser extends QueryParser {
 
   public SolrQueryParser(QParser parser, String defaultField) {
-    super(parser.getReq().getCore().getSolrConfig().luceneMatchVersion, defaultField, parser);
+    super(defaultField, parser);
   }
 
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d1b2fb33/solr/core/src/test-files/solr/collection1/conf/multiword-synonyms.txt
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/collection1/conf/multiword-synonyms.txt b/solr/core/src/test-files/solr/collection1/conf/multiword-synonyms.txt
new file mode 100644
index 0000000..0ef4d78
--- /dev/null
+++ b/solr/core/src/test-files/solr/collection1/conf/multiword-synonyms.txt
@@ -0,0 +1,13 @@
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+US, U.S., U S, USA, U.S.A., U S A, United States, United States of America
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d1b2fb33/solr/core/src/test-files/solr/collection1/conf/schema-multiword-synonyms.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/collection1/conf/schema-multiword-synonyms.xml b/solr/core/src/test-files/solr/collection1/conf/schema-multiword-synonyms.xml
new file mode 100644
index 0000000..5544e22
--- /dev/null
+++ b/solr/core/src/test-files/solr/collection1/conf/schema-multiword-synonyms.xml
@@ -0,0 +1,50 @@
+<?xml version="1.0" ?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<schema name="test-multi-word-synonyms" version="1.6">
+
+  <fieldType name="string" class="solr.StrField" sortMissingLast="true"/>
+  <fieldType name="long" class="solr.TrieLongField" precisionStep="0" positionIncrementGap="0"/>
+  <field name="signatureField" type="string" indexed="true" stored="false"/>
+
+  <fieldType name="text" class="solr.TextField" positionIncrementGap="100">
+    <analyzer type="index">
+      <tokenizer class="solr.StandardTokenizerFactory"/>
+      <filter class="solr.LowerCaseFilterFactory"/>
+    </analyzer>
+    <analyzer type="query">
+      <tokenizer class="solr.StandardTokenizerFactory"/>
+      <filter class="solr.LowerCaseFilterFactory"/>
+      <filter class="solr.SynonymGraphFilterFactory" 
+              synonyms="multiword-synonyms.txt"
+              tokenizerFactory="solr.StandardTokenizerFactory"
+              ignoreCase="true" 
+              expand="true"/>
+    </analyzer>
+  </fieldType>
+
+  <field name="id" type="string" indexed="true" stored="true" multiValued="false" required="true"/>
+  <uniqueKey>id</uniqueKey>
+
+  <field name="_version_" type="long" indexed="false" stored="false" docValues="true"/>
+
+  <field name="text" type="text" indexed="true" stored="true"/>
+
+  <dynamicField name="*_sS" type="string" indexed="false" stored="true"/>
+
+</schema>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d1b2fb33/solr/core/src/test-files/solr/collection1/conf/synonyms.txt
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/collection1/conf/synonyms.txt b/solr/core/src/test-files/solr/collection1/conf/synonyms.txt
index b0e31cb..340abd7 100644
--- a/solr/core/src/test-files/solr/collection1/conf/synonyms.txt
+++ b/solr/core/src/test-files/solr/collection1/conf/synonyms.txt
@@ -29,3 +29,5 @@ Television, Televisions, TV, TVs
 # Synonym mappings can be used for spelling correction too
 pixima => pixma
 
+# multiword synonyms
+wi fi => wifi


[28/46] lucene-solr:jira/solr-9959: LUCENE-7748: buildAndPushRelease.py should fail if the project DOAP files are missing releases that are less than the release being produced

Posted by ab...@apache.org.
LUCENE-7748: buildAndPushRelease.py should fail if the project DOAP files are missing releases that are less than the release being produced


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/d60849f3
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/d60849f3
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/d60849f3

Branch: refs/heads/jira/solr-9959
Commit: d60849f37782e0740e4e9964d70002a7d3078e22
Parents: e7d9db9
Author: Steve Rowe <sa...@apache.org>
Authored: Sat Mar 18 15:09:43 2017 -0400
Committer: Steve Rowe <sa...@apache.org>
Committed: Sat Mar 18 15:09:43 2017 -0400

----------------------------------------------------------------------
 dev-tools/scripts/buildAndPushRelease.py | 64 +++++++++++++++++++++++++++
 1 file changed, 64 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d60849f3/dev-tools/scripts/buildAndPushRelease.py
----------------------------------------------------------------------
diff --git a/dev-tools/scripts/buildAndPushRelease.py b/dev-tools/scripts/buildAndPushRelease.py
index d742214..57f093b 100644
--- a/dev-tools/scripts/buildAndPushRelease.py
+++ b/dev-tools/scripts/buildAndPushRelease.py
@@ -22,6 +22,8 @@ import os
 import sys
 import subprocess
 import textwrap
+import urllib.request, urllib.error, urllib.parse
+import xml.etree.ElementTree as ET
 
 LOG = '/tmp/release.log'
 
@@ -57,6 +59,14 @@ def runAndSendGPGPassword(command, password):
     print(msg)
     raise RuntimeError(msg)
 
+def load(urlString):
+  try:
+    content = urllib.request.urlopen(urlString).read().decode('utf-8')
+  except Exception as e:
+    print('Retrying download of url %s after exception: %s' % (urlString, e))
+    content = urllib.request.urlopen(urlString).read().decode('utf-8')
+  return content
+
 def getGitRev():
   status = os.popen('git status').read().strip()
   if 'nothing to commit, working directory clean' not in status and 'nothing to commit, working tree clean' not in status:
@@ -84,6 +94,9 @@ def prepare(root, version, gpgKeyID, gpgPassword):
   print('  git rev: %s' % rev)
   log('\nGIT rev: %s\n' % rev)
 
+  print('  Check DOAP files')
+  checkDOAPfiles(version)
+
   print('  ant clean test')
   run('ant clean test')
 
@@ -119,6 +132,57 @@ def prepare(root, version, gpgKeyID, gpgPassword):
   print()
   return rev
 
+reVersion1 = re.compile(r'\>(\d+)\.(\d+)\.(\d+)(-alpha|-beta)?/\<', re.IGNORECASE)
+reVersion2 = re.compile(r'-(\d+)\.(\d+)\.(\d+)(-alpha|-beta)?\.zip<', re.IGNORECASE)
+reDoapRevision = re.compile(r'(\d+)\.(\d+)(?:\.(\d+))?(-alpha|-beta)?', re.IGNORECASE)
+def checkDOAPfiles(version):
+  # In Lucene and Solr DOAP files, verify presence of all releases less than the one being produced.
+  errorMessages = []
+  for product in 'lucene', 'solr':
+    url = 'https://archive.apache.org/dist/lucene/%s' % ('java' if product == 'lucene' else product)
+    distpage = load(url)
+    releases = set()
+    for regex in reVersion1, reVersion2:
+      for tup in regex.findall(distpage):
+        if tup[0] in ('1', '2'):                    # Ignore 1.X and 2.X releases
+          continue
+        releases.add(normalizeVersion(tup))
+    doapNS = '{http://usefulinc.com/ns/doap#}'
+    xpathRevision = '{0}Project/{0}release/{0}Version/{0}revision'.format(doapNS)
+    doapFile = "dev-tools/doap/%s.rdf" % product
+    treeRoot = ET.parse(doapFile).getroot()
+    doapRevisions = set()
+    for revision in treeRoot.findall(xpathRevision):
+      match = reDoapRevision.match(revision.text)
+      if (match is not None):
+        if (match.group(1) not in ('0', '1', '2')): # Ignore 0.X, 1.X and 2.X revisions
+          doapRevisions.add(normalizeVersion(match.groups()))
+      else:
+        errorMessages.append('ERROR: Failed to parse revision: %s in %s' % (revision.text, doapFile))
+    missingDoapRevisions = set()
+    for release in releases:
+      if release not in doapRevisions and release < version: # Ignore releases greater than the one being produced
+        missingDoapRevisions.add(release)
+    if len(missingDoapRevisions) > 0:
+      errorMessages.append('ERROR: Missing revision(s) in %s: %s' % (doapFile, ', '.join(sorted(missingDoapRevisions))))
+  if (len(errorMessages) > 0):
+    raise RuntimeError('\n%s\n(Hint: copy/paste from the stable branch version of the file(s).)'
+                       % '\n'.join(errorMessages))
+
+def normalizeVersion(tup):
+  suffix = ''
+  if tup[-1] is not None and tup[-1].lower() == '-alpha':
+    tup = tup[:(len(tup) - 1)]
+    suffix = '-ALPHA'
+  elif tup[-1] is not None and tup[-1].lower() == '-beta':
+    tup = tup[:(len(tup) - 1)]
+    suffix = '-BETA'
+  while tup[-1] in ('', None):
+    tup = tup[:(len(tup) - 1)]
+  while len(tup) < 3:
+    tup = tup + ('0',)
+  return '.'.join(tup) + suffix
+
 def pushLocal(version, root, rev, rcNum, localDir):
   print('Push local [%s]...' % localDir)
   os.makedirs(localDir)


[07/46] lucene-solr:jira/solr-9959: SOLR-10271: SQL aggregations in map_reduce mode should use javabin transport

Posted by ab...@apache.org.
SOLR-10271: SQL aggregations in map_reduce mode should use javabin transport


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/be9fea1b
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/be9fea1b
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/be9fea1b

Branch: refs/heads/jira/solr-9959
Commit: be9fea1bc509eff9296c89e7270c5e8669b39d0f
Parents: 22f91ba
Author: Joel Bernstein <jb...@apache.org>
Authored: Mon Mar 13 17:10:21 2017 -0400
Committer: Joel Bernstein <jb...@apache.org>
Committed: Wed Mar 15 18:31:45 2017 -0400

----------------------------------------------------------------------
 solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java | 2 ++
 1 file changed, 2 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/be9fea1b/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java b/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java
index 37ed7d9..644ed97 100644
--- a/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java
+++ b/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java
@@ -453,6 +453,7 @@ class SolrTable extends AbstractQueryableTable implements TranslatableTable {
 
     params.set(CommonParams.FL, fl);
     params.set(CommonParams.Q, query);
+    params.set(CommonParams.WT, CommonParams.JAVABIN);
     //Always use the /export handler for Group By Queries because it requires exporting full result sets.
     params.set(CommonParams.QT, "/export");
 
@@ -691,6 +692,7 @@ class SolrTable extends AbstractQueryableTable implements TranslatableTable {
 
     params.set(CommonParams.FL, fl);
     params.set(CommonParams.Q, query);
+    params.set(CommonParams.WT, CommonParams.JAVABIN);
     //Always use the /export handler for Distinct Queries because it requires exporting full result sets.
     params.set(CommonParams.QT, "/export");
 


[14/46] lucene-solr:jira/solr-9959: SOLR-10286: large fields. And refactored FieldType.checkSchemaField to call a new checkSupportsDocValues()

Posted by ab...@apache.org.
SOLR-10286: large fields.
And refactored FieldType.checkSchemaField to call a new checkSupportsDocValues()


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/2502af9f
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/2502af9f
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/2502af9f

Branch: refs/heads/jira/solr-9959
Commit: 2502af9f3fa25a1b724400af61bf74102f2475dd
Parents: f8831ce
Author: David Smiley <ds...@apache.org>
Authored: Thu Mar 16 14:58:59 2017 -0400
Committer: David Smiley <ds...@apache.org>
Committed: Thu Mar 16 14:58:59 2017 -0400

----------------------------------------------------------------------
 solr/CHANGES.txt                                |   6 +
 .../apache/solr/schema/ICUCollationField.java   |   5 +-
 .../handler/component/RealTimeGetComponent.java |  36 ++-
 .../transform/BaseEditorialTransformer.java     |  36 +--
 .../org/apache/solr/schema/BinaryField.java     |   9 +
 .../java/org/apache/solr/schema/BoolField.java  |   3 -
 .../org/apache/solr/schema/CollationField.java  |   5 +-
 .../java/org/apache/solr/schema/EnumField.java  |   7 -
 .../org/apache/solr/schema/FieldProperties.java |  43 +--
 .../java/org/apache/solr/schema/FieldType.java  |  20 +-
 .../solr/schema/LatLonPointSpatialField.java    |   3 +-
 .../java/org/apache/solr/schema/PointField.java |   4 -
 .../apache/solr/schema/PrimitiveFieldType.java  |   4 +
 .../org/apache/solr/schema/SchemaField.java     |   7 +-
 .../java/org/apache/solr/schema/StrField.java   |   4 -
 .../java/org/apache/solr/schema/TrieField.java  |   3 -
 .../apache/solr/search/SolrIndexSearcher.java   | 268 ++++++++++++-------
 .../org/apache/solr/update/DocumentBuilder.java |   7 +-
 .../conf/schema-unifiedhighlight.xml            |   4 +-
 .../conf/solrconfig-managed-schema.xml          |   2 +-
 .../apache/solr/cloud/TestRandomFlRTGCloud.java |   2 +-
 .../highlight/TestUnifiedSolrHighlighter.java   |  12 +-
 .../apache/solr/schema/SortableBinaryField.java |   3 +-
 .../org/apache/solr/search/LargeFieldTest.java  | 141 ++++++++++
 .../java/org/apache/solr/SolrTestCaseJ4.java    |   2 +-
 25 files changed, 448 insertions(+), 188 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2502af9f/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 7ec0c52..af151ed 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -197,6 +197,12 @@ New Features
 
 * SOLR-10254: significantTerms Streaming Expression should work in non-SolrCloud mode (Joel Bernstein)
 
+* SOLR-10286: string/text fields may now declare themselves as large="true" in the schema.  Large fields are always
+  lazy loaded and will only take up space in the document cache if the actual value is < 512KB.  This option
+  requires "stored" and must not be multiValued.  It's intended for fields that might have very large values so that
+  they don't get cached in memory. (David Smiley)
+
+
 Bug Fixes
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2502af9f/solr/contrib/analysis-extras/src/java/org/apache/solr/schema/ICUCollationField.java
----------------------------------------------------------------------
diff --git a/solr/contrib/analysis-extras/src/java/org/apache/solr/schema/ICUCollationField.java b/solr/contrib/analysis-extras/src/java/org/apache/solr/schema/ICUCollationField.java
index 7d9e1c7..79cb6b3 100644
--- a/solr/contrib/analysis-extras/src/java/org/apache/solr/schema/ICUCollationField.java
+++ b/solr/contrib/analysis-extras/src/java/org/apache/solr/schema/ICUCollationField.java
@@ -277,10 +277,9 @@ public class ICUCollationField extends FieldType {
       return new TermRangeQuery(field.getName(), low, high, minInclusive, maxInclusive);
     }
   }
-  
+
   @Override
-  public void checkSchemaField(SchemaField field) {
-    // no-op
+  protected void checkSupportsDocValues() { // we support DocValues
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2502af9f/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java b/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java
index 123abea..900c787 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java
@@ -690,24 +690,52 @@ public class RealTimeGetComponent extends SearchComponent
           List<Object> vals = new ArrayList<>();
           if (f.fieldType().docValuesType() == DocValuesType.SORTED_NUMERIC) {
             // SORTED_NUMERICS store sortable bits version of the value, need to retrieve the original
-            vals.add(sf.getType().toObject(f));
+            vals.add(sf.getType().toObject(f)); // (will materialize by side-effect)
           } else {
-            vals.add( f );
+            vals.add( materialize(f) );
           }
           out.setField( f.name(), vals );
         }
         else{
-          out.setField( f.name(), f );
+          out.setField( f.name(), materialize(f) );
         }
       }
       else {
-        out.addField( f.name(), f );
+        out.addField( f.name(), materialize(f) );
       }
     }
     return out;
   }
 
   /**
+   * Ensure we don't have {@link org.apache.lucene.document.LazyDocument.LazyField} or equivalent.
+   * It can pose problems if the searcher is about to be closed and we haven't fetched a value yet.
+   */
+  private static IndexableField materialize(IndexableField in) {
+    if (in instanceof Field) { // already materialized
+      return in;
+    }
+    return new ClonedField(in);
+  }
+
+  private static class ClonedField extends Field { // TODO Lucene Field has no copy constructor; maybe it should?
+    ClonedField(IndexableField in) {
+      super(in.name(), in.fieldType());
+      this.fieldsData = in.numericValue();
+      if (this.fieldsData == null) {
+        this.fieldsData = in.binaryValue();
+        if (this.fieldsData == null) {
+          this.fieldsData = in.stringValue();
+          if (this.fieldsData == null) {
+            // fallback:
+            assert false : in; // unexpected
+          }
+        }
+      }
+    }
+  }
+
+  /**
    * Converts a SolrInputDocument to SolrDocument, using an IndexSchema instance. 
    * @lucene.experimental
    */

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2502af9f/solr/core/src/java/org/apache/solr/response/transform/BaseEditorialTransformer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/response/transform/BaseEditorialTransformer.java b/solr/core/src/java/org/apache/solr/response/transform/BaseEditorialTransformer.java
index 80e0b61..bc5fb65 100644
--- a/solr/core/src/java/org/apache/solr/response/transform/BaseEditorialTransformer.java
+++ b/solr/core/src/java/org/apache/solr/response/transform/BaseEditorialTransformer.java
@@ -17,12 +17,12 @@
 package org.apache.solr.response.transform;
 
 
-import org.apache.lucene.document.Field;
+import java.util.Set;
+
+import org.apache.lucene.index.IndexableField;
 import org.apache.solr.common.SolrDocument;
 import org.apache.solr.schema.FieldType;
 
-import java.util.Set;
-
 /**
  *
  *
@@ -40,8 +40,7 @@ public abstract class BaseEditorialTransformer extends DocTransformer {
   }
 
   @Override
-  public String getName()
-  {
+  public String getName() {
     return name;
   }
 
@@ -61,22 +60,15 @@ public abstract class BaseEditorialTransformer extends DocTransformer {
   protected abstract Set<String> getIdSet();
 
   protected String getKey(SolrDocument doc) {
-    String key;
-    Object field = doc.get(idFieldName);
-    final Number n;
-    if (field instanceof Field) {
-      n = ((Field) field).numericValue();
-    } else {
-      n = null;
-    }
-    if (n != null) {
-      key = n.toString();
-      key = ft.readableToIndexed(key);
-    } else if (field instanceof Field){
-      key = ((Field)field).stringValue();
-    } else {
-      key = field.toString();
+    Object obj = doc.get(idFieldName);
+    if (obj instanceof IndexableField) {
+      IndexableField f = (IndexableField) obj;
+      Number n = f.numericValue();
+      if (n != null) {
+        return ft.readableToIndexed(n.toString());
+      }
+      return ft.readableToIndexed(f.stringValue());
     }
-    return key;
+    throw new AssertionError("Expected an IndexableField but got: " + obj.getClass());
   }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2502af9f/solr/core/src/java/org/apache/solr/schema/BinaryField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/BinaryField.java b/solr/core/src/java/org/apache/solr/schema/BinaryField.java
index d1882b1..a0e7d05 100644
--- a/solr/core/src/java/org/apache/solr/schema/BinaryField.java
+++ b/solr/core/src/java/org/apache/solr/schema/BinaryField.java
@@ -23,6 +23,7 @@ import java.nio.ByteBuffer;
 import org.apache.lucene.index.IndexableField;
 import org.apache.lucene.search.SortField;
 import org.apache.lucene.util.BytesRef;
+import org.apache.solr.common.SolrException;
 import org.apache.solr.common.util.Base64;
 import org.apache.solr.response.TextResponseWriter;
 import org.apache.solr.uninverting.UninvertingReader.Type;
@@ -34,6 +35,14 @@ public class BinaryField extends FieldType  {
 
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
+  @Override
+  public void checkSchemaField(SchemaField field) {
+    super.checkSchemaField(field);
+    if (field.isLarge()) {
+      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Field type " + this + " is 'large'; not supported (yet)");
+    }
+  }
+
   private String toBase64String(ByteBuffer buf) {
     return Base64.byteArrayToBase64(buf.array(), buf.position(), buf.limit()-buf.position());
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2502af9f/solr/core/src/java/org/apache/solr/schema/BoolField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/BoolField.java b/solr/core/src/java/org/apache/solr/schema/BoolField.java
index 7d5c0d7..607e384 100644
--- a/solr/core/src/java/org/apache/solr/schema/BoolField.java
+++ b/solr/core/src/java/org/apache/solr/schema/BoolField.java
@@ -207,9 +207,6 @@ public class BoolField extends PrimitiveFieldType {
     return Collections.singletonList(fval);
   }
 
-  @Override
-  public void checkSchemaField(final SchemaField field) {
-  }
 }
 
 // TODO - this can be much more efficient - use FixedBitSet or Bits

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2502af9f/solr/core/src/java/org/apache/solr/schema/CollationField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/CollationField.java b/solr/core/src/java/org/apache/solr/schema/CollationField.java
index a498c01..c6a4f81 100644
--- a/solr/core/src/java/org/apache/solr/schema/CollationField.java
+++ b/solr/core/src/java/org/apache/solr/schema/CollationField.java
@@ -247,10 +247,9 @@ public class CollationField extends FieldType {
       return new TermRangeQuery(field.getName(), low, high, minInclusive, maxInclusive);
     }
   }
-  
+
   @Override
-  public void checkSchemaField(SchemaField field) {
-    // no-op
+  protected void checkSupportsDocValues() { // we support DocValues
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2502af9f/solr/core/src/java/org/apache/solr/schema/EnumField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/EnumField.java b/solr/core/src/java/org/apache/solr/schema/EnumField.java
index 2e73f74..3127262 100644
--- a/solr/core/src/java/org/apache/solr/schema/EnumField.java
+++ b/solr/core/src/java/org/apache/solr/schema/EnumField.java
@@ -281,13 +281,6 @@ public class EnumField extends PrimitiveFieldType {
    * {@inheritDoc}
    */
   @Override
-  public void checkSchemaField(SchemaField field) {
-  }
-
-  /**
-   * {@inheritDoc}
-   */
-  @Override
   public String readableToIndexed(String val) {
     if (val == null)
       return null;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2502af9f/solr/core/src/java/org/apache/solr/schema/FieldProperties.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/FieldProperties.java b/solr/core/src/java/org/apache/solr/schema/FieldProperties.java
index 2b1a8bb..6762345 100644
--- a/solr/core/src/java/org/apache/solr/schema/FieldProperties.java
+++ b/solr/core/src/java/org/apache/solr/schema/FieldProperties.java
@@ -30,29 +30,30 @@ public abstract class FieldProperties {
   // many of the variables are independent or semi-independent.
 
   // bit values for boolean field properties.
-  protected final static int INDEXED             = 0x00000001;
-  protected final static int TOKENIZED           = 0x00000002;
-  protected final static int STORED              = 0x00000004;
-  protected final static int BINARY              = 0x00000008;
-  protected final static int OMIT_NORMS          = 0x00000010;
-  protected final static int OMIT_TF_POSITIONS   = 0x00000020;
-  protected final static int STORE_TERMVECTORS   = 0x00000040;
-  protected final static int STORE_TERMPOSITIONS = 0x00000080;
-  protected final static int STORE_TERMOFFSETS   = 0x00000100;
-
-
-  protected final static int MULTIVALUED         = 0x00000200;
-  protected final static int SORT_MISSING_FIRST  = 0x00000400;
-  protected final static int SORT_MISSING_LAST   = 0x00000800;
+  protected final static int INDEXED             = 0b1;
+  protected final static int TOKENIZED           = 0b10;
+  protected final static int STORED              = 0b100;
+  protected final static int BINARY              = 0b1000;
+  protected final static int OMIT_NORMS          = 0b10000;
+  protected final static int OMIT_TF_POSITIONS   = 0b100000;
+  protected final static int STORE_TERMVECTORS   = 0b1000000;
+  protected final static int STORE_TERMPOSITIONS = 0b10000000;
+  protected final static int STORE_TERMOFFSETS   = 0b100000000;
+
+
+  protected final static int MULTIVALUED         = 0b1000000000;
+  protected final static int SORT_MISSING_FIRST  = 0b10000000000;
+  protected final static int SORT_MISSING_LAST   = 0b100000000000;
   
-  protected final static int REQUIRED            = 0x00001000;
-  protected final static int OMIT_POSITIONS      = 0x00002000;
+  protected final static int REQUIRED            = 0b1000000000000;
+  protected final static int OMIT_POSITIONS      = 0b10000000000000;
 
-  protected final static int STORE_OFFSETS       = 0x00004000;
-  protected final static int DOC_VALUES          = 0x00008000;
+  protected final static int STORE_OFFSETS       = 0b100000000000000;
+  protected final static int DOC_VALUES          = 0b1000000000000000;
 
-  protected final static int STORE_TERMPAYLOADS  = 0x00010000;
-  protected final static int USE_DOCVALUES_AS_STORED  = 0x00020000;
+  protected final static int STORE_TERMPAYLOADS  = 0b10000000000000000;
+  protected final static int USE_DOCVALUES_AS_STORED  = 0b100000000000000000;
+  protected final static int LARGE_FIELD         = 0b1000000000000000000;
 
   static final String[] propertyNames = {
           "indexed", "tokenized", "stored",
@@ -60,7 +61,7 @@ public abstract class FieldProperties {
           "termVectors", "termPositions", "termOffsets",
           "multiValued",
           "sortMissingFirst","sortMissingLast","required", "omitPositions",
-          "storeOffsetsWithPositions", "docValues", "termPayloads", "useDocValuesAsStored"
+          "storeOffsetsWithPositions", "docValues", "termPayloads", "useDocValuesAsStored", "large"
   };
 
   static final Map<String,Integer> propertyMap = new HashMap<>();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2502af9f/solr/core/src/java/org/apache/solr/schema/FieldType.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/FieldType.java b/solr/core/src/java/org/apache/solr/schema/FieldType.java
index 67b7be7..016e166 100644
--- a/solr/core/src/java/org/apache/solr/schema/FieldType.java
+++ b/solr/core/src/java/org/apache/solr/schema/FieldType.java
@@ -791,17 +791,27 @@ public abstract class FieldType extends FieldProperties {
    *
    * <p>
    * This method is called by the <code>SchemaField</code> constructor to 
-   * check that its initialization does not violate any fundemental 
-   * requirements of the <code>FieldType</code>.  The default implementation 
-   * does nothing, but subclasses may chose to throw a {@link SolrException}  
+   * check that its initialization does not violate any fundamental
+   * requirements of the <code>FieldType</code>.
+   * Subclasses may choose to throw a {@link SolrException}
    * if invariants are violated by the <code>SchemaField.</code>
    * </p>
    */
   public void checkSchemaField(final SchemaField field) {
-    // override if your field type supports doc values
     if (field.hasDocValues()) {
-      throw new SolrException(ErrorCode.SERVER_ERROR, "Field type " + this + " does not support doc values");
+      checkSupportsDocValues();
     }
+    if (field.isLarge() && field.multiValued()) {
+      throw new SolrException(ErrorCode.SERVER_ERROR, "Field type " + this + " is 'large'; can't support multiValued");
+    }
+    if (field.isLarge() && getNumberType() != null) {
+      throw new SolrException(ErrorCode.SERVER_ERROR, "Field type " + this + " is 'large'; can't support numerics");
+    }
+  }
+
+  /** Called by {@link #checkSchemaField(SchemaField)} if the field has docValues. By default none do. */
+  protected void checkSupportsDocValues() {
+    throw new SolrException(ErrorCode.SERVER_ERROR, "Field type " + this + " does not support doc values");
   }
 
   public static final String TYPE = "type";

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2502af9f/solr/core/src/java/org/apache/solr/schema/LatLonPointSpatialField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/LatLonPointSpatialField.java b/solr/core/src/java/org/apache/solr/schema/LatLonPointSpatialField.java
index c09856a..8ed5484 100644
--- a/solr/core/src/java/org/apache/solr/schema/LatLonPointSpatialField.java
+++ b/solr/core/src/java/org/apache/solr/schema/LatLonPointSpatialField.java
@@ -57,8 +57,7 @@ public class LatLonPointSpatialField extends AbstractSpatialFieldType implements
   // TODO handle polygons
 
   @Override
-  public void checkSchemaField(SchemaField field) {
-    // override because if we didn't, FieldType will complain about docValues not being supported (we do support it)
+  protected void checkSupportsDocValues() { // we support DocValues
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2502af9f/solr/core/src/java/org/apache/solr/schema/PointField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/PointField.java b/solr/core/src/java/org/apache/solr/schema/PointField.java
index 6c75105..91d3eff 100644
--- a/solr/core/src/java/org/apache/solr/schema/PointField.java
+++ b/solr/core/src/java/org/apache/solr/schema/PointField.java
@@ -244,8 +244,4 @@ public abstract class PointField extends NumericFieldType {
 
   protected abstract StoredField getStoredField(SchemaField sf, Object value);
 
-  @Override
-  public void checkSchemaField(final SchemaField field) {
-    // PointFields support DocValues
-  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2502af9f/solr/core/src/java/org/apache/solr/schema/PrimitiveFieldType.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/PrimitiveFieldType.java b/solr/core/src/java/org/apache/solr/schema/PrimitiveFieldType.java
index a920f35..9d9da47 100644
--- a/solr/core/src/java/org/apache/solr/schema/PrimitiveFieldType.java
+++ b/solr/core/src/java/org/apache/solr/schema/PrimitiveFieldType.java
@@ -33,4 +33,8 @@ public abstract class PrimitiveFieldType extends FieldType {
       properties |= OMIT_NORMS;
     }
   }
+
+  @Override
+  protected void checkSupportsDocValues() { // primitive types support DocValues
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2502af9f/solr/core/src/java/org/apache/solr/schema/SchemaField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/SchemaField.java b/solr/core/src/java/org/apache/solr/schema/SchemaField.java
index e690a13..1e18ee0 100644
--- a/solr/core/src/java/org/apache/solr/schema/SchemaField.java
+++ b/solr/core/src/java/org/apache/solr/schema/SchemaField.java
@@ -66,7 +66,7 @@ public final class SchemaField extends FieldProperties implements IndexableField
     args = prototype.args;
   }
 
- /** Create a new SchemaField with the given name and type,
+  /** Create a new SchemaField with the given name and type,
    * and with the specified properties.  Properties are *not*
    * inherited from the type in this case, so users of this
    * constructor should derive the properties from type.getSolrProperties()
@@ -106,7 +106,8 @@ public final class SchemaField extends FieldProperties implements IndexableField
   public boolean multiValued() { return (properties & MULTIVALUED)!=0; }
   public boolean sortMissingFirst() { return (properties & SORT_MISSING_FIRST)!=0; }
   public boolean sortMissingLast() { return (properties & SORT_MISSING_LAST)!=0; }
-  public boolean isRequired() { return required; } 
+  public boolean isRequired() { return required; }
+  public boolean isLarge() { return (properties & LARGE_FIELD)!=0;}
   public Map<String,?> getArgs() { return Collections.unmodifiableMap(args); }
 
   // things that should be determined by field type, not set as options
@@ -240,7 +241,7 @@ public final class SchemaField extends FieldProperties implements IndexableField
     // that depend on that.
     //
     if (on(falseProps,STORED)) {
-      int pp = STORED | BINARY;
+      int pp = STORED | BINARY | LARGE_FIELD;
       if (on(pp,trueProps)) {
         throw new RuntimeException("SchemaField: " + name + " conflicting stored field options:" + props);
       }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2502af9f/solr/core/src/java/org/apache/solr/schema/StrField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/StrField.java b/solr/core/src/java/org/apache/solr/schema/StrField.java
index 0b1576b..3294b04 100644
--- a/solr/core/src/java/org/apache/solr/schema/StrField.java
+++ b/solr/core/src/java/org/apache/solr/schema/StrField.java
@@ -96,10 +96,6 @@ public class StrField extends PrimitiveFieldType {
   }
 
   @Override
-  public void checkSchemaField(SchemaField field) {
-  }
-
-  @Override
   public Object marshalSortValue(Object value) {
     return marshalStringSortValue(value);
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2502af9f/solr/core/src/java/org/apache/solr/schema/TrieField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/TrieField.java b/solr/core/src/java/org/apache/solr/schema/TrieField.java
index 46240c9..e7a33bd 100644
--- a/solr/core/src/java/org/apache/solr/schema/TrieField.java
+++ b/solr/core/src/java/org/apache/solr/schema/TrieField.java
@@ -690,9 +690,6 @@ public class TrieField extends NumericFieldType {
     return null;
   }
 
-  @Override
-  public void checkSchemaField(final SchemaField field) {
-  }
 }
 
 class TrieDateFieldSource extends LongFieldSource {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2502af9f/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
index 19e47d0..83df60f 100644
--- a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
@@ -18,6 +18,7 @@ package org.apache.solr.search;
 
 import java.io.Closeable;
 import java.io.IOException;
+import java.io.Reader;
 import java.lang.invoke.MethodHandles;
 import java.net.URL;
 import java.nio.charset.StandardCharsets;
@@ -38,63 +39,16 @@ import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicLong;
 import java.util.concurrent.atomic.AtomicReference;
 
+import com.google.common.collect.Iterables;
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.TokenStream;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.DocumentStoredFieldVisitor;
 import org.apache.lucene.document.LazyDocument;
-import org.apache.lucene.index.BinaryDocValues;
-import org.apache.lucene.index.DirectoryReader;
-import org.apache.lucene.index.DocValuesType;
-import org.apache.lucene.index.ExitableDirectoryReader;
-import org.apache.lucene.index.FieldInfo;
-import org.apache.lucene.index.FieldInfos;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.IndexableField;
-import org.apache.lucene.index.LeafReader;
-import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.index.MultiPostingsEnum;
-import org.apache.lucene.index.NumericDocValues;
-import org.apache.lucene.index.PostingsEnum;
-import org.apache.lucene.index.ReaderUtil;
-import org.apache.lucene.index.SortedDocValues;
-import org.apache.lucene.index.SortedNumericDocValues;
-import org.apache.lucene.index.SortedSetDocValues;
-import org.apache.lucene.index.StoredFieldVisitor;
+import org.apache.lucene.index.*;
 import org.apache.lucene.index.StoredFieldVisitor.Status;
-import org.apache.lucene.index.Term;
-import org.apache.lucene.index.TermContext;
-import org.apache.lucene.index.Terms;
-import org.apache.lucene.index.TermsEnum;
-import org.apache.lucene.search.BooleanClause;
+import org.apache.lucene.search.*;
 import org.apache.lucene.search.BooleanClause.Occur;
-import org.apache.lucene.search.BooleanQuery;
-import org.apache.lucene.search.CollectionStatistics;
-import org.apache.lucene.search.Collector;
-import org.apache.lucene.search.ConstantScoreQuery;
-import org.apache.lucene.search.DocIdSet;
-import org.apache.lucene.search.DocIdSetIterator;
-import org.apache.lucene.search.EarlyTerminatingSortingCollector;
-import org.apache.lucene.search.Explanation;
-import org.apache.lucene.search.FieldDoc;
-import org.apache.lucene.search.IndexSearcher;
-import org.apache.lucene.search.LeafCollector;
-import org.apache.lucene.search.MatchAllDocsQuery;
-import org.apache.lucene.search.MultiCollector;
-import org.apache.lucene.search.Query;
-import org.apache.lucene.search.ScoreDoc;
-import org.apache.lucene.search.Scorer;
-import org.apache.lucene.search.SimpleCollector;
-import org.apache.lucene.search.Sort;
-import org.apache.lucene.search.SortField;
-import org.apache.lucene.search.TermQuery;
-import org.apache.lucene.search.TermStatistics;
-import org.apache.lucene.search.TimeLimitingCollector;
-import org.apache.lucene.search.TopDocs;
-import org.apache.lucene.search.TopDocsCollector;
-import org.apache.lucene.search.TopFieldCollector;
-import org.apache.lucene.search.TopFieldDocs;
-import org.apache.lucene.search.TopScoreDocCollector;
-import org.apache.lucene.search.TotalHitCountCollector;
-import org.apache.lucene.search.Weight;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.BytesRef;
@@ -134,8 +88,6 @@ import org.apache.solr.update.SolrIndexConfig;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.collect.Iterables;
-
 /**
  * SolrIndexSearcher adds schema awareness and caching functionality over {@link IndexSearcher}.
  *
@@ -192,7 +144,12 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
   /** Contains the names/patterns of all docValues=true,stored=false fields, excluding those that are copyField targets in the schema. */
   private final Set<String> nonStoredDVsWithoutCopyTargets;
 
-  private Collection<String> storedHighlightFieldNames;
+  private static int largeValueLengthCacheThreshold = Integer.getInteger("solr.largeField.cacheThreshold", 512 * 1024); // internal setting
+
+  private final Set<String> largeFields;
+
+  private Collection<String> storedHighlightFieldNames; // lazy populated; use getter
+
   private DirectoryFactory directoryFactory;
 
   private final LeafReader leafReader;
@@ -204,6 +161,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
 
   private final NamedList<Object> readerStats;
 
+
   private static DirectoryReader getReader(SolrCore core, SolrIndexConfig config, DirectoryFactory directoryFactory,
       String path) throws IOException {
     final Directory dir = directoryFactory.get(path, DirContext.DEFAULT, config.lockType);
@@ -367,11 +325,15 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
     final Set<String> nonStoredDVsUsedAsStored = new HashSet<>();
     final Set<String> allNonStoredDVs = new HashSet<>();
     final Set<String> nonStoredDVsWithoutCopyTargets = new HashSet<>();
+    final Set<String> storedLargeFields = new HashSet<>();
 
     this.fieldInfos = leafReader.getFieldInfos();
-    for (FieldInfo fieldInfo : fieldInfos) {
+    for (FieldInfo fieldInfo : fieldInfos) { // can find materialized dynamic fields, unlike using the Solr IndexSchema.
       final SchemaField schemaField = schema.getFieldOrNull(fieldInfo.name);
-      if (schemaField != null && !schemaField.stored() && schemaField.hasDocValues()) {
+      if (schemaField == null) {
+        continue;
+      }
+      if (!schemaField.stored() && schemaField.hasDocValues()) {
         if (schemaField.useDocValuesAsStored()) {
           nonStoredDVsUsedAsStored.add(fieldInfo.name);
         }
@@ -380,11 +342,15 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
           nonStoredDVsWithoutCopyTargets.add(fieldInfo.name);
         }
       }
+      if (schemaField.stored() && schemaField.isLarge()) {
+        storedLargeFields.add(schemaField.getName());
+      }
     }
 
     this.nonStoredDVsUsedAsStored = Collections.unmodifiableSet(nonStoredDVsUsedAsStored);
     this.allNonStoredDVs = Collections.unmodifiableSet(allNonStoredDVs);
     this.nonStoredDVsWithoutCopyTargets = Collections.unmodifiableSet(nonStoredDVsWithoutCopyTargets);
+    this.largeFields = Collections.unmodifiableSet(storedLargeFields);
 
     // We already have our own filter cache
     setQueryCache(null);
@@ -677,26 +643,41 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
   /*
    * Future optimizations (yonik)
    *
-   * If no cache is present: - use NO_LOAD instead of LAZY_LOAD - use LOAD_AND_BREAK if a single field is begin
+   * If no cache is present: - use NO_LOAD instead of LAZY_LOAD - use LOAD_AND_BREAK if a single field is being
    * retrieved
    */
 
-  /** FieldSelector which loads the specified fields, and loads all other field lazily. */
-  private static class SetNonLazyFieldSelector extends DocumentStoredFieldVisitor {
+  /** {@link StoredFieldVisitor} which loads the specified fields eagerly (or all if null).
+   * If {@link #enableLazyFieldLoading} then the rest get special lazy field entries.  Designated "large"
+   * fields will always get a special field entry. */
+  private class SolrDocumentStoredFieldVisitor extends DocumentStoredFieldVisitor {
     private final Document doc;
-    private final LazyDocument lazyDoc;
+    private final LazyDocument lazyFieldProducer; // arguably a better name than LazyDocument; at least how we use it here
+    private final int docId;
+    private final boolean addLargeFieldsLazily;
 
-    SetNonLazyFieldSelector(Set<String> toLoad, IndexReader reader, int docID) {
+    SolrDocumentStoredFieldVisitor(Set<String> toLoad, IndexReader reader, int docId) {
       super(toLoad);
-      lazyDoc = new LazyDocument(reader, docID);
-      doc = getDocument();
+      this.docId = docId;
+      this.doc = getDocument();
+      this.lazyFieldProducer = toLoad != null && enableLazyFieldLoading ? new LazyDocument(reader, docId) : null;
+      this.addLargeFieldsLazily = (documentCache != null && !largeFields.isEmpty());
+      //TODO can we return Status.STOP after a val is loaded and we know there are no other fields of interest?
+      //    When: toLoad is one single-valued field, no lazyFieldProducer
     }
 
     @Override
     public Status needsField(FieldInfo fieldInfo) throws IOException {
       Status status = super.needsField(fieldInfo);
-      if (status == Status.NO) {
-        doc.add(lazyDoc.getField(fieldInfo));
+      assert status != Status.STOP : "Status.STOP not supported or expected";
+      if (addLargeFieldsLazily && largeFields.contains(fieldInfo.name)) { // load "large" fields using this lazy mechanism
+        if (lazyFieldProducer != null || status == Status.YES) {
+          doc.add(new LargeLazyField(fieldInfo.name, docId));
+        }
+        return Status.NO;
+      }
+      if (status == Status.NO && lazyFieldProducer != null) { // lazy
+        doc.add(lazyFieldProducer.getField(fieldInfo));
       }
       return status;
     }
@@ -717,15 +698,15 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
    * @see IndexReader#document(int, StoredFieldVisitor)
    */
   @Override
-  public void doc(int n, StoredFieldVisitor visitor) throws IOException {
+  public void doc(int docId, StoredFieldVisitor visitor) throws IOException {
     if (documentCache != null) {
-      Document cached = documentCache.get(n);
+      Document cached = documentCache.get(docId);
       if (cached != null) {
         visitFromCached(cached, visitor);
         return;
       }
     }
-    getIndexReader().document(n, visitor);
+    getIndexReader().document(docId, visitor);
   }
 
   /** Executes a stored field visitor against a hit from the document cache */
@@ -735,13 +716,13 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
       final Status needsField = visitor.needsField(info);
       if (needsField == Status.STOP) return;
       if (needsField == Status.NO) continue;
-      if (f.binaryValue() != null) {
-        final BytesRef binaryValue = f.binaryValue();
-        final byte copy[] = new byte[binaryValue.length];
-        System.arraycopy(binaryValue.bytes, binaryValue.offset, copy, 0, copy.length);
-        visitor.binaryField(info, copy);
-      } else if (f.numericValue() != null) {
-        final Number numericValue = f.numericValue();
+      BytesRef binaryValue = f.binaryValue();
+      if (binaryValue != null) {
+        visitor.binaryField(info, toByteArrayUnwrapIfPossible(binaryValue));
+        continue;
+      }
+      Number numericValue = f.numericValue();
+      if (numericValue != null) {
         if (numericValue instanceof Double) {
           visitor.doubleField(info, numericValue.doubleValue());
         } else if (numericValue instanceof Integer) {
@@ -753,12 +734,25 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
         } else {
           throw new AssertionError();
         }
+        continue;
+      }
+      // must be String
+      if (f instanceof LargeLazyField) { // optimization to avoid premature string conversion
+        visitor.stringField(info, toByteArrayUnwrapIfPossible(((LargeLazyField) f).readBytes()));
       } else {
         visitor.stringField(info, f.stringValue().getBytes(StandardCharsets.UTF_8));
       }
     }
   }
 
+  private byte[] toByteArrayUnwrapIfPossible(BytesRef bytesRef) {
+    if (bytesRef.offset == 0 && bytesRef.bytes.length == bytesRef.length) {
+      return bytesRef.bytes;
+    } else {
+      return Arrays.copyOfRange(bytesRef.bytes, bytesRef.offset, bytesRef.offset + bytesRef.length);
+    }
+  }
+
   /**
    * Retrieve the {@link Document} instance corresponding to the document id.
    * <p>
@@ -775,23 +769,16 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
     }
 
     final DirectoryReader reader = getIndexReader();
-    if (fields != null) {
-      if (enableLazyFieldLoading) {
-        final SetNonLazyFieldSelector visitor = new SetNonLazyFieldSelector(fields, reader, i);
-        reader.document(i, visitor);
-        d = visitor.doc;
-      } else if (documentCache == null) {
-        d = reader.document(i, fields);
-      } else {
-        // we do not pass the fields in this case because that would return an incomplete document which would
-        // be eventually cached. The alternative would be to read the stored fields twice; once with the fields
-        // and then without for caching leading to a performance hit
-        // see SOLR-8858 for related discussion
-        d = reader.document(i);
-      }
-    } else {
-      d = reader.document(i);
+    if (documentCache != null && !enableLazyFieldLoading) {
+      // we do not filter the fields in this case because that would return an incomplete document which would
+      // be eventually cached. The alternative would be to read the stored fields twice; once with the fields
+      // and then without for caching leading to a performance hit
+      // see SOLR-8858 for related discussion
+      fields = null;
     }
+    final SolrDocumentStoredFieldVisitor visitor = new SolrDocumentStoredFieldVisitor(fields, reader, i);
+    reader.document(i, visitor);
+    d = visitor.getDocument();
 
     if (documentCache != null) {
       documentCache.put(i, d);
@@ -800,6 +787,103 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
     return d;
   }
 
+  /** Unlike LazyDocument.LazyField, we (a) don't cache large values, and (b) provide access to the byte[]. */
+  class LargeLazyField implements IndexableField {
+
+    final String name;
+    final int docId;
+    // synchronize on 'this' to access:
+    BytesRef cachedBytes; // we only conditionally populate this if it's big enough
+
+    private LargeLazyField(String name, int docId) {
+      this.name = name;
+      this.docId = docId;
+    }
+
+    @Override
+    public String toString() {
+      return fieldType().toString() + "<" + name() + ">"; // mimic Field.java
+    }
+
+    @Override
+    public String name() {
+      return name;
+    }
+
+    @Override
+    public IndexableFieldType fieldType() {
+      return schema.getField(name());
+    }
+
+    @Override
+    public TokenStream tokenStream(Analyzer analyzer, TokenStream reuse) {
+      return analyzer.tokenStream(name(), stringValue()); // or we could throw unsupported exception?
+    }
+    /** (for tests) */
+    synchronized boolean hasBeenLoaded() {
+      return cachedBytes != null;
+    }
+
+    @Override
+    public synchronized String stringValue() {
+      try {
+        return readBytes().utf8ToString();
+      } catch (IOException e) {
+        throw new RuntimeException(e);
+      }
+    }
+
+    synchronized BytesRef readBytes() throws IOException {
+      if (cachedBytes != null) {
+        return cachedBytes;
+      } else {
+        BytesRef bytesRef = new BytesRef();
+        getIndexReader().document(docId, new StoredFieldVisitor() {
+          boolean done = false;
+          @Override
+          public Status needsField(FieldInfo fieldInfo) throws IOException {
+            if (done) {
+              return Status.STOP;
+            }
+            return fieldInfo.name.equals(name()) ? Status.YES : Status.NO;
+          }
+
+          @Override
+          public void stringField(FieldInfo fieldInfo, byte[] value) throws IOException {
+            bytesRef.bytes = value;
+            bytesRef.length = value.length;
+            done = true;
+          }
+
+          @Override
+          public void binaryField(FieldInfo fieldInfo, byte[] value) throws IOException {
+            throw new UnsupportedOperationException("'large' binary fields are not (yet) supported");
+          }
+        });
+        if (bytesRef.length < largeValueLengthCacheThreshold) {
+          return cachedBytes = bytesRef;
+        } else {
+          return bytesRef;
+        }
+      }
+    }
+
+    @Override
+    public BytesRef binaryValue() {
+      return null;
+    }
+
+    @Override
+    public Reader readerValue() {
+      return null;
+    }
+
+    @Override
+    public Number numericValue() {
+      return null;
+    }
+  }
+
   /**
    * This will fetch and add the docValues fields to a given SolrDocument/SolrInputDocument
    *

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2502af9f/solr/core/src/java/org/apache/solr/update/DocumentBuilder.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/DocumentBuilder.java b/solr/core/src/java/org/apache/solr/update/DocumentBuilder.java
index abf4a1f..e3d2011 100644
--- a/solr/core/src/java/org/apache/solr/update/DocumentBuilder.java
+++ b/solr/core/src/java/org/apache/solr/update/DocumentBuilder.java
@@ -20,7 +20,6 @@ import java.util.List;
 import java.util.Set;
 
 import org.apache.lucene.document.Document;
-import org.apache.lucene.document.Field;
 import org.apache.lucene.document.NumericDocValuesField;
 import org.apache.lucene.index.IndexableField;
 import org.apache.solr.common.SolrException;
@@ -55,7 +54,7 @@ public class DocumentBuilder {
         assert val instanceof NumericDocValuesField: "Expected in-place update to be done on"
             + " NDV fields only.";
       }
-      doc.add((Field)val);
+      doc.add((IndexableField)val);
       return;
     }
     for (IndexableField f : field.getType().createFields(field, val)) {
@@ -69,10 +68,10 @@ public class DocumentBuilder {
         //    assert f instanceof NumericDocValuesField
         if (forInPlaceUpdate) {
           if (f instanceof NumericDocValuesField) {
-            doc.add((Field) f);
+            doc.add(f);
           }
         } else {
-          doc.add((Field) f);
+          doc.add(f);
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2502af9f/solr/core/src/test-files/solr/collection1/conf/schema-unifiedhighlight.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/collection1/conf/schema-unifiedhighlight.xml b/solr/core/src/test-files/solr/collection1/conf/schema-unifiedhighlight.xml
index ab18354..8d71088 100644
--- a/solr/core/src/test-files/solr/collection1/conf/schema-unifiedhighlight.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/schema-unifiedhighlight.xml
@@ -17,7 +17,7 @@
 
 <!-- Test schema file for UnifiedHighlighter -->
 
-<schema name="unifiedhighlight" version="1.0">
+<schema name="unifiedhighlight" version="1.6" >
   <fieldType name="int" class="solr.TrieIntField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
 
   <!-- basic text field: no offsets! -->
@@ -39,7 +39,7 @@
   <field name="id" type="int" indexed="true" stored="true" multiValued="false" required="false"/>
   <field name="text" type="text_offsets" indexed="true" stored="true"/>
   <field name="text2" type="text" indexed="true" stored="true"/>
-  <field name="text3" type="text_offsets" indexed="true" stored="true"/>
+  <field name="text3" type="text_offsets" indexed="true" stored="true"         large="true"/>
 
   <defaultSearchField>text</defaultSearchField>
   <uniqueKey>id</uniqueKey>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2502af9f/solr/core/src/test-files/solr/collection1/conf/solrconfig-managed-schema.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/collection1/conf/solrconfig-managed-schema.xml b/solr/core/src/test-files/solr/collection1/conf/solrconfig-managed-schema.xml
index abd4fbe..26224ad 100644
--- a/solr/core/src/test-files/solr/collection1/conf/solrconfig-managed-schema.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/solrconfig-managed-schema.xml
@@ -24,7 +24,7 @@
 
   <schemaFactory class="ManagedIndexSchemaFactory">
     <bool name="mutable">${managed.schema.mutable}</bool>
-    <str name="managedSchemaResourceName">managed-schema</str>
+    <str name="managedSchemaResourceName">${managed.schema.resourceName:managed-schema}</str>
   </schemaFactory>
 
   <codecFactory class="solr.SchemaCodecFactory"/>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2502af9f/solr/core/src/test/org/apache/solr/cloud/TestRandomFlRTGCloud.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestRandomFlRTGCloud.java b/solr/core/src/test/org/apache/solr/cloud/TestRandomFlRTGCloud.java
index 966d8ef..140fd7e 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestRandomFlRTGCloud.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestRandomFlRTGCloud.java
@@ -160,7 +160,7 @@ public class TestRandomFlRTGCloud extends SolrCloudTestCase {
   }
 
   /** 
-   * Tests thta all TransformerFactories that are implicitly provided by Solr are tested in this class
+   * Tests that all TransformerFactories that are implicitly provided by Solr are tested in this class
    *
    * @see FlValidator#getDefaultTransformerFactoryName
    * @see #FL_VALIDATORS

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2502af9f/solr/core/src/test/org/apache/solr/highlight/TestUnifiedSolrHighlighter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/highlight/TestUnifiedSolrHighlighter.java b/solr/core/src/test/org/apache/solr/highlight/TestUnifiedSolrHighlighter.java
index 9835518..2f7a003 100644
--- a/solr/core/src/test/org/apache/solr/highlight/TestUnifiedSolrHighlighter.java
+++ b/solr/core/src/test/org/apache/solr/highlight/TestUnifiedSolrHighlighter.java
@@ -19,6 +19,7 @@ package org.apache.solr.highlight;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.schema.IndexSchema;
+import org.junit.AfterClass;
 import org.junit.BeforeClass;
 
 /** Tests for the UnifiedHighlighter Solr plugin **/
@@ -26,7 +27,10 @@ public class TestUnifiedSolrHighlighter extends SolrTestCaseJ4 {
   
   @BeforeClass
   public static void beforeClass() throws Exception {
-    initCore("solrconfig-basic.xml", "schema-unifiedhighlight.xml");
+    System.setProperty("filterCache.enabled", "false");
+    System.setProperty("queryResultCache.enabled", "false");
+    System.setProperty("documentCache.enabled", "true"); // this is why we use this particular solrconfig
+    initCore("solrconfig-cache-enable-disable.xml", "schema-unifiedhighlight.xml");
     
     // test our config is sane, just to be sure:
 
@@ -36,6 +40,12 @@ public class TestUnifiedSolrHighlighter extends SolrTestCaseJ4 {
     assertTrue(schema.getField("text3").storeOffsetsWithPositions());
     assertFalse(schema.getField("text2").storeOffsetsWithPositions());
   }
+  @AfterClass
+  public static void afterClass() {
+    System.clearProperty("filterCache.enabled");
+    System.clearProperty("queryResultCache.enabled");
+    System.clearProperty("documentCache.enabled");
+  }
   
   @Override
   public void setUp() throws Exception {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2502af9f/solr/core/src/test/org/apache/solr/schema/SortableBinaryField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/schema/SortableBinaryField.java b/solr/core/src/test/org/apache/solr/schema/SortableBinaryField.java
index b8ed296..90c92d0 100644
--- a/solr/core/src/test/org/apache/solr/schema/SortableBinaryField.java
+++ b/solr/core/src/test/org/apache/solr/schema/SortableBinaryField.java
@@ -35,8 +35,7 @@ import org.apache.lucene.util.BytesRef;
 public class SortableBinaryField extends BinaryField {
 
   @Override
-  public void checkSchemaField(final SchemaField field) {
-    // NOOP, It's Aaaaaall Good.
+  protected void checkSupportsDocValues() { // we support DocValues
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2502af9f/solr/core/src/test/org/apache/solr/search/LargeFieldTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/LargeFieldTest.java b/solr/core/src/test/org/apache/solr/search/LargeFieldTest.java
new file mode 100644
index 0000000..d05c69c
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/search/LargeFieldTest.java
@@ -0,0 +1,141 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.search;
+
+import java.util.Arrays;
+import java.util.Collections;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.LazyDocument;
+import org.apache.lucene.index.IndexableField;
+import org.apache.solr.SolrTestCaseJ4;
+import org.apache.solr.common.util.ContentStreamBase;
+import org.apache.solr.request.SolrQueryRequestBase;
+import org.apache.solr.response.SolrQueryResponse;
+import org.apache.solr.schema.IndexSchema;
+import org.apache.solr.util.RefCounted;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class LargeFieldTest extends SolrTestCaseJ4 {
+
+  private static final String ID_FLD = "str"; // TODO alter underlying schema to be 'id'
+  private static final String LAZY_FIELD = "lazyField";
+  private static final String BIG_FIELD = "bigField";
+
+  @BeforeClass
+  public static void initManagedSchemaCore() throws Exception {
+    // TODO propose convenience API for this?
+    // This testing approach means no new solrconfig or schema file or per-test temp solr-home!
+    System.setProperty("managed.schema.mutable", "true");
+    System.setProperty("managed.schema.resourceName", "schema-one-field-no-dynamic-field-unique-key.xml");
+    System.setProperty("enable.update.log", "false");
+    initCore("solrconfig-managed-schema.xml", "ignoredSchemaName?");
+
+    // modify solr config  TODO propose more convenient API for this; maybe with JSON-ification of a map
+    try (SolrQueryRequestBase req = (SolrQueryRequestBase) req()) {
+      req.getContext().put("httpMethod", "POST");
+      req.setContentStreams(Collections.singleton(new ContentStreamBase.StringStream(
+          "{ 'set-property':{" +
+              "'query.enableLazyFieldLoading':true, " +
+              "'query.documentCache.class':'solr.LRUCache'" +
+              "}}"
+      )));
+      SolrQueryResponse rsp = new SolrQueryResponse();
+      h.getCore().execute(h.getCore().getRequestHandler("/config"), req, rsp);
+      assertNull(rsp.getException());
+    }
+
+    boolean PERSIST_FALSE = false; // don't write to test resource dir
+
+    IndexSchema schema = h.getCore().getLatestSchema();
+    schema = schema.addFieldTypes(Collections.singletonList(
+        schema.newFieldType("textType", "solr.TextField", // redundant; TODO improve api
+            map("name", "textType",   "class", "solr.TextField",
+                "analyzer", map("class", "org.apache.lucene.analysis.standard.StandardAnalyzer")))),
+        PERSIST_FALSE);
+    schema = schema.addFields(Arrays.asList(
+        schema.newField(LAZY_FIELD, "textType", map()),
+        schema.newField(BIG_FIELD, "textType", map("large", true))),
+        Collections.emptyMap(),
+        PERSIST_FALSE);
+
+    h.getCore().setLatestSchema(schema);
+  }
+
+  @Test
+  public void test() throws Exception {
+    // add just one document (docid 0)
+    assertU(adoc(ID_FLD, "101", LAZY_FIELD, "lzy", BIG_FIELD, "big document field one"));
+    assertU(commit());
+
+    // trigger the ID_FLD to get into the doc cache; don't reference other fields
+    assertQ(req("q", "101", "df", ID_FLD, "fl", ID_FLD)); // eager load ID_FLD; rest are lazy
+
+    // fetch the document; we know it will be from the documentCache, docId 0
+    final Document d;
+    RefCounted<SolrIndexSearcher> searcherRef = h.getCore().getSearcher();
+    try {
+      d = searcherRef.get().doc(0);
+    } finally {
+      searcherRef.decref();
+    }
+
+    assertEager(d, ID_FLD);
+    assertLazyNotLoaded(d, LAZY_FIELD);
+    assertLazyNotLoaded(d, BIG_FIELD);
+
+    assertQ(req("q", "101", "df", ID_FLD, "fl", LAZY_FIELD)); // trigger load of LAZY_FIELD
+
+    assertEager(d, ID_FLD);
+    assertLazyLoaded(d, LAZY_FIELD); // loaded now
+    assertLazyNotLoaded(d, BIG_FIELD); // because big fields are handled separately
+
+    assertQ(req("q", "101", "df", ID_FLD, "fl", BIG_FIELD)); // trigger load of BIG_FIELD
+
+    assertEager(d, ID_FLD);
+    assertLazyLoaded(d, LAZY_FIELD);
+    assertLazyLoaded(d, BIG_FIELD); // loaded now
+  }
+
+  private void assertEager(Document d, String fieldName) {
+    assertFalse( d.getField(fieldName) instanceof LazyDocument.LazyField);
+  }
+
+  private void assertLazyNotLoaded(Document d, String fieldName) {
+    IndexableField field = d.getField(fieldName);
+    if (fieldName == BIG_FIELD) {
+      assertTrue(field instanceof SolrIndexSearcher.LargeLazyField);
+      assertFalse(((SolrIndexSearcher.LargeLazyField)field).hasBeenLoaded());
+    } else {
+      assertTrue(field instanceof LazyDocument.LazyField);
+      assertFalse(((LazyDocument.LazyField)field).hasBeenLoaded());
+    }
+  }
+
+  private void assertLazyLoaded(Document d, String fieldName) {
+    IndexableField field = d.getField(fieldName);
+    if (fieldName == BIG_FIELD) {
+      assertTrue(field instanceof SolrIndexSearcher.LargeLazyField);
+      assertTrue(((SolrIndexSearcher.LargeLazyField)field).hasBeenLoaded());
+    } else {
+      assertTrue(field instanceof LazyDocument.LazyField);
+      assertTrue(((LazyDocument.LazyField)field).hasBeenLoaded());
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2502af9f/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
----------------------------------------------------------------------
diff --git a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
index 825e7c7..04fc3ff 100644
--- a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
+++ b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
@@ -852,7 +852,7 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase {
   /** Validates a query matches some XPath test expressions and closes the query */
   public static void assertQ(String message, SolrQueryRequest req, String... tests) {
     try {
-      String m = (null == message) ? "" : message + " ";
+      String m = (null == message) ? "" : message + " "; // TODO log 'm' !!!
       String response = h.query(req);
 
       if (req.getParams().getBool("facet", false)) {


[04/46] lucene-solr:jira/solr-9959: SOLR-10046: Add UninvertDocValuesMergePolicyFactory class. (Keith Laban, Christine Poerschke)

Posted by ab...@apache.org.
SOLR-10046: Add UninvertDocValuesMergePolicyFactory class. (Keith Laban, Christine Poerschke)


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/9d56f136
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/9d56f136
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/9d56f136

Branch: refs/heads/jira/solr-9959
Commit: 9d56f136505098ea5538c5d6eaaf60536848feb9
Parents: 65c695b
Author: Christine Poerschke <cp...@apache.org>
Authored: Wed Mar 15 10:31:10 2017 +0000
Committer: Christine Poerschke <cp...@apache.org>
Committed: Wed Mar 15 12:15:17 2017 +0000

----------------------------------------------------------------------
 solr/CHANGES.txt                                |   2 +
 .../UninvertDocValuesMergePolicyFactory.java    | 218 +++++++++++++++++
 .../solr/collection1/conf/schema-docValues.xml  |   1 +
 ...nfig-uninvertdocvaluesmergepolicyfactory.xml |  38 +++
 .../index/UninvertDocValuesMergePolicyTest.java | 243 +++++++++++++++++++
 5 files changed, 502 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9d56f136/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 0f1f488..6829cd1 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -185,6 +185,8 @@ New Features
 
 * SOLR-10224: Add disk total and disk free metrics. (ab)
 
+* SOLR-10046: Add UninvertDocValuesMergePolicyFactory class. (Keith Laban, Christine Poerschke)
+
 Bug Fixes
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9d56f136/solr/core/src/java/org/apache/solr/index/UninvertDocValuesMergePolicyFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/index/UninvertDocValuesMergePolicyFactory.java b/solr/core/src/java/org/apache/solr/index/UninvertDocValuesMergePolicyFactory.java
new file mode 100644
index 0000000..b6bfbed
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/index/UninvertDocValuesMergePolicyFactory.java
@@ -0,0 +1,218 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.index;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.lucene.codecs.DocValuesProducer;
+import org.apache.lucene.index.BinaryDocValues;
+import org.apache.lucene.index.CodecReader;
+import org.apache.lucene.index.DocValuesType;
+import org.apache.lucene.index.FieldInfo;
+import org.apache.lucene.index.FieldInfos;
+import org.apache.lucene.index.FilterCodecReader;
+import org.apache.lucene.index.IndexOptions;
+import org.apache.lucene.index.MergePolicy;
+import org.apache.lucene.index.NumericDocValues;
+import org.apache.lucene.index.OneMergeWrappingMergePolicy;
+import org.apache.lucene.index.SegmentCommitInfo;
+import org.apache.lucene.index.SortedDocValues;
+import org.apache.lucene.index.SortedNumericDocValues;
+import org.apache.lucene.index.SortedSetDocValues;
+import org.apache.solr.core.SolrResourceLoader;
+import org.apache.solr.schema.IndexSchema;
+import org.apache.solr.schema.SchemaField;
+import org.apache.solr.uninverting.UninvertingReader;
+
+/**
+ * A merge policy that can detect schema changes and  write docvalues into merging segments when a field has docvalues enabled
+ * Using UninvertingReader.
+ * 
+ * This merge policy will delegate to the wrapped merge policy for selecting merge segments
+ * 
+ */
+public class UninvertDocValuesMergePolicyFactory extends WrapperMergePolicyFactory {
+  
+  final private boolean skipIntegrityCheck;
+
+  /**
+   * Whether or not the wrapped docValues producer should check consistency 
+   */
+  public boolean getSkipIntegrityCheck() {
+    return skipIntegrityCheck;
+  }
+
+  public UninvertDocValuesMergePolicyFactory(SolrResourceLoader resourceLoader, MergePolicyFactoryArgs args, IndexSchema schema) {
+    super(resourceLoader, args, schema);
+    final Boolean sic = (Boolean)args.remove("skipIntegrityCheck");
+    if (sic != null) {
+      this.skipIntegrityCheck = sic.booleanValue();
+    } else {
+      this.skipIntegrityCheck = false;
+    }
+    if (!args.keys().isEmpty()) {
+      throw new IllegalArgumentException("Arguments were "+args+" but "+getClass().getSimpleName()+" takes no arguments.");
+    }
+  }
+
+  @Override
+  protected MergePolicy getMergePolicyInstance(MergePolicy wrappedMP) {
+    return new OneMergeWrappingMergePolicy(wrappedMP, (merge) -> new UninvertDocValuesOneMerge(merge.segments));
+  }
+  
+  private UninvertingReader.Type getUninversionType(FieldInfo fi) {
+    SchemaField sf = schema.getFieldOrNull(fi.name);
+    
+    if (null != sf &&
+        sf.hasDocValues() &&
+        fi.getDocValuesType() == DocValuesType.NONE &&
+        fi.getIndexOptions() != IndexOptions.NONE) {
+      return sf.getType().getUninversionType(sf);
+    } else {
+      return null;
+    }
+  }
+    
+  private class UninvertDocValuesOneMerge extends MergePolicy.OneMerge {
+
+    public UninvertDocValuesOneMerge(List<SegmentCommitInfo> segments) {
+      super(segments);
+    }
+    
+    @Override
+    public CodecReader wrapForMerge(CodecReader reader) throws IOException {
+      // Wrap the reader with an uninverting reader if any of the fields have no docvalues but the 
+      // Schema says there should be
+      
+      
+      Map<String,UninvertingReader.Type> uninversionMap = null;
+      
+      for(FieldInfo fi: reader.getFieldInfos()) {
+        final UninvertingReader.Type type = getUninversionType(fi);
+        if (type != null) {
+          if (uninversionMap == null) {
+            uninversionMap = new HashMap<>();
+          }
+          uninversionMap.put(fi.name, type);
+        }
+        
+      }
+      
+      if(uninversionMap == null) {
+        return reader; // Default to normal reader if nothing to uninvert
+      } else {
+        return new UninvertingFilterCodecReader(reader, uninversionMap);
+      }
+      
+    }
+    
+  }
+  
+  
+  /**
+   * Delegates to an Uninverting for fields with docvalues
+   * 
+   * This is going to blow up FieldCache, look into an alternative implementation that uninverts without
+   * fieldcache
+   */
+  private class UninvertingFilterCodecReader extends FilterCodecReader {
+
+    private final UninvertingReader uninvertingReader;
+    private final DocValuesProducer docValuesProducer;
+
+    public UninvertingFilterCodecReader(CodecReader in, Map<String,UninvertingReader.Type> uninversionMap) {
+      super(in);
+
+      this.uninvertingReader = new UninvertingReader(in, uninversionMap);
+      this.docValuesProducer = new DocValuesProducer() {
+
+        @Override
+        public NumericDocValues getNumeric(FieldInfo field) throws IOException {
+          return uninvertingReader.getNumericDocValues(field.name);
+        }
+
+        @Override
+        public BinaryDocValues getBinary(FieldInfo field) throws IOException {
+          return uninvertingReader.getBinaryDocValues(field.name);
+        }
+
+        @Override
+        public SortedDocValues getSorted(FieldInfo field) throws IOException {
+          return uninvertingReader.getSortedDocValues(field.name);
+        }
+
+        @Override
+        public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException {
+          return uninvertingReader.getSortedNumericDocValues(field.name);
+        }
+
+        @Override
+        public SortedSetDocValues getSortedSet(FieldInfo field) throws IOException {
+          return uninvertingReader.getSortedSetDocValues(field.name);
+        }
+
+        @Override
+        public void checkIntegrity() throws IOException {
+          if (!skipIntegrityCheck) {
+            uninvertingReader.checkIntegrity();
+          }
+        }
+
+        @Override
+        public void close() throws IOException {
+        }
+
+        @Override
+        public long ramBytesUsed() {
+          return 0;
+        }
+      };
+    }
+    
+    @Override
+    protected void doClose() throws IOException {
+      docValuesProducer.close();
+      uninvertingReader.close();
+      super.doClose();
+    }
+
+    @Override
+    public DocValuesProducer getDocValuesReader() {
+      return docValuesProducer;
+    }
+    
+    @Override
+    public FieldInfos getFieldInfos() {
+      return uninvertingReader.getFieldInfos();
+    }
+
+    @Override
+    public CacheHelper getCoreCacheHelper() {
+      return in.getCoreCacheHelper();
+    }
+
+    @Override
+    public CacheHelper getReaderCacheHelper() {
+      return in.getReaderCacheHelper();
+    }
+    
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9d56f136/solr/core/src/test-files/solr/collection1/conf/schema-docValues.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/collection1/conf/schema-docValues.xml b/solr/core/src/test-files/solr/collection1/conf/schema-docValues.xml
index c7b7de8..9e4286d 100644
--- a/solr/core/src/test-files/solr/collection1/conf/schema-docValues.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/schema-docValues.xml
@@ -62,6 +62,7 @@
   <field name="datedv" type="date" indexed="false" stored="false" docValues="true" default="1995-12-31T23:59:59.999Z"/>
 
   <field name="stringdv" type="string" indexed="false" stored="false" docValues="true" default="solr" />
+  <field name="string_add_dv_later" type="string" indexed="true" stored="true" docValues="false"/>
   <field name="booldv" type="boolean" indexed="false" stored="false" docValues="true" default="true" />
 
   <field name="floatdvs" type="float" indexed="false" stored="false" docValues="true" default="1"/>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9d56f136/solr/core/src/test-files/solr/collection1/conf/solrconfig-uninvertdocvaluesmergepolicyfactory.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/collection1/conf/solrconfig-uninvertdocvaluesmergepolicyfactory.xml b/solr/core/src/test-files/solr/collection1/conf/solrconfig-uninvertdocvaluesmergepolicyfactory.xml
new file mode 100644
index 0000000..613357b
--- /dev/null
+++ b/solr/core/src/test-files/solr/collection1/conf/solrconfig-uninvertdocvaluesmergepolicyfactory.xml
@@ -0,0 +1,38 @@
+<?xml version="1.0" ?>
+
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<config>
+  <luceneMatchVersion>${tests.luceneMatchVersion:LATEST}</luceneMatchVersion>
+  <directoryFactory name="DirectoryFactory" class="${solr.directoryFactory:solr.RAMDirectoryFactory}"/>
+  <schemaFactory class="ClassicIndexSchemaFactory"/>
+
+  <indexConfig>
+    <useCompoundFile>${useCompoundFile:false}</useCompoundFile>
+    <mergePolicyFactory class="org.apache.solr.index.UninvertDocValuesMergePolicyFactory">
+      <str name="wrapped.prefix">inner</str>
+      <str name="inner.class">org.apache.solr.index.DefaultMergePolicyFactory</str>
+      <bool name="skipIntegrityCheck">${solr.tests.skipIntegrityCheck:false}</bool>
+    </mergePolicyFactory>
+     
+    <mergeScheduler class="org.apache.lucene.index.ConcurrentMergeScheduler"/>
+  </indexConfig>
+
+  <requestHandler name="standard" class="solr.StandardRequestHandler"></requestHandler>
+
+</config>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9d56f136/solr/core/src/test/org/apache/solr/index/UninvertDocValuesMergePolicyTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/index/UninvertDocValuesMergePolicyTest.java b/solr/core/src/test/org/apache/solr/index/UninvertDocValuesMergePolicyTest.java
new file mode 100644
index 0000000..17e6b3e
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/index/UninvertDocValuesMergePolicyTest.java
@@ -0,0 +1,243 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.index;
+
+import java.util.Random;
+import java.util.function.IntUnaryOperator;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.DocValuesType;
+import org.apache.lucene.index.FieldInfos;
+import org.apache.lucene.index.LeafReader;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.MultiFields;
+import org.apache.lucene.index.SortedDocValues;
+import org.apache.solr.SolrTestCaseJ4;
+import org.apache.solr.core.SolrCore;
+import org.apache.solr.schema.IndexSchema;
+import org.apache.solr.schema.SchemaField;
+import org.apache.solr.search.SolrIndexSearcher;
+import org.apache.solr.util.RefCounted;
+import org.apache.solr.util.TestHarness;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+
+public class UninvertDocValuesMergePolicyTest extends SolrTestCaseJ4 {
+
+  private static String SOLR_TESTS_SKIP_INTEGRITY_CHECK = "solr.tests.skipIntegrityCheck";
+  private static String ID_FIELD = "id";
+  private static String TEST_FIELD = "string_add_dv_later";
+
+  @BeforeClass
+  public static void beforeTests() throws Exception {
+    System.setProperty(SOLR_TESTS_SKIP_INTEGRITY_CHECK, (random().nextBoolean() ? "true" : "false"));
+  }
+
+  @AfterClass
+  public static void afterTests() {
+    System.clearProperty(SOLR_TESTS_SKIP_INTEGRITY_CHECK);
+  }
+
+  @After
+  public void after() throws Exception {
+    deleteCore();
+  }
+  
+  @Before
+  public void before() throws Exception {
+    initCore("solrconfig-uninvertdocvaluesmergepolicyfactory.xml", "schema-docValues.xml");
+  }
+
+  public void testIndexAndAddDocValues() throws Exception {
+    Random rand = random();
+    
+    for(int i=0; i < 100; i++) {
+      assertU(adoc(ID_FIELD, String.valueOf(i), TEST_FIELD, String.valueOf(i)));
+      
+      if(rand.nextBoolean()) {
+        assertU(commit());
+      }
+    }
+    
+    assertU(commit());
+    
+    // Assert everything has been indexed and there are no docvalues
+    withNewRawReader(h, topReader -> {
+      assertEquals(100, topReader.numDocs());
+
+      final FieldInfos infos = MultiFields.getMergedFieldInfos(topReader);
+
+      // The global field type should not have docValues yet
+      assertEquals(DocValuesType.NONE, infos.fieldInfo(TEST_FIELD).getDocValuesType());
+    });
+    
+    
+    addDocValuesTo(h, TEST_FIELD);
+    
+    
+    // Add some more documents with doc values turned on including updating some
+    for(int i=90; i < 110; i++) {
+      assertU(adoc(ID_FIELD, String.valueOf(i), TEST_FIELD, String.valueOf(i)));
+      
+      if(rand.nextBoolean()) {
+        assertU(commit());
+      }
+    }
+    
+    assertU(commit());
+    
+    withNewRawReader(h, topReader -> {
+      assertEquals(110, topReader.numDocs());
+
+      final FieldInfos infos = MultiFields.getMergedFieldInfos(topReader);
+      // The global field type should have docValues because a document with dvs was added
+      assertEquals(DocValuesType.SORTED, infos.fieldInfo(TEST_FIELD).getDocValuesType());
+    });
+    
+    int optimizeSegments = 1;
+    assertU(optimize("maxSegments", String.valueOf(optimizeSegments)));
+    
+    
+    // Assert all docs have the right docvalues
+    withNewRawReader(h, topReader -> {
+      // Assert merged into one segment 
+      assertEquals(110, topReader.numDocs());
+      assertEquals(optimizeSegments, topReader.leaves().size());
+      
+
+      final FieldInfos infos = MultiFields.getMergedFieldInfos(topReader);
+      // The global field type should have docValues because a document with dvs was added
+      assertEquals(DocValuesType.SORTED, infos.fieldInfo(TEST_FIELD).getDocValuesType());
+      
+      
+      // Check that all segments have the right docvalues type with the correct value
+      // Also check that other fields (e.g. the id field) didn't mistakenly get docvalues added
+      for (LeafReaderContext ctx : topReader.leaves()) {
+        LeafReader r = ctx.reader();
+        SortedDocValues docvalues = r.getSortedDocValues(TEST_FIELD);
+        for(int i = 0; i < r.numDocs(); ++i) {
+          Document doc = r.document(i);
+          String v = doc.getField(TEST_FIELD).stringValue();
+          String id = doc.getField(ID_FIELD).stringValue();
+          assertEquals(DocValuesType.SORTED, r.getFieldInfos().fieldInfo(TEST_FIELD).getDocValuesType());
+          assertEquals(DocValuesType.NONE, r.getFieldInfos().fieldInfo(ID_FIELD).getDocValuesType());
+          assertEquals(v, id);
+          
+          docvalues.nextDoc();
+          assertEquals(v, docvalues.binaryValue().utf8ToString());
+        }
+      }
+    });
+  }
+  
+  
+  // When an non-indexed field gets merged, it exhibit the old behavior
+  // The field will be merged, docvalues headers updated, but no docvalues for this field
+  public void testNonIndexedFieldDoesNonFail() throws Exception {
+    // Remove Indexed from fieldType
+    removeIndexFrom(h, TEST_FIELD);
+    
+    assertU(adoc(ID_FIELD, String.valueOf(1), TEST_FIELD, String.valueOf(1)));
+    assertU(commit());
+    
+    addDocValuesTo(h, TEST_FIELD);
+    
+    assertU(adoc(ID_FIELD, String.valueOf(2), TEST_FIELD, String.valueOf(2)));
+    assertU(commit());
+    
+    assertU(optimize("maxSegments", "1"));
+    
+    withNewRawReader(h, topReader -> {
+      // Assert merged into one segment 
+      assertEquals(2, topReader.numDocs());
+      assertEquals(1, topReader.leaves().size());
+      
+
+      final FieldInfos infos = MultiFields.getMergedFieldInfos(topReader);
+      // The global field type should have docValues because a document with dvs was added
+      assertEquals(DocValuesType.SORTED, infos.fieldInfo(TEST_FIELD).getDocValuesType());
+      
+      for (LeafReaderContext ctx : topReader.leaves()) {
+        LeafReader r = ctx.reader();
+        SortedDocValues docvalues = r.getSortedDocValues(TEST_FIELD);
+        for(int i = 0; i < r.numDocs(); ++i) {
+          Document doc = r.document(i);
+          String v = doc.getField(TEST_FIELD).stringValue();
+          String id = doc.getField(ID_FIELD).stringValue();
+          assertEquals(DocValuesType.SORTED, r.getFieldInfos().fieldInfo(TEST_FIELD).getDocValuesType());
+          assertEquals(DocValuesType.NONE, r.getFieldInfos().fieldInfo(ID_FIELD).getDocValuesType());
+          
+         
+          if(id.equals("2")) {
+            assertTrue(docvalues.advanceExact(i));
+            assertEquals(v, docvalues.binaryValue().utf8ToString());
+          } else {
+            assertFalse(docvalues.advanceExact(i));
+          }
+          
+        }
+      }  
+    });
+  }
+
+  
+  private static void addDocValuesTo(TestHarness h, String fieldName) {
+    implUpdateSchemaField(h, fieldName, (p) -> (p | 0x00008000)); // FieldProperties.DOC_VALUES
+  }
+
+  private static void removeIndexFrom(TestHarness h, String fieldName) {
+    implUpdateSchemaField(h, fieldName, (p) -> (p ^ 0x00000001)); // FieldProperties.INDEXED
+  }
+
+  private static void implUpdateSchemaField(TestHarness h, String fieldName, IntUnaryOperator propertiesModifier) {
+    try (SolrCore core = h.getCoreInc()) {
+
+      // Add docvalues to the field type
+      IndexSchema schema = core.getLatestSchema();
+      SchemaField oldSchemaField = schema.getField(fieldName);
+      SchemaField newSchemaField = new SchemaField(
+          fieldName,
+          oldSchemaField.getType(),
+          propertiesModifier.applyAsInt(oldSchemaField.getProperties()),
+          oldSchemaField.getDefaultValue());
+      schema.getFields().put(fieldName, newSchemaField);
+    }
+  }
+  
+  private interface DirectoryReaderConsumer {
+    public void accept(DirectoryReader consumer) throws Exception;
+  }
+
+  private static void withNewRawReader(TestHarness h, DirectoryReaderConsumer consumer) {
+    try (SolrCore core = h.getCoreInc()) {
+      final RefCounted<SolrIndexSearcher> searcherRef = core.openNewSearcher(true, true);
+      final SolrIndexSearcher searcher = searcherRef.get();
+      try {
+        try {
+          consumer.accept(searcher.getRawReader());
+        } catch (Exception e) {
+          fail(e.toString());
+        }
+      } finally {
+        searcherRef.decref();
+      }
+    }
+  }
+}


[36/46] lucene-solr:jira/solr-9959: SOLR-10333: Fixes use of HashedMap in StreamEvaluator tests

Posted by ab...@apache.org.
SOLR-10333: Fixes use of HashedMap in StreamEvaluator tests


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/4171ef79
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/4171ef79
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/4171ef79

Branch: refs/heads/jira/solr-9959
Commit: 4171ef79b404829f5bb7c03de3855313d3ea9c86
Parents: 92297b5
Author: Dennis Gove <dp...@gmail.com>
Authored: Tue Mar 21 08:40:40 2017 -0400
Committer: Dennis Gove <dp...@gmail.com>
Committed: Tue Mar 21 08:40:40 2017 -0400

----------------------------------------------------------------------
 .../solrj/io/stream/eval/AbsoluteValueEvaluatorTest.java  |  8 ++++----
 .../client/solrj/io/stream/eval/AddEvaluatorTest.java     |  8 ++++----
 .../client/solrj/io/stream/eval/AndEvaluatorTest.java     |  8 ++++----
 .../solrj/io/stream/eval/CompoundEvaluatorTest.java       |  8 ++++----
 .../client/solrj/io/stream/eval/DivideEvaluatorTest.java  | 10 +++++-----
 .../client/solrj/io/stream/eval/EqualsEvaluatorTest.java  |  8 ++++----
 .../solrj/io/stream/eval/ExclusiveOrEvaluatorTest.java    |  8 ++++----
 .../io/stream/eval/GreaterThanEqualToEvaluatorTest.java   |  8 ++++----
 .../solrj/io/stream/eval/GreaterThanEvaluatorTest.java    |  8 ++++----
 .../io/stream/eval/LessThanEqualToEvaluatorTest.java      |  8 ++++----
 .../solrj/io/stream/eval/LessThanEvaluatorTest.java       |  8 ++++----
 .../solrj/io/stream/eval/MultiplyEvaluatorTest.java       |  8 ++++----
 .../solrj/io/stream/eval/NaturalLogEvaluatorTest.java     |  8 ++++----
 .../client/solrj/io/stream/eval/NotEvaluatorTest.java     | 10 +++++-----
 .../solr/client/solrj/io/stream/eval/OrEvaluatorTest.java |  8 ++++----
 .../solrj/io/stream/eval/RawValueEvaluatorTest.java       |  8 ++++----
 .../solrj/io/stream/eval/SubtractEvaluatorTest.java       |  8 ++++----
 17 files changed, 70 insertions(+), 70 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4171ef79/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/AbsoluteValueEvaluatorTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/AbsoluteValueEvaluatorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/AbsoluteValueEvaluatorTest.java
index 88d3447..ff2384c 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/AbsoluteValueEvaluatorTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/AbsoluteValueEvaluatorTest.java
@@ -17,11 +17,9 @@
 package org.apache.solr.client.solrj.io.stream.eval;
 
 import java.io.IOException;
+import java.util.HashMap;
 import java.util.Map;
 
-import junit.framework.Assert;
-
-import org.apache.commons.collections.map.HashedMap;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.client.solrj.io.Tuple;
 import org.apache.solr.client.solrj.io.eval.AbsoluteValueEvaluator;
@@ -29,6 +27,8 @@ import org.apache.solr.client.solrj.io.eval.StreamEvaluator;
 import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
 import org.junit.Test;
 
+import junit.framework.Assert;
+
 public class AbsoluteValueEvaluatorTest extends LuceneTestCase {
 
   StreamFactory factory;
@@ -39,7 +39,7 @@ public class AbsoluteValueEvaluatorTest extends LuceneTestCase {
     
     factory = new StreamFactory()
       .withFunctionName("abs", AbsoluteValueEvaluator.class);
-    values = new HashedMap();
+    values = new HashMap<String,Object>();
   }
     
   @Test

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4171ef79/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/AddEvaluatorTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/AddEvaluatorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/AddEvaluatorTest.java
index 7115452..ac31acc 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/AddEvaluatorTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/AddEvaluatorTest.java
@@ -17,11 +17,9 @@
 package org.apache.solr.client.solrj.io.stream.eval;
 
 import java.io.IOException;
+import java.util.HashMap;
 import java.util.Map;
 
-import junit.framework.Assert;
-
-import org.apache.commons.collections.map.HashedMap;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.client.solrj.io.Tuple;
 import org.apache.solr.client.solrj.io.eval.AddEvaluator;
@@ -29,6 +27,8 @@ import org.apache.solr.client.solrj.io.eval.StreamEvaluator;
 import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
 import org.junit.Test;
 
+import junit.framework.Assert;
+
 public class AddEvaluatorTest extends LuceneTestCase {
 
   StreamFactory factory;
@@ -39,7 +39,7 @@ public class AddEvaluatorTest extends LuceneTestCase {
     
     factory = new StreamFactory()
       .withFunctionName("add", AddEvaluator.class);
-    values = new HashedMap();
+    values = new HashMap<String,Object>();
   }
     
   @Test

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4171ef79/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/AndEvaluatorTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/AndEvaluatorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/AndEvaluatorTest.java
index 9daa928..4e94758 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/AndEvaluatorTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/AndEvaluatorTest.java
@@ -16,11 +16,9 @@
  */
 package org.apache.solr.client.solrj.io.stream.eval;
 
+import java.util.HashMap;
 import java.util.Map;
 
-import junit.framework.Assert;
-
-import org.apache.commons.collections.map.HashedMap;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.client.solrj.io.Tuple;
 import org.apache.solr.client.solrj.io.eval.AndEvaluator;
@@ -28,6 +26,8 @@ import org.apache.solr.client.solrj.io.eval.StreamEvaluator;
 import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
 import org.junit.Test;
 
+import junit.framework.Assert;
+
 public class AndEvaluatorTest extends LuceneTestCase {
 
   StreamFactory factory;
@@ -38,7 +38,7 @@ public class AndEvaluatorTest extends LuceneTestCase {
     
     factory = new StreamFactory()
       .withFunctionName("and", AndEvaluator.class);
-    values = new HashedMap();
+    values = new HashMap<String,Object>();
   }
     
   @Test

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4171ef79/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/CompoundEvaluatorTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/CompoundEvaluatorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/CompoundEvaluatorTest.java
index 8ae5657..74b44b3 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/CompoundEvaluatorTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/CompoundEvaluatorTest.java
@@ -16,11 +16,9 @@
  */
 package org.apache.solr.client.solrj.io.stream.eval;
 
+import java.util.HashMap;
 import java.util.Map;
 
-import junit.framework.Assert;
-
-import org.apache.commons.collections.map.HashedMap;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.client.solrj.io.Tuple;
 import org.apache.solr.client.solrj.io.eval.AddEvaluator;
@@ -34,6 +32,8 @@ import org.apache.solr.client.solrj.io.eval.SubtractEvaluator;
 import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
 import org.junit.Test;
 
+import junit.framework.Assert;
+
 public class CompoundEvaluatorTest extends LuceneTestCase {
 
   StreamFactory factory;
@@ -50,7 +50,7 @@ public class CompoundEvaluatorTest extends LuceneTestCase {
       .withFunctionName("sub", SubtractEvaluator.class)
       .withFunctionName("mult", MultiplyEvaluator.class)
       .withFunctionName("if", IfThenElseEvaluator.class);
-    values = new HashedMap();
+    values = new HashMap<String,Object>();
   }
     
   @Test

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4171ef79/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/DivideEvaluatorTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/DivideEvaluatorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/DivideEvaluatorTest.java
index 680be63..b33c896 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/DivideEvaluatorTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/DivideEvaluatorTest.java
@@ -17,11 +17,9 @@
 package org.apache.solr.client.solrj.io.stream.eval;
 
 import java.io.IOException;
+import java.util.HashMap;
 import java.util.Map;
 
-import junit.framework.Assert;
-
-import org.apache.commons.collections.map.HashedMap;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.client.solrj.io.Tuple;
 import org.apache.solr.client.solrj.io.eval.DivideEvaluator;
@@ -29,6 +27,8 @@ import org.apache.solr.client.solrj.io.eval.StreamEvaluator;
 import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
 import org.junit.Test;
 
+import junit.framework.Assert;
+
 public class DivideEvaluatorTest extends LuceneTestCase {
 
   StreamFactory factory;
@@ -39,7 +39,7 @@ public class DivideEvaluatorTest extends LuceneTestCase {
     
     factory = new StreamFactory()
       .withFunctionName("div", DivideEvaluator.class);
-    values = new HashedMap();
+    values = new HashMap<String,Object>();
   }
     
   @Test
@@ -122,7 +122,7 @@ public class DivideEvaluatorTest extends LuceneTestCase {
   
   @Test(expected = IOException.class)
   public void divManyFieldsWithValues() throws Exception{
-    StreamEvaluator evaluator = factory.constructEvaluator("div(a,b,c,d)");
+    factory.constructEvaluator("div(a,b,c,d)");
   }
   
   @Test

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4171ef79/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/EqualsEvaluatorTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/EqualsEvaluatorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/EqualsEvaluatorTest.java
index 2f9dd9c..690e2fd 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/EqualsEvaluatorTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/EqualsEvaluatorTest.java
@@ -17,11 +17,9 @@
 package org.apache.solr.client.solrj.io.stream.eval;
 
 import java.io.IOException;
+import java.util.HashMap;
 import java.util.Map;
 
-import junit.framework.Assert;
-
-import org.apache.commons.collections.map.HashedMap;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.client.solrj.io.Tuple;
 import org.apache.solr.client.solrj.io.eval.EqualsEvaluator;
@@ -30,6 +28,8 @@ import org.apache.solr.client.solrj.io.eval.StreamEvaluator;
 import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
 import org.junit.Test;
 
+import junit.framework.Assert;
+
 public class EqualsEvaluatorTest extends LuceneTestCase {
 
   StreamFactory factory;
@@ -42,7 +42,7 @@ public class EqualsEvaluatorTest extends LuceneTestCase {
       .withFunctionName("eq", EqualsEvaluator.class)
       .withFunctionName("val", RawValueEvaluator.class)
       ;
-    values = new HashedMap();
+    values = new HashMap<String,Object>();
   }
     
   @Test

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4171ef79/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/ExclusiveOrEvaluatorTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/ExclusiveOrEvaluatorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/ExclusiveOrEvaluatorTest.java
index c1cc677..659bfdd 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/ExclusiveOrEvaluatorTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/ExclusiveOrEvaluatorTest.java
@@ -16,11 +16,9 @@
  */
 package org.apache.solr.client.solrj.io.stream.eval;
 
+import java.util.HashMap;
 import java.util.Map;
 
-import junit.framework.Assert;
-
-import org.apache.commons.collections.map.HashedMap;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.client.solrj.io.Tuple;
 import org.apache.solr.client.solrj.io.eval.ExclusiveOrEvaluator;
@@ -28,6 +26,8 @@ import org.apache.solr.client.solrj.io.eval.StreamEvaluator;
 import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
 import org.junit.Test;
 
+import junit.framework.Assert;
+
 public class ExclusiveOrEvaluatorTest extends LuceneTestCase {
 
   StreamFactory factory;
@@ -38,7 +38,7 @@ public class ExclusiveOrEvaluatorTest extends LuceneTestCase {
     
     factory = new StreamFactory()
       .withFunctionName("eor", ExclusiveOrEvaluator.class);
-    values = new HashedMap();
+    values = new HashMap<String,Object>();
   }
     
   @Test

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4171ef79/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/GreaterThanEqualToEvaluatorTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/GreaterThanEqualToEvaluatorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/GreaterThanEqualToEvaluatorTest.java
index 5968a15..26e6ac5 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/GreaterThanEqualToEvaluatorTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/GreaterThanEqualToEvaluatorTest.java
@@ -17,11 +17,9 @@
 package org.apache.solr.client.solrj.io.stream.eval;
 
 import java.io.IOException;
+import java.util.HashMap;
 import java.util.Map;
 
-import junit.framework.Assert;
-
-import org.apache.commons.collections.map.HashedMap;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.client.solrj.io.Tuple;
 import org.apache.solr.client.solrj.io.eval.GreaterThanEqualToEvaluator;
@@ -29,6 +27,8 @@ import org.apache.solr.client.solrj.io.eval.StreamEvaluator;
 import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
 import org.junit.Test;
 
+import junit.framework.Assert;
+
 public class GreaterThanEqualToEvaluatorTest extends LuceneTestCase {
 
   StreamFactory factory;
@@ -39,7 +39,7 @@ public class GreaterThanEqualToEvaluatorTest extends LuceneTestCase {
     
     factory = new StreamFactory()
       .withFunctionName("gte", GreaterThanEqualToEvaluator.class);
-    values = new HashedMap();
+    values = new HashMap<String,Object>();
   }
     
   @Test

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4171ef79/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/GreaterThanEvaluatorTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/GreaterThanEvaluatorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/GreaterThanEvaluatorTest.java
index d31a79c..41a9763 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/GreaterThanEvaluatorTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/GreaterThanEvaluatorTest.java
@@ -17,11 +17,9 @@
 package org.apache.solr.client.solrj.io.stream.eval;
 
 import java.io.IOException;
+import java.util.HashMap;
 import java.util.Map;
 
-import junit.framework.Assert;
-
-import org.apache.commons.collections.map.HashedMap;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.client.solrj.io.Tuple;
 import org.apache.solr.client.solrj.io.eval.GreaterThanEvaluator;
@@ -29,6 +27,8 @@ import org.apache.solr.client.solrj.io.eval.StreamEvaluator;
 import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
 import org.junit.Test;
 
+import junit.framework.Assert;
+
 public class GreaterThanEvaluatorTest extends LuceneTestCase {
 
   StreamFactory factory;
@@ -39,7 +39,7 @@ public class GreaterThanEvaluatorTest extends LuceneTestCase {
     
     factory = new StreamFactory()
       .withFunctionName("gt", GreaterThanEvaluator.class);
-    values = new HashedMap();
+    values = new HashMap<String,Object>();
   }
     
   @Test

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4171ef79/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/LessThanEqualToEvaluatorTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/LessThanEqualToEvaluatorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/LessThanEqualToEvaluatorTest.java
index 114ea2d..e62a3e9 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/LessThanEqualToEvaluatorTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/LessThanEqualToEvaluatorTest.java
@@ -17,11 +17,9 @@
 package org.apache.solr.client.solrj.io.stream.eval;
 
 import java.io.IOException;
+import java.util.HashMap;
 import java.util.Map;
 
-import junit.framework.Assert;
-
-import org.apache.commons.collections.map.HashedMap;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.client.solrj.io.Tuple;
 import org.apache.solr.client.solrj.io.eval.LessThanEqualToEvaluator;
@@ -29,6 +27,8 @@ import org.apache.solr.client.solrj.io.eval.StreamEvaluator;
 import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
 import org.junit.Test;
 
+import junit.framework.Assert;
+
 public class LessThanEqualToEvaluatorTest extends LuceneTestCase {
 
   StreamFactory factory;
@@ -39,7 +39,7 @@ public class LessThanEqualToEvaluatorTest extends LuceneTestCase {
     
     factory = new StreamFactory()
       .withFunctionName("lte", LessThanEqualToEvaluator.class);
-    values = new HashedMap();
+    values = new HashMap<String,Object>();
   }
     
   @Test

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4171ef79/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/LessThanEvaluatorTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/LessThanEvaluatorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/LessThanEvaluatorTest.java
index 5cc0274..da8c46a 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/LessThanEvaluatorTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/LessThanEvaluatorTest.java
@@ -17,11 +17,9 @@
 package org.apache.solr.client.solrj.io.stream.eval;
 
 import java.io.IOException;
+import java.util.HashMap;
 import java.util.Map;
 
-import junit.framework.Assert;
-
-import org.apache.commons.collections.map.HashedMap;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.client.solrj.io.Tuple;
 import org.apache.solr.client.solrj.io.eval.LessThanEvaluator;
@@ -29,6 +27,8 @@ import org.apache.solr.client.solrj.io.eval.StreamEvaluator;
 import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
 import org.junit.Test;
 
+import junit.framework.Assert;
+
 public class LessThanEvaluatorTest extends LuceneTestCase {
 
   StreamFactory factory;
@@ -39,7 +39,7 @@ public class LessThanEvaluatorTest extends LuceneTestCase {
     
     factory = new StreamFactory()
       .withFunctionName("lt", LessThanEvaluator.class);
-    values = new HashedMap();
+    values = new HashMap<String,Object>();
   }
     
   @Test

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4171ef79/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/MultiplyEvaluatorTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/MultiplyEvaluatorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/MultiplyEvaluatorTest.java
index a2a6616..ef473fc 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/MultiplyEvaluatorTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/MultiplyEvaluatorTest.java
@@ -17,11 +17,9 @@
 package org.apache.solr.client.solrj.io.stream.eval;
 
 import java.io.IOException;
+import java.util.HashMap;
 import java.util.Map;
 
-import junit.framework.Assert;
-
-import org.apache.commons.collections.map.HashedMap;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.client.solrj.io.Tuple;
 import org.apache.solr.client.solrj.io.eval.MultiplyEvaluator;
@@ -29,6 +27,8 @@ import org.apache.solr.client.solrj.io.eval.StreamEvaluator;
 import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
 import org.junit.Test;
 
+import junit.framework.Assert;
+
 public class MultiplyEvaluatorTest extends LuceneTestCase {
 
   StreamFactory factory;
@@ -39,7 +39,7 @@ public class MultiplyEvaluatorTest extends LuceneTestCase {
     
     factory = new StreamFactory()
       .withFunctionName("mult", MultiplyEvaluator.class);
-    values = new HashedMap();
+    values = new HashMap<String,Object>();
   }
     
   @Test

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4171ef79/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/NaturalLogEvaluatorTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/NaturalLogEvaluatorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/NaturalLogEvaluatorTest.java
index c4ae127..97867fe 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/NaturalLogEvaluatorTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/NaturalLogEvaluatorTest.java
@@ -17,11 +17,9 @@
 package org.apache.solr.client.solrj.io.stream.eval;
 
 import java.io.IOException;
+import java.util.HashMap;
 import java.util.Map;
 
-import junit.framework.Assert;
-
-import org.apache.commons.collections.map.HashedMap;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.client.solrj.io.Tuple;
 import org.apache.solr.client.solrj.io.eval.AddEvaluator;
@@ -30,6 +28,8 @@ import org.apache.solr.client.solrj.io.eval.StreamEvaluator;
 import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
 import org.junit.Test;
 
+import junit.framework.Assert;
+
 public class NaturalLogEvaluatorTest extends LuceneTestCase {
 
   StreamFactory factory;
@@ -40,7 +40,7 @@ public class NaturalLogEvaluatorTest extends LuceneTestCase {
 
     factory = new StreamFactory()
         .withFunctionName("log", NaturalLogEvaluator.class).withFunctionName("add", AddEvaluator.class);
-    values = new HashedMap();
+    values = new HashMap<String,Object>();
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4171ef79/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/NotEvaluatorTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/NotEvaluatorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/NotEvaluatorTest.java
index 6116163..56cb4fd 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/NotEvaluatorTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/NotEvaluatorTest.java
@@ -16,18 +16,18 @@
  */
 package org.apache.solr.client.solrj.io.stream.eval;
 
+import java.util.HashMap;
 import java.util.Map;
 
-import junit.framework.Assert;
-
-import org.apache.commons.collections.map.HashedMap;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.client.solrj.io.Tuple;
-import org.apache.solr.client.solrj.io.eval.StreamEvaluator;
 import org.apache.solr.client.solrj.io.eval.NotEvaluator;
+import org.apache.solr.client.solrj.io.eval.StreamEvaluator;
 import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
 import org.junit.Test;
 
+import junit.framework.Assert;
+
 public class NotEvaluatorTest extends LuceneTestCase {
 
   StreamFactory factory;
@@ -38,7 +38,7 @@ public class NotEvaluatorTest extends LuceneTestCase {
     
     factory = new StreamFactory()
       .withFunctionName("not", NotEvaluator.class);
-    values = new HashedMap();
+    values = new HashMap<String,Object>();
   }
     
   @Test

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4171ef79/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/OrEvaluatorTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/OrEvaluatorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/OrEvaluatorTest.java
index 00c6b7a..09206af 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/OrEvaluatorTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/OrEvaluatorTest.java
@@ -16,11 +16,9 @@
  */
 package org.apache.solr.client.solrj.io.stream.eval;
 
+import java.util.HashMap;
 import java.util.Map;
 
-import junit.framework.Assert;
-
-import org.apache.commons.collections.map.HashedMap;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.client.solrj.io.Tuple;
 import org.apache.solr.client.solrj.io.eval.OrEvaluator;
@@ -28,6 +26,8 @@ import org.apache.solr.client.solrj.io.eval.StreamEvaluator;
 import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
 import org.junit.Test;
 
+import junit.framework.Assert;
+
 public class OrEvaluatorTest extends LuceneTestCase {
 
   StreamFactory factory;
@@ -38,7 +38,7 @@ public class OrEvaluatorTest extends LuceneTestCase {
     
     factory = new StreamFactory()
       .withFunctionName("or", OrEvaluator.class);
-    values = new HashedMap();
+    values = new HashMap<String,Object>();
   }
     
   @Test

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4171ef79/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/RawValueEvaluatorTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/RawValueEvaluatorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/RawValueEvaluatorTest.java
index 0d637e1..1419ec2 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/RawValueEvaluatorTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/RawValueEvaluatorTest.java
@@ -16,11 +16,9 @@
  */
 package org.apache.solr.client.solrj.io.stream.eval;
 
+import java.util.HashMap;
 import java.util.Map;
 
-import junit.framework.Assert;
-
-import org.apache.commons.collections.map.HashedMap;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.client.solrj.io.Tuple;
 import org.apache.solr.client.solrj.io.eval.AddEvaluator;
@@ -29,6 +27,8 @@ import org.apache.solr.client.solrj.io.eval.RawValueEvaluator;
 import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
 import org.junit.Test;
 
+import junit.framework.Assert;
+
 public class RawValueEvaluatorTest extends LuceneTestCase {
 
   StreamFactory factory;
@@ -42,7 +42,7 @@ public class RawValueEvaluatorTest extends LuceneTestCase {
       .withFunctionName("add", AddEvaluator.class)
       .withFunctionName("and", AndEvaluator.class)
       ;
-    values = new HashedMap();
+    values = new HashMap<String,Object>();
   }
     
   @Test

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4171ef79/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/SubtractEvaluatorTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/SubtractEvaluatorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/SubtractEvaluatorTest.java
index 58cef8d..3da169e 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/SubtractEvaluatorTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/SubtractEvaluatorTest.java
@@ -17,11 +17,9 @@
 package org.apache.solr.client.solrj.io.stream.eval;
 
 import java.io.IOException;
+import java.util.HashMap;
 import java.util.Map;
 
-import junit.framework.Assert;
-
-import org.apache.commons.collections.map.HashedMap;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.client.solrj.io.Tuple;
 import org.apache.solr.client.solrj.io.eval.StreamEvaluator;
@@ -29,6 +27,8 @@ import org.apache.solr.client.solrj.io.eval.SubtractEvaluator;
 import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
 import org.junit.Test;
 
+import junit.framework.Assert;
+
 public class SubtractEvaluatorTest extends LuceneTestCase {
 
   StreamFactory factory;
@@ -39,7 +39,7 @@ public class SubtractEvaluatorTest extends LuceneTestCase {
     
     factory = new StreamFactory()
       .withFunctionName("sub", SubtractEvaluator.class);
-    values = new HashedMap();
+    values = new HashMap<String,Object>();
   }
     
   @Test


[35/46] lucene-solr:jira/solr-9959: SOLR-10292: Adds CartesianProductStream to turn multivalued fields into multiple tuples

Posted by ab...@apache.org.
SOLR-10292: Adds CartesianProductStream to turn multivalued fields into multiple tuples


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/92297b58
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/92297b58
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/92297b58

Branch: refs/heads/jira/solr-9959
Commit: 92297b58605104106b5b31d3dae5c2daed1886ba
Parents: ffaa234
Author: Dennis Gove <dp...@gmail.com>
Authored: Mon Mar 20 16:36:05 2017 -0400
Committer: Dennis Gove <dp...@gmail.com>
Committed: Mon Mar 20 16:40:46 2017 -0400

----------------------------------------------------------------------
 solr/CHANGES.txt                                |   3 +
 .../org/apache/solr/handler/StreamHandler.java  |   5 +
 .../client/solrj/io/comp/FieldComparator.java   |   5 +
 .../solrj/io/comp/MultipleFieldComparator.java  |  22 ++
 .../client/solrj/io/comp/StreamComparator.java  |   1 +
 .../client/solrj/io/eval/FieldEvaluator.java    |  28 +-
 .../solrj/io/stream/CartesianProductStream.java | 301 +++++++++++++++++++
 .../solr/configsets/streaming/conf/schema.xml   |   1 +
 .../solrj/io/stream/StreamExpressionTest.java   | 132 ++++++++
 .../io/stream/eval/FieldEvaluatorTest.java      | 114 +++++++
 10 files changed, 611 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/92297b58/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 7767453..f61c4c2 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -58,6 +58,9 @@ New Features
 
 * SOLR-9835: Create another replication mode for SolrCloud
 
+* SOLR-10292: Adds CartesianProductStream which turns a single tuple with a multi-valued field into N 
+  tuples, one for each value in the multi-valued field. (Dennis Gove)
+
 Bug Fixes
 ----------------------
 * SOLR-9262: Connection and read timeouts are being ignored by UpdateShardHandler after SOLR-4509.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/92297b58/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/StreamHandler.java b/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
index e69f52b..dfae5cd 100644
--- a/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
@@ -154,6 +154,9 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
       .withFunctionName("gatherNodes", GatherNodesStream.class)
       .withFunctionName("nodes", GatherNodesStream.class)
       .withFunctionName("select", SelectStream.class)
+      .withFunctionName("shortestPath", ShortestPathStream.class)
+      .withFunctionName("gatherNodes", GatherNodesStream.class)
+      .withFunctionName("nodes", GatherNodesStream.class)
       .withFunctionName("scoreNodes", ScoreNodesStream.class)
       .withFunctionName("model", ModelStream.class)
       .withFunctionName("classify", ClassifyStream.class)
@@ -162,6 +165,8 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
       .withFunctionName("null", NullStream.class)
       .withFunctionName("priority", PriorityStream.class)
       .withFunctionName("significantTerms", SignificantTermsStream.class)
+      .withFunctionName("cartesianProduct", CartesianProductStream.class)
+      
       // metrics
       .withFunctionName("min", MinMetric.class)
       .withFunctionName("max", MaxMetric.class)

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/92297b58/solr/solrj/src/java/org/apache/solr/client/solrj/io/comp/FieldComparator.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/comp/FieldComparator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/comp/FieldComparator.java
index 15af57a..f2be53e 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/comp/FieldComparator.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/comp/FieldComparator.java
@@ -166,4 +166,9 @@ public class FieldComparator implements StreamComparator {
         order
     );
   }
+  
+  @Override
+  public StreamComparator append(StreamComparator other){
+    return new MultipleFieldComparator(this).append(other);
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/92297b58/solr/solrj/src/java/org/apache/solr/client/solrj/io/comp/MultipleFieldComparator.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/comp/MultipleFieldComparator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/comp/MultipleFieldComparator.java
index c709f4d..09532e1 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/comp/MultipleFieldComparator.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/comp/MultipleFieldComparator.java
@@ -17,6 +17,8 @@
 package org.apache.solr.client.solrj.io.comp;
 
 import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
 import java.util.Map;
 import java.util.UUID;
 
@@ -113,4 +115,24 @@ public class MultipleFieldComparator implements StreamComparator {
     
     return new MultipleFieldComparator(aliasedComps);
   }
+  
+  @Override
+  public StreamComparator append(StreamComparator other){
+    List<StreamComparator> newComps = new ArrayList<>();
+    
+    for(StreamComparator comp : comps){
+      newComps.add(comp);
+    }
+    
+    if(other instanceof FieldComparator){
+      newComps.add(other);
+    }
+    else if(other instanceof MultipleFieldComparator){
+      for(StreamComparator comp : ((MultipleFieldComparator)other).comps){
+        newComps.add(comp);
+      }
+    }
+    
+    return new MultipleFieldComparator(newComps.toArray(new StreamComparator[newComps.size()]));
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/92297b58/solr/solrj/src/java/org/apache/solr/client/solrj/io/comp/StreamComparator.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/comp/StreamComparator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/comp/StreamComparator.java
index 156a19c..70bd51d 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/comp/StreamComparator.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/comp/StreamComparator.java
@@ -27,4 +27,5 @@ import org.apache.solr.client.solrj.io.stream.expr.Expressible;
 public interface StreamComparator extends Comparator<Tuple>, Expressible, Serializable {
   public boolean isDerivedFrom(StreamComparator base);
   public StreamComparator copyAliased(Map<String,String> aliases);
+  public StreamComparator append(StreamComparator other);
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/92297b58/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/FieldEvaluator.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/FieldEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/FieldEvaluator.java
index 0ebe729..3251498 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/FieldEvaluator.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/FieldEvaluator.java
@@ -20,6 +20,8 @@
 package org.apache.solr.client.solrj.io.eval;
 
 import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
 
 import org.apache.solr.client.solrj.io.Tuple;
 import org.apache.solr.client.solrj.io.stream.expr.Explanation;
@@ -43,7 +45,31 @@ public class FieldEvaluator extends SimpleEvaluator {
   
   @Override
   public Object evaluate(Tuple tuple) {
-    return tuple.get(fieldName); // returns null if field doesn't exist in tuple
+    Object value = tuple.get(fieldName);
+    
+    // if we have an array then convert to an ArrayList
+    // if we have an iterable that is not a list then convert to ArrayList
+    // lists are good to go
+    if(null != value){
+      if(value instanceof Object[]){
+        Object[] array = (Object[])value;
+        List<Object> list = new ArrayList<Object>(array.length);
+        for(Object obj : array){
+          list.add(obj);
+        }
+        return list;
+      }
+      else if(value instanceof Iterable && !(value instanceof List<?>)){
+        Iterable<?> iter = (Iterable<?>)value;
+        List<Object> list = new ArrayList<Object>();
+        for(Object obj : iter){
+          list.add(obj);
+        }
+        return list;
+      }
+    }
+    
+    return value;
   }
   
   @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/92297b58/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CartesianProductStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CartesianProductStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CartesianProductStream.java
new file mode 100644
index 0000000..feb10c7
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CartesianProductStream.java
@@ -0,0 +1,301 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.client.solrj.io.stream;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+
+import org.apache.solr.client.solrj.io.Tuple;
+import org.apache.solr.client.solrj.io.comp.FieldComparator;
+import org.apache.solr.client.solrj.io.comp.StreamComparator;
+import org.apache.solr.client.solrj.io.eval.FieldEvaluator;
+import org.apache.solr.client.solrj.io.eval.StreamEvaluator;
+import org.apache.solr.client.solrj.io.stream.expr.Explanation;
+import org.apache.solr.client.solrj.io.stream.expr.Explanation.ExpressionType;
+import org.apache.solr.client.solrj.io.stream.expr.Expressible;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExplanation;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionNamedParameter;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionParameter;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionParser;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionValue;
+import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
+
+public class CartesianProductStream extends TupleStream implements Expressible {
+
+  private static final long serialVersionUID = 1;
+
+  private TupleStream stream;
+  private List<NamedEvaluator> evaluators;
+  private StreamComparator orderBy;
+  
+  // Used to contain the sorted queue of generated tuples 
+  private LinkedList<Tuple> generatedTuples;
+  
+  public CartesianProductStream(StreamExpression expression,StreamFactory factory) throws IOException {
+    String functionName = factory.getFunctionName(getClass());
+    
+    // grab all parameters out
+    List<StreamExpression> streamExpressions = factory.getExpressionOperandsRepresentingTypes(expression, Expressible.class, TupleStream.class);
+    List<StreamExpressionParameter> evaluateAsExpressions = factory.getOperandsOfType(expression, StreamExpressionValue.class);
+    StreamExpressionNamedParameter orderByExpression = factory.getNamedOperand(expression, "productSort");
+    
+    // validate expression contains only what we want.
+    if(expression.getParameters().size() != streamExpressions.size() + evaluateAsExpressions.size() + (null == orderByExpression ? 0 : 1)){
+      throw new IOException(String.format(Locale.ROOT,"Invalid %s expression %s - unknown operands found", functionName, expression));
+    }
+    
+    if(1 != streamExpressions.size()){
+      throw new IOException(String.format(Locale.ROOT,"Invalid %s expression %s - expecting single stream but found %d (must be TupleStream types)", functionName, expression, streamExpressions.size()));
+    }
+
+    stream = factory.constructStream(streamExpressions.get(0));
+    orderBy = null == orderByExpression ? null : factory.constructComparator(((StreamExpressionValue)orderByExpression.getParameter()).getValue(), FieldComparator.class);
+    
+    evaluators = new ArrayList<>();
+    for(StreamExpressionParameter evaluateAsExpression : evaluateAsExpressions){
+      String fullString = ((StreamExpressionValue)evaluateAsExpression).getValue().trim();
+      String originalFullString = fullString; // used for error messages
+      
+      // remove possible wrapping quotes
+      if(fullString.length() > 2 && fullString.startsWith("\"") && fullString.endsWith("\"")){
+        fullString = fullString.substring(1, fullString.length() - 1).trim();
+      }
+      
+      String evaluatorPart = null;
+      String asNamePart = null;
+      
+      if(fullString.toLowerCase(Locale.ROOT).contains(" as ")){
+        String[] parts = fullString.split("(?i) as "); // ensure we are splitting in a case-insensitive way
+        if(2 != parts.length){
+          throw new IOException(String.format(Locale.ROOT,"Invalid %s expression %s - expecting evaluator of form 'fieldA' or 'fieldA as alias' but found %s", functionName, expression, originalFullString));
+        }
+        
+        evaluatorPart = parts[0].trim();
+        asNamePart = parts[1].trim();        
+      }
+      else{
+        evaluatorPart = fullString;
+        // no rename
+      }
+      
+      boolean wasHandledAsEvaluatorFunction = false;
+      StreamEvaluator evaluator = null;
+      if(evaluatorPart.contains("(")){
+        // is a possible evaluator
+        try{
+          StreamExpression asValueExpression = StreamExpressionParser.parse(evaluatorPart);
+          if(factory.doesRepresentTypes(asValueExpression, StreamEvaluator.class)){
+            evaluator = factory.constructEvaluator(asValueExpression);
+            wasHandledAsEvaluatorFunction = true;
+          }
+        }
+        catch(Throwable e){
+          // it was not handled, so treat as a non-evaluator
+        }
+      }
+      if(!wasHandledAsEvaluatorFunction){
+        // treat as a straight field evaluator
+        evaluator = new FieldEvaluator(evaluatorPart);
+        if(null == asNamePart){
+          asNamePart = evaluatorPart; // just use the field name
+        }
+      }
+
+      if(null == evaluator || null == asNamePart){
+        throw new IOException(String.format(Locale.ROOT,"Invalid %s expression %s - failed to parse evaluator '%s'", functionName, expression, originalFullString));
+      }
+      
+      evaluators.add(new NamedEvaluator(asNamePart, evaluator));
+    }
+  }
+    
+  @Override
+  public StreamExpression toExpression(StreamFactory factory) throws IOException{
+    return toExpression(factory, true);
+  }
+  
+  private StreamExpression toExpression(StreamFactory factory, boolean includeStreams) throws IOException {
+    // function name
+    StreamExpression expression = new StreamExpression(factory.getFunctionName(this.getClass()));
+    
+    if(includeStreams){
+      // we know stream is expressible
+      expression.addParameter(((Expressible)stream).toExpression(factory));
+    }
+    else{
+      expression.addParameter("<stream>");
+    }
+        
+    // selected evaluators
+    for(NamedEvaluator evaluator : evaluators) {
+      expression.addParameter(String.format(Locale.ROOT, "%s as %s", evaluator.getEvaluator().toExpression(factory), evaluator.getName()));
+    }
+    
+    expression.addParameter(new StreamExpressionNamedParameter("productSort", orderBy.toExpression(factory)));
+    
+    return expression;   
+  }
+  
+  @Override
+  public Explanation toExplanation(StreamFactory factory) throws IOException {
+
+    Explanation explanation = new StreamExplanation(getStreamNodeId().toString())
+      .withChildren(new Explanation[]{
+        stream.toExplanation(factory)
+      })
+      .withFunctionName(factory.getFunctionName(this.getClass()))
+      .withImplementingClass(this.getClass().getName())
+      .withExpressionType(ExpressionType.STREAM_DECORATOR)
+      .withExpression(toExpression(factory, false).toString());   
+    
+    for(NamedEvaluator evaluator : evaluators){
+      explanation.addHelper(evaluator.getEvaluator().toExplanation(factory));
+    }
+    
+    explanation.addHelper(orderBy.toExplanation(factory));
+    
+    return explanation;
+  }
+
+  public Tuple read() throws IOException {
+    if(generatedTuples.isEmpty()){
+      Tuple tuple = stream.read();
+      
+      if(tuple.EOF){
+        return tuple;
+      }
+    
+      // returns tuples in desired sorted order
+      generatedTuples = generateTupleList(tuple);
+    }
+    
+    return generatedTuples.pop();
+  }
+  
+  private LinkedList<Tuple> generateTupleList(Tuple original) throws IOException{
+    Map<String, Object> evaluatedValues = new HashMap<>();
+    
+    for(NamedEvaluator evaluator : evaluators){
+      evaluatedValues.put(evaluator.getName(), evaluator.getEvaluator().evaluate(original));
+    }
+    
+    // use an array list internally because it has better sort performance
+    // in Java 8. We do pay a conversion to a linked list but ..... oh well
+    ArrayList<Tuple> generatedTupleList = new ArrayList<>();
+    
+    int[] workingIndexes = new int[evaluators.size()]; // java language spec ensures all values are 0
+    do{
+      Tuple generated = original.clone();
+      for(int offset = 0; offset < workingIndexes.length; ++offset){
+        String fieldName = evaluators.get(offset).getName();
+        Object evaluatedValue = evaluatedValues.get(fieldName);
+        if(evaluatedValue instanceof Collection){
+          // because of the way a FieldEvaluator works we know that 
+          // any collection is a list.
+          generated.put(fieldName, ((List<Object>)evaluatedValue).get(workingIndexes[offset]));
+        }
+      }
+      generatedTupleList.add(generated);
+    }while(iterate(evaluators, workingIndexes, evaluatedValues));
+    
+    // order if we need to
+    if(null != orderBy){
+      generatedTupleList.sort(orderBy);
+    }
+    
+    return new LinkedList<>(generatedTupleList);
+  }
+  
+  private boolean iterate(List<NamedEvaluator> evaluators, int[] indexes, Map<String, Object> evaluatedValues){
+    // this assumes evaluators and indexes are the same length, which is ok cause we created it so we know it is
+    // go right to left and increment, returning true if we're not at the end
+    for(int offset = indexes.length - 1; offset >= 0; --offset){
+      Object evaluatedValue = evaluatedValues.get(evaluators.get(offset).getName());
+      if(evaluatedValue instanceof Collection){
+        int currentIndexValue = indexes[offset];
+        if(currentIndexValue < ((Collection)evaluatedValue).size() - 1){
+          indexes[offset] = currentIndexValue + 1;
+          return true;
+        }
+        else if(0 != offset){
+          indexes[offset] = 0;
+          // move to the left
+        }
+      }
+    }
+    
+    // no more
+    return false;
+  }
+  
+  /** Return the incoming sort + the sort applied to the generated tuples */
+  public StreamComparator getStreamSort(){
+    if(null != orderBy){
+      return stream.getStreamSort().append(orderBy);
+    }
+    return stream.getStreamSort();
+  }
+  
+  public void setStreamContext(StreamContext context) {
+    this.stream.setStreamContext(context);
+  }
+
+  public List<TupleStream> children() {
+    List<TupleStream> l =  new ArrayList<>();
+    l.add(stream);
+    return l;
+  }
+
+  public void open() throws IOException {
+    stream.open();
+    generatedTuples = new LinkedList<>();
+  }
+
+  public void close() throws IOException {
+    stream.close();
+    generatedTuples.clear();
+  }
+
+  public int getCost() {
+    return 0;
+  }
+  
+  class NamedEvaluator{
+    private String name;
+    private StreamEvaluator evaluator;
+    
+    public NamedEvaluator(String name, StreamEvaluator evaluator){
+      this.name = name;
+      this.evaluator = evaluator;
+    }
+    
+    public String getName(){
+      return name;
+    }
+    
+    public StreamEvaluator getEvaluator(){
+      return evaluator;
+    }
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/92297b58/solr/solrj/src/test-files/solrj/solr/configsets/streaming/conf/schema.xml
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test-files/solrj/solr/configsets/streaming/conf/schema.xml b/solr/solrj/src/test-files/solrj/solr/configsets/streaming/conf/schema.xml
index 7d3173a..b61a2e9 100644
--- a/solr/solrj/src/test-files/solrj/solr/configsets/streaming/conf/schema.xml
+++ b/solr/solrj/src/test-files/solrj/solr/configsets/streaming/conf/schema.xml
@@ -526,6 +526,7 @@
     <dynamicField name="*_ss" type="string"  indexed="true"  stored="true" multiValued="true"/>
     <dynamicField name="*_s1"  type="string"  indexed="true"  stored="true" multiValued="false"/>
     <dynamicField name="*_l"  type="long"   indexed="true"  stored="true"/>
+    <dynamicField name="*_ls"  type="long"   indexed="true"  stored="true" multiValued="true"/>
     <dynamicField name="*_l1"  type="long"   indexed="true"  stored="true" multiValued="false"/>
     <dynamicField name="*_t"  type="text"    indexed="true"  stored="true"/>
     <dynamicField name="*_b"  type="boolean" indexed="true"  stored="true"/>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/92297b58/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
index c61e443..6c96025 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
@@ -4960,6 +4960,138 @@ public class StreamExpressionTest extends SolrCloudTestCase {
   }
 
   @Test
+  public void testCartesianProductStream() throws Exception {
+
+    new UpdateRequest()
+        .add(id, "0", "a_ss", "a", "a_ss", "b", "a_ss", "c", "a_ss", "d", "a_ss", "e", "b_ls", "1", "b_ls", "2", "b_ls", "3")
+        .add(id, "1", "a_ss", "a", "a_ss", "b", "a_ss", "c", "a_ss", "d", "a_ss", "e")
+        .commit(cluster.getSolrClient(), COLLECTIONORALIAS);
+    
+    StreamExpression expression;
+    TupleStream stream;
+    List<Tuple> tuples;
+    
+    StreamFactory factory = new StreamFactory()
+      .withCollectionZkHost("collection1", cluster.getZkServer().getZkAddress())
+      .withFunctionName("search", CloudSolrStream.class)
+      .withFunctionName("cartesian", CartesianProductStream.class);
+      
+    // single selection, no sort
+    stream = factory.constructStream("cartesian("
+                                   +   "search(collection1, q=*:*, fl=\"id,a_ss\", sort=\"id asc\"),"
+                                   +   "a_ss"
+                                   + ")");
+    tuples = getTuples(stream);
+   
+    assertEquals(10, tuples.size());
+    assertOrder(tuples, 0,0,0,0,0,1,1,1,1,1);
+    assertEquals("a", tuples.get(0).get("a_ss"));
+    assertEquals("c", tuples.get(2).get("a_ss"));
+    assertEquals("a", tuples.get(5).get("a_ss"));
+    assertEquals("c", tuples.get(7).get("a_ss"));
+
+    // single selection, sort
+    stream = factory.constructStream("cartesian("
+        +   "search(collection1, q=*:*, fl=\"id,a_ss\", sort=\"id asc\"),"
+        +   "a_ss,"
+        +   "productSort=\"a_ss DESC\""
+        + ")");
+    tuples = getTuples(stream);
+    
+    assertEquals(10, tuples.size());
+    assertOrder(tuples, 0,0,0,0,0,1,1,1,1,1);
+    assertEquals("e", tuples.get(0).get("a_ss"));
+    assertEquals("c", tuples.get(2).get("a_ss"));
+    assertEquals("e", tuples.get(5).get("a_ss"));
+    assertEquals("c", tuples.get(7).get("a_ss"));
+    
+    // multi selection, sort
+    stream = factory.constructStream("cartesian("
+        +   "search(collection1, q=*:*, fl=\"id,a_ss,b_ls\", sort=\"id asc\"),"
+        +   "a_ss,"
+        +   "b_ls,"
+        +   "productSort=\"a_ss ASC\""
+        + ")");
+    tuples = getTuples(stream);
+    
+    assertEquals(20, tuples.size()); // (5 * 3) + 5
+    assertOrder(tuples, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1);
+    assertEquals("a", tuples.get(0).get("a_ss"));
+    assertEquals(1L, tuples.get(0).get("b_ls"));
+    assertEquals("a", tuples.get(1).get("a_ss"));
+    assertEquals(2L, tuples.get(1).get("b_ls"));
+    assertEquals("a", tuples.get(2).get("a_ss"));
+    assertEquals(3L, tuples.get(2).get("b_ls"));
+    
+    assertEquals("b", tuples.get(3).get("a_ss"));
+    assertEquals(1L, tuples.get(3).get("b_ls"));
+    assertEquals("b", tuples.get(4).get("a_ss"));
+    assertEquals(2L, tuples.get(4).get("b_ls"));
+    assertEquals("b", tuples.get(5).get("a_ss"));
+    assertEquals(3L, tuples.get(5).get("b_ls"));
+    
+    // multi selection, sort
+    stream = factory.constructStream("cartesian("
+    +   "search(collection1, q=*:*, fl=\"id,a_ss,b_ls\", sort=\"id asc\"),"
+    +   "a_ss,"
+    +   "b_ls,"
+    +   "productSort=\"a_ss ASC, b_ls DESC\""
+    + ")");
+    tuples = getTuples(stream);
+    
+    assertEquals(20, tuples.size()); // (5 * 3) + 5
+    assertOrder(tuples, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1);
+    assertEquals("a", tuples.get(0).get("a_ss"));
+    assertEquals(3L, tuples.get(0).get("b_ls"));
+    assertEquals("a", tuples.get(1).get("a_ss"));
+    assertEquals(2L, tuples.get(1).get("b_ls"));
+    assertEquals("a", tuples.get(2).get("a_ss"));
+    assertEquals(1L, tuples.get(2).get("b_ls"));
+    
+    assertEquals("b", tuples.get(3).get("a_ss"));
+    assertEquals(3L, tuples.get(3).get("b_ls"));
+    assertEquals("b", tuples.get(4).get("a_ss"));
+    assertEquals(2L, tuples.get(4).get("b_ls"));
+    assertEquals("b", tuples.get(5).get("a_ss"));
+    assertEquals(1L, tuples.get(5).get("b_ls"));
+
+    // multi selection, sort
+    stream = factory.constructStream("cartesian("
+    +   "search(collection1, q=*:*, fl=\"id,a_ss,b_ls\", sort=\"id asc\"),"
+    +   "a_ss,"
+    +   "b_ls,"
+    +   "productSort=\"b_ls DESC\""
+    + ")");
+    tuples = getTuples(stream);
+    
+    assertEquals(20, tuples.size()); // (5 * 3) + 5
+    assertOrder(tuples, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1);
+    assertEquals("a", tuples.get(0).get("a_ss"));
+    assertEquals(3L, tuples.get(0).get("b_ls"));
+    assertEquals("b", tuples.get(1).get("a_ss"));
+    assertEquals(3L, tuples.get(1).get("b_ls"));
+    assertEquals("c", tuples.get(2).get("a_ss"));
+    assertEquals(3L, tuples.get(2).get("b_ls"));
+    assertEquals("d", tuples.get(3).get("a_ss"));
+    assertEquals(3L, tuples.get(3).get("b_ls"));
+    assertEquals("e", tuples.get(4).get("a_ss"));
+    assertEquals(3L, tuples.get(4).get("b_ls"));
+    
+    assertEquals("a", tuples.get(5).get("a_ss"));
+    assertEquals(2L, tuples.get(5).get("b_ls"));
+    assertEquals("b", tuples.get(6).get("a_ss"));
+    assertEquals(2L, tuples.get(6).get("b_ls"));
+    assertEquals("c", tuples.get(7).get("a_ss"));
+    assertEquals(2L, tuples.get(7).get("b_ls"));
+    assertEquals("d", tuples.get(8).get("a_ss"));
+    assertEquals(2L, tuples.get(8).get("b_ls"));
+    assertEquals("e", tuples.get(9).get("a_ss"));
+    assertEquals(2L, tuples.get(9).get("b_ls"));
+
+  }
+
+  
+  @Test
   public void testParallelComplementStream() throws Exception {
 
     new UpdateRequest()

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/92297b58/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/FieldEvaluatorTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/FieldEvaluatorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/FieldEvaluatorTest.java
new file mode 100644
index 0000000..b1c67c7
--- /dev/null
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/FieldEvaluatorTest.java
@@ -0,0 +1,114 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.client.solrj.io.stream.eval;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.PriorityQueue;
+
+import org.apache.lucene.util.LuceneTestCase;
+import org.apache.solr.client.solrj.io.Tuple;
+import org.apache.solr.client.solrj.io.eval.FieldEvaluator;
+import org.junit.Test;
+
+import junit.framework.Assert;
+
+public class FieldEvaluatorTest extends LuceneTestCase {
+
+  Map<String, Object> values;
+  
+  public FieldEvaluatorTest() {
+    super();
+    
+    values = new HashMap<String,Object>();
+  }
+    
+  @SuppressWarnings("serial")
+  @Test
+  public void listTypes() throws Exception{
+    values.clear();
+    values.put("a", new ArrayList<Boolean>(){{ add(true); add(false); }});
+    values.put("b", new ArrayList<Double>(){{ add(0.0); add(1.1); }});
+    values.put("c", new ArrayList<Integer>(){{ add(0); add(1); }});
+    values.put("d", new ArrayList<Long>(){{ add(0L); add(1L); }});
+    values.put("e", new ArrayList<String>(){{ add("first"); add("second"); }});
+    
+    Tuple tuple = new Tuple(values);
+    
+    for(String fieldName : new String[]{ "a", "b", "c", "d", "e" }){
+      Assert.assertTrue(new FieldEvaluator(fieldName).evaluate(tuple) instanceof Collection);
+      Assert.assertEquals(2, ((Collection<?>)new FieldEvaluator(fieldName).evaluate(tuple)).size());
+    }
+    
+    Assert.assertEquals(false, ((Collection<?>)new FieldEvaluator("a").evaluate(tuple)).toArray()[1]);
+    Assert.assertEquals(1.1, ((Collection<?>)new FieldEvaluator("b").evaluate(tuple)).toArray()[1]);
+    Assert.assertEquals(1, ((Collection<?>)new FieldEvaluator("c").evaluate(tuple)).toArray()[1]);
+    Assert.assertEquals(1L, ((Collection<?>)new FieldEvaluator("d").evaluate(tuple)).toArray()[1]);
+    Assert.assertEquals("second", ((Collection<?>)new FieldEvaluator("e").evaluate(tuple)).toArray()[1]);
+  }
+  
+  @Test
+  public void arrayTypes() throws Exception{
+    values.clear();
+    values.put("a", new Boolean[]{ true, false });
+    values.put("b", new Double[]{ 0.0, 1.1 });
+    values.put("c", new Integer[]{ 0, 1 });
+    values.put("d", new Long[]{ 0L, 1L });
+    values.put("e", new String[]{ "first", "second" });
+    
+    Tuple tuple = new Tuple(values);
+    
+    for(String fieldName : new String[]{ "a", "b", "c", "d", "e" }){
+      Assert.assertTrue(new FieldEvaluator(fieldName).evaluate(tuple) instanceof Collection);
+      Assert.assertEquals(2, ((Collection<?>)new FieldEvaluator(fieldName).evaluate(tuple)).size());
+    }
+    
+    Assert.assertEquals(false, ((Collection<?>)new FieldEvaluator("a").evaluate(tuple)).toArray()[1]);
+    Assert.assertEquals(1.1, ((Collection<?>)new FieldEvaluator("b").evaluate(tuple)).toArray()[1]);
+    Assert.assertEquals(1, ((Collection<?>)new FieldEvaluator("c").evaluate(tuple)).toArray()[1]);
+    Assert.assertEquals(1L, ((Collection<?>)new FieldEvaluator("d").evaluate(tuple)).toArray()[1]);
+    Assert.assertEquals("second", ((Collection<?>)new FieldEvaluator("e").evaluate(tuple)).toArray()[1]);
+  }
+  
+  @SuppressWarnings("serial")
+  @Test
+  public void iterableTypes() throws Exception{
+    values.clear();
+    
+    values.put("a", new PriorityQueue<Boolean>(){{ add(true); add(false); }});
+    values.put("b", new PriorityQueue<Double>(){{ add(0.0); add(1.1); }});
+    values.put("c", new PriorityQueue<Integer>(){{ add(0); add(1); }});
+    values.put("d", new PriorityQueue<Long>(){{ add(0L); add(1L); }});
+    values.put("e", new PriorityQueue<String>(){{ add("first"); add("second"); }});
+    
+    Tuple tuple = new Tuple(values);
+    
+    for(String fieldName : new String[]{ "a", "b", "c", "d", "e" }){
+      Assert.assertTrue(new FieldEvaluator(fieldName).evaluate(tuple) instanceof Collection);
+      Assert.assertEquals(2, ((Collection<?>)new FieldEvaluator(fieldName).evaluate(tuple)).size());
+    }
+    
+    // the priority queue is doing natural ordering, so false is first
+    Assert.assertEquals(true, ((Collection<?>)new FieldEvaluator("a").evaluate(tuple)).toArray()[1]);
+    Assert.assertEquals(1.1, ((Collection<?>)new FieldEvaluator("b").evaluate(tuple)).toArray()[1]);
+    Assert.assertEquals(1, ((Collection<?>)new FieldEvaluator("c").evaluate(tuple)).toArray()[1]);
+    Assert.assertEquals(1L, ((Collection<?>)new FieldEvaluator("d").evaluate(tuple)).toArray()[1]);
+    Assert.assertEquals("second", ((Collection<?>)new FieldEvaluator("e").evaluate(tuple)).toArray()[1]);
+  }
+}


[41/46] lucene-solr:jira/solr-9959: Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/lucene-solr

Posted by ab...@apache.org.
Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/lucene-solr


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/88c3c3c5
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/88c3c3c5
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/88c3c3c5

Branch: refs/heads/jira/solr-9959
Commit: 88c3c3c580417b38825dde34c98653a1c995920b
Parents: b7042c1 725cd4e
Author: Cao Manh Dat <da...@apache.org>
Authored: Thu Mar 23 07:31:37 2017 +0700
Committer: Cao Manh Dat <da...@apache.org>
Committed: Thu Mar 23 07:31:37 2017 +0700

----------------------------------------------------------------------
 .../org/apache/solr/schema/DatePointField.java  | 62 ++++++++++++++++++++
 .../solr/search/facet/FacetFieldProcessor.java  |  6 +-
 .../solr/search/facet/FacetProcessor.java       |  6 +-
 3 files changed, 66 insertions(+), 8 deletions(-)
----------------------------------------------------------------------



[03/46] lucene-solr:jira/solr-9959: SOLR-9516: Fix: Admin UI (angular) didn't work with Kerberos

Posted by ab...@apache.org.
SOLR-9516: Fix: Admin UI (angular) didn't work with Kerberos


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/65c695b0
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/65c695b0
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/65c695b0

Branch: refs/heads/jira/solr-9959
Commit: 65c695b025ad0efb952494f767c1ec9fa44a4924
Parents: e7b87f5
Author: Ishan Chattopadhyaya <is...@apache.org>
Authored: Wed Mar 15 17:25:06 2017 +0530
Committer: Ishan Chattopadhyaya <is...@apache.org>
Committed: Wed Mar 15 17:25:06 2017 +0530

----------------------------------------------------------------------
 solr/CHANGES.txt                | 2 ++
 solr/webapp/web/WEB-INF/web.xml | 2 +-
 2 files changed, 3 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/65c695b0/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index b3ce30c..0f1f488 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -253,6 +253,8 @@ Bug Fixes
 
 * SOLR-10184: Fix bin/solr so it can run properly on java9 (hossman, Uwe Schindler)
 
+* SOLR-9516: Admin UI (angular) didn't work with Kerberos (Cassandra Targett, Amrit Sarkar via Ishan Chattopadhyaya)
+
 Optimizations
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/65c695b0/solr/webapp/web/WEB-INF/web.xml
----------------------------------------------------------------------
diff --git a/solr/webapp/web/WEB-INF/web.xml b/solr/webapp/web/WEB-INF/web.xml
index 5278ae5..5ebce10 100644
--- a/solr/webapp/web/WEB-INF/web.xml
+++ b/solr/webapp/web/WEB-INF/web.xml
@@ -56,7 +56,7 @@
     -->
     <init-param>
       <param-name>excludePatterns</param-name>
-      <param-value>/css/.+,/js/.+,/img/.+,/tpl/.+</param-value>
+      <param-value>/libs/.+,/css/.+,/js/.+,/img/.+,/tpl/.+</param-value>
     </init-param>
   </filter>
 


[06/46] lucene-solr:jira/solr-9959: SOLR-10085: SQL result set fields should be ordered by the field list

Posted by ab...@apache.org.
SOLR-10085: SQL result set fields should be ordered by the field list


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/b46e09c7
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/b46e09c7
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/b46e09c7

Branch: refs/heads/jira/solr-9959
Commit: b46e09c79f849d9211b3de235788bbf32d7aa84b
Parents: be9fea1
Author: Joel Bernstein <jb...@apache.org>
Authored: Wed Mar 15 18:31:14 2017 -0400
Committer: Joel Bernstein <jb...@apache.org>
Committed: Wed Mar 15 18:31:45 2017 -0400

----------------------------------------------------------------------
 .../org/apache/solr/handler/SQLHandler.java     | 37 +++++++++++++++-----
 .../org/apache/solr/handler/TestSQLHandler.java | 31 ++++++++++++++++
 .../org/apache/solr/client/solrj/io/Tuple.java  | 21 +++++++----
 3 files changed, 74 insertions(+), 15 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b46e09c7/solr/core/src/java/org/apache/solr/handler/SQLHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/SQLHandler.java b/solr/core/src/java/org/apache/solr/handler/SQLHandler.java
index d65ea56..7563fe8 100644
--- a/solr/core/src/java/org/apache/solr/handler/SQLHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/SQLHandler.java
@@ -34,7 +34,9 @@ import org.apache.solr.client.solrj.io.stream.ExceptionStream;
 import org.apache.solr.client.solrj.io.stream.JDBCStream;
 import org.apache.solr.client.solrj.io.stream.TupleStream;
 import org.apache.solr.common.SolrException;
+import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.core.CoreContainer;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.handler.sql.CalciteSolrDriver;
@@ -74,6 +76,9 @@ public class SQLHandler extends RequestHandlerBase implements SolrCoreAware, Per
 
   public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
     ModifiableSolrParams params = new ModifiableSolrParams(req.getParams());
+    params = adjustParams(params);
+    req.setParams(params);
+
     String sql = params.get("stmt");
     // Set defaults for parameters
     params.set("numWorkers", params.getInt("numWorkers", 1));
@@ -139,6 +144,8 @@ public class SQLHandler extends RequestHandlerBase implements SolrCoreAware, Per
   private class SqlHandlerStream extends JDBCStream {
     private final boolean includeMetadata;
     private boolean firstTuple = true;
+    List<String> metadataFields = new ArrayList<>();
+    Map<String, String> metadataAliases = new HashMap<>();
 
     SqlHandlerStream(String connectionUrl, String sqlQuery, StreamComparator definedSort,
                      Properties connectionProperties, String driverClassName, boolean includeMetadata)
@@ -151,7 +158,7 @@ public class SQLHandler extends RequestHandlerBase implements SolrCoreAware, Per
     @Override
     public Tuple read() throws IOException {
       // Return a metadata tuple as the first tuple and then pass through to the JDBCStream.
-      if(includeMetadata && firstTuple) {
+      if(firstTuple) {
         try {
           Map<String, Object> fields = new HashMap<>();
 
@@ -159,8 +166,6 @@ public class SQLHandler extends RequestHandlerBase implements SolrCoreAware, Per
 
           ResultSetMetaData resultSetMetaData = resultSet.getMetaData();
 
-          List<String> metadataFields = new ArrayList<>();
-          Map<String, String> metadataAliases = new HashMap<>();
           for(int i = 1; i <= resultSetMetaData.getColumnCount(); i++) {
             String columnName = resultSetMetaData.getColumnName(i);
             String columnLabel = resultSetMetaData.getColumnLabel(i);
@@ -168,16 +173,30 @@ public class SQLHandler extends RequestHandlerBase implements SolrCoreAware, Per
             metadataAliases.put(columnName, columnLabel);
           }
 
-          fields.put("isMetadata", true);
-          fields.put("fields", metadataFields);
-          fields.put("aliases", metadataAliases);
-          return new Tuple(fields);
+          if(includeMetadata) {
+            fields.put("isMetadata", true);
+            fields.put("fields", metadataFields);
+            fields.put("aliases", metadataAliases);
+            return new Tuple(fields);
+          }
         } catch (SQLException e) {
           throw new IOException(e);
         }
-      } else {
-        return super.read();
       }
+
+      Tuple tuple = super.read();
+      if(!tuple.EOF) {
+        tuple.fieldNames = metadataFields;
+        tuple.fieldLabels = metadataAliases;
+      }
+      return tuple;
     }
   }
+
+  private ModifiableSolrParams adjustParams(SolrParams params) {
+    ModifiableSolrParams adjustedParams = new ModifiableSolrParams();
+    adjustedParams.add(params);
+    adjustedParams.add(CommonParams.OMIT_HEADER, "true");
+    return adjustedParams;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b46e09c7/solr/core/src/test/org/apache/solr/handler/TestSQLHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/handler/TestSQLHandler.java b/solr/core/src/test/org/apache/solr/handler/TestSQLHandler.java
index f222cee..cb16f03 100644
--- a/solr/core/src/test/org/apache/solr/handler/TestSQLHandler.java
+++ b/solr/core/src/test/org/apache/solr/handler/TestSQLHandler.java
@@ -16,21 +16,30 @@
  */
 package org.apache.solr.handler;
 
+import java.io.BufferedReader;
 import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Locale;
 
+import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.SolrRequest;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.impl.InputStreamResponseParser;
 import org.apache.solr.client.solrj.io.Tuple;
 import org.apache.solr.client.solrj.io.stream.ExceptionStream;
 import org.apache.solr.client.solrj.io.stream.SolrStream;
 import org.apache.solr.client.solrj.io.stream.TupleStream;
+import org.apache.solr.client.solrj.request.QueryRequest;
 import org.apache.solr.cloud.AbstractFullDistribZkTestBase;
 import org.apache.solr.common.cloud.Replica;
 import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.params.SolrParams;
 
+import org.apache.solr.common.util.NamedList;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
@@ -161,6 +170,9 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase {
       assert(tuple.getLong("field_i") == 7);
       assert(tuple.get("str_s").equals("a"));
 
+      //Assert field order
+      assertResponseContains(clients.get(0), sParams, "{\"docs\":[{\"id\":8,\"field_i\":60,\"str_s\":\"c\"}");
+
       //Test unlimited unsorted result. Should sort on _version_ desc
       sParams = mapParams(CommonParams.QT, "/sql", "stmt", "select id, field_i, str_s from collection1 where text='XXXX'");
 
@@ -2362,4 +2374,23 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase {
     return params;
   }
 
+  public void assertResponseContains(SolrClient server, SolrParams requestParams, String json) throws IOException, SolrServerException {
+    String p = requestParams.get("qt");
+    if(p != null) {
+      ModifiableSolrParams modifiableSolrParams = (ModifiableSolrParams) requestParams;
+      modifiableSolrParams.remove("qt");
+    }
+
+    QueryRequest query = new QueryRequest( requestParams );
+    query.setPath(p);
+    query.setResponseParser(new InputStreamResponseParser("json"));
+    query.setMethod(SolrRequest.METHOD.POST);
+    NamedList<Object> genericResponse = server.request(query);
+    InputStream stream = (InputStream)genericResponse.get("stream");
+    InputStreamReader reader = new InputStreamReader(stream, "UTF-8");
+    BufferedReader bufferedReader = new BufferedReader(reader);
+    String response = bufferedReader.readLine();
+    assertTrue(response.contains(json));
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b46e09c7/solr/solrj/src/java/org/apache/solr/client/solrj/io/Tuple.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/Tuple.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/Tuple.java
index 58d948d..fdf44c9 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/Tuple.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/Tuple.java
@@ -45,6 +45,8 @@ public class Tuple implements Cloneable, MapWriter {
   public boolean EXCEPTION;
 
   public Map fields = new HashMap();
+  public List<String> fieldNames;
+  public Map<String, String> fieldLabels;
 
   public Tuple(Map fields) {
     if(fields.containsKey("EOF")) {
@@ -198,12 +200,19 @@ public class Tuple implements Cloneable, MapWriter {
 
   @Override
   public void writeMap(EntryWriter ew) throws IOException {
-    fields.forEach((k, v) -> {
-      try {
-        ew.put((String)k,v);
-      } catch (IOException e) {
-        throw new RuntimeException(e);
+    if(fieldNames == null) {
+      fields.forEach((k, v) -> {
+        try {
+          ew.put((String) k, v);
+        } catch (IOException e) {
+          throw new RuntimeException(e);
+        }
+      });
+    } else {
+      for(String fieldName : fieldNames) {
+        String label = fieldLabels.get(fieldName);
+        ew.put(label, fields.get(label));
       }
-    });
+    }
   }
 }


[17/46] lucene-solr:jira/solr-9959: SOLR-9185: Solr's edismax and Lucene/standard query parsers should optionally not split on whitespace before sending terms to analysis

Posted by ab...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d1b2fb33/solr/core/src/test/org/apache/solr/search/TestExtendedDismaxParser.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/TestExtendedDismaxParser.java b/solr/core/src/test/org/apache/solr/search/TestExtendedDismaxParser.java
index c3b119f..27bf40f 100644
--- a/solr/core/src/test/org/apache/solr/search/TestExtendedDismaxParser.java
+++ b/solr/core/src/test/org/apache/solr/search/TestExtendedDismaxParser.java
@@ -16,7 +16,9 @@
  */
 package org.apache.solr.search;
 
+import java.util.Arrays;
 import java.util.HashSet;
+import java.util.Map;
 import java.util.Random;
 import java.util.Set;
 
@@ -32,9 +34,11 @@ import org.apache.solr.common.SolrException;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.schema.TextField;
 import org.apache.solr.util.SolrPluginUtils;
 import org.junit.BeforeClass;
 import org.junit.Test;
+import org.noggit.ObjectBuilder;
 
 public class TestExtendedDismaxParser extends SolrTestCaseJ4 {
 
@@ -62,7 +66,7 @@ public class TestExtendedDismaxParser extends SolrTestCaseJ4 {
                  "foo_i", "8"
     ));
     assertU(adoc("id", "47", "trait_ss", "Pig",
-            "text", "line up and fly directly at the enemy death cannons, clogging them with wreckage!"));
+            "text_sw", "line up and fly directly at the enemy death cannons, clogging them with wreckage!"));
     assertU(adoc("id", "48", "text_sw", "this has gigabyte potential", "foo_i","100"));
     assertU(adoc("id", "49", "text_sw", "start the big apple end", "foo_i","-100"));
     assertU(adoc("id", "50", "text_sw", "start new big city end"));
@@ -88,98 +92,109 @@ public class TestExtendedDismaxParser extends SolrTestCaseJ4 {
     assertU(adoc("id", "69", "text_sw", "ties barbie"));
     assertU(adoc("id", "70", "text_sw", "hair"));
     assertU(adoc("id", "71", "text_sw", "ties"));
+    assertU(adoc("id", "72", "text_sw", "wifi ATM"));
     assertU(commit());
   }
 
   @Test
   public void testSyntax() throws Exception {
-    // a bare * should be treated as *:*
-    assertJQ(req("defType","edismax", "q","*", "df","doesnotexist_s")
-        ,"/response/docs/[0]=="   // make sure we get something...
-    );
-    assertJQ(req("defType","edismax", "q","doesnotexist_s:*")
-        ,"/response/numFound==0"   // nothing should be found
-    );
-    assertJQ(req("defType","edismax","q","doesnotexist_s:*")
-        ,"/response/numFound==0"   // nothing should be found
-    );
-    assertJQ(req("defType","edismax","q","doesnotexist_s:( * * * )")
-        ,"/response/numFound==0"   // nothing should be found
-    );
+    for (String sow : Arrays.asList("true", "false")) {
+      // a bare * should be treated as *:*
+      assertJQ(req("defType", "edismax", "q", "*", "df", "doesnotexist_s", "sow", sow)
+          , "/response/docs/[0]=="   // make sure we get something...
+      );
+      assertJQ(req("defType", "edismax", "q", "doesnotexist_s:*", "sow", sow)
+          , "/response/numFound==0"   // nothing should be found
+      );
+      assertJQ(req("defType", "edismax", "q", "doesnotexist_s:*", "sow", sow)
+          , "/response/numFound==0"   // nothing should be found
+      );
+      assertJQ(req("defType", "edismax", "q", "doesnotexist_s:( * * * )", "sow", sow)
+          , "/response/numFound==0"   // nothing should be found
+      );
+    }
   }
 
 
   public void testTrailingOperators() throws Exception {
-    // really just test that exceptions aren't thrown by
-    // single + -
-
-    assertJQ(req("defType","edismax", "q","-")
-        ,"/response==");
-
-    assertJQ(req("defType","edismax", "q","+")
-        ,"/response==");
-
-    assertJQ(req("defType","edismax", "q","+ - +")
-        ,"/response==");
-
-    assertJQ(req("defType","edismax", "q","- + -")
-        ,"/response==");
-
-    assertJQ(req("defType","edismax", "q","id:47 +")
-        ,"/response/numFound==1");
-
-    assertJQ(req("defType","edismax", "q","id:47 -")
-        ,"/response/numFound==1");
-
-    Random r = random();
-    for (int i=0; i<100; i++) {
-      StringBuilder sb = new StringBuilder();
-      for (int j=0; j<r.nextInt(10); j++) {
-        switch (r.nextInt(3)) {
-          case 0: sb.append(' '); break;
-          case 1: sb.append('+'); break;
-          case 2: sb.append('-'); break;
-          case 3: sb.append((char)r.nextInt(127)); break;
+    for (String sow : Arrays.asList("true", "false")) {
+      // really just test that exceptions aren't thrown by
+      // single + -
+
+      assertJQ(req("defType", "edismax", "q", "-", "df", "text_sw", "sow", sow)
+          , "/response==");
+
+      assertJQ(req("defType", "edismax", "q", "+", "df", "text_sw", "sow", sow)
+          , "/response==");
+
+      assertJQ(req("defType", "edismax", "q", "+ - +", "df", "text_sw", "sow", sow)
+          , "/response==");
+
+      assertJQ(req("defType", "edismax", "q", "- + -", "df", "text_sw", "sow", sow)
+          , "/response==");
+
+      assertJQ(req("defType", "edismax", "q", "id:47 +", "df", "text_sw", "sow", sow)
+          , "/response/numFound==1");
+
+      assertJQ(req("defType", "edismax", "q", "id:47 -", "df", "text_sw", "sow", sow)
+          , "/response/numFound==1");
+
+      Random r = random();
+      for (int i=0; i<100; i++) {
+        StringBuilder sb = new StringBuilder();
+        for (int j=0; j<r.nextInt(10); j++) {
+          switch (r.nextInt(3)) {
+            case 0: sb.append(' '); break;
+            case 1: sb.append('+'); break;
+            case 2: sb.append('-'); break;
+            case 3: sb.append((char)r.nextInt(127)); break;
+          }
         }
-      }
 
-      String q = sb.toString();
-      assertJQ(req("defType","edismax", "q",q)
-          ,"/response==");
+        String q = sb.toString();
+        assertJQ(req("defType", "edismax", "q", q, "df", "text_sw", "sow", sow)
+            , "/response==");
+      }
     }
   }
 
 
   public void testLowercaseOperators() {
-    assertQ("Upper case operator",
-        req("q","Zapp AND Brannigan",
-            "qf", "name",
-            "lowercaseOperators", "false",
-            "defType","edismax")
-        ,"*[count(//doc)=1]");
-    
-    assertQ("Upper case operator, allow lowercase",
-        req("q","Zapp AND Brannigan",
-            "qf", "name",
-            "lowercaseOperators", "true",
-            "defType","edismax")
-        ,"*[count(//doc)=1]");
-    
-    assertQ("Lower case operator, don't allow lowercase operators",
-        req("q","Zapp and Brannigan",
-            "qf", "name",
-            "q.op", "AND", 
-            "lowercaseOperators", "false",
-            "defType","edismax")
-        ,"*[count(//doc)=0]");
-    
-    assertQ("Lower case operator, allow lower case operators",
-        req("q","Zapp and Brannigan",
-            "qf", "name",
-            "q.op", "AND", 
-            "lowercaseOperators", "true",
-            "defType","edismax")
-        ,"*[count(//doc)=1]");
+    for (String sow : Arrays.asList("true", "false")) {
+      assertQ("Upper case operator",
+          req("q", "Zapp AND Brannigan",
+              "qf", "name",
+              "lowercaseOperators", "false",
+              "sow", sow,
+              "defType", "edismax")
+          , "*[count(//doc)=1]");
+
+      assertQ("Upper case operator, allow lowercase",
+          req("q", "Zapp AND Brannigan",
+              "qf", "name",
+              "lowercaseOperators", "true",
+              "sow", sow,
+              "defType", "edismax")
+          , "*[count(//doc)=1]");
+
+      assertQ("Lower case operator, don't allow lowercase operators",
+          req("q", "Zapp and Brannigan",
+              "qf", "name",
+              "q.op", "AND",
+              "lowercaseOperators", "false",
+              "sow", sow,
+              "defType", "edismax")
+          , "*[count(//doc)=0]");
+
+      assertQ("Lower case operator, allow lower case operators",
+          req("q", "Zapp and Brannigan",
+              "qf", "name",
+              "q.op", "AND",
+              "lowercaseOperators", "true",
+              "sow", sow,
+              "defType", "edismax")
+          , "*[count(//doc)=1]");
+    }
   }
     
   // test the edismax query parser based on the dismax parser
@@ -249,42 +264,42 @@ public class TestExtendedDismaxParser extends SolrTestCaseJ4 {
            , twor
            );
    
-   assertQ(req("defType", "edismax", "qf", "name title subject text",
+   assertQ(req("defType", "edismax", "qf", "name title subject text_sw",
                "q","op"), twor
     );
    assertQ(req("defType", "edismax", 
-               "qf", "name title subject text",
+               "qf", "name title subject text_sw",
                "q.op", "AND",
                "q","Order op"), oner
     );
    assertQ(req("defType", "edismax", 
-               "qf", "name title subject text",
+               "qf", "name title subject text_sw",
                "q.op", "OR",
                "q","Order op"), twor
     );
-   assertQ(req("defType", "edismax", "qf", "name title subject text",
+   assertQ(req("defType", "edismax", "qf", "name title subject text_sw",
                "q","Order AND op"), oner
     );
-   assertQ(req("defType", "edismax", "qf", "name title subject text",
+   assertQ(req("defType", "edismax", "qf", "name title subject text_sw",
                "q","Order and op"), oner
     );
-    assertQ(req("defType", "edismax", "qf", "name title subject text",
+    assertQ(req("defType", "edismax", "qf", "name title subject text_sw",
                "q","+Order op"), oner
     );
-    assertQ(req("defType", "edismax", "qf", "name title subject text",
+    assertQ(req("defType", "edismax", "qf", "name title subject text_sw",
                "q","Order OR op"), twor
     );
-    assertQ(req("defType", "edismax", "qf", "name title subject text",
+    assertQ(req("defType", "edismax", "qf", "name title subject text_sw",
                "q","Order or op"), twor
     );
-    assertQ(req("defType", "edismax", "qf", "name title subject text",
+    assertQ(req("defType", "edismax", "qf", "name title subject text_sw",
                "q","*:*"), allr
     );
 
-    assertQ(req("defType", "edismax", "qf", "name title subject text",
+    assertQ(req("defType", "edismax", "qf", "name title subject text_sw",
            "q","star OR (-star)"), allr
     );
-    assertQ(req("defType", "edismax", "qf", "name title subject text",
+    assertQ(req("defType", "edismax", "qf", "name title subject text_sw",
            "q","id:42 OR (-id:42)"), allr
     );
 
@@ -536,7 +551,7 @@ public class TestExtendedDismaxParser extends SolrTestCaseJ4 {
         "//str[@name='parsedquery_toString'][.='+(id:42)^5.0']");
     
     
-    assertQ(req("defType","edismax", "uf","-*", "q","cannons"),
+    assertQ(req("defType","edismax", "uf","-*", "q","cannons", "qf","text_sw"),
         oner);
     
     assertQ(req("defType","edismax", "uf","* -id", "q","42", "qf", "id"), oner);
@@ -870,7 +885,7 @@ public class TestExtendedDismaxParser extends SolrTestCaseJ4 {
         "*[count(//doc)=3]");
     assertQ(
         "Might be double-escaping a client-escaped colon", 
-        req("q", "text_sw:(theos OR thistokenhasa\\:preescapedcolon OR theou)", "defType", "edismax", "qf", "text"),
+        req("q", "text_sw:(theos OR thistokenhasa\\:preescapedcolon OR theou)", "defType", "edismax", "qf", "text_sw"),
         "*[count(//doc)=3]");    
     
   }
@@ -1032,56 +1047,56 @@ public class TestExtendedDismaxParser extends SolrTestCaseJ4 {
     // "line up and fly directly at the enemy death cannons, clogging them with wreckage!"
     assertQ("test default operator with mm (AND + 0% => 0 hits)",
         req("q", "(line notfound) OR notfound",
-            "qf", "text",
+            "qf", "text_sw",
             "q.op", "AND",
             "mm", "0%",
             "defType", "edismax")
         , "*[count(//doc)=0]");
     assertQ("test default operator with mm (OR + 0% => 1 hit)",
         req("q", "line notfound OR notfound",
-            "qf", "text",
+            "qf", "text_sw",
             "q.op", "OR",
             "mm", "0%",
             "defType", "edismax")
         , "*[count(//doc)=1]");
     assertQ("test default operator with mm (OR + 100% => 0 hits)",
         req("q", "line notfound OR notfound",
-            "qf", "text",
+            "qf", "text_sw",
             "q.op", "OR",
             "mm", "100%",
             "defType", "edismax")
         , "*[count(//doc)=0]");
     assertQ("test default operator with mm (OR + 35% => 1 hit)",
         req("q", "line notfound notfound2 OR notfound",
-            "qf", "text",
+            "qf", "text_sw",
             "q.op", "OR",
             "mm", "35%",
             "defType", "edismax")
         , "*[count(//doc)=1]");
     assertQ("test default operator with mm (OR + 75% => 0 hits)",
         req("q", "line notfound notfound2 OR notfound3",
-            "qf", "text",
+            "qf", "text_sw",
             "q.op", "OR",
             "mm", "75%",
             "defType", "edismax")
         , "*[count(//doc)=0]");
     assertQ("test default operator with mm (AND + 0% => 1 hit)",
         req("q", "(line enemy) OR notfound",
-            "qf", "text",
+            "qf", "text_sw",
             "q.op", "AND",
             "mm", "0%",
             "defType", "edismax")
         , "*[count(//doc)=1]");
     assertQ("test default operator with mm (AND + 50% => 1 hit)",
         req("q", "(line enemy) OR (line notfound) OR (death cannons) OR (death notfound)",
-            "qf", "text",
+            "qf", "text_sw",
             "q.op", "AND",
             "mm", "50%",
             "defType", "edismax")
         , "*[count(//doc)=1]");
     assertQ("test default operator with mm (AND + 75% => 0 hits)",
         req("q", "(line enemy) OR (line notfound) OR (death cannons) OR (death notfound)",
-            "qf", "text",
+            "qf", "text_sw",
             "q.op", "AND",
             "mm", "75%",
             "defType", "edismax")
@@ -1092,214 +1107,257 @@ public class TestExtendedDismaxParser extends SolrTestCaseJ4 {
    * Test that minShouldMatch applies to Optional terms only
    */
   public void testMinShouldMatchOptional() throws Exception {
-    assertQ("test minShouldMatch (top level optional terms only)",
-        req("q", "stocks oil gold", // +(((text_sw:stock) (text_sw:oil) (text_sw:gold))~1)
-            "qf", "text_sw",
-            "mm", "50%",
-            "defType", "edismax")
-        , "*[count(//doc)=4]");
-    
-    assertQ("test minShouldMatch (top level optional and negative terms mm=50%)",
-        req("q", "stocks oil gold -stockade", // +(((text_sw:stock) (text_sw:oil) (text_sw:gold) -(text_sw:stockad))~1)
-            "qf", "text_sw",
-            "mm", "50%",
-            "defType", "edismax")
-        , "*[count(//doc)=3]");
-
-    assertQ("test minShouldMatch (top level optional and negative terms mm=100%)",
-        req("q", "stocks gold -stockade", // +(((text_sw:stock) (text_sw:oil) (text_sw:gold) -(text_sw:stockad))~2)
-            "qf", "text_sw",
-            "mm", "100%",
-            "defType", "edismax")
-        , "*[count(//doc)=1]");
-
-    assertQ("test minShouldMatch (top level required terms only)",
-        req("q", "stocks AND oil", // +(+(text_sw:stock) +(text_sw:oil))
-            "qf", "text_sw",
-            "mm", "50%",
-            "defType", "edismax")
-        , "*[count(//doc)=1]");
-
-    assertQ("test minShouldMatch (top level optional and required terms)",
-        req("q", "oil gold +stocks", // +(((text_sw:oil) (text_sw:gold) +(text_sw:stock))~1)
-            "qf", "text_sw",
-            "mm", "50%",
-            "defType", "edismax")
-        , "*[count(//doc)=3]");
-
-    assertQ("test minShouldMatch (top level optional with explicit OR and parens)",
-        req("q", "(snake OR stocks) oil",
-            "qf", "text_sw",
-            "mm", "100%",
-            "defType", "edismax")
-        , "*[count(//doc)=2]");
-
-    // The results for these two appear odd, but are correct as per BooleanQuery processing.
-    // See: http://searchhub.org/2011/12/28/why-not-and-or-and-not/
-    // Non-parenthesis OR/AND precedence is not true to abstract boolean logic in solr when q.op = AND
-    //   and when q.op = OR all three clauses are top-level and optional so mm takes over
-    assertQ("test minShouldMatch (top level optional with explicit OR without parens)",
-        req("q", "snake OR stocks oil",
-            "qf", "text_sw",
-            "q.op", "OR",
-            "mm", "100%",
-            "defType", "edismax")
-        , "*[count(//doc)=0]");
-    assertQ("test minShouldMatch (top level optional with explicit OR without parens)",
-        req("q", "snake OR stocks oil",
-            "qf", "text_sw",
-            "q.op", "AND",
-            "mm", "100%",
-            "defType", "edismax")
-        , "*[count(//doc)=0]");
-
-    // SOLR-9174
-    assertQ("test minShouldMatch=1<-1 with explicit OR, one impossible clause, and no explicit q.op",
-        req("q", "barbie OR (hair AND nonexistentword)",
-            "qf", "text_sw",
-            "mm", "1<-1",
-            "defType", "edismax")
-        , "*[count(//doc)=3]");
+    for (String sow : Arrays.asList("true", "false")) {
+      assertQ("test minShouldMatch (top level optional terms only)",
+          req("q", "stocks oil gold", // +(((text_sw:stock) (text_sw:oil) (text_sw:gold))~1)
+              "qf", "text_sw",
+              "mm", "50%",
+              "sow", sow,
+              "defType", "edismax")
+          , "*[count(//doc)=4]");
+
+      assertQ("test minShouldMatch (top level optional terms only and sow=false)",
+          req("q", "stocks oil gold", // +(((text_sw:stock) (text_sw:oil) (text_sw:gold))~1)
+              "qf", "text_sw",
+              "mm", "50%",
+              "sow", sow,
+              "defType", "edismax")
+          , "*[count(//doc)=4]");
+
+      assertQ("test minShouldMatch (top level optional and negative terms mm=50%)",
+          req("q", "stocks oil gold -stockade", // +(((text_sw:stock) (text_sw:oil) (text_sw:gold) -(text_sw:stockad))~1)
+              "qf", "text_sw",
+              "mm", "50%",
+              "sow", sow,
+              "defType", "edismax")
+          , "*[count(//doc)=3]");
+
+      assertQ("test minShouldMatch (top level optional and negative terms mm=100%)",
+          req("q", "stocks gold -stockade", // +(((text_sw:stock) (text_sw:oil) (text_sw:gold) -(text_sw:stockad))~2)
+              "qf", "text_sw",
+              "mm", "100%",
+              "sow", sow,
+              "defType", "edismax")
+          , "*[count(//doc)=1]");
+
+      assertQ("test minShouldMatch (top level required terms only)",
+          req("q", "stocks AND oil", // +(+(text_sw:stock) +(text_sw:oil))
+              "qf", "text_sw",
+              "mm", "50%",
+              "sow", sow,
+              "defType", "edismax")
+          , "*[count(//doc)=1]");
+
+      assertQ("test minShouldMatch (top level optional and required terms)",
+          req("q", "oil gold +stocks", // +(((text_sw:oil) (text_sw:gold) +(text_sw:stock))~1)
+              "qf", "text_sw",
+              "mm", "50%",
+              "sow", sow,
+              "defType", "edismax")
+          , "*[count(//doc)=3]");
+
+      assertQ("test minShouldMatch (top level optional with explicit OR and parens)",
+          req("q", "(snake OR stocks) oil",
+              "qf", "text_sw",
+              "mm", "100%",
+              "sow", sow,
+              "defType", "edismax")
+          , "*[count(//doc)=2]");
+
+      // The results for these two appear odd, but are correct as per BooleanQuery processing.
+      // See: http://searchhub.org/2011/12/28/why-not-and-or-and-not/
+      // Non-parenthesis OR/AND precedence is not true to abstract boolean logic in solr when q.op = AND
+      //   and when q.op = OR all three clauses are top-level and optional so mm takes over
+      assertQ("test minShouldMatch (top level optional with explicit OR without parens)",
+          req("q", "snake OR stocks oil",
+              "qf", "text_sw",
+              "q.op", "OR",
+              "mm", "100%",
+              "sow", sow,
+              "defType", "edismax")
+          , "*[count(//doc)=0]");
+      assertQ("test minShouldMatch (top level optional with explicit OR without parens)",
+          req("q", "snake OR stocks oil",
+              "qf", "text_sw",
+              "q.op", "AND",
+              "mm", "100%",
+              "sow", sow,
+              "defType", "edismax")
+          , "*[count(//doc)=0]");
+
+      // SOLR-9174
+      assertQ("test minShouldMatch=1<-1 with explicit OR, one impossible clause, and no explicit q.op",
+          req("q", "barbie OR (hair AND nonexistentword)",
+              "qf", "text_sw",
+              "mm", "1<-1",
+              "sow", sow,
+              "defType", "edismax")
+          , "*[count(//doc)=3]");
+    }
   }
 
   /* SOLR-8812 */
   @Test
   public void testDefaultMM() throws Exception {
     // Ensure MM is off when explicit operators (+/-/OR/NOT) are used and no explicit mm spec is specified.
-    assertQ("Explicit OR in query with no explicit mm and q.op=AND => mm = 0%",
-        req("q", "oil OR stocks",
-            "qf", "text_sw",
-            "q.op", "AND",
-            "defType", "edismax")
-        , "*[count(//doc)=4]");
-    assertQ("Explicit 'or' in query with lowercaseOperators=true, no explicit mm and q.op=AND => mm = 0%",
-        req("q", "oil or stocks",
-            "qf", "text_sw",
-            "q.op", "AND",
-            "lowercaseOperators", "true",
-            "defType", "edismax")
-        , "*[count(//doc)=4]");
-    assertQ("Explicit OR in query with no explicit mm and no explicit q.op => mm = 0%",
-        req("q", "oil OR stocks",
-            "qf", "text_sw",
-            "defType", "edismax")
-        , "*[count(//doc)=4]");
-    assertQ("No operator in query with no explicit mm and q.op=OR => mm = 0%",
-        req("q", "oil stocks",
-            "qf", "text_sw",
-            "defType", "edismax")
-        , "*[count(//doc)=4]");
-    assertQ("No operator in query with no explicit mm and q.op=AND => mm = 100%",
-        req("q", "oil stocks",
-            "qf", "text_sw",
-            "q.op", "AND",
-            "defType", "edismax")
-        , "*[count(//doc)=1]");
-    assertQ("No operator in query with no explicit mm and q.op=OR => mm = 0%",
-        req("q", "oil stocks",
-            "qf", "text_sw",
-            "q.op", "OR",
-            "defType", "edismax")
-        , "*[count(//doc)=4]");
-
-    assertQ("Explicit '-' operator in query with no explicit mm and no explicit q.op => mm = 0%",
-        req("q", "hair ties -barbie",
-            "qf", "text_sw",
-            "defType", "edismax")
-        , "*[count(//doc)=3]");
-    assertQ("Explicit NOT in query with no explicit mm and no explicit q.op => mm = 0%",
-        req("q", "hair ties NOT barbie",
-            "qf", "text_sw",
-            "defType", "edismax")
-        , "*[count(//doc)=3]");
-
-    assertQ("Explicit '-' operator in query with no explicit mm and q.op=OR => mm = 0%",
-        req("q", "hair ties -barbie",
-            "qf", "text_sw",
-            "q.op", "OR",
-            "defType", "edismax")
-        , "*[count(//doc)=3]");
-    assertQ("Explicit NOT in query with no explicit mm and q.op=OR => mm = 0%",
-        req("q", "hair ties NOT barbie",
-            "qf", "text_sw",
-            "q.op", "OR",
-            "defType", "edismax")
-        , "*[count(//doc)=3]");
-
-    assertQ("Explicit '-' operator in query with no explicit mm and q.op=OR => mm = 0%",
-        req("q", "hair AND ties -barbie",
-            "qf", "text_sw",
-            "q.op", "OR",
-            "defType", "edismax")
-        , "*[count(//doc)=1]");
-    assertQ("Explicit NOT in query with no explicit mm and q.op=OR => mm = 0%",
-        req("q", "hair AND ties -barbie",
-            "qf", "text_sw",
-            "q.op", "OR",
-            "defType", "edismax")
-        , "*[count(//doc)=1]");
-
-    assertQ("No explicit non-AND operator in query with no explicit mm and q.op=OR => mm = 0%",
-        req("q", "hair AND ties barbie",
-            "qf", "text_sw",
-            "q.op", "OR",
-            "defType", "edismax")
-        , "*[count(//doc)=2]");
-    assertQ("No explicit non-AND operator in query with no explicit mm and q.op=AND => mm = 100%",
-        req("q", "hair AND ties barbie",
-            "qf", "text_sw",
-            "q.op", "AND",
-            "defType", "edismax")
-        , "*[count(//doc)=1]");
-    assertQ("No explicit non-AND operator in query with no explicit mm and no explicit q.op => mm = 0%",
-        req("q", "hair AND ties barbie",
-            "qf", "text_sw",
-            "defType", "edismax")
-        , "*[count(//doc)=2]");
-    assertQ("No explicit non-AND operator in query with no explicit mm and no explicit q.op => mm = 0%",
-        req("q", "hair and ties barbie",
-            "qf", "text_sw",
-            "lowercaseOperators", "true",
-            "defType", "edismax")
-        , "*[count(//doc)=2]");
-
-    assertQ("Explicit '-' operator in query with no explicit mm and q.op=AND => mm = 100%",
-        req("q", "hair ties -barbie",
-            "qf", "text_sw",
-            "q.op", "AND",
-            "defType", "edismax")
-        , "*[count(//doc)=1]");
-    assertQ("Explicit NOT in query with no explicit mm and q.op=AND => mm = 100%",
-        req("q", "hair ties NOT barbie",
-            "qf", "text_sw",
-            "q.op", "AND",
-            "defType", "edismax")
-        , "*[count(//doc)=1]");
-
-    assertQ("Explicit OR in query with no explicit mm and q.op=AND => mm = 0%",
-        req("q", "hair OR ties barbie",
-            "qf", "text_sw",
-            "q.op", "AND",
-            "defType", "edismax")
-        , "*[count(//doc)=3]");
-    assertQ("Explicit OR in query with no explicit mm and q.op=OR => mm = 0%",
-        req("q", "hair OR ties barbie",
-            "qf", "text_sw",
-            "q.op", "OR",
-            "defType", "edismax")
-        , "*[count(//doc)=6]");
-    assertQ("Explicit OR in query with no explicit mm and no explicit q.op => mm = 0%",
-        req("q", "hair OR ties barbie",
-            "qf", "text_sw",
-            "defType", "edismax")
-        , "*[count(//doc)=6]");
-
-    assertQ("Explicit '+' operator in query with no explicit mm and q.op=AND => mm = 0%",
-        req("q", "hair ties +barbie",
-            "qf", "text_sw",
-            "q.op", "AND",
-            "defType", "edismax")
-        , "*[count(//doc)=1]");
+    for (String sow : Arrays.asList("true", "false")) {
+      assertQ("Explicit OR in query with no explicit mm and q.op=AND => mm = 0%",
+          req("q", "oil OR stocks",
+              "qf", "text_sw",
+              "q.op", "AND",
+              "sow", sow,
+              "defType", "edismax")
+          , "*[count(//doc)=4]");
+      assertQ("Explicit 'or' in query with lowercaseOperators=true, no explicit mm and q.op=AND => mm = 0%",
+          req("q", "oil or stocks",
+              "qf", "text_sw",
+              "q.op", "AND",
+              "lowercaseOperators", "true",
+              "sow", sow,
+              "defType", "edismax")
+          , "*[count(//doc)=4]");
+      assertQ("Explicit OR in query with no explicit mm and no explicit q.op => mm = 0%",
+          req("q", "oil OR stocks",
+              "qf", "text_sw",
+              "sow", sow,
+              "defType", "edismax")
+          , "*[count(//doc)=4]");
+      assertQ("No operator in query with no explicit mm and q.op=OR => mm = 0%",
+          req("q", "oil stocks",
+              "qf", "text_sw",
+              "sow", sow,
+              "defType", "edismax")
+          , "*[count(//doc)=4]");
+      assertQ("No operator in query with no explicit mm and q.op=AND => mm = 100%",
+          req("q", "oil stocks",
+              "qf", "text_sw",
+              "q.op", "AND",
+              "sow", sow,
+              "defType", "edismax")
+          , "*[count(//doc)=1]");
+      assertQ("No operator in query with no explicit mm and q.op=OR => mm = 0%",
+          req("q", "oil stocks",
+              "qf", "text_sw",
+              "q.op", "OR",
+              "sow", sow,
+              "defType", "edismax")
+          , "*[count(//doc)=4]");
+
+      assertQ("Explicit '-' operator in query with no explicit mm and no explicit q.op => mm = 0%",
+          req("q", "hair ties -barbie",
+              "qf", "text_sw",
+              "sow", sow,
+              "defType", "edismax")
+          , "*[count(//doc)=3]");
+      assertQ("Explicit NOT in query with no explicit mm and no explicit q.op => mm = 0%",
+          req("q", "hair ties NOT barbie",
+              "qf", "text_sw",
+              "sow", sow,
+              "defType", "edismax")
+          , "*[count(//doc)=3]");
+
+      assertQ("Explicit '-' operator in query with no explicit mm and q.op=OR => mm = 0%",
+          req("q", "hair ties -barbie",
+              "qf", "text_sw",
+              "q.op", "OR",
+              "sow", sow,
+              "defType", "edismax")
+          , "*[count(//doc)=3]");
+      assertQ("Explicit NOT in query with no explicit mm and q.op=OR => mm = 0%",
+          req("q", "hair ties NOT barbie",
+              "qf", "text_sw",
+              "q.op", "OR",
+              "sow", sow,
+              "defType", "edismax")
+          , "*[count(//doc)=3]");
+
+      assertQ("Explicit '-' operator in query with no explicit mm and q.op=OR => mm = 0%",
+          req("q", "hair AND ties -barbie",
+              "qf", "text_sw",
+              "q.op", "OR",
+              "sow", sow,
+              "defType", "edismax")
+          , "*[count(//doc)=1]");
+      assertQ("Explicit NOT in query with no explicit mm and q.op=OR => mm = 0%",
+          req("q", "hair AND ties -barbie",
+              "qf", "text_sw",
+              "q.op", "OR",
+              "sow", sow,
+              "defType", "edismax")
+          , "*[count(//doc)=1]");
+
+      assertQ("No explicit non-AND operator in query with no explicit mm and q.op=OR => mm = 0%",
+          req("q", "hair AND ties barbie",
+              "qf", "text_sw",
+              "q.op", "OR",
+              "sow", sow,
+              "defType", "edismax")
+          , "*[count(//doc)=2]");
+      assertQ("No explicit non-AND operator in query with no explicit mm and q.op=AND => mm = 100%",
+          req("q", "hair AND ties barbie",
+              "qf", "text_sw",
+              "q.op", "AND",
+              "sow", sow,
+              "defType", "edismax")
+          , "*[count(//doc)=1]");
+      assertQ("No explicit non-AND operator in query with no explicit mm and no explicit q.op => mm = 0%",
+          req("q", "hair AND ties barbie",
+              "qf", "text_sw",
+              "sow", sow,
+              "defType", "edismax")
+          , "*[count(//doc)=2]");
+      assertQ("No explicit non-AND operator in query with no explicit mm and no explicit q.op => mm = 0%",
+          req("q", "hair and ties barbie",
+              "qf", "text_sw",
+              "lowercaseOperators", "true",
+              "sow", sow,
+              "defType", "edismax")
+          , "*[count(//doc)=2]");
+
+      assertQ("Explicit '-' operator in query with no explicit mm and q.op=AND => mm = 100%",
+          req("q", "hair ties -barbie",
+              "qf", "text_sw",
+              "q.op", "AND",
+              "sow", sow,
+              "defType", "edismax")
+          , "*[count(//doc)=1]");
+      assertQ("Explicit NOT in query with no explicit mm and q.op=AND => mm = 100%",
+          req("q", "hair ties NOT barbie",
+              "qf", "text_sw",
+              "q.op", "AND",
+              "sow", sow,
+              "defType", "edismax")
+          , "*[count(//doc)=1]");
+
+      assertQ("Explicit OR in query with no explicit mm and q.op=AND => mm = 0%",
+          req("q", "hair OR ties barbie",
+              "qf", "text_sw",
+              "q.op", "AND",
+              "sow", sow,
+              "defType", "edismax")
+          , "*[count(//doc)=3]");
+      assertQ("Explicit OR in query with no explicit mm and q.op=OR => mm = 0%",
+          req("q", "hair OR ties barbie",
+              "qf", "text_sw",
+              "q.op", "OR",
+              "sow", sow,
+              "defType", "edismax")
+          , "*[count(//doc)=6]");
+      assertQ("Explicit OR in query with no explicit mm and no explicit q.op => mm = 0%",
+          req("q", "hair OR ties barbie",
+              "qf", "text_sw",
+              "sow", sow,
+              "defType", "edismax")
+          , "*[count(//doc)=6]");
+
+      assertQ("Explicit '+' operator in query with no explicit mm and q.op=AND => mm = 0%",
+          req("q", "hair ties +barbie",
+              "qf", "text_sw",
+              "q.op", "AND",
+              "sow", sow,
+              "defType", "edismax")
+          , "*[count(//doc)=1]");
+    }
   }
 
   public void testEdismaxSimpleExtension() throws SyntaxError {
@@ -1336,6 +1394,380 @@ public class TestExtendedDismaxParser extends SolrTestCaseJ4 {
     
   }
 
+  // LUCENE-7533
+  public void testSplitOnWhitespace_with_autoGeneratePhraseQueries() throws Exception {
+    assertTrue(((TextField)h.getCore().getLatestSchema().getField("text").getType()).getAutoGeneratePhraseQueries());
+
+    try (SolrQueryRequest req = req()) {
+      final QParser qparser = QParser.getParser("{!edismax sow=false fq=text}blah blah)", req);
+      expectThrows(IllegalArgumentException.class, qparser::getQuery);
+    }
+  }
+
+  @Test
+  public void testSplitOnWhitespace_Basic() throws Exception {
+    // The "text_sw" field has synonyms loaded from synonyms.txt
+
+    // retrieve the single document containing literal "wifi"
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","wifi", "sow","true")
+        , "/response/numFound==1"
+        , "/response/docs/[0]/id=='72'"
+    );
+
+    // trigger the "wi fi => wifi" synonym
+    assertJQ(req("qf", "text_sw title", "defType","edismax", "q","wi fi", "sow","false")
+        , "/response/numFound==1"
+        , "/response/docs/[0]/id=='72'"
+    );
+    assertJQ(req("qf", "text_sw title", "defType","edismax", "q","wi fi", "sow","true")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi") // default sow=true
+        , "/response/numFound==0"
+    );
+
+    assertJQ(req("qf","text_sw title", "q","{!edismax sow=false}wi fi")
+        , "/response/numFound==1"
+        , "/response/docs/[0]/id=='72'"
+    );
+    assertJQ(req("df", "text_sw title", "q","{!edismax sow=true}wi fi")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "text_sw title", "q", "{!edismax}wi fi") // default sow=true
+        , "/response/numFound==0"
+    );
+
+    assertQ(req("qf", "name title", 
+                "q", "barking curds of stigma",
+                "defType", "edismax",
+                "sow", "false",
+                "debugQuery", "true"),
+        "//str[@name='parsedquery'][contains(.,'DisjunctionMaxQuery((name:barking | title:barking))')]",
+        "//str[@name='parsedquery'][contains(.,'DisjunctionMaxQuery((name:curds | title:curds))')]",
+        "//str[@name='parsedquery'][contains(.,'DisjunctionMaxQuery((name:of | title:of))')]",
+        "//str[@name='parsedquery'][contains(.,'DisjunctionMaxQuery((name:stigma | title:stigma))')]"
+    );
+    assertQ(req("qf", "name title",
+        "q", "barking curds of stigma",
+        "defType", "edismax",
+        "sow", "true",
+        "debugQuery", "true"),
+        "//str[@name='parsedquery'][contains(.,'DisjunctionMaxQuery((name:barking | title:barking))')]",
+        "//str[@name='parsedquery'][contains(.,'DisjunctionMaxQuery((name:curds | title:curds))')]",
+        "//str[@name='parsedquery'][contains(.,'DisjunctionMaxQuery((name:of | title:of))')]",
+        "//str[@name='parsedquery'][contains(.,'DisjunctionMaxQuery((name:stigma | title:stigma))')]"
+    );
+    assertQ(req("qf", "name title",
+        "q", "barking curds of stigma",
+        "defType", "edismax",
+        "debugQuery", "true"), // Default sow=true
+        "//str[@name='parsedquery'][contains(.,'DisjunctionMaxQuery((name:barking | title:barking))')]",
+        "//str[@name='parsedquery'][contains(.,'DisjunctionMaxQuery((name:curds | title:curds))')]",
+        "//str[@name='parsedquery'][contains(.,'DisjunctionMaxQuery((name:of | title:of))')]",
+        "//str[@name='parsedquery'][contains(.,'DisjunctionMaxQuery((name:stigma | title:stigma))')]"
+    );
+  }
+  
+  public void testSplitOnWhitespace_Different_Field_Analysis() throws Exception {
+    // When the *structure* of produced queries is different in each field, 
+    // sow=true produces boolean-of-dismax query structure,
+    // and sow=false produces dismax-of-boolean query structure.
+    assertQ(req("qf", "text_sw title",
+        "q", "olive the other",
+        "defType", "edismax",
+        "sow", "true",
+        "debugQuery", "true"),
+        "//str[@name='parsedquery'][contains(.,'DisjunctionMaxQuery((text_sw:oliv | title:olive))')]",
+        "//str[@name='parsedquery'][contains(.,'DisjunctionMaxQuery((title:the))')]",
+        "//str[@name='parsedquery'][contains(.,'DisjunctionMaxQuery((text_sw:other | title:other))')]"
+    );
+    assertQ(req("qf", "text_sw title",
+        "q", "olive the other",
+        "defType", "edismax",
+        "sow", "false",
+        "debugQuery", "true"),
+        "//str[@name='parsedquery'][contains(.,'+DisjunctionMaxQuery(((text_sw:oliv text_sw:other) | (title:olive title:the title:other)))')]"
+    );
+
+    // When field's analysis produce different query structures, mm processing is always done on the boolean query.
+    // sow=true produces (boolean-of-dismax)~<mm> query structure,
+    // and sow=false produces dismax-of-(boolean)~<mm> query structure.
+    assertQ(req("qf", "text_sw title",
+        "q", "olive the other",
+        "defType", "edismax",
+        "sow", "true",
+        "mm", "100%",
+        "debugQuery", "true"),
+        "//str[@name='parsedquery'][contains(.,'+(DisjunctionMaxQuery((text_sw:oliv | title:olive)) DisjunctionMaxQuery((title:the)) DisjunctionMaxQuery((text_sw:other | title:other)))~3')]"
+    );
+    assertQ(req("qf", "text_sw title",
+        "q", "olive the other",
+        "defType", "edismax",
+        "sow", "false",
+        "mm", "100%",
+        "debugQuery", "true"),
+        "//str[@name='parsedquery'][contains(.,'+DisjunctionMaxQuery((((text_sw:oliv text_sw:other)~2) | ((title:olive title:the title:other)~3)))')]"
+    );
+
+
+    // When the *structure* of produced queries is the same in each field, 
+    // sow=false/true produce the same boolean-of-dismax query structure 
+    for (String sow : Arrays.asList("true", "false")) {
+      assertQ(req("qf", "text_sw title",
+          "q", "olive blah other",
+          "defType", "edismax",
+          "sow", sow,
+          "debugQuery", "true"),
+          "//str[@name='parsedquery'][contains(.,'"
+              + "+(DisjunctionMaxQuery((text_sw:oliv | title:olive))"
+              + " DisjunctionMaxQuery((text_sw:blah | title:blah))"
+              + " DisjunctionMaxQuery((text_sw:other | title:other)))')]"
+      );
+    }
+  }
+
+  public void testOperatorsAndMultiWordSynonyms() throws Exception {
+    // The "text_sw" field has synonyms loaded from synonyms.txt
+
+    // retrieve the single document containing literal "wifi"
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","wifi", "sow","true")
+        , "/response/numFound==1"
+        , "/response/docs/[0]/id=='72'"
+    );
+    // trigger the "wi fi => wifi" synonym
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi", "sow","false")
+        , "/response/numFound==1"
+        , "/response/docs/[0]/id=='72'"
+    );
+
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","+wi fi", "sow","false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","-wi fi", "sow","false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","!wi fi", "sow","false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi* fi", "sow","false")
+        , "/response/numFound==2"    // matches because wi* matches "wifi" in one doc and "with" in another
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","w? fi", "sow","false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi~1 fi", "sow","false")
+        , "/response/numFound==4"   // matches because wi~1 matches ti (stemmed "ties")
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi^2 fi", "sow","false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi^=2 fi", "sow","false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi +fi", "sow","false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi -fi", "sow","false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi !fi", "sow","false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi*", "sow","false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi?", "sow","false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi~1", "sow","false")
+        , "/response/numFound==4"   // matches because fi~1 matches ti (stemmed "ties")
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi^2", "sow","false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi^=2", "sow","false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","text_sw:wi fi", "sow","false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi text_sw:fi", "sow","false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","NOT wi fi", "sow","false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi NOT fi", "sow","false")
+        , "/response/numFound==0"
+    );
+
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi AND ATM", "sow","false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","ATM AND wi fi", "sow","false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi && ATM", "sow","false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","ATM && wi fi", "sow","false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","(wi fi) AND ATM", "sow","false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","ATM AND (wi fi)", "sow","false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","(wi fi) && ATM", "sow","false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","ATM && (wi fi)", "sow","false")
+        , "/response/numFound==1"
+    );
+
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi OR NotThereAtAll", "sow","false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","NotThereAtAll OR wi fi", "sow","false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi || NotThereAtAll", "sow","false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","NotThereAtAll || wi fi", "sow","false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","(wi fi) OR NotThereAtAll", "sow","false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","NotThereAtAll OR (wi fi)", "sow","false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","(wi fi) || NotThereAtAll", "sow","false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","NotThereAtAll || (wi fi)", "sow","false")
+        , "/response/numFound==1"
+    );
+
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","\"wi\" fi", "sow","false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi \"fi\"", "sow","false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","(wi) fi", "sow","false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi (fi)", "sow","false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","/wi/ fi", "sow","false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi /fi/", "sow","false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","(wi fi)", "sow","false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","+(wi fi)", "sow","false")
+        , "/response/numFound==1"
+    );
+
+    Map all = (Map)ObjectBuilder.fromJSON(h.query(req("q", "*:*", "rows", "0", "wt", "json")));
+    int totalDocs = Integer.parseInt(((Map)all.get("response")).get("numFound").toString());
+    int allDocsExceptOne = totalDocs - 1;
+
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","-(wi fi)", "sow","false")
+        , "/response/numFound==" + allDocsExceptOne  // one doc contains "wifi" in the text_sw field
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","!(wi fi)", "sow","false")
+        , "/response/numFound==" + allDocsExceptOne  // one doc contains "wifi" in the text_sw field
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","NOT (wi fi)", "sow","false")
+        , "/response/numFound==" + allDocsExceptOne  // one doc contains "wifi" in the text_sw field
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","(wi fi)^2", "sow","false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","(wi fi)^=2", "sow","false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","text_sw:(wi fi)", "sow","false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","+ATM wi fi", "sow","false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","-ATM wi fi", "sow","false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","-NotThereAtAll wi fi", "sow","false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","!ATM wi fi", "sow","false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","!NotThereAtAll wi fi", "sow","false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","NOT ATM wi fi", "sow","false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","NOT NotThereAtAll wi fi", "sow","false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","AT* wi fi", "sow","false")
+        , "/response/numFound==2"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","AT? wi fi", "sow","false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","\"ATM\" wi fi", "sow","false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi +ATM", "sow","false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi -ATM", "sow","false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi -NotThereAtAll", "sow","false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi !ATM", "sow","false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi !NotThereAtAll", "sow","false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi NOT ATM", "sow","false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi NOT NotThereAtAll", "sow","false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi AT*", "sow","false")
+        , "/response/numFound==2"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi AT?", "sow","false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi \"ATM\"", "sow","false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","\"wi fi\"~2", "sow","false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("qf","text_sw title", "defType","edismax", "q","text_sw:\"wi fi\"", "sow","false")
+        , "/response/numFound==1"
+    );
+  }
+  
+  
+
   private boolean containsClause(Query query, String field, String value,
       int boost, boolean fuzzy) {
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d1b2fb33/solr/core/src/test/org/apache/solr/search/TestMultiWordSynonyms.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/TestMultiWordSynonyms.java b/solr/core/src/test/org/apache/solr/search/TestMultiWordSynonyms.java
new file mode 100644
index 0000000..ecc80c3
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/search/TestMultiWordSynonyms.java
@@ -0,0 +1,100 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.search;
+
+import java.util.Arrays;
+
+import org.apache.solr.SolrTestCaseJ4;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestMultiWordSynonyms extends SolrTestCaseJ4 {
+
+  @BeforeClass
+  public static void beforeClass() throws Exception {
+    initCore("solrconfig.xml", "schema-multiword-synonyms.xml");
+    index();
+  }
+
+  private static void index() throws Exception {
+    assertU(adoc("id","1", "text","USA Today"));
+    assertU(adoc("id","2", "text","A dynamic US economy"));
+    assertU(adoc("id","3", "text","The United States of America's 50 states"));
+    assertU(adoc("id","4", "text","Party in the U.S.A."));
+    assertU(adoc("id","5", "text","These United States"));
+
+    assertU(adoc("id","6", "text","America United of States"));
+    assertU(adoc("id","7", "text","States United"));
+
+    assertU(commit());
+  }
+
+  @Test
+  public void testNonPhrase() throws Exception {
+    // Don't split on whitespace (sow=false)
+    for (String q : Arrays.asList("US", "U.S.", "USA", "U.S.A.", "United States", "United States of America")) {
+      for (String defType : Arrays.asList("lucene", "edismax")) {
+        assertJQ(req("q", q,
+            "defType", defType,
+            "df", "text",
+            "sow", "false")
+            , "/response/numFound==7"
+        );
+      }
+    }
+
+    // Split on whitespace (sow=true)
+    for (String q : Arrays.asList("US", "U.S.", "USA", "U.S.A.")) {
+      for (String defType : Arrays.asList("lucene", "edismax")) {
+        assertJQ(req("q", q,
+            "defType", defType,
+            "df", "text",
+            "sow", "true")
+            , "/response/numFound==7"
+        );
+      }
+    }
+    for (String q : Arrays.asList("United States", "United States of America")) {
+      for (String defType : Arrays.asList("lucene", "edismax")) {
+        assertJQ(req("q", q,
+            "defType", defType,
+            "df", "text",
+            "sow", "true")
+            , "/response/numFound==4"
+        );
+      }
+    }
+  }
+  
+  @Test
+  public void testPhrase() throws Exception {
+    for (String q : Arrays.asList
+        ("\"US\"", "\"U.S.\"", "\"USA\"", "\"U.S.A.\"", "\"United States\"", "\"United States of America\"")) {
+      for (String defType : Arrays.asList("lucene", "edismax")) {
+        for (String sow : Arrays.asList("true", "false")) {
+          assertJQ(req("q", q,
+              "defType", defType,
+              "df", "text",
+              "sow", sow)
+              , "/response/numFound==5"
+          );
+        }
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d1b2fb33/solr/core/src/test/org/apache/solr/search/TestSolrQueryParser.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/TestSolrQueryParser.java b/solr/core/src/test/org/apache/solr/search/TestSolrQueryParser.java
index 8195c05..92bd6c0 100644
--- a/solr/core/src/test/org/apache/solr/search/TestSolrQueryParser.java
+++ b/solr/core/src/test/org/apache/solr/search/TestSolrQueryParser.java
@@ -16,7 +16,12 @@
  */
 package org.apache.solr.search;
 
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
 import java.util.Locale;
+import java.util.Map;
 import java.util.Random;
 
 import org.apache.lucene.search.BooleanClause;
@@ -28,12 +33,15 @@ import org.apache.lucene.search.Query;
 import org.apache.lucene.search.TermInSetQuery;
 import org.apache.lucene.search.TermQuery;
 import org.apache.solr.SolrTestCaseJ4;
+import org.apache.solr.common.params.MapSolrParams;
 import org.apache.solr.core.SolrInfoMBean;
 import org.apache.solr.parser.QueryParser;
 import org.apache.solr.query.FilterQuery;
 import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.schema.TextField;
 import org.junit.BeforeClass;
 import org.junit.Test;
+import org.noggit.ObjectBuilder;
 
 
 public class TestSolrQueryParser extends SolrTestCaseJ4 {
@@ -57,6 +65,8 @@ public class TestSolrQueryParser extends SolrTestCaseJ4 {
     assertU(adoc("id", "12", "eee_s", "X"));
     assertU(adoc("id", "13", "eee_s", "'balance'", "rrr_s", "/leading_slash"));
 
+    assertU(adoc("id", "20", "syn", "wifi ATM"));
+
     assertU(commit());
   }
 
@@ -208,86 +218,105 @@ public class TestSolrQueryParser extends SolrTestCaseJ4 {
     QParser qParser;
     Query q,qq;
 
-    // relevance query should not be a filter
-    qParser = QParser.getParser("foo_s:(a b c)", req);
-    q = qParser.getQuery();
-    assertEquals(3, ((BooleanQuery)q).clauses().size());
-
-    // small filter query should still use BooleanQuery
-    if (QueryParser.TERMS_QUERY_THRESHOLD > 3) {
+    Map<String, String> sowFalseParamsMap = new HashMap<>();
+    sowFalseParamsMap.put("sow", "false");
+    Map<String, String> sowTrueParamsMap = new HashMap<>();
+    sowTrueParamsMap.put("sow", "true");
+    List<MapSolrParams> paramMaps = Arrays.asList
+        (new MapSolrParams(Collections.emptyMap()), // no sow param (i.e. the default sow value) 
+         new MapSolrParams(sowFalseParamsMap),
+         new MapSolrParams(sowTrueParamsMap));
+
+    for (MapSolrParams params : paramMaps) {
+      // relevance query should not be a filter
       qParser = QParser.getParser("foo_s:(a b c)", req);
-      qParser.setIsFilter(true); // this may change in the future
+      qParser.setParams(params);
       q = qParser.getQuery();
       assertEquals(3, ((BooleanQuery) q).clauses().size());
-    }
 
-    // large relevancy query should use BooleanQuery
-    // TODO: we may decide that string fields shouldn't have relevance in the future... change to a text field w/o a stop filter if so
-    qParser = QParser.getParser("foo_s:(a b c d e f g h i j k l m n o p q r s t u v w x y z)", req);
-    q = qParser.getQuery();
-    assertEquals(26, ((BooleanQuery)q).clauses().size());
+      // small filter query should still use BooleanQuery
+      if (QueryParser.TERMS_QUERY_THRESHOLD > 3) {
+        qParser = QParser.getParser("foo_s:(a b c)", req);
+        qParser.setParams(params);
+        qParser.setIsFilter(true); // this may change in the future
+        q = qParser.getQuery();
+        assertEquals(3, ((BooleanQuery) q).clauses().size());
+      }
 
-    // large filter query should use TermsQuery
-    qParser = QParser.getParser("foo_s:(a b c d e f g h i j k l m n o p q r s t u v w x y z)", req);
-    qParser.setIsFilter(true); // this may change in the future
-    q = qParser.getQuery();
-    assertEquals(26, ((TermInSetQuery)q).getTermData().size());
+      // large relevancy query should use BooleanQuery
+      // TODO: we may decide that string fields shouldn't have relevance in the future... change to a text field w/o a stop filter if so
+      qParser = QParser.getParser("foo_s:(a b c d e f g h i j k l m n o p q r s t u v w x y z)", req);
+      qParser.setParams(params);
+      q = qParser.getQuery();
+      assertEquals(26, ((BooleanQuery)q).clauses().size());
 
-    // large numeric filter query should use TermsQuery (for trie fields)
-    qParser = QParser.getParser("foo_ti:(1 2 3 4 5 6 7 8 9 10 20 19 18 17 16 15 14 13 12 11)", req);
-    qParser.setIsFilter(true); // this may change in the future
-    q = qParser.getQuery();
-    assertEquals(20, ((TermInSetQuery)q).getTermData().size());
-    
-    // for point fields large filter query should use PointInSetQuery
-    qParser = QParser.getParser("foo_pi:(1 2 3 4 5 6 7 8 9 10 20 19 18 17 16 15 14 13 12 11)", req);
-    qParser.setIsFilter(true); // this may change in the future
-    q = qParser.getQuery();
-    assertTrue(q instanceof PointInSetQuery);
-    assertEquals(20, ((PointInSetQuery)q).getPackedPoints().size());
+      // large filter query should use TermsQuery
+      qParser = QParser.getParser("foo_s:(a b c d e f g h i j k l m n o p q r s t u v w x y z)", req);
+      qParser.setIsFilter(true); // this may change in the future
+      qParser.setParams(params);
+      q = qParser.getQuery();
+      assertEquals(26, ((TermInSetQuery)q).getTermData().size());
 
-    // a filter() clause inside a relevancy query should be able to use a TermsQuery
-    qParser = QParser.getParser("foo_s:aaa filter(foo_s:(a b c d e f g h i j k l m n o p q r s t u v w x y z))", req);
-    q = qParser.getQuery();
-    assertEquals(2, ((BooleanQuery)q).clauses().size());
-    qq = ((BooleanQuery)q).clauses().get(0).getQuery();
-    if (qq instanceof TermQuery) {
-      qq = ((BooleanQuery)q).clauses().get(1).getQuery();
-    }
+      // large numeric filter query should use TermsQuery (for trie fields)
+      qParser = QParser.getParser("foo_ti:(1 2 3 4 5 6 7 8 9 10 20 19 18 17 16 15 14 13 12 11)", req);
+      qParser.setIsFilter(true); // this may change in the future
+      qParser.setParams(params);
+      q = qParser.getQuery();
+      assertEquals(20, ((TermInSetQuery)q).getTermData().size());
 
-    if (qq instanceof FilterQuery) {
-      qq = ((FilterQuery)qq).getQuery();
-    }
+      // for point fields large filter query should use PointInSetQuery
+      qParser = QParser.getParser("foo_pi:(1 2 3 4 5 6 7 8 9 10 20 19 18 17 16 15 14 13 12 11)", req);
+      qParser.setIsFilter(true); // this may change in the future
+      qParser.setParams(params);
+      q = qParser.getQuery();
+      assertTrue(q instanceof PointInSetQuery);
+      assertEquals(20, ((PointInSetQuery)q).getPackedPoints().size());
 
-    assertEquals(26, ((TermInSetQuery)qq).getTermData().size());
+      // a filter() clause inside a relevancy query should be able to use a TermsQuery
+      qParser = QParser.getParser("foo_s:aaa filter(foo_s:(a b c d e f g h i j k l m n o p q r s t u v w x y z))", req);
+      qParser.setParams(params);
+      q = qParser.getQuery();
+      assertEquals(2, ((BooleanQuery)q).clauses().size());
+      qq = ((BooleanQuery)q).clauses().get(0).getQuery();
+      if (qq instanceof TermQuery) {
+        qq = ((BooleanQuery)q).clauses().get(1).getQuery();
+      }
 
-    // test mixed boolean query, including quotes (which shouldn't matter)
-    qParser = QParser.getParser("foo_s:(a +aaa b -bbb c d e f bar_s:(qqq www) g h i j k l m n o p q r s t u v w x y z)", req);
-    qParser.setIsFilter(true); // this may change in the future
-    q = qParser.getQuery();
-    assertEquals(4, ((BooleanQuery)q).clauses().size());
-    qq = null;
-    for (BooleanClause clause : ((BooleanQuery)q).clauses()) {
-      qq = clause.getQuery();
-      if (qq instanceof TermInSetQuery) break;
-    }
-    assertEquals(26, ((TermInSetQuery)qq).getTermData().size());
+      if (qq instanceof FilterQuery) {
+        qq = ((FilterQuery)qq).getQuery();
+      }
 
-    // test terms queries of two different fields (LUCENE-7637 changed to require all terms be in the same field)
-    StringBuilder sb = new StringBuilder();
-    for (int i=0; i<17; i++) {
-      char letter = (char)('a'+i);
-      sb.append("foo_s:" + letter + " bar_s:" + letter + " ");
-    }
-    qParser = QParser.getParser(sb.toString(), req);
-    qParser.setIsFilter(true); // this may change in the future
-    q = qParser.getQuery();
-    assertEquals(2, ((BooleanQuery)q).clauses().size());
-    for (BooleanClause clause : ((BooleanQuery)q).clauses()) {
-      qq = clause.getQuery();
-      assertEquals(17, ((TermInSetQuery)qq).getTermData().size());
-    }
+      assertEquals(26, ((TermInSetQuery) qq).getTermData().size());
+
+      // test mixed boolean query, including quotes (which shouldn't matter)
+      qParser = QParser.getParser("foo_s:(a +aaa b -bbb c d e f bar_s:(qqq www) g h i j k l m n o p q r s t u v w x y z)", req);
+      qParser.setIsFilter(true); // this may change in the future
+      qParser.setParams(params);
+      q = qParser.getQuery();
+      assertEquals(4, ((BooleanQuery)q).clauses().size());
+      qq = null;
+      for (BooleanClause clause : ((BooleanQuery)q).clauses()) {
+        qq = clause.getQuery();
+        if (qq instanceof TermInSetQuery) break;
+      }
+      assertEquals(26, ((TermInSetQuery)qq).getTermData().size());
 
+      // test terms queries of two different fields (LUCENE-7637 changed to require all terms be in the same field)
+      StringBuilder sb = new StringBuilder();
+      for (int i=0; i<17; i++) {
+        char letter = (char)('a'+i);
+        sb.append("foo_s:" + letter + " bar_s:" + letter + " ");
+      }
+      qParser = QParser.getParser(sb.toString(), req);
+      qParser.setIsFilter(true); // this may change in the future
+      qParser.setParams(params);
+      q = qParser.getQuery();
+      assertEquals(2, ((BooleanQuery)q).clauses().size());
+      for (BooleanClause clause : ((BooleanQuery)q).clauses()) {
+        qq = clause.getQuery();
+        assertEquals(17, ((TermInSetQuery)qq).getTermData().size());
+      }
+    }
     req.close();
   }
 
@@ -306,6 +335,10 @@ public class TestSolrQueryParser extends SolrTestCaseJ4 {
     // This will still fail when used as the main query, but will pass in a filter query since TermsQuery can be used.
     assertJQ(req("q","*:*", "fq", q)
         ,"/response/numFound==6");
+    assertJQ(req("q","*:*", "fq", q, "sow", "false")
+        ,"/response/numFound==6");
+    assertJQ(req("q","*:*", "fq", q, "sow", "true")
+        ,"/response/numFound==6");
   }
 
   @Test
@@ -540,4 +573,400 @@ public class TestSolrQueryParser extends SolrTestCaseJ4 {
     req.close();
   }
 
+  // LUCENE-7533
+  public void testSplitOnWhitespace_with_autoGeneratePhraseQueries() throws Exception {
+    assertTrue(((TextField)h.getCore().getLatestSchema().getField("text").getType()).getAutoGeneratePhraseQueries());
+    
+    try (SolrQueryRequest req = req()) {
+      final QParser qparser = QParser.getParser("{!lucene sow=false qf=text}blah blah", req);
+      expectThrows(QueryParserConfigurationException.class, qparser::getQuery);
+    }
+  }
+
+  @Test
+  public void testSplitOnWhitespace_Basic() throws Exception {
+    // The "syn" field has synonyms loaded from synonyms.txt
+
+    assertJQ(req("df", "syn", "q", "wifi", "sow", "true") // retrieve the single document containing literal "wifi"
+        , "/response/numFound==1"
+        , "/response/docs/[0]/id=='20'"
+    );
+
+    assertJQ(req("df", "syn", "q", "wi fi", "sow", "false") // trigger the "wi fi => wifi" synonym
+        , "/response/numFound==1"
+        , "/response/docs/[0]/id=='20'"
+    );
+
+    assertJQ(req("df", "syn", "q", "wi fi", "sow", "true")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "wi fi") // default sow=true
+        , "/response/numFound==0"
+    );
+
+    assertJQ(req("df", "syn", "q", "{!lucene sow=false}wi fi")
+        , "/response/numFound==1"
+        , "/response/docs/[0]/id=='20'"
+    );
+    assertJQ(req("df", "syn", "q", "{!lucene sow=true}wi fi")
+        , "/response/numFound==0"
+    );
+
+    assertJQ(req("df", "syn", "q", "{!lucene}wi fi") // default sow=true
+        , "/response/numFound==0"
+    );
+  }
+
+  public void testSplitOnWhitespace_Comments() throws Exception {
+    // The "syn" field has synonyms loaded from synonyms.txt
+
+    assertJQ(req("df", "syn", "q", "wifi", "sow", "true") // retrieve the single document containing literal "wifi"
+        , "/response/numFound==1"
+        , "/response/docs/[0]/id=='20'"
+    );
+    assertJQ(req("df", "syn", "q", "wi fi", "sow", "false") // trigger the "wi fi => wifi" synonym
+        , "/response/numFound==1"
+        , "/response/docs/[0]/id=='20'"
+    );
+    assertJQ(req("df", "syn", "q", "wi /* foo */ fi", "sow", "false") // trigger the "wi fi => wifi" synonym
+        , "/response/numFound==1"
+        , "/response/docs/[0]/id=='20'"
+    );
+    assertJQ(req("df", "syn", "q", "wi /* foo */ /* bar */ fi", "sow", "false") // trigger the "wi fi => wifi" synonym
+        , "/response/numFound==1"
+        , "/response/docs/[0]/id=='20'"
+    );
+    assertJQ(req("df", "syn", "q", " /* foo */ wi fi /* bar */", "sow", "false") // trigger the "wi fi => wifi" synonym
+        , "/response/numFound==1"
+        , "/response/docs/[0]/id=='20'"
+    );
+    assertJQ(req("df", "syn", "q", " /* foo */ wi /* bar */ fi /* baz */", "sow", "false") // trigger the "wi fi => wifi" synonym
+        , "/response/numFound==1"
+        , "/response/docs/[0]/id=='20'"
+    );
+
+    assertJQ(req("df", "syn", "q", "wi fi", "sow", "true")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "wi /* foo */ fi", "sow", "true")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "wi /* foo */ /* bar */ fi", "sow", "true")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "/* foo */ wi fi /* bar */", "sow", "true")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "/* foo */ wi /* bar */ fi /* baz */", "sow", "true")
+        , "/response/numFound==0"
+    );
+
+    assertJQ(req("df", "syn", "q", "wi fi") // default sow=true
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "wi /* foo */ fi") // default sow=true
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "wi /* foo */ /* bar */ fi") // default sow=true
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "/* foo */ wi fi /* bar */") // default sow=true
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "/* foo */ wi /* bar */ fi /* baz */") // default sow=true
+        , "/response/numFound==0"
+    );
+
+
+    assertJQ(req("df", "syn", "q", "{!lucene sow=false}wi fi")
+        , "/response/numFound==1"
+        , "/response/docs/[0]/id=='20'"
+    );
+    assertJQ(req("df", "syn", "q", "{!lucene sow=false}wi /* foo */ fi")
+        , "/response/numFound==1"
+        , "/response/docs/[0]/id=='20'"
+    );
+    assertJQ(req("df", "syn", "q", "{!lucene sow=false}wi /* foo */ /* bar */ fi")
+        , "/response/numFound==1"
+        , "/response/docs/[0]/id=='20'"
+    );
+    assertJQ(req("df", "syn", "q", "{!lucene sow=false}/* foo */ wi fi /* bar */")
+        , "/response/numFound==1"
+        , "/response/docs/[0]/id=='20'"
+    );
+    assertJQ(req("df", "syn", "q", "{!lucene sow=false}/* foo */ wi /* bar */ fi /* baz */")
+        , "/response/numFound==1"
+        , "/response/docs/[0]/id=='20'"
+    );
+
+    assertJQ(req("df", "syn", "q", "{!lucene sow=true}wi fi")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "{!lucene sow=true}wi /* foo */ fi")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "{!lucene sow=true}wi /* foo */ /* bar */ fi")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "{!lucene sow=true}/* foo */ wi fi /* bar */")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "{!lucene sow=true}/* foo */ wi /* bar */ fi /* baz */")
+        , "/response/numFound==0"
+    );
+
+    assertJQ(req("df", "syn", "q", "{!lucene}wi fi") // default sow=true
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "{!lucene}wi /* foo */ fi") // default sow=true
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "{!lucene}wi /* foo */ /* bar */ fi") // default sow=true
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "{!lucene}/* foo */ wi fi /* bar */") // default sow=true
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "{!lucene}/* foo */ wi /* bar */ fi /* baz */") // default sow=true
+        , "/response/numFound==0"
+    );
+  }
+
+  public void testOperatorsAndMultiWordSynonyms() throws Exception {
+    // The "syn" field has synonyms loaded from synonyms.txt
+
+    assertJQ(req("df", "syn", "q", "wifi", "sow", "true") // retrieve the single document containing literal "wifi"
+        , "/response/numFound==1"
+        , "/response/docs/[0]/id=='20'"
+    );
+    assertJQ(req("df", "syn", "q", "wi fi", "sow", "false") // trigger the "wi fi => wifi" synonym
+        , "/response/numFound==1"
+        , "/response/docs/[0]/id=='20'"
+    );
+
+    assertJQ(req("df", "syn", "q", "+wi fi", "sow", "false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "-wi fi", "sow", "false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "!wi fi", "sow", "false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "wi* fi", "sow", "false")    // matches because wi* matches wifi
+        , "/response/numFound==1"
+    );
+    assertJQ(req("df", "syn", "q", "w? fi", "sow", "false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "wi~1 fi", "sow", "false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "wi^2 fi", "sow", "false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "wi^=2 fi", "sow", "false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "wi +fi", "sow", "false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "wi -fi", "sow", "false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "wi !fi", "sow", "false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "wi fi*", "sow", "false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "wi fi?", "sow", "false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "wi fi~1", "sow", "false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "wi fi^2", "sow", "false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "wi fi^=2", "sow", "false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "syn:wi fi", "sow", "false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "wi syn:fi", "sow", "false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "NOT wi fi", "sow", "false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "wi NOT fi", "sow", "false")
+        , "/response/numFound==0"
+    );
+
+    assertJQ(req("df", "syn", "q", "wi fi AND ATM", "sow", "false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "ATM AND wi fi", "sow", "false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "wi fi && ATM", "sow", "false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "ATM && wi fi", "sow", "false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "(wi fi) AND ATM", "sow", "false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("df", "syn", "q", "ATM AND (wi fi)", "sow", "false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("df", "syn", "q", "(wi fi) && ATM", "sow", "false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("df", "syn", "q", "ATM && (wi fi)", "sow", "false")
+        , "/response/numFound==1"
+    );
+
+    assertJQ(req("df", "syn", "q", "wi fi OR NotThereAtAll", "sow", "false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "NotThereAtAll OR wi fi", "sow", "false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "wi fi || NotThereAtAll", "sow", "false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "NotThereAtAll || wi fi", "sow", "false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "(wi fi) OR NotThereAtAll", "sow", "false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("df", "syn", "q", "NotThereAtAll OR (wi fi)", "sow", "false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("df", "syn", "q", "(wi fi) || NotThereAtAll", "sow", "false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("df", "syn", "q", "NotThereAtAll || (wi fi)", "sow", "false")
+        , "/response/numFound==1"
+    );
+
+    assertJQ(req("df", "syn", "q", "\"wi\" fi", "sow", "false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "wi \"fi\"", "sow", "false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "(wi) fi", "sow", "false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "wi (fi)", "sow", "false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "/wi/ fi", "sow", "false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "wi /fi/", "sow", "false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "(wi fi)", "sow", "false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("df", "syn", "q", "+(wi fi)", "sow", "false")
+        , "/response/numFound==1"
+    );
+
+    Map all = (Map)ObjectBuilder.fromJSON(h.query(req("q", "*:*", "rows", "0", "wt", "json")));
+    int totalDocs = Integer.parseInt(((Map)all.get("response")).get("numFound").toString());
+    int allDocsExceptOne = totalDocs - 1;
+
+    assertJQ(req("df", "syn", "q", "-(wi fi)", "sow", "false")
+        , "/response/numFound==" + allDocsExceptOne  // one doc contains "wifi" in the syn field
+    );
+    assertJQ(req("df", "syn", "q", "!(wi fi)", "sow", "false")
+        , "/response/numFound==" + allDocsExceptOne  // one doc contains "wifi" in the syn field
+    );
+    assertJQ(req("df", "syn", "q", "NOT (wi fi)", "sow", "false")
+        , "/response/numFound==" + allDocsExceptOne  // one doc contains "wifi" in the syn field
+    );
+    assertJQ(req("df", "syn", "q", "(wi fi)^2", "sow", "false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("df", "syn", "q", "(wi fi)^=2", "sow", "false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("df", "syn", "q", "syn:(wi fi)", "sow", "false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("df", "syn", "q", "+ATM wi fi", "sow", "false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("df", "syn", "q", "-ATM wi fi", "sow", "false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "-NotThereAtAll wi fi", "sow", "false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("df", "syn", "q", "!ATM wi fi", "sow", "false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "!NotThereAtAll wi fi", "sow", "false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("df", "syn", "q", "NOT ATM wi fi", "sow", "false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "NOT NotThereAtAll wi fi", "sow", "false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("df", "syn", "q", "AT* wi fi", "sow", "false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("df", "syn", "q", "AT? wi fi", "sow", "false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("df", "syn", "q", "\"ATM\" wi fi", "sow", "false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("df", "syn", "q", "wi fi +ATM", "sow", "false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("df", "syn", "q", "wi fi -ATM", "sow", "false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "wi fi -NotThereAtAll", "sow", "false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("df", "syn", "q", "wi fi !ATM", "sow", "false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "wi fi !NotThereAtAll", "sow", "false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("df", "syn", "q", "wi fi NOT ATM", "sow", "false")
+        , "/response/numFound==0"
+    );
+    assertJQ(req("df", "syn", "q", "wi fi NOT NotThereAtAll", "sow", "false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("df", "syn", "q", "wi fi AT*", "sow", "false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("df", "syn", "q", "wi fi AT?", "sow", "false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("df", "syn", "q", "wi fi \"ATM\"", "sow", "false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("df", "syn", "q", "\"wi fi\"~2", "sow", "false")
+        , "/response/numFound==1"
+    );
+    assertJQ(req("df", "syn", "q", "syn:\"wi fi\"", "sow", "false")
+        , "/response/numFound==1"
+    );
+  }
 }
\ No newline at end of file


[21/46] lucene-solr:jira/solr-9959: Add 6.6 version

Posted by ab...@apache.org.
Add 6.6 version


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/d374193e
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/d374193e
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/d374193e

Branch: refs/heads/jira/solr-9959
Commit: d374193e73e8c6025aaac72719757a230f0c8596
Parents: 8fbd9f1
Author: Jim Ferenczi <ji...@apache.org>
Authored: Fri Mar 17 11:55:59 2017 +0100
Committer: Jim Ferenczi <ji...@apache.org>
Committed: Fri Mar 17 11:55:59 2017 +0100

----------------------------------------------------------------------
 lucene/CHANGES.txt                                 |  3 +++
 .../src/java/org/apache/lucene/util/Version.java   |  7 +++++++
 solr/CHANGES.txt                                   | 17 +++++++++++++++++
 3 files changed, 27 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d374193e/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index bd38f3f..b2ea412 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -80,6 +80,9 @@ Other
 
 * LUCENE-7681: MemoryIndex uses new DocValues API (Alan Woodward)
 
+======================= Lucene 6.6.0 =======================
+(No Changes)
+
 ======================= Lucene 6.5.0 =======================
 
 API Changes

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d374193e/lucene/core/src/java/org/apache/lucene/util/Version.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/util/Version.java b/lucene/core/src/java/org/apache/lucene/util/Version.java
index 895f169..da6d653 100644
--- a/lucene/core/src/java/org/apache/lucene/util/Version.java
+++ b/lucene/core/src/java/org/apache/lucene/util/Version.java
@@ -102,6 +102,13 @@ public final class Version {
   public static final Version LUCENE_6_5_0 = new Version(6, 5, 0);
 
   /**
+   * Match settings and bugs in Lucene's 6.6.0 release.
+   * @deprecated Use latest
+   */
+  @Deprecated
+  public static final Version LUCENE_6_6_0 = new Version(6, 6, 0);
+
+  /**
    * Match settings and bugs in Lucene's 7.0.0 release.
    *  <p>
    *  Use this to get the latest &amp; greatest settings, bug

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d374193e/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index dfe8d93..1548410 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -88,6 +88,23 @@ Other Changes
 
 ----------------------
 
+==================  6.6.0 ==================
+
+Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
+
+Versions of Major Components
+---------------------
+Apache Tika 1.13
+Carrot2 3.15.0
+Velocity 1.7 and Velocity Tools 2.0
+Apache UIMA 2.3.1
+Apache ZooKeeper 3.4.6
+Jetty 9.3.14.v20161028
+
+
+(No Changes)
+
+
 ==================  6.5.0 ==================
 
 Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.