You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by sa...@apache.org on 2016/07/21 13:37:23 UTC

[01/51] [abbrv] lucene-solr:apiv2: LUCENE-7368: Remove queryNorm.

Repository: lucene-solr
Updated Branches:
  refs/heads/apiv2 22f1be69f -> 49a092170


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/solr/core/src/test/org/apache/solr/search/mlt/CloudMLTQParserTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/mlt/CloudMLTQParserTest.java b/solr/core/src/test/org/apache/solr/search/mlt/CloudMLTQParserTest.java
index 7956853..ffcde2f 100644
--- a/solr/core/src/test/org/apache/solr/search/mlt/CloudMLTQParserTest.java
+++ b/solr/core/src/test/org/apache/solr/search/mlt/CloudMLTQParserTest.java
@@ -98,7 +98,7 @@ public class CloudMLTQParserTest extends SolrCloudTestCase {
     QueryResponse queryResponse = cluster.getSolrClient()
         .query(COLLECTION, new SolrQuery("{!mlt qf=lowerfilt_u}17").setShowDebugInfo(true));
     SolrDocumentList solrDocuments = queryResponse.getResults();
-    int[] expectedIds = new int[]{7, 13, 14, 15, 16, 20, 22, 24, 32, 9};
+    int[] expectedIds = new int[]{7, 9, 13, 14, 15, 16, 20, 22, 24, 32};
     int[] actualIds = new int[10];
     int i = 0;
     for (SolrDocument solrDocument : solrDocuments) {
@@ -113,7 +113,7 @@ public class CloudMLTQParserTest extends SolrCloudTestCase {
 
     QueryResponse queryResponse = cluster.getSolrClient().query(COLLECTION, new SolrQuery("{!mlt qf=lowerfilt_u boost=true}17"));
     SolrDocumentList solrDocuments = queryResponse.getResults();
-    int[] expectedIds = new int[]{7, 13, 14, 15, 16, 20, 22, 24, 32, 9};
+    int[] expectedIds = new int[]{7, 9, 13, 14, 15, 16, 20, 22, 24, 32};
     int[] actualIds = new int[solrDocuments.size()];
     int i = 0;
     for (SolrDocument solrDocument : solrDocuments) {
@@ -159,7 +159,7 @@ public class CloudMLTQParserTest extends SolrCloudTestCase {
     QueryResponse queryResponse = cluster.getSolrClient().query(COLLECTION,
         new SolrQuery("{!mlt qf=lowerfilt_u,lowerfilt1_u mindf=0 mintf=1}26"));
     SolrDocumentList solrDocuments = queryResponse.getResults();
-    int[] expectedIds = new int[]{27, 3, 29, 28};
+    int[] expectedIds = new int[]{3, 29, 27, 28};
     int[] actualIds = new int[solrDocuments.size()];
     int i = 0;
     for (SolrDocument solrDocument : solrDocuments) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/solr/core/src/test/org/apache/solr/uninverting/TestFieldCacheSortRandom.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/uninverting/TestFieldCacheSortRandom.java b/solr/core/src/test/org/apache/solr/uninverting/TestFieldCacheSortRandom.java
index 6f2e17c..dedb449 100644
--- a/solr/core/src/test/org/apache/solr/uninverting/TestFieldCacheSortRandom.java
+++ b/solr/core/src/test/org/apache/solr/uninverting/TestFieldCacheSortRandom.java
@@ -268,8 +268,8 @@ public class TestFieldCacheSortRandom extends LuceneTestCase {
     }
 
     @Override
-    public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-      return new ConstantScoreWeight(this) {
+    public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+      return new ConstantScoreWeight(this, boost) {
         @Override
         public Scorer scorer(LeafReaderContext context) throws IOException {
           Random random = new Random(seed ^ context.docBase);


[02/51] [abbrv] lucene-solr:apiv2: LUCENE-7368: Remove queryNorm.

Posted by sa...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/fuzzyTestData.txt
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/fuzzyTestData.txt b/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/fuzzyTestData.txt
deleted file mode 100644
index b759da7..0000000
--- a/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/fuzzyTestData.txt
+++ /dev/null
@@ -1,3721 +0,0 @@
-3
-0,0,1,0.1
-1
-0,1.0
-1,0,1,0.1
-1
-1,1.0
-2,0,1,0.1
-1
-2,1.0
-3,0,1,0.1
-1
-3,1.0
-4,0,1,0.1
-1
-4,1.0
-5,0,1,0.1
-1
-5,1.0
-6,0,1,0.1
-1
-6,1.0
-7,0,1,0.1
-1
-7,1.0
-0,0,1,0.3
-1
-0,1.0
-1,0,1,0.3
-1
-1,1.0
-2,0,1,0.3
-1
-2,1.0
-3,0,1,0.3
-1
-3,1.0
-4,0,1,0.3
-1
-4,1.0
-5,0,1,0.3
-1
-5,1.0
-6,0,1,0.3
-1
-6,1.0
-7,0,1,0.3
-1
-7,1.0
-0,0,1,0.5
-1
-0,1.0
-1,0,1,0.5
-1
-1,1.0
-2,0,1,0.5
-1
-2,1.0
-3,0,1,0.5
-1
-3,1.0
-4,0,1,0.5
-1
-4,1.0
-5,0,1,0.5
-1
-5,1.0
-6,0,1,0.5
-1
-6,1.0
-7,0,1,0.5
-1
-7,1.0
-0,0,1,0.7
-1
-0,1.0
-1,0,1,0.7
-1
-1,1.0
-2,0,1,0.7
-1
-2,1.0
-3,0,1,0.7
-1
-3,1.0
-4,0,1,0.7
-1
-4,1.0
-5,0,1,0.7
-1
-5,1.0
-6,0,1,0.7
-1
-6,1.0
-7,0,1,0.7
-1
-7,1.0
-0,0,1,0.9
-1
-0,1.0
-1,0,1,0.9
-1
-1,1.0
-2,0,1,0.9
-1
-2,1.0
-3,0,1,0.9
-1
-3,1.0
-4,0,1,0.9
-1
-4,1.0
-5,0,1,0.9
-1
-5,1.0
-6,0,1,0.9
-1
-6,1.0
-7,0,1,0.9
-1
-7,1.0
-0,0,2,0.1
-1
-0,1.0
-1,0,2,0.1
-1
-1,1.0
-2,0,2,0.1
-2
-2,0.91381156
-4,0.4061385
-3,0,2,0.1
-2
-3,0.91381156
-2,0.4061385
-4,0,2,0.1
-2
-4,0.84623283
-5,0.53281325
-5,0,2,0.1
-2
-5,0.84623283
-4,0.53281325
-6,0,2,0.1
-2
-6,0.84623283
-4,0.53281325
-7,0,2,0.1
-2
-7,0.84623283
-5,0.53281325
-0,0,2,0.3
-1
-0,1.0
-1,0,2,0.3
-1
-1,1.0
-2,0,2,0.3
-2
-2,0.96152395
-4,0.27472112
-3,0,2,0.3
-2
-3,0.96152395
-2,0.27472112
-4,0,2,0.3
-2
-4,0.88583153
-5,0.4640069
-5,0,2,0.3
-2
-5,0.88583153
-4,0.4640069
-6,0,2,0.3
-2
-6,0.88583153
-4,0.4640069
-7,0,2,0.3
-2
-7,0.88583153
-5,0.4640069
-0,0,2,0.5
-1
-0,1.0
-1,0,2,0.5
-1
-1,1.0
-2,0,2,0.5
-1
-2,1.0
-3,0,2,0.5
-1
-3,1.0
-4,0,2,0.5
-2
-4,0.9486833
-5,0.3162277
-5,0,2,0.5
-2
-5,0.9486833
-4,0.3162277
-6,0,2,0.5
-2
-6,0.9486833
-4,0.3162277
-7,0,2,0.5
-2
-7,0.9486833
-5,0.3162277
-0,0,2,0.7
-1
-0,1.0
-1,0,2,0.7
-1
-1,1.0
-2,0,2,0.7
-1
-2,1.0
-3,0,2,0.7
-1
-3,1.0
-4,0,2,0.7
-1
-4,1.0
-5,0,2,0.7
-1
-5,1.0
-6,0,2,0.7
-1
-6,1.0
-7,0,2,0.7
-1
-7,1.0
-0,0,2,0.9
-1
-0,1.0
-1,0,2,0.9
-1
-1,1.0
-2,0,2,0.9
-1
-2,1.0
-3,0,2,0.9
-1
-3,1.0
-4,0,2,0.9
-1
-4,1.0
-5,0,2,0.9
-1
-5,1.0
-6,0,2,0.9
-1
-6,1.0
-7,0,2,0.9
-1
-7,1.0
-0,0,3,0.1
-1
-0,1.0
-1,0,3,0.1
-1
-1,1.0
-2,0,3,0.1
-3
-2,0.84664875
-4,0.37628835
-5,0.37628835
-3,0,3,0.1
-3
-3,0.84664875
-2,0.37628835
-5,0.37628835
-4,0,3,0.1
-3
-4,0.74683726
-5,0.47023085
-6,0.47023085
-5,0,3,0.1
-3
-5,0.74683726
-4,0.47023085
-7,0.47023085
-6,0,3,0.1
-3
-6,0.74683726
-4,0.47023085
-7,0.47023085
-7,0,3,0.1
-3
-7,0.74683726
-5,0.47023085
-6,0.47023085
-0,0,3,0.3
-1
-0,1.0
-1,0,3,0.3
-1
-1,1.0
-2,0,3,0.3
-3
-2,0.92717266
-4,0.26490647
-5,0.26490647
-3,0,3,0.3
-3
-3,0.92717266
-2,0.26490647
-5,0.26490647
-4,0,3,0.3
-3
-4,0.8035427
-5,0.42090324
-6,0.42090324
-5,0,3,0.3
-3
-5,0.80354273
-4,0.42090327
-7,0.42090327
-6,0,3,0.3
-3
-6,0.80354273
-4,0.42090327
-7,0.42090327
-7,0,3,0.3
-3
-7,0.8035427
-5,0.42090324
-6,0.42090324
-0,0,3,0.5
-1
-0,1.0
-1,0,3,0.5
-1
-1,1.0
-2,0,3,0.5
-1
-2,1.0
-3,0,3,0.5
-1
-3,1.0
-4,0,3,0.5
-3
-4,0.9045341
-5,0.3015113
-6,0.3015113
-5,0,3,0.5
-3
-5,0.9045341
-4,0.3015113
-7,0.3015113
-6,0,3,0.5
-3
-6,0.9045341
-4,0.3015113
-7,0.3015113
-7,0,3,0.5
-3
-7,0.9045341
-5,0.3015113
-6,0.3015113
-0,0,3,0.7
-1
-0,1.0
-1,0,3,0.7
-1
-1,1.0
-2,0,3,0.7
-1
-2,1.0
-3,0,3,0.7
-1
-3,1.0
-4,0,3,0.7
-1
-4,1.0
-5,0,3,0.7
-1
-5,1.0
-6,0,3,0.7
-1
-6,1.0
-7,0,3,0.7
-1
-7,1.0
-0,0,3,0.9
-1
-0,1.0
-1,0,3,0.9
-1
-1,1.0
-2,0,3,0.9
-1
-2,1.0
-3,0,3,0.9
-1
-3,1.0
-4,0,3,0.9
-1
-4,1.0
-5,0,3,0.9
-1
-5,1.0
-6,0,3,0.9
-1
-6,1.0
-7,0,3,0.9
-1
-7,1.0
-0,0,4,0.1
-1
-0,1.0
-1,0,4,0.1
-1
-1,1.0
-2,0,4,0.1
-4
-2,0.7924058
-3,0.35218036
-4,0.35218036
-5,0.35218036
-3,0,4,0.1
-4
-3,0.79240584
-2,0.3521804
-5,0.3521804
-6,0.3521804
-4,0,4,0.1
-4
-4,0.7088104
-5,0.44628802
-6,0.44628802
-2,0.31502685
-5,0,4,0.1
-4
-5,0.7088104
-4,0.44628802
-7,0.44628802
-2,0.31502685
-6,0,4,0.1
-4
-6,0.7088104
-4,0.44628802
-7,0.44628802
-2,0.31502685
-7,0,4,0.1
-4
-7,0.7088104
-5,0.44628802
-6,0.44628802
-3,0.31502685
-0,0,4,0.3
-1
-0,1.0
-1,0,4,0.3
-1
-1,1.0
-2,0,4,0.3
-4
-2,0.8962582
-3,0.25607374
-4,0.25607374
-5,0.25607374
-3,0,4,0.3
-4
-3,0.8962582
-2,0.25607374
-5,0.25607374
-6,0.25607374
-4,0,4,0.3
-4
-4,0.7831679
-5,0.41023073
-6,0.41023073
-2,0.22376224
-5,0,4,0.3
-4
-5,0.7831679
-4,0.41023073
-7,0.41023073
-2,0.22376224
-6,0,4,0.3
-4
-6,0.7831679
-4,0.41023073
-7,0.41023073
-2,0.22376224
-7,0,4,0.3
-4
-7,0.7831679
-5,0.41023073
-6,0.41023073
-3,0.22376224
-0,0,4,0.5
-1
-0,1.0
-1,0,4,0.5
-1
-1,1.0
-2,0,4,0.5
-1
-2,1.0
-3,0,4,0.5
-1
-3,1.0
-4,0,4,0.5
-3
-4,0.9045341
-5,0.3015113
-6,0.3015113
-5,0,4,0.5
-3
-5,0.9045341
-4,0.3015113
-7,0.3015113
-6,0,4,0.5
-3
-6,0.9045341
-4,0.3015113
-7,0.3015113
-7,0,4,0.5
-3
-7,0.9045341
-5,0.3015113
-6,0.3015113
-0,0,4,0.7
-1
-0,1.0
-1,0,4,0.7
-1
-1,1.0
-2,0,4,0.7
-1
-2,1.0
-3,0,4,0.7
-1
-3,1.0
-4,0,4,0.7
-1
-4,1.0
-5,0,4,0.7
-1
-5,1.0
-6,0,4,0.7
-1
-6,1.0
-7,0,4,0.7
-1
-7,1.0
-0,0,4,0.9
-1
-0,1.0
-1,0,4,0.9
-1
-1,1.0
-2,0,4,0.9
-1
-2,1.0
-3,0,4,0.9
-1
-3,1.0
-4,0,4,0.9
-1
-4,1.0
-5,0,4,0.9
-1
-5,1.0
-6,0,4,0.9
-1
-6,1.0
-7,0,4,0.9
-1
-7,1.0
-0,0,5,0.1
-1
-0,1.0
-1,0,5,0.1
-1
-1,1.0
-2,0,5,0.1
-5
-2,0.7474093
-3,0.33218193
-4,0.33218193
-5,0.33218193
-6,0.33218193
-3,0,5,0.1
-5
-3,0.74740934
-2,0.33218196
-5,0.33218196
-6,0.33218196
-7,0.33218196
-4,0,5,0.1
-5
-4,0.697137
-5,0.4389381
-6,0.4389381
-2,0.30983868
-7,0.18073922
-5,0,5,0.1
-5
-5,0.67605716
-4,0.42566562
-7,0.42566562
-2,0.30046988
-3,0.30046988
-6,0,5,0.1
-5
-6,0.67605716
-4,0.42566562
-7,0.42566562
-2,0.30046988
-3,0.30046988
-7,0,5,0.1
-5
-7,0.697137
-5,0.4389381
-6,0.4389381
-3,0.30983868
-4,0.18073922
-0,0,5,0.3
-1
-0,1.0
-1,0,5,0.3
-1
-1,1.0
-2,0,5,0.3
-5
-2,0.86824316
-3,0.24806947
-4,0.24806947
-5,0.24806947
-6,0.24806947
-3,0,5,0.3
-5
-3,0.8682432
-2,0.24806948
-5,0.24806948
-6,0.24806948
-7,0.24806948
-4,0,5,0.3
-5
-4,0.7826238
-5,0.40994576
-6,0.40994576
-2,0.2236068
-7,0.037267767
-5,0,5,0.3
-5
-5,0.7642683
-4,0.40033093
-7,0.40033093
-2,0.21836235
-3,0.21836235
-6,0,5,0.3
-5
-6,0.7642683
-4,0.40033093
-7,0.40033093
-2,0.21836235
-3,0.21836235
-7,0,5,0.3
-5
-7,0.7826238
-5,0.40994576
-6,0.40994576
-3,0.2236068
-4,0.037267767
-0,0,5,0.5
-1
-0,1.0
-1,0,5,0.5
-1
-1,1.0
-2,0,5,0.5
-1
-2,1.0
-3,0,5,0.5
-1
-3,1.0
-4,0,5,0.5
-3
-4,0.9045341
-5,0.3015113
-6,0.3015113
-5,0,5,0.5
-3
-5,0.9045341
-4,0.3015113
-7,0.3015113
-6,0,5,0.5
-3
-6,0.9045341
-4,0.3015113
-7,0.3015113
-7,0,5,0.5
-3
-7,0.9045341
-5,0.3015113
-6,0.3015113
-0,0,5,0.7
-1
-0,1.0
-1,0,5,0.7
-1
-1,1.0
-2,0,5,0.7
-1
-2,1.0
-3,0,5,0.7
-1
-3,1.0
-4,0,5,0.7
-1
-4,1.0
-5,0,5,0.7
-1
-5,1.0
-6,0,5,0.7
-1
-6,1.0
-7,0,5,0.7
-1
-7,1.0
-0,0,5,0.9
-1
-0,1.0
-1,0,5,0.9
-1
-1,1.0
-2,0,5,0.9
-1
-2,1.0
-3,0,5,0.9
-1
-3,1.0
-4,0,5,0.9
-1
-4,1.0
-5,0,5,0.9
-1
-5,1.0
-6,0,5,0.9
-1
-6,1.0
-7,0,5,0.9
-1
-7,1.0
-0,0,6,0.1
-1
-0,1.0
-1,0,6,0.1
-1
-1,1.0
-2,0,6,0.1
-5
-2,0.7474093
-3,0.33218193
-4,0.33218193
-5,0.33218193
-6,0.33218193
-3,0,6,0.1
-5
-3,0.74740934
-2,0.33218196
-5,0.33218196
-6,0.33218196
-7,0.33218196
-4,0,6,0.1
-5
-4,0.697137
-5,0.4389381
-6,0.4389381
-2,0.30983868
-7,0.18073922
-5,0,6,0.1
-6
-5,0.6659059
-4,0.41927406
-7,0.41927406
-2,0.2959582
-3,0.2959582
-6,0.17264226
-6,0,6,0.1
-6
-6,0.6659059
-4,0.41927406
-7,0.41927406
-2,0.2959582
-3,0.2959582
-5,0.17264226
-7,0,6,0.1
-5
-7,0.697137
-5,0.4389381
-6,0.4389381
-3,0.30983868
-4,0.18073922
-0,0,6,0.3
-1
-0,1.0
-1,0,6,0.3
-1
-1,1.0
-2,0,6,0.3
-5
-2,0.86824316
-3,0.24806947
-4,0.24806947
-5,0.24806947
-6,0.24806947
-3,0,6,0.3
-5
-3,0.8682432
-2,0.24806948
-5,0.24806948
-6,0.24806948
-7,0.24806948
-4,0,6,0.3
-5
-4,0.7826238
-5,0.40994576
-6,0.40994576
-2,0.2236068
-7,0.037267767
-5,0,6,0.3
-6
-5,0.76376265
-4,0.40006608
-7,0.40006608
-2,0.2182179
-3,0.2182179
-6,0.036369618
-6,0,6,0.3
-6
-6,0.76376265
-4,0.40006608
-7,0.40006608
-2,0.2182179
-3,0.2182179
-5,0.036369618
-7,0,6,0.3
-5
-7,0.7826238
-5,0.40994576
-6,0.40994576
-3,0.2236068
-4,0.037267767
-0,0,6,0.5
-1
-0,1.0
-1,0,6,0.5
-1
-1,1.0
-2,0,6,0.5
-1
-2,1.0
-3,0,6,0.5
-1
-3,1.0
-4,0,6,0.5
-3
-4,0.9045341
-5,0.3015113
-6,0.3015113
-5,0,6,0.5
-3
-5,0.9045341
-4,0.3015113
-7,0.3015113
-6,0,6,0.5
-3
-6,0.9045341
-4,0.3015113
-7,0.3015113
-7,0,6,0.5
-3
-7,0.9045341
-5,0.3015113
-6,0.3015113
-0,0,6,0.7
-1
-0,1.0
-1,0,6,0.7
-1
-1,1.0
-2,0,6,0.7
-1
-2,1.0
-3,0,6,0.7
-1
-3,1.0
-4,0,6,0.7
-1
-4,1.0
-5,0,6,0.7
-1
-5,1.0
-6,0,6,0.7
-1
-6,1.0
-7,0,6,0.7
-1
-7,1.0
-0,0,6,0.9
-1
-0,1.0
-1,0,6,0.9
-1
-1,1.0
-2,0,6,0.9
-1
-2,1.0
-3,0,6,0.9
-1
-3,1.0
-4,0,6,0.9
-1
-4,1.0
-5,0,6,0.9
-1
-5,1.0
-6,0,6,0.9
-1
-6,1.0
-7,0,6,0.9
-1
-7,1.0
-0,0,7,0.1
-1
-0,1.0
-1,0,7,0.1
-1
-1,1.0
-2,0,7,0.1
-5
-2,0.7474093
-3,0.33218193
-4,0.33218193
-5,0.33218193
-6,0.33218193
-3,0,7,0.1
-5
-3,0.74740934
-2,0.33218196
-5,0.33218196
-6,0.33218196
-7,0.33218196
-4,0,7,0.1
-5
-4,0.697137
-5,0.4389381
-6,0.4389381
-2,0.30983868
-7,0.18073922
-5,0,7,0.1
-6
-5,0.6659059
-4,0.41927406
-7,0.41927406
-2,0.2959582
-3,0.2959582
-6,0.17264226
-6,0,7,0.1
-6
-6,0.6659059
-4,0.41927406
-7,0.41927406
-2,0.2959582
-3,0.2959582
-5,0.17264226
-7,0,7,0.1
-5
-7,0.697137
-5,0.4389381
-6,0.4389381
-3,0.30983868
-4,0.18073922
-0,0,7,0.3
-1
-0,1.0
-1,0,7,0.3
-1
-1,1.0
-2,0,7,0.3
-5
-2,0.86824316
-3,0.24806947
-4,0.24806947
-5,0.24806947
-6,0.24806947
-3,0,7,0.3
-5
-3,0.8682432
-2,0.24806948
-5,0.24806948
-6,0.24806948
-7,0.24806948
-4,0,7,0.3
-5
-4,0.7826238
-5,0.40994576
-6,0.40994576
-2,0.2236068
-7,0.037267767
-5,0,7,0.3
-6
-5,0.76376265
-4,0.40006608
-7,0.40006608
-2,0.2182179
-3,0.2182179
-6,0.036369618
-6,0,7,0.3
-6
-6,0.76376265
-4,0.40006608
-7,0.40006608
-2,0.2182179
-3,0.2182179
-5,0.036369618
-7,0,7,0.3
-5
-7,0.7826238
-5,0.40994576
-6,0.40994576
-3,0.2236068
-4,0.037267767
-0,0,7,0.5
-1
-0,1.0
-1,0,7,0.5
-1
-1,1.0
-2,0,7,0.5
-1
-2,1.0
-3,0,7,0.5
-1
-3,1.0
-4,0,7,0.5
-3
-4,0.9045341
-5,0.3015113
-6,0.3015113
-5,0,7,0.5
-3
-5,0.9045341
-4,0.3015113
-7,0.3015113
-6,0,7,0.5
-3
-6,0.9045341
-4,0.3015113
-7,0.3015113
-7,0,7,0.5
-3
-7,0.9045341
-5,0.3015113
-6,0.3015113
-0,0,7,0.7
-1
-0,1.0
-1,0,7,0.7
-1
-1,1.0
-2,0,7,0.7
-1
-2,1.0
-3,0,7,0.7
-1
-3,1.0
-4,0,7,0.7
-1
-4,1.0
-5,0,7,0.7
-1
-5,1.0
-6,0,7,0.7
-1
-6,1.0
-7,0,7,0.7
-1
-7,1.0
-0,0,7,0.9
-1
-0,1.0
-1,0,7,0.9
-1
-1,1.0
-2,0,7,0.9
-1
-2,1.0
-3,0,7,0.9
-1
-3,1.0
-4,0,7,0.9
-1
-4,1.0
-5,0,7,0.9
-1
-5,1.0
-6,0,7,0.9
-1
-6,1.0
-7,0,7,0.9
-1
-7,1.0
-0,0,8,0.1
-1
-0,1.0
-1,0,8,0.1
-1
-1,1.0
-2,0,8,0.1
-5
-2,0.7474093
-3,0.33218193
-4,0.33218193
-5,0.33218193
-6,0.33218193
-3,0,8,0.1
-5
-3,0.74740934
-2,0.33218196
-5,0.33218196
-6,0.33218196
-7,0.33218196
-4,0,8,0.1
-5
-4,0.697137
-5,0.4389381
-6,0.4389381
-2,0.30983868
-7,0.18073922
-5,0,8,0.1
-6
-5,0.6659059
-4,0.41927406
-7,0.41927406
-2,0.2959582
-3,0.2959582
-6,0.17264226
-6,0,8,0.1
-6
-6,0.6659059
-4,0.41927406
-7,0.41927406
-2,0.2959582
-3,0.2959582
-5,0.17264226
-7,0,8,0.1
-5
-7,0.697137
-5,0.4389381
-6,0.4389381
-3,0.30983868
-4,0.18073922
-0,0,8,0.3
-1
-0,1.0
-1,0,8,0.3
-1
-1,1.0
-2,0,8,0.3
-5
-2,0.86824316
-3,0.24806947
-4,0.24806947
-5,0.24806947
-6,0.24806947
-3,0,8,0.3
-5
-3,0.8682432
-2,0.24806948
-5,0.24806948
-6,0.24806948
-7,0.24806948
-4,0,8,0.3
-5
-4,0.7826238
-5,0.40994576
-6,0.40994576
-2,0.2236068
-7,0.037267767
-5,0,8,0.3
-6
-5,0.76376265
-4,0.40006608
-7,0.40006608
-2,0.2182179
-3,0.2182179
-6,0.036369618
-6,0,8,0.3
-6
-6,0.76376265
-4,0.40006608
-7,0.40006608
-2,0.2182179
-3,0.2182179
-5,0.036369618
-7,0,8,0.3
-5
-7,0.7826238
-5,0.40994576
-6,0.40994576
-3,0.2236068
-4,0.037267767
-0,0,8,0.5
-1
-0,1.0
-1,0,8,0.5
-1
-1,1.0
-2,0,8,0.5
-1
-2,1.0
-3,0,8,0.5
-1
-3,1.0
-4,0,8,0.5
-3
-4,0.9045341
-5,0.3015113
-6,0.3015113
-5,0,8,0.5
-3
-5,0.9045341
-4,0.3015113
-7,0.3015113
-6,0,8,0.5
-3
-6,0.9045341
-4,0.3015113
-7,0.3015113
-7,0,8,0.5
-3
-7,0.9045341
-5,0.3015113
-6,0.3015113
-0,0,8,0.7
-1
-0,1.0
-1,0,8,0.7
-1
-1,1.0
-2,0,8,0.7
-1
-2,1.0
-3,0,8,0.7
-1
-3,1.0
-4,0,8,0.7
-1
-4,1.0
-5,0,8,0.7
-1
-5,1.0
-6,0,8,0.7
-1
-6,1.0
-7,0,8,0.7
-1
-7,1.0
-0,0,8,0.9
-1
-0,1.0
-1,0,8,0.9
-1
-1,1.0
-2,0,8,0.9
-1
-2,1.0
-3,0,8,0.9
-1
-3,1.0
-4,0,8,0.9
-1
-4,1.0
-5,0,8,0.9
-1
-5,1.0
-6,0,8,0.9
-1
-6,1.0
-7,0,8,0.9
-1
-7,1.0
-0,1,1,0.1
-1
-0,1.0
-1,1,1,0.1
-1
-1,1.0
-2,1,1,0.1
-1
-2,1.0
-3,1,1,0.1
-1
-3,1.0
-4,1,1,0.1
-1
-4,1.0
-5,1,1,0.1
-1
-5,1.0
-6,1,1,0.1
-1
-6,1.0
-7,1,1,0.1
-1
-7,1.0
-0,1,1,0.3
-1
-0,1.0
-1,1,1,0.3
-1
-1,1.0
-2,1,1,0.3
-1
-2,1.0
-3,1,1,0.3
-1
-3,1.0
-4,1,1,0.3
-1
-4,1.0
-5,1,1,0.3
-1
-5,1.0
-6,1,1,0.3
-1
-6,1.0
-7,1,1,0.3
-1
-7,1.0
-0,1,1,0.5
-1
-0,1.0
-1,1,1,0.5
-1
-1,1.0
-2,1,1,0.5
-1
-2,1.0
-3,1,1,0.5
-1
-3,1.0
-4,1,1,0.5
-1
-4,1.0
-5,1,1,0.5
-1
-5,1.0
-6,1,1,0.5
-1
-6,1.0
-7,1,1,0.5
-1
-7,1.0
-0,1,1,0.7
-1
-0,1.0
-1,1,1,0.7
-1
-1,1.0
-2,1,1,0.7
-1
-2,1.0
-3,1,1,0.7
-1
-3,1.0
-4,1,1,0.7
-1
-4,1.0
-5,1,1,0.7
-1
-5,1.0
-6,1,1,0.7
-1
-6,1.0
-7,1,1,0.7
-1
-7,1.0
-0,1,1,0.9
-1
-0,1.0
-1,1,1,0.9
-1
-1,1.0
-2,1,1,0.9
-1
-2,1.0
-3,1,1,0.9
-1
-3,1.0
-4,1,1,0.9
-1
-4,1.0
-5,1,1,0.9
-1
-5,1.0
-6,1,1,0.9
-1
-6,1.0
-7,1,1,0.9
-1
-7,1.0
-0,1,2,0.1
-1
-0,1.0
-1,1,2,0.1
-1
-1,1.0
-2,1,2,0.1
-2
-2,0.91381156
-4,0.4061385
-3,1,2,0.1
-2
-3,0.91381156
-2,0.4061385
-4,1,2,0.1
-2
-4,0.84623283
-5,0.53281325
-5,1,2,0.1
-2
-5,0.84623283
-4,0.53281325
-6,1,2,0.1
-2
-6,0.84623283
-4,0.53281325
-7,1,2,0.1
-2
-7,0.84623283
-5,0.53281325
-0,1,2,0.3
-1
-0,1.0
-1,1,2,0.3
-1
-1,1.0
-2,1,2,0.3
-2
-2,0.96152395
-4,0.27472112
-3,1,2,0.3
-2
-3,0.96152395
-2,0.27472112
-4,1,2,0.3
-2
-4,0.88583153
-5,0.4640069
-5,1,2,0.3
-2
-5,0.88583153
-4,0.4640069
-6,1,2,0.3
-2
-6,0.88583153
-4,0.4640069
-7,1,2,0.3
-2
-7,0.88583153
-5,0.4640069
-0,1,2,0.5
-1
-0,1.0
-1,1,2,0.5
-1
-1,1.0
-2,1,2,0.5
-1
-2,1.0
-3,1,2,0.5
-1
-3,1.0
-4,1,2,0.5
-2
-4,0.9486833
-5,0.3162277
-5,1,2,0.5
-2
-5,0.9486833
-4,0.3162277
-6,1,2,0.5
-2
-6,0.9486833
-4,0.3162277
-7,1,2,0.5
-2
-7,0.9486833
-5,0.3162277
-0,1,2,0.7
-1
-0,1.0
-1,1,2,0.7
-1
-1,1.0
-2,1,2,0.7
-1
-2,1.0
-3,1,2,0.7
-1
-3,1.0
-4,1,2,0.7
-1
-4,1.0
-5,1,2,0.7
-1
-5,1.0
-6,1,2,0.7
-1
-6,1.0
-7,1,2,0.7
-1
-7,1.0
-0,1,2,0.9
-1
-0,1.0
-1,1,2,0.9
-1
-1,1.0
-2,1,2,0.9
-1
-2,1.0
-3,1,2,0.9
-1
-3,1.0
-4,1,2,0.9
-1
-4,1.0
-5,1,2,0.9
-1
-5,1.0
-6,1,2,0.9
-1
-6,1.0
-7,1,2,0.9
-1
-7,1.0
-0,1,3,0.1
-1
-0,1.0
-1,1,3,0.1
-1
-1,1.0
-2,1,3,0.1
-3
-2,0.84664875
-4,0.37628835
-5,0.37628835
-3,1,3,0.1
-3
-3,0.84664875
-2,0.37628835
-5,0.37628835
-4,1,3,0.1
-3
-4,0.74683726
-5,0.47023085
-6,0.47023085
-5,1,3,0.1
-3
-5,0.74683726
-4,0.47023085
-7,0.47023085
-6,1,3,0.1
-3
-6,0.74683726
-4,0.47023085
-7,0.47023085
-7,1,3,0.1
-3
-7,0.74683726
-5,0.47023085
-6,0.47023085
-0,1,3,0.3
-1
-0,1.0
-1,1,3,0.3
-1
-1,1.0
-2,1,3,0.3
-3
-2,0.92717266
-4,0.26490647
-5,0.26490647
-3,1,3,0.3
-3
-3,0.92717266
-2,0.26490647
-5,0.26490647
-4,1,3,0.3
-3
-4,0.8035427
-5,0.42090324
-6,0.42090324
-5,1,3,0.3
-3
-5,0.80354273
-4,0.42090327
-7,0.42090327
-6,1,3,0.3
-3
-6,0.80354273
-4,0.42090327
-7,0.42090327
-7,1,3,0.3
-3
-7,0.8035427
-5,0.42090324
-6,0.42090324
-0,1,3,0.5
-1
-0,1.0
-1,1,3,0.5
-1
-1,1.0
-2,1,3,0.5
-1
-2,1.0
-3,1,3,0.5
-1
-3,1.0
-4,1,3,0.5
-3
-4,0.9045341
-5,0.3015113
-6,0.3015113
-5,1,3,0.5
-3
-5,0.9045341
-4,0.3015113
-7,0.3015113
-6,1,3,0.5
-3
-6,0.9045341
-4,0.3015113
-7,0.3015113
-7,1,3,0.5
-3
-7,0.9045341
-5,0.3015113
-6,0.3015113
-0,1,3,0.7
-1
-0,1.0
-1,1,3,0.7
-1
-1,1.0
-2,1,3,0.7
-1
-2,1.0
-3,1,3,0.7
-1
-3,1.0
-4,1,3,0.7
-1
-4,1.0
-5,1,3,0.7
-1
-5,1.0
-6,1,3,0.7
-1
-6,1.0
-7,1,3,0.7
-1
-7,1.0
-0,1,3,0.9
-1
-0,1.0
-1,1,3,0.9
-1
-1,1.0
-2,1,3,0.9
-1
-2,1.0
-3,1,3,0.9
-1
-3,1.0
-4,1,3,0.9
-1
-4,1.0
-5,1,3,0.9
-1
-5,1.0
-6,1,3,0.9
-1
-6,1.0
-7,1,3,0.9
-1
-7,1.0
-0,1,4,0.1
-1
-0,1.0
-1,1,4,0.1
-1
-1,1.0
-2,1,4,0.1
-4
-2,0.7924058
-3,0.35218036
-4,0.35218036
-5,0.35218036
-3,1,4,0.1
-4
-3,0.79240584
-2,0.3521804
-5,0.3521804
-6,0.3521804
-4,1,4,0.1
-4
-4,0.7088104
-5,0.44628802
-6,0.44628802
-2,0.31502685
-5,1,4,0.1
-4
-5,0.7088104
-4,0.44628802
-7,0.44628802
-2,0.31502685
-6,1,4,0.1
-4
-6,0.7088104
-4,0.44628802
-7,0.44628802
-2,0.31502685
-7,1,4,0.1
-4
-7,0.7088104
-5,0.44628802
-6,0.44628802
-3,0.31502685
-0,1,4,0.3
-1
-0,1.0
-1,1,4,0.3
-1
-1,1.0
-2,1,4,0.3
-4
-2,0.8962582
-3,0.25607374
-4,0.25607374
-5,0.25607374
-3,1,4,0.3
-4
-3,0.8962582
-2,0.25607374
-5,0.25607374
-6,0.25607374
-4,1,4,0.3
-4
-4,0.7831679
-5,0.41023073
-6,0.41023073
-2,0.22376224
-5,1,4,0.3
-4
-5,0.7831679
-4,0.41023073
-7,0.41023073
-2,0.22376224
-6,1,4,0.3
-4
-6,0.7831679
-4,0.41023073
-7,0.41023073
-2,0.22376224
-7,1,4,0.3
-4
-7,0.7831679
-5,0.41023073
-6,0.41023073
-3,0.22376224
-0,1,4,0.5
-1
-0,1.0
-1,1,4,0.5
-1
-1,1.0
-2,1,4,0.5
-1
-2,1.0
-3,1,4,0.5
-1
-3,1.0
-4,1,4,0.5
-3
-4,0.9045341
-5,0.3015113
-6,0.3015113
-5,1,4,0.5
-3
-5,0.9045341
-4,0.3015113
-7,0.3015113
-6,1,4,0.5
-3
-6,0.9045341
-4,0.3015113
-7,0.3015113
-7,1,4,0.5
-3
-7,0.9045341
-5,0.3015113
-6,0.3015113
-0,1,4,0.7
-1
-0,1.0
-1,1,4,0.7
-1
-1,1.0
-2,1,4,0.7
-1
-2,1.0
-3,1,4,0.7
-1
-3,1.0
-4,1,4,0.7
-1
-4,1.0
-5,1,4,0.7
-1
-5,1.0
-6,1,4,0.7
-1
-6,1.0
-7,1,4,0.7
-1
-7,1.0
-0,1,4,0.9
-1
-0,1.0
-1,1,4,0.9
-1
-1,1.0
-2,1,4,0.9
-1
-2,1.0
-3,1,4,0.9
-1
-3,1.0
-4,1,4,0.9
-1
-4,1.0
-5,1,4,0.9
-1
-5,1.0
-6,1,4,0.9
-1
-6,1.0
-7,1,4,0.9
-1
-7,1.0
-0,1,5,0.1
-1
-0,1.0
-1,1,5,0.1
-1
-1,1.0
-2,1,5,0.1
-5
-2,0.7474093
-3,0.33218193
-4,0.33218193
-5,0.33218193
-6,0.33218193
-3,1,5,0.1
-5
-3,0.74740934
-2,0.33218196
-5,0.33218196
-6,0.33218196
-7,0.33218196
-4,1,5,0.1
-5
-4,0.697137
-5,0.4389381
-6,0.4389381
-2,0.30983868
-7,0.18073922
-5,1,5,0.1
-5
-5,0.67605716
-4,0.42566562
-7,0.42566562
-2,0.30046988
-3,0.30046988
-6,1,5,0.1
-5
-6,0.67605716
-4,0.42566562
-7,0.42566562
-2,0.30046988
-3,0.30046988
-7,1,5,0.1
-5
-7,0.697137
-5,0.4389381
-6,0.4389381
-3,0.30983868
-4,0.18073922
-0,1,5,0.3
-1
-0,1.0
-1,1,5,0.3
-1
-1,1.0
-2,1,5,0.3
-5
-2,0.86824316
-3,0.24806947
-4,0.24806947
-5,0.24806947
-6,0.24806947
-3,1,5,0.3
-5
-3,0.8682432
-2,0.24806948
-5,0.24806948
-6,0.24806948
-7,0.24806948
-4,1,5,0.3
-5
-4,0.7826238
-5,0.40994576
-6,0.40994576
-2,0.2236068
-7,0.037267767
-5,1,5,0.3
-5
-5,0.7642683
-4,0.40033093
-7,0.40033093
-2,0.21836235
-3,0.21836235
-6,1,5,0.3
-5
-6,0.7642683
-4,0.40033093
-7,0.40033093
-2,0.21836235
-3,0.21836235
-7,1,5,0.3
-5
-7,0.7826238
-5,0.40994576
-6,0.40994576
-3,0.2236068
-4,0.037267767
-0,1,5,0.5
-1
-0,1.0
-1,1,5,0.5
-1
-1,1.0
-2,1,5,0.5
-1
-2,1.0
-3,1,5,0.5
-1
-3,1.0
-4,1,5,0.5
-3
-4,0.9045341
-5,0.3015113
-6,0.3015113
-5,1,5,0.5
-3
-5,0.9045341
-4,0.3015113
-7,0.3015113
-6,1,5,0.5
-3
-6,0.9045341
-4,0.3015113
-7,0.3015113
-7,1,5,0.5
-3
-7,0.9045341
-5,0.3015113
-6,0.3015113
-0,1,5,0.7
-1
-0,1.0
-1,1,5,0.7
-1
-1,1.0
-2,1,5,0.7
-1
-2,1.0
-3,1,5,0.7
-1
-3,1.0
-4,1,5,0.7
-1
-4,1.0
-5,1,5,0.7
-1
-5,1.0
-6,1,5,0.7
-1
-6,1.0
-7,1,5,0.7
-1
-7,1.0
-0,1,5,0.9
-1
-0,1.0
-1,1,5,0.9
-1
-1,1.0
-2,1,5,0.9
-1
-2,1.0
-3,1,5,0.9
-1
-3,1.0
-4,1,5,0.9
-1
-4,1.0
-5,1,5,0.9
-1
-5,1.0
-6,1,5,0.9
-1
-6,1.0
-7,1,5,0.9
-1
-7,1.0
-0,1,6,0.1
-1
-0,1.0
-1,1,6,0.1
-1
-1,1.0
-2,1,6,0.1
-5
-2,0.7474093
-3,0.33218193
-4,0.33218193
-5,0.33218193
-6,0.33218193
-3,1,6,0.1
-5
-3,0.74740934
-2,0.33218196
-5,0.33218196
-6,0.33218196
-7,0.33218196
-4,1,6,0.1
-5
-4,0.697137
-5,0.4389381
-6,0.4389381
-2,0.30983868
-7,0.18073922
-5,1,6,0.1
-6
-5,0.6659059
-4,0.41927406
-7,0.41927406
-2,0.2959582
-3,0.2959582
-6,0.17264226
-6,1,6,0.1
-6
-6,0.6659059
-4,0.41927406
-7,0.41927406
-2,0.2959582
-3,0.2959582
-5,0.17264226
-7,1,6,0.1
-5
-7,0.697137
-5,0.4389381
-6,0.4389381
-3,0.30983868
-4,0.18073922
-0,1,6,0.3
-1
-0,1.0
-1,1,6,0.3
-1
-1,1.0
-2,1,6,0.3
-5
-2,0.86824316
-3,0.24806947
-4,0.24806947
-5,0.24806947
-6,0.24806947
-3,1,6,0.3
-5
-3,0.8682432
-2,0.24806948
-5,0.24806948
-6,0.24806948
-7,0.24806948
-4,1,6,0.3
-5
-4,0.7826238
-5,0.40994576
-6,0.40994576
-2,0.2236068
-7,0.037267767
-5,1,6,0.3
-6
-5,0.76376265
-4,0.40006608
-7,0.40006608
-2,0.2182179
-3,0.2182179
-6,0.036369618
-6,1,6,0.3
-6
-6,0.76376265
-4,0.40006608
-7,0.40006608
-2,0.2182179
-3,0.2182179
-5,0.036369618
-7,1,6,0.3
-5
-7,0.7826238
-5,0.40994576
-6,0.40994576
-3,0.2236068
-4,0.037267767
-0,1,6,0.5
-1
-0,1.0
-1,1,6,0.5
-1
-1,1.0
-2,1,6,0.5
-1
-2,1.0
-3,1,6,0.5
-1
-3,1.0
-4,1,6,0.5
-3
-4,0.9045341
-5,0.3015113
-6,0.3015113
-5,1,6,0.5
-3
-5,0.9045341
-4,0.3015113
-7,0.3015113
-6,1,6,0.5
-3
-6,0.9045341
-4,0.3015113
-7,0.3015113
-7,1,6,0.5
-3
-7,0.9045341
-5,0.3015113
-6,0.3015113
-0,1,6,0.7
-1
-0,1.0
-1,1,6,0.7
-1
-1,1.0
-2,1,6,0.7
-1
-2,1.0
-3,1,6,0.7
-1
-3,1.0
-4,1,6,0.7
-1
-4,1.0
-5,1,6,0.7
-1
-5,1.0
-6,1,6,0.7
-1
-6,1.0
-7,1,6,0.7
-1
-7,1.0
-0,1,6,0.9
-1
-0,1.0
-1,1,6,0.9
-1
-1,1.0
-2,1,6,0.9
-1
-2,1.0
-3,1,6,0.9
-1
-3,1.0
-4,1,6,0.9
-1
-4,1.0
-5,1,6,0.9
-1
-5,1.0
-6,1,6,0.9
-1
-6,1.0
-7,1,6,0.9
-1
-7,1.0
-0,1,7,0.1
-1
-0,1.0
-1,1,7,0.1
-1
-1,1.0
-2,1,7,0.1
-5
-2,0.7474093
-3,0.33218193
-4,0.33218193
-5,0.33218193
-6,0.33218193
-3,1,7,0.1
-5
-3,0.74740934
-2,0.33218196
-5,0.33218196
-6,0.33218196
-7,0.33218196
-4,1,7,0.1
-5
-4,0.697137
-5,0.4389381
-6,0.4389381
-2,0.30983868
-7,0.18073922
-5,1,7,0.1
-6
-5,0.6659059
-4,0.41927406
-7,0.41927406
-2,0.2959582
-3,0.2959582
-6,0.17264226
-6,1,7,0.1
-6
-6,0.6659059
-4,0.41927406
-7,0.41927406
-2,0.2959582
-3,0.2959582
-5,0.17264226
-7,1,7,0.1
-5
-7,0.697137
-5,0.4389381
-6,0.4389381
-3,0.30983868
-4,0.18073922
-0,1,7,0.3
-1
-0,1.0
-1,1,7,0.3
-1
-1,1.0
-2,1,7,0.3
-5
-2,0.86824316
-3,0.24806947
-4,0.24806947
-5,0.24806947
-6,0.24806947
-3,1,7,0.3
-5
-3,0.8682432
-2,0.24806948
-5,0.24806948
-6,0.24806948
-7,0.24806948
-4,1,7,0.3
-5
-4,0.7826238
-5,0.40994576
-6,0.40994576
-2,0.2236068
-7,0.037267767
-5,1,7,0.3
-6
-5,0.76376265
-4,0.40006608
-7,0.40006608
-2,0.2182179
-3,0.2182179
-6,0.036369618
-6,1,7,0.3
-6
-6,0.76376265
-4,0.40006608
-7,0.40006608
-2,0.2182179
-3,0.2182179
-5,0.036369618
-7,1,7,0.3
-5
-7,0.7826238
-5,0.40994576
-6,0.40994576
-3,0.2236068
-4,0.037267767
-0,1,7,0.5
-1
-0,1.0
-1,1,7,0.5
-1
-1,1.0
-2,1,7,0.5
-1
-2,1.0
-3,1,7,0.5
-1
-3,1.0
-4,1,7,0.5
-3
-4,0.9045341
-5,0.3015113
-6,0.3015113
-5,1,7,0.5
-3
-5,0.9045341
-4,0.3015113
-7,0.3015113
-6,1,7,0.5
-3
-6,0.9045341
-4,0.3015113
-7,0.3015113
-7,1,7,0.5
-3
-7,0.9045341
-5,0.3015113
-6,0.3015113
-0,1,7,0.7
-1
-0,1.0
-1,1,7,0.7
-1
-1,1.0
-2,1,7,0.7
-1
-2,1.0
-3,1,7,0.7
-1
-3,1.0
-4,1,7,0.7
-1
-4,1.0
-5,1,7,0.7
-1
-5,1.0
-6,1,7,0.7
-1
-6,1.0
-7,1,7,0.7
-1
-7,1.0
-0,1,7,0.9
-1
-0,1.0
-1,1,7,0.9
-1
-1,1.0
-2,1,7,0.9
-1
-2,1.0
-3,1,7,0.9
-1
-3,1.0
-4,1,7,0.9
-1
-4,1.0
-5,1,7,0.9
-1
-5,1.0
-6,1,7,0.9
-1
-6,1.0
-7,1,7,0.9
-1
-7,1.0
-0,1,8,0.1
-1
-0,1.0
-1,1,8,0.1
-1
-1,1.0
-2,1,8,0.1
-5
-2,0.7474093
-3,0.33218193
-4,0.33218193
-5,0.33218193
-6,0.33218193
-3,1,8,0.1
-5
-3,0.74740934
-2,0.33218196
-5,0.33218196
-6,0.33218196
-7,0.33218196
-4,1,8,0.1
-5
-4,0.697137
-5,0.4389381
-6,0.4389381
-2,0.30983868
-7,0.18073922
-5,1,8,0.1
-6
-5,0.6659059
-4,0.41927406
-7,0.41927406
-2,0.2959582
-3,0.2959582
-6,0.17264226
-6,1,8,0.1
-6
-6,0.6659059
-4,0.41927406
-7,0.41927406
-2,0.2959582
-3,0.2959582
-5,0.17264226
-7,1,8,0.1
-5
-7,0.697137
-5,0.4389381
-6,0.4389381
-3,0.30983868
-4,0.18073922
-0,1,8,0.3
-1
-0,1.0
-1,1,8,0.3
-1
-1,1.0
-2,1,8,0.3
-5
-2,0.86824316
-3,0.24806947
-4,0.24806947
-5,0.24806947
-6,0.24806947
-3,1,8,0.3
-5
-3,0.8682432
-2,0.24806948
-5,0.24806948
-6,0.24806948
-7,0.24806948
-4,1,8,0.3
-5
-4,0.7826238
-5,0.40994576
-6,0.40994576
-2,0.2236068
-7,0.037267767
-5,1,8,0.3
-6
-5,0.76376265
-4,0.40006608
-7,0.40006608
-2,0.2182179
-3,0.2182179
-6,0.036369618
-6,1,8,0.3
-6
-6,0.76376265
-4,0.40006608
-7,0.40006608
-2,0.2182179
-3,0.2182179
-5,0.036369618
-7,1,8,0.3
-5
-7,0.7826238
-5,0.40994576
-6,0.40994576
-3,0.2236068
-4,0.037267767
-0,1,8,0.5
-1
-0,1.0
-1,1,8,0.5
-1
-1,1.0
-2,1,8,0.5
-1
-2,1.0
-3,1,8,0.5
-1
-3,1.0
-4,1,8,0.5
-3
-4,0.9045341
-5,0.3015113
-6,0.3015113
-5,1,8,0.5
-3
-5,0.9045341
-4,0.3015113
-7,0.3015113
-6,1,8,0.5
-3
-6,0.9045341
-4,0.3015113
-7,0.3015113
-7,1,8,0.5
-3
-7,0.9045341
-5,0.3015113
-6,0.3015113
-0,1,8,0.7
-1
-0,1.0
-1,1,8,0.7
-1
-1,1.0
-2,1,8,0.7
-1
-2,1.0
-3,1,8,0.7
-1
-3,1.0
-4,1,8,0.7
-1
-4,1.0
-5,1,8,0.7
-1
-5,1.0
-6,1,8,0.7
-1
-6,1.0
-7,1,8,0.7
-1
-7,1.0
-0,1,8,0.9
-1
-0,1.0
-1,1,8,0.9
-1
-1,1.0
-2,1,8,0.9
-1
-2,1.0
-3,1,8,0.9
-1
-3,1.0
-4,1,8,0.9
-1
-4,1.0
-5,1,8,0.9
-1
-5,1.0
-6,1,8,0.9
-1
-6,1.0
-7,1,8,0.9
-1
-7,1.0
-0,2,1,0.1
-1
-0,1.0
-1,2,1,0.1
-1
-1,1.0
-2,2,1,0.1
-1
-2,1.0
-3,2,1,0.1
-1
-3,1.0
-4,2,1,0.1
-1
-4,1.0
-5,2,1,0.1
-1
-5,1.0
-6,2,1,0.1
-1
-6,1.0
-7,2,1,0.1
-1
-7,1.0
-0,2,1,0.3
-1
-0,1.0
-1,2,1,0.3
-1
-1,1.0
-2,2,1,0.3
-1
-2,1.0
-3,2,1,0.3
-1
-3,1.0
-4,2,1,0.3
-1
-4,1.0
-5,2,1,0.3
-1
-5,1.0
-6,2,1,0.3
-1
-6,1.0
-7,2,1,0.3
-1
-7,1.0
-0,2,1,0.5
-1
-0,1.0
-1,2,1,0.5
-1
-1,1.0
-2,2,1,0.5
-1
-2,1.0
-3,2,1,0.5
-1
-3,1.0
-4,2,1,0.5
-1
-4,1.0
-5,2,1,0.5
-1
-5,1.0
-6,2,1,0.5
-1
-6,1.0
-7,2,1,0.5
-1
-7,1.0
-0,2,1,0.7
-1
-0,1.0
-1,2,1,0.7
-1
-1,1.0
-2,2,1,0.7
-1
-2,1.0
-3,2,1,0.7
-1
-3,1.0
-4,2,1,0.7
-1
-4,1.0
-5,2,1,0.7
-1
-5,1.0
-6,2,1,0.7
-1
-6,1.0
-7,2,1,0.7
-1
-7,1.0
-0,2,1,0.9
-1
-0,1.0
-1,2,1,0.9
-1
-1,1.0
-2,2,1,0.9
-1
-2,1.0
-3,2,1,0.9
-1
-3,1.0
-4,2,1,0.9
-1
-4,1.0
-5,2,1,0.9
-1
-5,1.0
-6,2,1,0.9
-1
-6,1.0
-7,2,1,0.9
-1
-7,1.0
-0,2,2,0.1
-1
-0,1.0
-1,2,2,0.1
-1
-1,1.0
-2,2,2,0.1
-2
-2,0.91381156
-4,0.4061385
-3,2,2,0.1
-2
-3,0.91381156
-6,0.4061385
-4,2,2,0.1
-2
-4,0.84623283
-5,0.53281325
-5,2,2,0.1
-2
-5,0.84623283
-4,0.53281325
-6,2,2,0.1
-2
-6,0.84623283
-7,0.53281325
-7,2,2,0.1
-2
-7,0.84623283
-6,0.53281325
-0,2,2,0.3
-1
-0,1.0
-1,2,2,0.3
-1
-1,1.0
-2,2,2,0.3
-2
-2,0.96152395
-4,0.27472112
-3,2,2,0.3
-2
-3,0.96152395
-6,0.27472112
-4,2,2,0.3
-2
-4,0.88583153
-5,0.4640069
-5,2,2,0.3
-2
-5,0.88583153
-4,0.4640069
-6,2,2,0.3
-2
-6,0.88583153
-7,0.4640069
-7,2,2,0.3
-2
-7,0.88583153
-6,0.4640069
-0,2,2,0.5
-1
-0,1.0
-1,2,2,0.5
-1
-1,1.0
-2,2,2,0.5
-1
-2,1.0
-3,2,2,0.5
-1
-3,1.0
-4,2,2,0.5
-2
-4,0.9486833
-5,0.3162277
-5,2,2,0.5
-2
-5,0.9486833
-4,0.3162277
-6,2,2,0.5
-2
-6,0.9486833
-7,0.3162277
-7,2,2,0.5
-2
-7,0.9486833
-6,0.3162277
-0,2,2,0.7
-1
-0,1.0
-1,2,2,0.7
-1
-1,1.0
-2,2,2,0.7
-1
-2,1.0
-3,2,2,0.7
-1
-3,1.0
-4,2,2,0.7
-1
-4,1.0
-5,2,2,0.7
-1
-5,1.0
-6,2,2,0.7
-1
-6,1.0
-7,2,2,0.7
-1
-7,1.0
-0,2,2,0.9
-1
-0,1.0
-1,2,2,0.9
-1
-1,1.0
-2,2,2,0.9
-1
-2,1.0
-3,2,2,0.9
-1
-3,1.0
-4,2,2,0.9
-1
-4,1.0
-5,2,2,0.9
-1
-5,1.0
-6,2,2,0.9
-1
-6,1.0
-7,2,2,0.9
-1
-7,1.0
-0,2,3,0.1
-1
-0,1.0
-1,2,3,0.1
-1
-1,1.0
-2,2,3,0.1
-3
-2,0.84664875
-4,0.37628835
-5,0.37628835
-3,2,3,0.1
-3
-3,0.84664875
-6,0.37628835
-7,0.37628835
-4,2,3,0.1
-3
-4,0.7920648
-5,0.49870744
-2,0.35202882
-5,2,3,0.1
-3
-5,0.7920648
-4,0.49870744
-2,0.35202882
-6,2,3,0.1
-3
-6,0.7920648
-7,0.49870744
-3,0.35202882
-7,2,3,0.1
-3
-7,0.7920648
-6,0.49870744
-3,0.35202882
-0,2,3,0.3
-1
-0,1.0
-1,2,3,0.3
-1
-1,1.0
-2,2,3,0.3
-3
-2,0.92717266
-4,0.26490647
-5,0.26490647
-3,2,3,0.3
-3
-3,0.92717266
-6,0.26490647
-7,0.26490647
-4,2,3,0.3
-3
-4,0.85875386
-5,0.44982338
-2,0.24535823
-5,2,3,0.3
-3
-5,0.85875386
-4,0.44982338
-2,0.24535823
-6,2,3,0.3
-3
-6,0.85875386
-7,0.44982338
-3,0.24535823
-7,2,3,0.3
-3
-7,0.85875386
-6,0.44982338
-3,0.24535823
-0,2,3,0.5
-1
-0,1.0
-1,2,3,0.5
-1
-1,1.0
-2,2,3,0.5
-1
-2,1.0
-3,2,3,0.5
-1
-3,1.0
-4,2,3,0.5
-2
-4,0.9486833
-5,0.3162277
-5,2,3,0.5
-2
-5,0.9486833
-4,0.3162277
-6,2,3,0.5
-2
-6,0.9486833
-7,0.3162277
-7,2,3,0.5
-2
-7,0.9486833
-6,0.3162277
-0,2,3,0.7
-1
-0,1.0
-1,2,3,0.7
-1
-1,1.0
-2,2,3,0.7
-1
-2,1.0
-3,2,3,0.7
-1
-3,1.0
-4,2,3,0.7
-1
-4,1.0
-5,2,3,0.7
-1
-5,1.0
-6,2,3,0.7
-1
-6,1.0
-7,2,3,0.7
-1
-7,1.0
-0,2,3,0.9
-1
-0,1.0
-1,2,3,0.9
-1
-1,1.0
-2,2,3,0.9
-1
-2,1.0
-3,2,3,0.9
-1
-3,1.0
-4,2,3,0.9
-1
-4,1.0
-5,2,3,0.9
-1
-5,1.0
-6,2,3,0.9
-1
-6,1.0
-7,2,3,0.9
-1
-7,1.0
-0,2,4,0.1
-1
-0,1.0
-1,2,4,0.1
-1
-1,1.0
-2,2,4,0.1
-3
-2,0.84664875
-4,0.37628835
-5,0.37628835
-3,2,4,0.1
-3
-3,0.84664875
-6,0.37628835
-7,0.37628835
-4,2,4,0.1
-3
-4,0.7920648
-5,0.49870744
-2,0.35202882
-5,2,4,0.1
-3
-5,0.7920648
-4,0.49870744
-2,0.35202882
-6,2,4,0.1
-3
-6,0.7920648
-7,0.49870744
-3,0.35202882
-7,2,4,0.1
-3
-7,0.7920648
-6,0.49870744
-3,0.35202882
-0,2,4,0.3
-1
-0,1.0
-1,2,4,0.3
-1
-1,1.0
-2,2,4,0.3
-3
-2,0.92717266
-4,0.26490647
-5,0.26490647
-3,2,4,0.3
-3
-3,0.92717266
-6,0.26490647
-7,0.26490647
-4,2,4,0.3
-3
-4,0.85875386
-5,0.44982338
-2,0.24535823
-5,2,4,0.3
-3
-5,0.85875386
-4,0.44982338
-2,0.24535823
-6,2,4,0.3
-3
-6,0.85875386
-7,0.44982338
-3,0.24535823
-7,2,4,0.3
-3
-7,0.85875386
-6,0.44982338
-3,0.24535823
-0,2,4,0.5
-1
-0,1.0
-1,2,4,0.5
-1
-1,1.0
-2,2,4,0.5
-1
-2,1.0
-3,2,4,0.5
-1
-3,1.0
-4,2,4,0.5
-2
-4,0.9486833
-5,0.3162277
-5,2,4,0.5
-2
-5,0.9486833
-4,0.3162277
-6,2,4,0.5
-2
-6,0.9486833
-7,0.3162277
-7,2,4,0.5
-2
-7,0.9486833
-6,0.3162277
-0,2,4,0.7
-1
-0,1.0
-1,2,4,0.7
-1
-1,1.0
-2,2,4,0.7
-1
-2,1.0
-3,2,4,0.7
-1
-3,1.0
-4,2,4,0.7
-1
-4,1.0
-5,2,4,0.7
-1
-5,1.0
-6,2,4,0.7
-1
-6,1.0
-7,2,4,0.7
-1
-7,1.0
-0,2,4,0.9
-1
-0,1.0
-1,2,4,0.9
-1
-1,1.0
-2,2,4,0.9
-1
-2,1.0
-3,2,4,0.9
-1
-3,1.0
-4,2,4,0.9
-1
-4,1.0
-5,2,4,0.9
-1
-5,1.0
-6,2,4,0.9
-1
-6,1.0
-7,2,4,0.9
-1
-7,1.0
-0,2,5,0.1
-1
-0,1.0
-1,2,5,0.1
-1
-1,1.0
-2,2,5,0.1
-3
-2,0.84664875
-4,0.37628835
-5,0.37628835
-3,2,5,0.1
-3
-3,0.84664875
-6,0.37628835
-7,0.37628835
-4,2,5,0.1
-3
-4,0.7920648
-5,0.49870744
-2,0.35202882
-5,2,5,0.1
-3
-5,0.7920648
-4,0.49870744
-2,0.35202882
-6,2,5,0.1
-3
-6,0.7920648
-7,0.49870744
-3,0.35202882
-7,2,5,0.1
-3
-7,0.7920648
-6,0.49870744
-3,0.35202882
-0,2,5,0.3
-1
-0,1.0
-1,2,5,0.3
-1
-1,1.0
-2,2,5,0.3
-3
-2,0.92717266
-4,0.26490647
-5,0.26490647
-3,2,5,0.3
-3
-3,0.92717266
-6,0.26490647
-7,0.26490647
-4,2,5,0.3
-3
-4,0.85875386
-5,0.44982338
-2,0.24535823
-5,2,5,0.3
-3
-5,0.85875386
-4,0.44982338
-2,0.24535823
-6,2,5,0.3
-3
-6,0.85875386
-7,0.44982338
-3,0.24535823
-7,2,5,0.3
-3
-7,0.85875386
-6,0.44982338
-3,0.24535823
-0,2,5,0.5
-1
-0,1.0
-1,2,5,0.5
-1
-1,1.0
-2,2,5,0.5
-1
-2,1.0
-3,2,5,0.5
-1
-3,1.0
-4,2,5,0.5
-2
-4,0.9486833
-5,0.3162277
-5,2,5,0.5
-2
-5,0.9486833
-4,0.3162277
-6,2,5,0.5
-2
-6,0.9486833
-7,0.3162277
-7,2,5,0.5
-2
-7,0.9486833
-6,0.3162277
-0,2,5,0.7
-1
-0,1.0
-1,2,5,0.7
-1
-1,1.0
-2,2,5,0.7
-1
-2,1.0
-3,2,5,0.7
-1
-3,1.0
-4,2,5,0.7
-1
-4,1.0
-5,2,5,0.7
-1
-5,1.0
-6,2,5,0.7
-1
-6,1.0
-7,2,5,0.7
-1
-7,1.0
-0,2,5,0.9
-1
-0,1.0
-1,2,5,0.9
-1
-1,1.0
-2,2,5,0.9
-1
-2,1.0
-3,2,5,0.9
-1
-3,1.0
-4,2,5,0.9
-1
-4,1.0
-5,2,5,0.9
-1
-5,1.0
-6,2,5,0.9
-1
-6,1.0
-7,2,5,0.9
-1
-7,1.0
-0,2,6,0.1
-1
-0,1.0
-1,2,6,0.1
-1
-1,1.0
-2,2,6,0.1
-3
-2,0.84664875
-4,0.37628835
-5,0.37628835
-3,2,6,0.1
-3
-3,0.84664875
-6,0.37628835
-7,0.37628835
-4,2,6,0.1
-3
-4,0.7920648
-5,0.49870744
-2,0.35202882
-5,2,6,0.1
-3
-5,0.7920648
-4,0.49870744
-2,0.35202882
-6,2,6,0.1
-3
-6,0.7920648
-7,0.49870744
-3,0.35202882
-7,2,6,0.1
-3
-7,0.7920648
-6,0.49870744
-3,0.35202882
-0,2,6,0.3
-1
-0,1.0
-1,2,6,0.3
-1
-1,1.0
-2,2,6,0.3
-3
-2,0.92717266
-4,0.26490647
-5,0.26490647
-3,2,6,0.3
-3
-3,0.92717266
-6,0.26490647
-7,0.26490647
-4,2,6,0.3
-3
-4,0.85875386
-5,0.44982338
-2,0.24535823
-5,2,6,0.3
-3
-5,0.85875386
-4,0.44982338
-2,0.24535823
-6,2,6,0.3
-3
-6,0.85875386
-7,0.44982338
-3,0.24535823
-7,2,6,0.3
-3
-7,0.85875386
-6,0.44982338
-3,0.24535823
-0,2,6,0.5
-1
-0,1.0
-1,2,6,0.5
-1
-1,1.0
-2,2,6,0.5
-1
-2,1.0
-3,2,6,0.5
-1
-3,1.0
-4,2,6,0.5
-2
-4,0.9486833
-5,0.3162277
-5,2,6,0.5
-2
-5,0.9486833
-4,0.3162277
-6,2,6,0.5
-2
-6,0.9486833
-7,0.3162277
-7,2,6,0.5
-2
-7,0.9486833
-6,0.3162277
-0,2,6,0.7
-1
-0,1.0
-1,2,6,0.7
-1
-1,1.0
-2,2,6,0.7
-1
-2,1.0
-3,2,6,0.7
-1
-3,1.0
-4,2,6,0.7
-1
-4,1.0
-5,2,6,0.7
-1
-5,1.0
-6,2,6,0.7
-1
-6,1.0
-7,2,6,0.7
-1
-7,1.0
-0,2,6,0.9
-1
-0,1.0
-1,2,6,0.9
-1
-1,1.0
-2,2,6,0.9
-1
-2,1.0
-3,2,6,0.9
-1
-3,1.0
-4,2,6,0.9
-1
-4,1.0
-5,2,6,0.9
-1
-5,1.0
-6,2,6,0.9
-1
-6,1.0
-7,2,6,0.9
-1
-7,1.0
-0,2,7,0.1
-1
-0,1.0
-1,2,7,0.1
-1
-1,1.0
-2,2,7,0.1
-3
-2,0.84664875
-4,0.37628835
-5,0.37628835
-3,2,7,0.1
-3
-3,0.84664875
-6,0.37628835
-7,0.37628835
-4,2,7,0.1
-3
-4,0.7920648
-5,0.49870744
-2,0.35202882
-5,2,7,0.1
-3
-5,0.7920648
-4,0.49870744
-2,0.35202882
-6,2,7,0.1
-3
-6,0.7920648
-7,0.49870744
-3,0.35202882
-7,2,7,0.1
-3
-7,0.7920648
-6,0.49870744
-3,0.35202882
-0,2,7,0.3
-1
-0,1.0
-1,2,7,0.3
-1
-1,1.0
-2,2,7,0.3
-3
-2,0.92717266
-4,0.26490647
-5,0.26490647
-3,2,7,0.3
-3
-3,0.92717266
-6,0.26490647
-7,0.26490647
-4,2,7,0.3
-3
-4,0.85875386
-5,0.44982338
-2,0.24535823
-5,2,7,0.3
-3
-5,0.85875386
-4,0.44982338
-2,0.24535823
-6,2,7,0.3
-3
-6,0.85875386
-7,0.44982338
-3,0.24535823
-7,2,7,0.3
-3
-7,0.85875386
-6,0.44982338
-3,0.24535823
-0,2,7,0.5
-1
-0,1.0
-1,2,7,0.5
-1
-1,1.0
-2,2,7,0.5
-1
-2,1.0
-3,2,7,0.5
-1
-3,1.0
-4,2,7,0.5
-2
-4,0.9486833
-5,0.3162277
-5,2,7,0.5
-2
-5,0.9486833
-4,0.3162277
-6,2,7,0.5
-2
-6,0.9486833
-7,0.3162277
-7,2,7,0.5
-2
-7,0.9486833
-6,0.3162277
-0,2,7,0.7
-1
-0,1.0
-1,2,7,0.7
-1
-1,1.0
-2,2,7,0.7
-1
-2,1.0
-3,2,7,0.7
-1
-3,1.0
-4,2,7,0.7
-1
-4,1.0
-5,2,7,0.7
-1
-5,1.0
-6,2,7,0.7
-1
-6,1.0
-7,2,7,0.7
-1
-7,1.0
-0,2,7,0.9
-1
-0,1.0
-1,2,7,0.9
-1
-1,1.0
-2,2,7,0.9
-1
-2,1.0
-3,2,7,0.9
-1
-3,1.0
-4,2,7,0.9
-1
-4,1.0
-5,2,7,0.9
-1
-5,1.0
-6,2,7,0.9
-1
-6,1.0
-7,2,7,0.9
-1
-7,1.0
-0,2,8,0.1
-1
-0,1.0
-1,2,8,0.1
-1
-1,1.0
-2,2,8,0.1
-3
-2,0.84664875
-4,0.37628835
-5,0.37628835
-3,2,8,0.1
-3
-3,0.84664875
-6,0.37628835
-7,0.37628835
-4,2,8,0.1
-3
-4,0.7920648
-5,0.49870744
-2,0.35202882
-5,2,8,0.1
-3
-5,0.7920648
-4,0.49870744
-2,0.35202882
-6,2,8,0.1
-3
-6,0.7920648
-7,0.49870744
-3,0.35202882
-7,2,8,0.1
-3
-7,0.7920648
-6,0.49870744
-3,0.35202882
-0,2,8,0.3
-1
-0,1.0
-1,2,8,0.3
-1
-1,1.0
-2,2,8,0.3
-3
-2,0.92717266
-4,0.26490647
-5,0.26490647
-3,2,8,0.3
-3
-3,0.92717266
-6,0.26490647
-7,0.26490647
-4,2,8,0.3
-3
-4,0.85875386
-5,0.44982338
-2,0.24535823
-5,2,8,0.3
-3
-5,0.85875386
-4,0.44982338
-2,0.24535823
-6,2,8,0.3
-3
-6,0.85875386
-7,0.44982338
-3,0.24535823
-7,2,8,0.3
-3
-7,0.85875386
-6,0.44982338
-3,0.24535823
-0,2,8,0.5
-1
-0,1.0
-1,2,8,0.5
-1
-1,1.0
-2,2,8,0.5
-1
-2,1.0
-3,2,8,0.5
-1
-3,1.0
-4,2,8,0.5
-2
-4,0.9486833
-5,0.3162277
-5,2,8,0.5
-2
-5,0.9486833
-4,0.3162277
-6,2,8,0.5
-2
-6,0.9486833
-7,0.3162277
-7,2,8,0.5
-2
-7,0.9486833
-6,0.3162277
-0,2,8,0.7
-1
-0,1.0
-1,2,8,0.7
-1
-1,1.0
-2,2,8,0.7
-1
-2,1.0
-3,2,8,0.7
-1
-3,1.0
-4,2,8,0.7
-1
-4,1.0
-5,2,8,0.7
-1
-5,1.0
-6,2,8,0.7
-1
-6,1.0
-7,2,8,0.7
-1
-7,1.0
-0,2,8,0.9
-1
-0,1.0
-1,2,8,0.9
-1
-1,1.0
-2,2,8,0.9
-1
-2,1.0
-3,2,8,0.9
-1
-3,1.0
-4,2,8,0.9
-1
-4,1.0
-5,2,8,0.9
-1
-5,1.0
-6,2,8,0.9
-1
-6,1.0
-7,2,8,0.9
-1
-7,1.0

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/sandbox/src/test/org/apache/lucene/search/TestTermAutomatonQuery.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/test/org/apache/lucene/search/TestTermAutomatonQuery.java b/lucene/sandbox/src/test/org/apache/lucene/search/TestTermAutomatonQuery.java
index 1cc86ba..2467e99 100644
--- a/lucene/sandbox/src/test/org/apache/lucene/search/TestTermAutomatonQuery.java
+++ b/lucene/sandbox/src/test/org/apache/lucene/search/TestTermAutomatonQuery.java
@@ -633,8 +633,8 @@ public class TestTermAutomatonQuery extends LuceneTestCase {
     }
 
     @Override
-    public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-      return new ConstantScoreWeight(this) {
+    public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+      return new ConstantScoreWeight(this, boost) {
         @Override
         public Scorer scorer(LeafReaderContext context) throws IOException {
           int maxDoc = context.reader().maxDoc();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/spatial-extras/src/java/org/apache/lucene/spatial/composite/CompositeVerifyQuery.java
----------------------------------------------------------------------
diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/composite/CompositeVerifyQuery.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/composite/CompositeVerifyQuery.java
index 823b9c2..d556efa 100644
--- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/composite/CompositeVerifyQuery.java
+++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/composite/CompositeVerifyQuery.java
@@ -82,11 +82,11 @@ public class CompositeVerifyQuery extends Query {
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-    final Weight indexQueryWeight = indexQuery.createWeight(searcher, false);//scores aren't unsupported
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    final Weight indexQueryWeight = indexQuery.createWeight(searcher, false, boost);//scores aren't unsupported
     final Map valueSourceContext = ValueSource.newContext(searcher);
 
-    return new ConstantScoreWeight(this) {
+    return new ConstantScoreWeight(this, boost) {
 
       @Override
       public Scorer scorer(LeafReaderContext context) throws IOException {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/spatial-extras/src/java/org/apache/lucene/spatial/composite/IntersectsRPTVerifyQuery.java
----------------------------------------------------------------------
diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/composite/IntersectsRPTVerifyQuery.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/composite/IntersectsRPTVerifyQuery.java
index ad91514..ce8c207 100644
--- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/composite/IntersectsRPTVerifyQuery.java
+++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/composite/IntersectsRPTVerifyQuery.java
@@ -82,10 +82,10 @@ public class IntersectsRPTVerifyQuery extends Query {
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
     final Map valueSourceContext = ValueSource.newContext(searcher);
 
-    return new ConstantScoreWeight(this) {
+    return new ConstantScoreWeight(this, boost) {
       @Override
       public Scorer scorer(LeafReaderContext context) throws IOException {
         // Compute approx & exact

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/AbstractPrefixTreeQuery.java
----------------------------------------------------------------------
diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/AbstractPrefixTreeQuery.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/AbstractPrefixTreeQuery.java
index a3f2f14..dbe643e 100644
--- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/AbstractPrefixTreeQuery.java
+++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/AbstractPrefixTreeQuery.java
@@ -76,8 +76,8 @@ public abstract class AbstractPrefixTreeQuery extends Query {
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-    return new ConstantScoreWeight(this) {
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    return new ConstantScoreWeight(this, boost) {
       @Override
       public Scorer scorer(LeafReaderContext context) throws IOException {
         DocIdSet docSet = getDocIdSet(context);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/spatial-extras/src/java/org/apache/lucene/spatial/serialized/SerializedDVStrategy.java
----------------------------------------------------------------------
diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/serialized/SerializedDVStrategy.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/serialized/SerializedDVStrategy.java
index 7e37aac..c2093f2 100644
--- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/serialized/SerializedDVStrategy.java
+++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/serialized/SerializedDVStrategy.java
@@ -135,8 +135,8 @@ public class SerializedDVStrategy extends SpatialStrategy {
     }
 
     @Override
-    public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-      return new RandomAccessWeight(this) {
+    public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+      return new RandomAccessWeight(this, boost) {
         @Override
         protected Bits getMatchingDocs(LeafReaderContext context) throws IOException {
           final FunctionValues predFuncValues = predicateValueSource.getValues(null, context);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/spatial/src/java/org/apache/lucene/spatial/geopoint/search/GeoPointTermQueryConstantScoreWrapper.java
----------------------------------------------------------------------
diff --git a/lucene/spatial/src/java/org/apache/lucene/spatial/geopoint/search/GeoPointTermQueryConstantScoreWrapper.java b/lucene/spatial/src/java/org/apache/lucene/spatial/geopoint/search/GeoPointTermQueryConstantScoreWrapper.java
index 739dec6..99a1f94 100644
--- a/lucene/spatial/src/java/org/apache/lucene/spatial/geopoint/search/GeoPointTermQueryConstantScoreWrapper.java
+++ b/lucene/spatial/src/java/org/apache/lucene/spatial/geopoint/search/GeoPointTermQueryConstantScoreWrapper.java
@@ -75,8 +75,8 @@ final class GeoPointTermQueryConstantScoreWrapper <Q extends GeoPointMultiTermQu
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-    return new ConstantScoreWeight(this) {
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    return new ConstantScoreWeight(this, boost) {
 
       @Override
       public Scorer scorer(LeafReaderContext context) throws IOException {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/PointInGeo3DShapeQuery.java
----------------------------------------------------------------------
diff --git a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/PointInGeo3DShapeQuery.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/PointInGeo3DShapeQuery.java
index 34e4de9..105e5d8 100644
--- a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/PointInGeo3DShapeQuery.java
+++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/PointInGeo3DShapeQuery.java
@@ -60,12 +60,12 @@ final class PointInGeo3DShapeQuery extends Query {
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
 
     // I don't use RandomAccessWeight here: it's no good to approximate with "match all docs"; this is an inverted structure and should be
     // used in the first pass:
 
-    return new ConstantScoreWeight(this) {
+    return new ConstantScoreWeight(this, boost) {
 
       @Override
       public Scorer scorer(LeafReaderContext context) throws IOException {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/CompletionWeight.java
----------------------------------------------------------------------
diff --git a/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/CompletionWeight.java b/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/CompletionWeight.java
index be18ea2..d74e56f 100644
--- a/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/CompletionWeight.java
+++ b/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/CompletionWeight.java
@@ -143,12 +143,4 @@ public class CompletionWeight extends Weight {
     return null;
   }
 
-  @Override
-  public float getValueForNormalization() throws IOException {
-    return 0;
-  }
-
-  @Override
-  public void normalize(float norm, float boost) {
-  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/ContextQuery.java
----------------------------------------------------------------------
diff --git a/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/ContextQuery.java b/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/ContextQuery.java
index be9f208..7a5e3e0 100644
--- a/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/ContextQuery.java
+++ b/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/ContextQuery.java
@@ -164,8 +164,8 @@ public class ContextQuery extends CompletionQuery {
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-    final CompletionWeight innerWeight = ((CompletionWeight) innerQuery.createWeight(searcher, needsScores));
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    final CompletionWeight innerWeight = ((CompletionWeight) innerQuery.createWeight(searcher, needsScores, boost));
     // if separators are preserved the fst contains a SEP_LABEL
     // behind each gap. To have a matching automaton, we need to
     // include the SEP_LABEL in the query as well

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/FuzzyCompletionQuery.java
----------------------------------------------------------------------
diff --git a/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/FuzzyCompletionQuery.java b/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/FuzzyCompletionQuery.java
index 5945833..be3aa95 100644
--- a/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/FuzzyCompletionQuery.java
+++ b/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/FuzzyCompletionQuery.java
@@ -142,7 +142,7 @@ public class FuzzyCompletionQuery extends PrefixCompletionQuery {
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
     CompletionTokenStream stream = (CompletionTokenStream) analyzer.tokenStream(getField(), getTerm().text());
     Set<IntsRef> refs = new HashSet<>();
     Automaton automaton = toLevenshteinAutomata(stream.toAutomaton(unicodeAware), refs);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/PrefixCompletionQuery.java
----------------------------------------------------------------------
diff --git a/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/PrefixCompletionQuery.java b/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/PrefixCompletionQuery.java
index 91d494b..dbc9298 100644
--- a/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/PrefixCompletionQuery.java
+++ b/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/PrefixCompletionQuery.java
@@ -66,7 +66,7 @@ public class PrefixCompletionQuery extends CompletionQuery {
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
     CompletionTokenStream stream = (CompletionTokenStream) analyzer.tokenStream(getField(), getTerm().text());
     return new CompletionWeight(this, stream.toAutomaton());
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/RegexCompletionQuery.java
----------------------------------------------------------------------
diff --git a/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/RegexCompletionQuery.java b/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/RegexCompletionQuery.java
index 5e0c489..71b4b67 100644
--- a/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/RegexCompletionQuery.java
+++ b/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/RegexCompletionQuery.java
@@ -88,7 +88,7 @@ public class RegexCompletionQuery extends CompletionQuery {
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
     return new CompletionWeight(this, new RegExp(getTerm().text(), flags).toAutomaton(maxDeterminizedStates));
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/SuggestIndexSearcher.java
----------------------------------------------------------------------
diff --git a/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/SuggestIndexSearcher.java b/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/SuggestIndexSearcher.java
index 3407633..a64afed 100644
--- a/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/SuggestIndexSearcher.java
+++ b/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/SuggestIndexSearcher.java
@@ -67,7 +67,7 @@ public class SuggestIndexSearcher extends IndexSearcher {
     // TODO use IndexSearcher.rewrite instead
     // have to implement equals() and hashCode() in CompletionQuerys and co
     query = (CompletionQuery) query.rewrite(getIndexReader());
-    Weight weight = query.createWeight(this, collector.needsScores());
+    Weight weight = query.createWeight(this, collector.needsScores(), 1f);
     for (LeafReaderContext context : getIndexReader().leaves()) {
       BulkScorer scorer = weight.bulkScorer(context);
       if (scorer != null) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/test-framework/src/java/org/apache/lucene/index/BaseNormsFormatTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BaseNormsFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BaseNormsFormatTestCase.java
index b6207be..ee70fb6 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/BaseNormsFormatTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/BaseNormsFormatTestCase.java
@@ -321,7 +321,7 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
     }
 
     @Override
-    public SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats) {
+    public SimWeight computeWeight(float boost, CollectionStatistics collectionStats, TermStatistics... termStats) {
       throw new UnsupportedOperationException();
     }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/test-framework/src/java/org/apache/lucene/search/AssertingIndexSearcher.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/AssertingIndexSearcher.java b/lucene/test-framework/src/java/org/apache/lucene/search/AssertingIndexSearcher.java
index 7bc4b4a..a9a7321 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/search/AssertingIndexSearcher.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/search/AssertingIndexSearcher.java
@@ -50,30 +50,11 @@ public class AssertingIndexSearcher extends IndexSearcher {
     super(context, ex);
     this.random = new Random(random.nextLong());
   }
-  
-  /** Ensures, that the returned {@code Weight} is not normalized again, which may produce wrong scores. */
-  @Override
-  public Weight createNormalizedWeight(Query query, boolean needsScores) throws IOException {
-    final Weight w = super.createNormalizedWeight(query, needsScores);
-    return new AssertingWeight(random, w, needsScores) {
-
-      @Override
-      public void normalize(float norm, float boost) {
-        throw new IllegalStateException("Weight already normalized.");
-      }
-
-      @Override
-      public float getValueForNormalization() {
-        throw new IllegalStateException("Weight already normalized.");
-      }
-
-    };
-  }
 
   @Override
-  public Weight createWeight(Query query, boolean needsScores) throws IOException {
+  public Weight createWeight(Query query, boolean needsScores, float boost) throws IOException {
     // this adds assertions to the inner weights/scorers too
-    return new AssertingWeight(random, super.createWeight(query, needsScores), needsScores);
+    return new AssertingWeight(random, super.createWeight(query, needsScores, boost), needsScores);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/test-framework/src/java/org/apache/lucene/search/AssertingQuery.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/AssertingQuery.java b/lucene/test-framework/src/java/org/apache/lucene/search/AssertingQuery.java
index 9110f5c..9280711 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/search/AssertingQuery.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/search/AssertingQuery.java
@@ -39,8 +39,8 @@ public final class AssertingQuery extends Query {
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-    return new AssertingWeight(new Random(random.nextLong()), in.createWeight(searcher, needsScores), needsScores);
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    return new AssertingWeight(new Random(random.nextLong()), in.createWeight(searcher, needsScores, boost), needsScores);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/test-framework/src/java/org/apache/lucene/search/AssertingWeight.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/AssertingWeight.java b/lucene/test-framework/src/java/org/apache/lucene/search/AssertingWeight.java
index 29ca12e..9f24f22 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/search/AssertingWeight.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/search/AssertingWeight.java
@@ -47,16 +47,6 @@ class AssertingWeight extends Weight {
   }
 
   @Override
-  public float getValueForNormalization() throws IOException {
-    return in.getValueForNormalization();
-  }
-
-  @Override
-  public void normalize(float norm, float boost) {
-    in.normalize(norm, boost);
-  }
-
-  @Override
   public Scorer scorer(LeafReaderContext context) throws IOException {
     final Scorer inScorer = in.scorer(context);
     assert inScorer == null || inScorer.docID() == -1;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/test-framework/src/java/org/apache/lucene/search/RandomApproximationQuery.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/RandomApproximationQuery.java b/lucene/test-framework/src/java/org/apache/lucene/search/RandomApproximationQuery.java
index 7e822f1..2db46ff 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/search/RandomApproximationQuery.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/search/RandomApproximationQuery.java
@@ -65,8 +65,8 @@ public class RandomApproximationQuery extends Query {
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-    final Weight weight = query.createWeight(searcher, needsScores);
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    final Weight weight = query.createWeight(searcher, needsScores, boost);
     return new RandomApproximationWeight(weight, new Random(random.nextLong()));
   }
 
@@ -92,16 +92,6 @@ public class RandomApproximationQuery extends Query {
     }
 
     @Override
-    public float getValueForNormalization() throws IOException {
-      return weight.getValueForNormalization();
-    }
-
-    @Override
-    public void normalize(float norm, float boost) {
-      weight.normalize(norm, boost);
-    }
-
-    @Override
     public Scorer scorer(LeafReaderContext context) throws IOException {
       final Scorer scorer = weight.scorer(context);
       if (scorer == null) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/test-framework/src/java/org/apache/lucene/search/similarities/RandomSimilarity.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/similarities/RandomSimilarity.java b/lucene/test-framework/src/java/org/apache/lucene/search/similarities/RandomSimilarity.java
index 5a55a99..136d7e5 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/search/similarities/RandomSimilarity.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/search/similarities/RandomSimilarity.java
@@ -45,15 +45,6 @@ public class RandomSimilarity extends PerFieldSimilarityWrapper {
   }
   
   @Override
-  public float queryNorm(float sumOfSquaredWeights) {
-    if (shouldQueryNorm) {
-      return defaultSim.queryNorm(sumOfSquaredWeights);
-    } else {
-      return 1.0f;
-    }
-  }
-  
-  @Override
   public synchronized Similarity get(String field) {
     assert field != null;
     Similarity sim = previousMappings.get(field);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/test-framework/src/java/org/apache/lucene/search/spans/AssertingSpanQuery.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/spans/AssertingSpanQuery.java b/lucene/test-framework/src/java/org/apache/lucene/search/spans/AssertingSpanQuery.java
index 544a53c..7de01e7 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/search/spans/AssertingSpanQuery.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/search/spans/AssertingSpanQuery.java
@@ -42,8 +42,8 @@ public class AssertingSpanQuery extends SpanQuery {
   }
 
   @Override
-  public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-    SpanWeight weight = in.createWeight(searcher, needsScores);
+  public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    SpanWeight weight = in.createWeight(searcher, needsScores, boost);
     return new AssertingSpanWeight(searcher, weight);
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/test-framework/src/java/org/apache/lucene/search/spans/AssertingSpanWeight.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/spans/AssertingSpanWeight.java b/lucene/test-framework/src/java/org/apache/lucene/search/spans/AssertingSpanWeight.java
index ae20d83..a0f4f7b 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/search/spans/AssertingSpanWeight.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/search/spans/AssertingSpanWeight.java
@@ -40,7 +40,7 @@ public class AssertingSpanWeight extends SpanWeight {
    * @throws IOException on error
    */
   public AssertingSpanWeight(IndexSearcher searcher, SpanWeight in) throws IOException {
-    super((SpanQuery) in.getQuery(), searcher, null);
+    super((SpanQuery) in.getQuery(), searcher, null, 1f);
     this.in = in;
   }
 
@@ -68,16 +68,6 @@ public class AssertingSpanWeight extends SpanWeight {
   }
 
   @Override
-  public float getValueForNormalization() throws IOException {
-    return in.getValueForNormalization();
-  }
-
-  @Override
-  public void normalize(float queryNorm, float boost) {
-    in.normalize(queryNorm, boost);
-  }
-
-  @Override
   public SpanScorer scorer(LeafReaderContext context) throws IOException {
     return in.scorer(context);
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/test-framework/src/test/org/apache/lucene/search/TestBaseExplanationTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/test/org/apache/lucene/search/TestBaseExplanationTestCase.java b/lucene/test-framework/src/test/org/apache/lucene/search/TestBaseExplanationTestCase.java
index 97ddd4a..0c342d9 100644
--- a/lucene/test-framework/src/test/org/apache/lucene/search/TestBaseExplanationTestCase.java
+++ b/lucene/test-framework/src/test/org/apache/lucene/search/TestBaseExplanationTestCase.java
@@ -73,8 +73,8 @@ public class TestBaseExplanationTestCase extends BaseExplanationTestCase {
       this.toggleExplainMatch = toggleExplainMatch;
       this.breakExplainScores = breakExplainScores;
     }
-    public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-      return new BrokenExplainWeight(this, super.createWeight(searcher,needsScores));
+    public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+      return new BrokenExplainWeight(this, super.createWeight(searcher,needsScores, boost));
     }
   }
   
@@ -107,12 +107,6 @@ public class TestBaseExplanationTestCase extends BaseExplanationTestCase {
     public void extractTerms(Set<Term> terms) {
       in.extractTerms(terms);
     }
-    public float getValueForNormalization() throws IOException {
-      return in.getValueForNormalization();
-    }
-    public void normalize(float norm, float boost) {
-      in.normalize(norm, boost);
-    }
     public Scorer scorer(LeafReaderContext context) throws IOException {
       return in.scorer(context);
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java b/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java
index 1942232..78cebd3 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java
@@ -257,7 +257,7 @@ public class RealTimeGetComponent extends SearchComponent
 
          if (rb.getFilters() != null) {
            for (Query q : rb.getFilters()) {
-             Scorer scorer = searcher.createWeight(q, false).scorer(ctx);
+             Scorer scorer = searcher.createWeight(q, false, 1f).scorer(ctx);
              if (scorer == null || segid != scorer.iterator().advance(segid)) {
                // filter doesn't match.
                docid = -1;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/solr/core/src/java/org/apache/solr/query/FilterQuery.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/query/FilterQuery.java b/solr/core/src/java/org/apache/solr/query/FilterQuery.java
index bd93bb6..785ab5c 100644
--- a/solr/core/src/java/org/apache/solr/query/FilterQuery.java
+++ b/solr/core/src/java/org/apache/solr/query/FilterQuery.java
@@ -73,18 +73,18 @@ public class FilterQuery extends ExtendedQueryBase {
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needScores) throws IOException {
+  public Weight createWeight(IndexSearcher searcher, boolean needScores, float boost) throws IOException {
     // SolrRequestInfo reqInfo = SolrRequestInfo.getRequestInfo();
 
     if (!(searcher instanceof SolrIndexSearcher)) {
       // delete-by-query won't have SolrIndexSearcher
-      return new BoostQuery(new ConstantScoreQuery(q), 0).createWeight(searcher, needScores);
+      return new BoostQuery(new ConstantScoreQuery(q), 0).createWeight(searcher, needScores, 1f);
     }
 
     SolrIndexSearcher solrSearcher = (SolrIndexSearcher)searcher;
     DocSet docs = solrSearcher.getDocSet(q);
     // reqInfo.addCloseHook(docs);  // needed for off-heap refcounting
 
-    return new BoostQuery(new SolrConstantScoreQuery(docs.getTopFilter()), 0).createWeight(searcher, needScores);
+    return new BoostQuery(new SolrConstantScoreQuery(docs.getTopFilter()), 0).createWeight(searcher, needScores, 1f);
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/solr/core/src/java/org/apache/solr/query/SolrRangeQuery.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/query/SolrRangeQuery.java b/solr/core/src/java/org/apache/solr/query/SolrRangeQuery.java
index 80d407a..bed0ad5 100644
--- a/solr/core/src/java/org/apache/solr/query/SolrRangeQuery.java
+++ b/solr/core/src/java/org/apache/solr/query/SolrRangeQuery.java
@@ -138,8 +138,8 @@ public final class SolrRangeQuery extends ExtendedQueryBase implements DocSetPro
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needScores) throws IOException {
-    return new ConstWeight(searcher, needScores);
+  public Weight createWeight(IndexSearcher searcher, boolean needScores, float boost) throws IOException {
+    return new ConstWeight(searcher, needScores, boost);
     /*
     DocSet docs = createDocSet(searcher.getIndexReader().leaves(), searcher.getIndexReader().maxDoc());
     SolrConstantScoreQuery csq = new SolrConstantScoreQuery( docs.getTopFilter() );
@@ -324,8 +324,8 @@ public final class SolrRangeQuery extends ExtendedQueryBase implements DocSetPro
     final SegState[] segStates;
 
 
-    protected ConstWeight(IndexSearcher searcher, boolean needScores) {
-      super( SolrRangeQuery.this );
+    protected ConstWeight(IndexSearcher searcher, boolean needScores, float boost) {
+      super( SolrRangeQuery.this, boost );
       this.searcher = searcher;
       this.segStates = new SegState[ searcher.getIndexReader().leaves().size() ];
       this.needScores = needScores;
@@ -398,8 +398,7 @@ public final class SolrRangeQuery extends ExtendedQueryBase implements DocSetPro
           bq.add(new TermQuery(new Term( SolrRangeQuery.this.getField(), t.term), termContext), BooleanClause.Occur.SHOULD);
         }
         Query q = new ConstantScoreQuery(bq.build());
-        final Weight weight = searcher.rewrite(q).createWeight(searcher, needScores);
-        weight.normalize(1f, score());
+        final Weight weight = searcher.rewrite(q).createWeight(searcher, needScores, score());
         return segStates[context.ord] = new SegState(weight);
       }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/solr/core/src/java/org/apache/solr/schema/LatLonType.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/LatLonType.java b/solr/core/src/java/org/apache/solr/schema/LatLonType.java
index b50b35f..b6d1b52 100644
--- a/solr/core/src/java/org/apache/solr/schema/LatLonType.java
+++ b/solr/core/src/java/org/apache/solr/schema/LatLonType.java
@@ -309,8 +309,8 @@ class SpatialDistanceQuery extends ExtendedQueryBase implements PostFilter {
     protected Map latContext;
     protected Map lonContext;
 
-    public SpatialWeight(IndexSearcher searcher) throws IOException {
-      super(SpatialDistanceQuery.this);
+    public SpatialWeight(IndexSearcher searcher, float boost) throws IOException {
+      super(SpatialDistanceQuery.this, boost);
       this.searcher = searcher;
       this.latContext = ValueSource.newContext(searcher);
       this.lonContext = ValueSource.newContext(searcher);
@@ -491,7 +491,7 @@ class SpatialDistanceQuery extends ExtendedQueryBase implements PostFilter {
   @Override
   public DelegatingCollector getFilterCollector(IndexSearcher searcher) {
     try {
-      return new SpatialCollector(new SpatialWeight(searcher));
+      return new SpatialCollector(new SpatialWeight(searcher, 1f));
     } catch (IOException e) {
       throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
     }
@@ -523,10 +523,10 @@ class SpatialDistanceQuery extends ExtendedQueryBase implements PostFilter {
 
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
     // if we were supposed to use bboxQuery, then we should have been rewritten using that query
     assert bboxQuery == null;
-    return new SpatialWeight(searcher);
+    return new SpatialWeight(searcher, boost);
   }
 
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/solr/core/src/java/org/apache/solr/search/ExportQParserPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/ExportQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/ExportQParserPlugin.java
index 8156f24..38bb74f 100644
--- a/solr/core/src/java/org/apache/solr/search/ExportQParserPlugin.java
+++ b/solr/core/src/java/org/apache/solr/search/ExportQParserPlugin.java
@@ -71,8 +71,8 @@ public class ExportQParserPlugin extends QParserPlugin {
       return null;
     }
 
-    public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException{
-      return mainQuery.createWeight(searcher, true);
+    public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException{
+      return mainQuery.createWeight(searcher, true, boost);
     }
 
     public Query rewrite(IndexReader reader) throws IOException {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/solr/core/src/java/org/apache/solr/search/Filter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/Filter.java b/solr/core/src/java/org/apache/solr/search/Filter.java
index 0aa9417..f4a148c 100644
--- a/solr/core/src/java/org/apache/solr/search/Filter.java
+++ b/solr/core/src/java/org/apache/solr/search/Filter.java
@@ -88,21 +88,13 @@ public abstract class Filter extends Query {
   //
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
     return new Weight(this) {
 
       @Override
       public void extractTerms(Set<Term> terms) {}
 
       @Override
-      public float getValueForNormalization() throws IOException {
-        return 0f;
-      }
-
-      @Override
-      public void normalize(float norm, float boost) {}
-
-      @Override
       public Explanation explain(LeafReaderContext context, int doc) throws IOException {
         final Scorer scorer = scorer(context);
         final boolean match = (scorer != null && scorer.iterator().advance(doc) == doc);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/solr/core/src/java/org/apache/solr/search/GraphTermsQParserPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/GraphTermsQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/GraphTermsQParserPlugin.java
index dfe411a..a8a86cb 100644
--- a/solr/core/src/java/org/apache/solr/search/GraphTermsQParserPlugin.java
+++ b/solr/core/src/java/org/apache/solr/search/GraphTermsQParserPlugin.java
@@ -193,7 +193,7 @@ public class GraphTermsQParserPlugin extends QParserPlugin {
     }
 
     @Override
-    public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
+    public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
 
       List<TermContext> finalContexts = new ArrayList();
       List<Term> finalTerms = new ArrayList();
@@ -208,7 +208,7 @@ public class GraphTermsQParserPlugin extends QParserPlugin {
         }
       }
 
-      return new ConstantScoreWeight(this) {
+      return new ConstantScoreWeight(this, boost) {
 
         @Override
         public void extractTerms(Set<Term> terms) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/solr/core/src/java/org/apache/solr/search/HashQParserPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/HashQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/HashQParserPlugin.java
index 2112c71..33a446a 100644
--- a/solr/core/src/java/org/apache/solr/search/HashQParserPlugin.java
+++ b/solr/core/src/java/org/apache/solr/search/HashQParserPlugin.java
@@ -112,7 +112,7 @@ public class HashQParserPlugin extends QParserPlugin {
       this.worker = worker;
     }
 
-    public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
+    public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
 
       String[] keys = keysParam.split(",");
       SolrIndexSearcher solrIndexSearcher = (SolrIndexSearcher)searcher;
@@ -132,7 +132,7 @@ public class HashQParserPlugin extends QParserPlugin {
       }
 
       ConstantScoreQuery constantScoreQuery = new ConstantScoreQuery(new BitsFilter(fixedBitSets));
-      return searcher.rewrite(constantScoreQuery).createWeight(searcher, false);
+      return searcher.rewrite(constantScoreQuery).createWeight(searcher, false, boost);
     }
 
     public class BitsFilter extends Filter {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/solr/core/src/java/org/apache/solr/search/JoinQParserPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/JoinQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/JoinQParserPlugin.java
index 2adec48..de96c96 100644
--- a/solr/core/src/java/org/apache/solr/search/JoinQParserPlugin.java
+++ b/solr/core/src/java/org/apache/solr/search/JoinQParserPlugin.java
@@ -146,8 +146,8 @@ class JoinQuery extends Query {
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-    return new JoinQueryWeight((SolrIndexSearcher)searcher);
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    return new JoinQueryWeight((SolrIndexSearcher)searcher, boost);
   }
 
   private class JoinQueryWeight extends ConstantScoreWeight {
@@ -157,8 +157,8 @@ class JoinQuery extends Query {
     private Similarity similarity;
     ResponseBuilder rb;
 
-    public JoinQueryWeight(SolrIndexSearcher searcher) {
-      super(JoinQuery.this);
+    public JoinQueryWeight(SolrIndexSearcher searcher, float boost) {
+      super(JoinQuery.this, boost);
       this.fromSearcher = searcher;
       SolrRequestInfo info = SolrRequestInfo.getRequestInfo();
       if (info != null) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/solr/core/src/java/org/apache/solr/search/ReRankQParserPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/ReRankQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/ReRankQParserPlugin.java
index 06974f2..03a1d33 100644
--- a/solr/core/src/java/org/apache/solr/search/ReRankQParserPlugin.java
+++ b/solr/core/src/java/org/apache/solr/search/ReRankQParserPlugin.java
@@ -174,8 +174,8 @@ public class ReRankQParserPlugin extends QParserPlugin {
       return super.rewrite(reader);
     }
 
-    public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException{
-      return new ReRankWeight(mainQuery, reRankQuery, reRankWeight, searcher, needsScores);
+    public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException{
+      return new ReRankWeight(mainQuery, reRankQuery, reRankWeight, searcher, needsScores, boost);
     }
   }
 
@@ -185,12 +185,12 @@ public class ReRankQParserPlugin extends QParserPlugin {
     private Weight mainWeight;
     private double reRankWeight;
 
-    public ReRankWeight(Query mainQuery, Query reRankQuery, double reRankWeight, IndexSearcher searcher, boolean needsScores) throws IOException {
+    public ReRankWeight(Query mainQuery, Query reRankQuery, double reRankWeight, IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
       super(mainQuery);
       this.reRankQuery = reRankQuery;
       this.searcher = searcher;
       this.reRankWeight = reRankWeight;
-      this.mainWeight = mainQuery.createWeight(searcher, needsScores);
+      this.mainWeight = mainQuery.createWeight(searcher, needsScores, boost);
     }
 
     @Override
@@ -199,18 +199,11 @@ public class ReRankQParserPlugin extends QParserPlugin {
 
     }
 
-    public float getValueForNormalization() throws IOException {
-      return mainWeight.getValueForNormalization();
-    }
-
+    @Override
     public Scorer scorer(LeafReaderContext context) throws IOException {
       return mainWeight.scorer(context);
     }
 
-    public void normalize(float norm, float topLevelBoost) {
-      mainWeight.normalize(norm, topLevelBoost);
-    }
-
     public Explanation explain(LeafReaderContext context, int doc) throws IOException {
       Explanation mainExplain = mainWeight.explain(context, doc);
       return new QueryRescorer(reRankQuery) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/solr/core/src/java/org/apache/solr/search/SolrConstantScoreQuery.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/SolrConstantScoreQuery.java b/solr/core/src/java/org/apache/solr/search/SolrConstantScoreQuery.java
index 6e55ad9..15d15f6 100644
--- a/solr/core/src/java/org/apache/solr/search/SolrConstantScoreQuery.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrConstantScoreQuery.java
@@ -84,8 +84,8 @@ public class SolrConstantScoreQuery extends Query implements ExtendedQuery {
   protected class ConstantWeight extends ConstantScoreWeight {
     private Map context;
 
-    public ConstantWeight(IndexSearcher searcher) throws IOException {
-      super(SolrConstantScoreQuery.this);
+    public ConstantWeight(IndexSearcher searcher, float boost) throws IOException {
+      super(SolrConstantScoreQuery.this, boost);
       this.context = ValueSource.newContext(searcher);
       if (filter instanceof SolrFilter)
         ((SolrFilter)filter).createWeight(context, searcher);
@@ -107,8 +107,8 @@ public class SolrConstantScoreQuery extends Query implements ExtendedQuery {
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-    return new SolrConstantScoreQuery.ConstantWeight(searcher);
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    return new SolrConstantScoreQuery.ConstantWeight(searcher, boost);
   }
 
   /** Prints a user-readable version of this query. */

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/solr/core/src/java/org/apache/solr/search/WrappedQuery.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/WrappedQuery.java b/solr/core/src/java/org/apache/solr/search/WrappedQuery.java
index 974c6e5..b3bde2a 100644
--- a/solr/core/src/java/org/apache/solr/search/WrappedQuery.java
+++ b/solr/core/src/java/org/apache/solr/search/WrappedQuery.java
@@ -40,8 +40,8 @@ public final class WrappedQuery extends ExtendedQueryBase {
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-    return q.createWeight(searcher, needsScores);
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    return q.createWeight(searcher, needsScores, boost);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/solr/core/src/java/org/apache/solr/search/join/GraphQuery.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/join/GraphQuery.java b/solr/core/src/java/org/apache/solr/search/join/GraphQuery.java
index 3fca05a..8cfcf79 100644
--- a/solr/core/src/java/org/apache/solr/search/join/GraphQuery.java
+++ b/solr/core/src/java/org/apache/solr/search/join/GraphQuery.java
@@ -108,8 +108,8 @@ public class GraphQuery extends Query {
   }
   
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-    Weight graphWeight = new GraphQueryWeight((SolrIndexSearcher)searcher);
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    Weight graphWeight = new GraphQueryWeight((SolrIndexSearcher)searcher, boost);
     return graphWeight;
   }
   
@@ -129,18 +129,18 @@ public class GraphQuery extends Query {
   
   protected class GraphQueryWeight extends Weight {
     
-    SolrIndexSearcher fromSearcher;
-    private float queryNorm = 1.0F;
-    private float queryWeight = 1.0F; 
+    final SolrIndexSearcher fromSearcher;
+    private final float boost;
     private int frontierSize = 0;
     private int currentDepth = -1;
     private Filter filter;
     private DocSet resultSet;
     
-    public GraphQueryWeight(SolrIndexSearcher searcher) {
+    public GraphQueryWeight(SolrIndexSearcher searcher, float boost) {
       // Grab the searcher so we can run additional searches.
       super(null);
       this.fromSearcher = searcher;
+      this.boost = boost;
     }
     
     @Override
@@ -157,16 +157,6 @@ public class GraphQuery extends Query {
       }
     }
     
-    @Override
-    public float getValueForNormalization() throws IOException {
-      return 1F;
-    }
-    
-    @Override
-    public void normalize(float norm, float topLevelBoost) {
-      this.queryWeight = norm * topLevelBoost;
-    }
-    
     /**
      * This computes the matching doc set for a given graph query
      * 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/solr/core/src/java/org/apache/solr/search/similarities/SchemaSimilarityFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/similarities/SchemaSimilarityFactory.java b/solr/core/src/java/org/apache/solr/search/similarities/SchemaSimilarityFactory.java
index 355767a..e648481 100644
--- a/solr/core/src/java/org/apache/solr/search/similarities/SchemaSimilarityFactory.java
+++ b/solr/core/src/java/org/apache/solr/search/similarities/SchemaSimilarityFactory.java
@@ -73,9 +73,7 @@ import org.apache.solr.util.plugin.SolrCoreAware;
  * <code>Similarity</code> for some or all fields in a Query, the behavior can be inconsistent 
  * with the behavior of explicitly configuring that same <code>Similarity</code> globally, because 
  * of differences in how some multi-field / multi-clause behavior is defined in 
- * <code>PerFieldSimilarityWrapper</code>.  In particular please consider carefully the documentation 
- * &amp; implementation of {@link Similarity#queryNorm} in 
- * {@link ClassicSimilarity} compared to {@link PerFieldSimilarityWrapper}
+ * <code>PerFieldSimilarityWrapper</code>.
  * </p>
  *
  * @see FieldType#getSimilarity

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/solr/core/src/java/org/apache/solr/update/DeleteByQueryWrapper.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/DeleteByQueryWrapper.java b/solr/core/src/java/org/apache/solr/update/DeleteByQueryWrapper.java
index 330e767..4630b7f 100644
--- a/solr/core/src/java/org/apache/solr/update/DeleteByQueryWrapper.java
+++ b/solr/core/src/java/org/apache/solr/update/DeleteByQueryWrapper.java
@@ -65,10 +65,10 @@ final class DeleteByQueryWrapper extends Query {
   }
   
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
     final LeafReader wrapped = wrap((LeafReader) searcher.getIndexReader());
     final IndexSearcher privateContext = new IndexSearcher(wrapped);
-    final Weight inner = in.createWeight(privateContext, needsScores);
+    final Weight inner = in.createWeight(privateContext, needsScores, boost);
     return new Weight(DeleteByQueryWrapper.this) {
       @Override
       public void extractTerms(Set<Term> terms) {
@@ -79,12 +79,6 @@ final class DeleteByQueryWrapper extends Query {
       public Explanation explain(LeafReaderContext context, int doc) throws IOException { throw new UnsupportedOperationException(); }
 
       @Override
-      public float getValueForNormalization() throws IOException { return inner.getValueForNormalization(); }
-
-      @Override
-      public void normalize(float norm, float topLevelBoost) { inner.normalize(norm, topLevelBoost); }
-
-      @Override
       public Scorer scorer(LeafReaderContext context) throws IOException {
         return inner.scorer(privateContext.getIndexReader().leaves().get(0));
       }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/solr/core/src/test/org/apache/solr/DisMaxRequestHandlerTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/DisMaxRequestHandlerTest.java b/solr/core/src/test/org/apache/solr/DisMaxRequestHandlerTest.java
index 8d1b758..a8070fa 100644
--- a/solr/core/src/test/org/apache/solr/DisMaxRequestHandlerTest.java
+++ b/solr/core/src/test/org/apache/solr/DisMaxRequestHandlerTest.java
@@ -86,8 +86,8 @@ public class DisMaxRequestHandlerTest extends SolrTestCaseJ4 {
             req("cool stuff")
             ,"//*[@numFound='3']"
             ,"//result/doc[1]/int[@name='id'][.='42']"
-            ,"//result/doc[2]/int[@name='id'][.='666']"
-            ,"//result/doc[3]/int[@name='id'][.='8675309']"
+            ,"//result/doc[2]/int[@name='id'][.='8675309']"
+            ,"//result/doc[3]/int[@name='id'][.='666']"
             );
 
     assertQ("multi qf",

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/solr/core/src/test/org/apache/solr/cloud/DistribJoinFromCollectionTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/DistribJoinFromCollectionTest.java b/solr/core/src/test/org/apache/solr/cloud/DistribJoinFromCollectionTest.java
index ae17ca9..d0d79f6 100644
--- a/solr/core/src/test/org/apache/solr/cloud/DistribJoinFromCollectionTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/DistribJoinFromCollectionTest.java
@@ -146,7 +146,7 @@ public class DistribJoinFromCollectionTest extends SolrCloudTestCase{
   private void testJoins(String toColl, String fromColl, Integer toDocId, boolean isScoresTest)
       throws SolrServerException, IOException {
     // verify the join with fromIndex works
-    final String fromQ = "match_s:c match_s:not_1_0_score_after_weight_normalization";
+    final String fromQ = "match_s:c^2";
     CloudSolrClient client = cluster.getSolrClient();
     {
     final String joinQ = "{!join " + anyScoreMode(isScoresTest)

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/solr/core/src/test/org/apache/solr/search/TestRankQueryPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/TestRankQueryPlugin.java b/solr/core/src/test/org/apache/solr/search/TestRankQueryPlugin.java
index 6b36375..d895697 100644
--- a/solr/core/src/test/org/apache/solr/search/TestRankQueryPlugin.java
+++ b/solr/core/src/test/org/apache/solr/search/TestRankQueryPlugin.java
@@ -109,8 +109,8 @@ public class TestRankQueryPlugin extends QParserPlugin {
       return false;
     }
 
-    public Weight createWeight(IndexSearcher indexSearcher, boolean needsScores) throws IOException{
-      return q.createWeight(indexSearcher, needsScores);
+    public Weight createWeight(IndexSearcher indexSearcher, boolean needsScores, float boost) throws IOException{
+      return q.createWeight(indexSearcher, needsScores, boost);
     }
 
     @Override


[40/51] [abbrv] lucene-solr:apiv2: SOLR-7280: refactored to incorporate Mike's suggestions. Default thread count for cloud is limited to 8 now. In our internal teting 8 has given us the best stability during restarts

Posted by sa...@apache.org.
SOLR-7280: refactored to incorporate Mike's suggestions. Default thread count for cloud is limited to 8 now. In our internal teting 8 has given us the best stability during restarts


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/2d1496c8
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/2d1496c8
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/2d1496c8

Branch: refs/heads/apiv2
Commit: 2d1496c83d83bb6582af39af6cf272828d83c9e3
Parents: 5c4b717
Author: Noble Paul <no...@apache.org>
Authored: Tue Jul 19 18:21:23 2016 +0530
Committer: Noble Paul <no...@apache.org>
Committed: Tue Jul 19 18:21:23 2016 +0530

----------------------------------------------------------------------
 .../org/apache/solr/core/CoreContainer.java     | 29 ++++++++---------
 .../java/org/apache/solr/core/NodeConfig.java   | 12 ++++---
 .../org/apache/solr/core/CoreSorterTest.java    | 34 +++++++++-----------
 .../test/org/apache/solr/core/TestSolrXml.java  |  2 +-
 4 files changed, 36 insertions(+), 41 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2d1496c8/solr/core/src/java/org/apache/solr/core/CoreContainer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/CoreContainer.java b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
index aa65f54..1bdf3e3 100644
--- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java
+++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
@@ -16,19 +16,6 @@
  */
 package org.apache.solr.core;
 
-import static com.google.common.base.Preconditions.checkNotNull;
-import static java.util.Collections.EMPTY_MAP;
-import static org.apache.solr.common.params.CommonParams.AUTHC_PATH;
-import static org.apache.solr.common.params.CommonParams.AUTHZ_PATH;
-import static org.apache.solr.common.params.CommonParams.COLLECTIONS_HANDLER_PATH;
-import static org.apache.solr.common.params.CommonParams.CONFIGSETS_HANDLER_PATH;
-import static org.apache.solr.common.params.CommonParams.CORES_HANDLER_PATH;
-import static org.apache.solr.common.params.CommonParams.INFO_HANDLER_PATH;
-import static org.apache.solr.common.params.CommonParams.ZK_PATH;
-import static org.apache.solr.core.NodeConfig.NodeConfigBuilder.DEFAULT_CORE_LOAD_THREADS;
-import static org.apache.solr.core.NodeConfig.NodeConfigBuilder.DEFAULT_CORE_LOAD_THREADS_IN_CLOUD;
-import static org.apache.solr.security.AuthenticationPlugin.AUTHENTICATION_PLUGIN_PROP;
-
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
 import java.nio.file.Path;
@@ -46,6 +33,8 @@ import java.util.concurrent.ExecutionException;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Future;
 
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Maps;
 import org.apache.http.auth.AuthSchemeProvider;
 import org.apache.http.client.CredentialsProvider;
 import org.apache.http.config.Lookup;
@@ -88,8 +77,16 @@ import org.apache.zookeeper.KeeperException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.Maps;
+import static com.google.common.base.Preconditions.checkNotNull;
+import static java.util.Collections.EMPTY_MAP;
+import static org.apache.solr.common.params.CommonParams.AUTHC_PATH;
+import static org.apache.solr.common.params.CommonParams.AUTHZ_PATH;
+import static org.apache.solr.common.params.CommonParams.COLLECTIONS_HANDLER_PATH;
+import static org.apache.solr.common.params.CommonParams.CONFIGSETS_HANDLER_PATH;
+import static org.apache.solr.common.params.CommonParams.CORES_HANDLER_PATH;
+import static org.apache.solr.common.params.CommonParams.INFO_HANDLER_PATH;
+import static org.apache.solr.common.params.CommonParams.ZK_PATH;
+import static org.apache.solr.security.AuthenticationPlugin.AUTHENTICATION_PLUGIN_PROP;
 
 
 /**
@@ -489,7 +486,7 @@ public class CoreContainer {
 
     // setup executor to load cores in parallel
     ExecutorService coreLoadExecutor = ExecutorUtil.newMDCAwareFixedThreadPool(
-        cfg.getCoreLoadThreadCount(isZooKeeperAware() ? DEFAULT_CORE_LOAD_THREADS_IN_CLOUD : DEFAULT_CORE_LOAD_THREADS),
+        cfg.getCoreLoadThreadCount(isZooKeeperAware()),
         new DefaultSolrThreadFactory("coreLoadExecutor") );
     final List<Future<SolrCore>> futures = new ArrayList<>();
     try {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2d1496c8/solr/core/src/java/org/apache/solr/core/NodeConfig.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/NodeConfig.java b/solr/core/src/java/org/apache/solr/core/NodeConfig.java
index 3db453b..ea451a9 100644
--- a/solr/core/src/java/org/apache/solr/core/NodeConfig.java
+++ b/solr/core/src/java/org/apache/solr/core/NodeConfig.java
@@ -87,7 +87,7 @@ public class NodeConfig {
     this.solrProperties = solrProperties;
     this.backupRepositoryPlugins = backupRepositoryPlugins;
 
-    if (this.cloudConfig != null && this.getCoreLoadThreadCount(NodeConfigBuilder.DEFAULT_CORE_LOAD_THREADS) < 2) {
+    if (this.cloudConfig != null && this.getCoreLoadThreadCount(false) < 2) {
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
           "SolrCloud requires a value of at least 2 for coreLoadThreads (configured value = " + this.coreLoadThreads + ")");
     }
@@ -109,8 +109,10 @@ public class NodeConfig {
     return updateShardHandlerConfig;
   }
 
-  public int getCoreLoadThreadCount(int def) {
-    return coreLoadThreads == null ? def : coreLoadThreads;
+  public int getCoreLoadThreadCount(boolean zkAware) {
+    return coreLoadThreads == null ?
+        (zkAware ? NodeConfigBuilder.DEFAULT_CORE_LOAD_THREADS_IN_CLOUD : NodeConfigBuilder.DEFAULT_CORE_LOAD_THREADS)
+        : coreLoadThreads;
   }
 
   public String getSharedLibDirectory() {
@@ -196,8 +198,8 @@ public class NodeConfig {
     private final String nodeName;
 
     public static final int DEFAULT_CORE_LOAD_THREADS = 3;
-    //No:of core load threads in cloud mode is set to a default of 24
-    public static final int DEFAULT_CORE_LOAD_THREADS_IN_CLOUD = 24;
+    //No:of core load threads in cloud mode is set to a default of 8
+    public static final int DEFAULT_CORE_LOAD_THREADS_IN_CLOUD = 8;
 
     private static final int DEFAULT_TRANSIENT_CACHE_SIZE = Integer.MAX_VALUE;
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2d1496c8/solr/core/src/test/org/apache/solr/core/CoreSorterTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/CoreSorterTest.java b/solr/core/src/test/org/apache/solr/core/CoreSorterTest.java
index 3e5af20..dda437a 100644
--- a/solr/core/src/test/org/apache/solr/core/CoreSorterTest.java
+++ b/solr/core/src/test/org/apache/solr/core/CoreSorterTest.java
@@ -39,7 +39,7 @@ import org.apache.solr.util.MockCoreContainer;
 import static java.util.stream.Collectors.toList;
 import static org.apache.solr.core.CoreSorter.getShardName;
 import static org.easymock.EasyMock.createMock;
-import static org.easymock.EasyMock.expectLastCall;
+import static org.easymock.EasyMock.expect;
 import static org.easymock.EasyMock.replay;
 import static org.easymock.EasyMock.reset;
 
@@ -49,16 +49,16 @@ public class CoreSorterTest extends SolrTestCaseJ4 {
 
   public void testComparator() {
     List<CountsForEachShard> l = new ArrayList<>();
-    //                           DN LIV  MY
-    l.add(new CountsForEachShard(1, 3, 1));
-    l.add(new CountsForEachShard(0, 3, 2));
-    l.add(new CountsForEachShard(0, 3, 3));
-    l.add(new CountsForEachShard(0, 3, 4));
-    l.add(new CountsForEachShard(1, 0, 2));
-    l.add(new CountsForEachShard(1, 0, 1));
-    l.add(new CountsForEachShard(2, 5, 1));
-    l.add(new CountsForEachShard(2, 4, 2));
-    l.add(new CountsForEachShard(2, 3, 3));
+    //                           DOWN LIVE  MY
+    l.add(new CountsForEachShard(1,     3,  1));
+    l.add(new CountsForEachShard(0,     3,  2));
+    l.add(new CountsForEachShard(0,     3,  3));
+    l.add(new CountsForEachShard(0,     3,  4));
+    l.add(new CountsForEachShard(1,     0,  2));
+    l.add(new CountsForEachShard(1,     0,  1));
+    l.add(new CountsForEachShard(2,     5,  1));
+    l.add(new CountsForEachShard(2,     4,  2));
+    l.add(new CountsForEachShard(2,     3,  3));
 
     List<CountsForEachShard> expected = Arrays.asList(
         new CountsForEachShard(0, 3, 2),
@@ -108,14 +108,10 @@ public class CoreSorterTest extends SolrTestCaseJ4 {
     ZkController mockZKC = createMock(ZkController.class);
     ClusterState mockClusterState = createMock(ClusterState.class);
     reset(mockCC, mockZKC, mockClusterState);
-    mockCC.isZooKeeperAware();
-    expectLastCall().andAnswer(() -> Boolean.TRUE).anyTimes();
-    mockCC.getZkController();
-    expectLastCall().andAnswer(() -> mockZKC).anyTimes();
-    mockClusterState.getLiveNodes();
-    expectLastCall().andAnswer(() -> liveNodes).anyTimes();
-    mockZKC.getClusterState();
-    expectLastCall().andAnswer(() -> mockClusterState).anyTimes();
+    expect(mockCC.isZooKeeperAware()).andReturn(Boolean.TRUE).anyTimes();
+    expect(mockCC.getZkController()).andReturn(mockZKC).anyTimes();
+    expect(mockClusterState.getLiveNodes()).andReturn(liveNodes).anyTimes();
+    expect(mockZKC.getClusterState()).andReturn(mockClusterState).anyTimes();
     replay(mockCC, mockZKC, mockClusterState);
     return mockCC;
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2d1496c8/solr/core/src/test/org/apache/solr/core/TestSolrXml.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/TestSolrXml.java b/solr/core/src/test/org/apache/solr/core/TestSolrXml.java
index e005c9f..9224c4d 100644
--- a/solr/core/src/test/org/apache/solr/core/TestSolrXml.java
+++ b/solr/core/src/test/org/apache/solr/core/TestSolrXml.java
@@ -73,7 +73,7 @@ public class TestSolrXml extends SolrTestCaseJ4 {
     assertEquals("collection handler class", "testCollectionsHandler", cfg.getCollectionsHandlerClass());
     assertEquals("info handler class", "testInfoHandler", cfg.getInfoHandlerClass());
     assertEquals("config set handler class", "testConfigSetsHandler", cfg.getConfigSetsHandlerClass());
-    assertEquals("core load threads", 11, cfg.getCoreLoadThreadCount(0));
+    assertEquals("core load threads", 11, cfg.getCoreLoadThreadCount(false));
     assertThat("core root dir", cfg.getCoreRootDirectory().toString(), containsString("testCoreRootDirectory"));
     assertEquals("distrib conn timeout", 22, cfg.getUpdateShardHandlerConfig().getDistributedConnectionTimeout());
     assertEquals("distrib socket timeout", 33, cfg.getUpdateShardHandlerConfig().getDistributedSocketTimeout());


[44/51] [abbrv] lucene-solr:apiv2: SOLR-9288: Fix [docid] transformer to return -1 when used in RTG with uncommitted doc

Posted by sa...@apache.org.
SOLR-9288: Fix [docid] transformer to return -1 when used in RTG with uncommitted doc


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/08019f42
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/08019f42
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/08019f42

Branch: refs/heads/apiv2
Commit: 08019f42889a537764384429c4184515d233a2cb
Parents: 180f956
Author: Chris Hostetter <ho...@apache.org>
Authored: Tue Jul 19 10:50:02 2016 -0700
Committer: Chris Hostetter <ho...@apache.org>
Committed: Tue Jul 19 10:50:45 2016 -0700

----------------------------------------------------------------------
 solr/CHANGES.txt                                |  2 +
 .../transform/DocIdAugmenterFactory.java        | 10 ++--
 .../apache/solr/cloud/TestRandomFlRTGCloud.java | 61 +++++++++++++++++++-
 .../solr/search/TestPseudoReturnFields.java     | 60 ++++++++-----------
 4 files changed, 90 insertions(+), 43 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/08019f42/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index bff2909..0ccccee 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -157,6 +157,8 @@ Bug Fixes
 
 * SOLR-9285: Fixed AIOOBE when using ValueSourceAugmenter in single node RTG (hossman)
 
+* SOLR-9288: Fix [docid] transformer to return -1 when used in RTG with uncommitted doc (hossman)
+
 Optimizations
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/08019f42/solr/core/src/java/org/apache/solr/response/transform/DocIdAugmenterFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/response/transform/DocIdAugmenterFactory.java b/solr/core/src/java/org/apache/solr/response/transform/DocIdAugmenterFactory.java
index 2f037c9..e95ac1e 100644
--- a/solr/core/src/java/org/apache/solr/response/transform/DocIdAugmenterFactory.java
+++ b/solr/core/src/java/org/apache/solr/response/transform/DocIdAugmenterFactory.java
@@ -21,7 +21,10 @@ import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.request.SolrQueryRequest;
 
 /**
- *
+ * Augments the document with a <code>[docid]</code> integer containing it's current
+ * (internal) id in the lucene index.  May be <code>-1</code> if this document did not come from the 
+ * index (ie: a RealTimeGet from  the transaction log)
+ * 
  * @since solr 4.0
  */
 public class DocIdAugmenterFactory extends TransformerFactory
@@ -49,9 +52,8 @@ class DocIdAugmenter extends DocTransformer
 
   @Override
   public void transform(SolrDocument doc, int docid, float score) {
-    if( docid >= 0 ) {
-      doc.setField( name, docid );
-    }
+    assert -1 <= docid;
+    doc.setField( name, docid );
   }
 }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/08019f42/solr/core/src/test/org/apache/solr/cloud/TestRandomFlRTGCloud.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestRandomFlRTGCloud.java b/solr/core/src/test/org/apache/solr/cloud/TestRandomFlRTGCloud.java
index 8cf1129..682d6a0 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestRandomFlRTGCloud.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestRandomFlRTGCloud.java
@@ -47,6 +47,7 @@ import org.apache.solr.common.SolrInputDocument;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.NamedList;
+import org.apache.solr.response.transform.DocTransformer; // jdocs
 
 import org.apache.solr.util.RandomizeSSL;
 import org.apache.lucene.util.TestUtil;
@@ -90,8 +91,6 @@ public class TestRandomFlRTGCloud extends SolrCloudTestCase {
     (Arrays.<FlValidator>asList(
       // TODO: SOLR-9314: add more of these for other various transformers
       //
-      // TODO: add a [docid] validator (blocked by SOLR-9288 & SOLR-9289)
-      //
       new GlobValidator("*"),
       new GlobValidator("*_i"),
       new GlobValidator("*_s"),
@@ -119,6 +118,9 @@ public class TestRandomFlRTGCloud extends SolrCloudTestCase {
                             new RenameFieldValueValidator("id", "my_id_alias"),
                             new RenameFieldValueValidator("bbb_i", "my_int_field_alias"),
                             new RenameFieldValueValidator("ddd_s", "my_str_field_alias")));
+      // SOLR-9289...
+      FL_VALIDATORS.add(new DocIdValidator());
+      FL_VALIDATORS.add(new DocIdValidator("my_docid_alias"));
     } else {
       // No-Op
       // No known transformers that only work in distrib cloud but fail in singleCoreMode
@@ -428,7 +430,7 @@ public class TestRandomFlRTGCloud extends SolrCloudTestCase {
   }
 
   /** 
-   * abstraction for diff types of things that can be added to an 'fl' param that can validate
+   * Abstraction for diff types of things that can be added to an 'fl' param that can validate
    * the results are correct compared to an expected SolrInputDocument
    */
   private interface FlValidator {
@@ -441,6 +443,21 @@ public class TestRandomFlRTGCloud extends SolrCloudTestCase {
       }
       params.add(buildCommaSepParams(random(), "fl", fls));
     }
+
+    /**
+     * Indicates if this validator is for a transformer that returns true from 
+     * {@link DocTransformer#needsSolrIndexSearcher}.  Other validators for transformers that 
+     * do <em>not</em> require a re-opened searcher (but may have slightly diff behavior depending 
+     * on wether a doc comesfrom the index or from the update log) may use this information to 
+     * decide wether they wish to enforce stricter assertions on the resulting document.
+     *
+     * The default implementation always returns <code>false</code>
+     *
+     * @see DocIdValidator
+     */
+    public default boolean requiresRealtimeSearcherReOpen() {
+      return false;
+    }
     
     /** 
      * Must return a non null String that can be used in an fl param -- either by itself, 
@@ -496,6 +513,42 @@ public class TestRandomFlRTGCloud extends SolrCloudTestCase {
     public String getFlParam() { return actualFieldName + ":" + expectedFieldName; }
   }
 
+  /** 
+   * enforces that a valid <code>[docid]</code> is present in the response, possibly using a 
+   * resultKey alias.  By default the only validation of docId values is that they are an integer 
+   * greater than or equal to <code>-1</code> -- but if any other validator in use returns true 
+   * from {@link #requiresRealtimeSearcherReOpen} then the constraint is tightened and values must 
+   * be greater than or equal to <code>0</code> 
+   */
+  private static class DocIdValidator implements FlValidator {
+    private final String resultKey;
+    public DocIdValidator(final String resultKey) {
+      this.resultKey = resultKey;
+    }
+    public DocIdValidator() {
+      this("[docid]");
+    }
+    public String getFlParam() { return "[docid]".equals(resultKey) ? resultKey : resultKey+":[docid]"; }
+    public Collection<String> assertRTGResults(final Collection<FlValidator> validators,
+                                               final SolrInputDocument expected,
+                                               final SolrDocument actual) {
+      final Object value =  actual.getFirstValue(resultKey);
+      assertNotNull(getFlParam() + " => no value in actual doc", value);
+      assertTrue("[docid] must be an Integer: " + value, value instanceof Integer);
+
+      int minValidDocId = -1; // if it comes from update log
+      for (FlValidator other : validators) {
+        if (other.requiresRealtimeSearcherReOpen()) {
+          minValidDocId = 0;
+          break;
+        }
+      }
+      assertTrue("[docid] must be >= " + minValidDocId + ": " + value,
+                 minValidDocId <= ((Integer)value).intValue());
+      return Collections.<String>singleton(resultKey);
+    }
+  }
+  
   /** Trivial validator of a ValueSourceAugmenter */
   private static class FunctionValidator implements FlValidator {
     private static String func(String fieldName) {
@@ -515,6 +568,8 @@ public class TestRandomFlRTGCloud extends SolrCloudTestCase {
       this.resultKey = resultKey;
       this.fieldName = fieldName;
     }
+    /** always returns true */
+    public boolean requiresRealtimeSearcherReOpen() { return true; }
     public String getFlParam() { return fl; }
     public Collection<String> assertRTGResults(final Collection<FlValidator> validators,
                                                final SolrInputDocument expected,

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/08019f42/solr/core/src/test/org/apache/solr/search/TestPseudoReturnFields.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/TestPseudoReturnFields.java b/solr/core/src/test/org/apache/solr/search/TestPseudoReturnFields.java
index 68f0773..87f3d89 100644
--- a/solr/core/src/test/org/apache/solr/search/TestPseudoReturnFields.java
+++ b/solr/core/src/test/org/apache/solr/search/TestPseudoReturnFields.java
@@ -531,21 +531,13 @@ public class TestPseudoReturnFields extends SolrTestCaseJ4 {
     }
   }
 
-  @AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/SOLR-9288")
   public void testDocIdAugmenterRTG() throws Exception {
-    // NOTE: once this test is fixed to pass, testAugmentersRTG should also be updated to test [docid]
-
-    // TODO: behavior of fl=[docid] should be consistent regardless of wether doc is committed
-    // what should behavior be?
-    // right now, for an uncommited doc, [docid] is silently ignored and no value included in result
-    // perhaps it should be "null" or "-1" ?
-    
-    // behavior shouldn't matter if we are committed or uncommitted
+    // for an uncommitted doc, we should get -1
     for (String id : Arrays.asList("42","99")) {
       assertQ(id + ": fl=[docid]",
               req("qt","/get","id",id, "wt","xml", "fl","[docid]")
               ,"count(//doc)=1"
-              ,"//doc/int[@name='[docid]']"
+              ,"//doc/int[@name='[docid]'][.>=-1]"
               ,"//doc[count(*)=1]"
               );
     }
@@ -554,22 +546,21 @@ public class TestPseudoReturnFields extends SolrTestCaseJ4 {
   public void testAugmentersRTG() throws Exception {
     // behavior shouldn't matter if we are committed or uncommitted
     for (String id : Arrays.asList("42","99")) {
-      // NOTE: once testDocIdAugmenterRTG can pass, [docid] should be tested here as well.
       for (SolrParams p : Arrays.asList
-             (params("fl","[shard],[explain],x_alias:[value v=10 t=int],abs(val_i)"),
-              params("fl","[shard],abs(val_i)","fl","[explain],x_alias:[value v=10 t=int]"),
-              params("fl","[shard]","fl","[explain],x_alias:[value v=10 t=int]","fl","abs(val_i)"),
-              params("fl","[shard]","fl","[explain]","fl","x_alias:[value v=10 t=int]","fl","abs(val_i)"))) {
+             (params("fl","[docid],[shard],[explain],x_alias:[value v=10 t=int],abs(val_i)"),
+              params("fl","[docid],[shard],abs(val_i)","fl","[explain],x_alias:[value v=10 t=int]"),
+              params("fl","[docid],[shard]","fl","[explain],x_alias:[value v=10 t=int]","fl","abs(val_i)"),
+              params("fl","[docid]","fl","[shard]","fl","[explain]","fl","x_alias:[value v=10 t=int]","fl","abs(val_i)"))) {
         assertQ(id + ": " + p,
                 req(p, "qt","/get","id",id, "wt","xml")
                 ,"count(//doc)=1"
-                // ,"//doc/int[@name='[docid]']" // TODO
+                ,"//doc/int[@name='[docid]'][.>=-1]"
                 ,"//doc/float[@name='abs(val_i)'][.='1.0']"
                 ,"//doc/str[@name='[shard]'][.='[not a shard request]']"
                 // RTG: [explain] should be missing (ignored)
                 ,"//doc/int[@name='x_alias'][.=10]"
                 
-                ,"//doc[count(*)=3]"
+                ,"//doc[count(*)=4]"
                 );
       }
     }
@@ -595,21 +586,20 @@ public class TestPseudoReturnFields extends SolrTestCaseJ4 {
   public void testAugmentersAndExplicitRTG() throws Exception {
     // behavior shouldn't matter if we are committed or uncommitted
     for (String id : Arrays.asList("42","99")) {
-      // NOTE: once testDocIdAugmenterRTG can pass, [docid] should be tested here as well.
       for (SolrParams p : Arrays.asList
-             (params("fl","id,[explain],x_alias:[value v=10 t=int],abs(val_i)"),
-              params("fl","id,abs(val_i)","fl","[explain],x_alias:[value v=10 t=int]"),
-              params("fl","id","fl","[explain]","fl","x_alias:[value v=10 t=int]","fl","abs(val_i)"))) {
+             (params("fl","id,[docid],[explain],x_alias:[value v=10 t=int],abs(val_i)"),
+              params("fl","id,[docid],abs(val_i)","fl","[explain],x_alias:[value v=10 t=int]"),
+              params("fl","id","fl","[docid]","fl","[explain]","fl","x_alias:[value v=10 t=int]","fl","abs(val_i)"))) {
         assertQ(id + ": " + p,
                 req(p, "qt","/get","id",id, "wt","xml")
                 ,"count(//doc)=1"
                 ,"//doc/str[@name='id']"
-                // ,"//doc/int[@name='[docid]']" // TODO
+                ,"//doc/int[@name='[docid]'][.>=-1]"
                 ,"//doc/float[@name='abs(val_i)'][.='1.0']"
                 // RTG: [explain] should be missing (ignored)
                 ,"//doc/int[@name='x_alias'][.=10]"
                 
-                ,"//doc[count(*)=3]"
+                ,"//doc[count(*)=4]"
               );
       }
     }
@@ -646,29 +636,28 @@ public class TestPseudoReturnFields extends SolrTestCaseJ4 {
   public void testAugmentersAndScoreRTG() throws Exception {
     // if we use RTG (committed or otherwise) score should be ignored
     for (String id : Arrays.asList("42","99")) {
-      // NOTE: once testDocIdAugmenterRTG can pass, [docid] should be tested here as well.
       assertQ(id,
               req("qt","/get","id",id, "wt","xml",
-                  "fl","x_alias:[value v=10 t=int],score,abs(val_i)")
-              // ,"//doc/int[@name='[docid]']" // TODO
+                  "fl","x_alias:[value v=10 t=int],score,abs(val_i),[docid]")
+              ,"//doc/int[@name='[docid]'][.>=-1]"
               ,"//doc/float[@name='abs(val_i)'][.='1.0']"
               ,"//doc/int[@name='x_alias'][.=10]"
               
-              ,"//doc[count(*)=2]"
+              ,"//doc[count(*)=3]"
               );
-      for (SolrParams p : Arrays.asList(params("fl","x_alias:[value v=10 t=int],[explain],score,abs(val_i)"),
-                                        params("fl","x_alias:[value v=10 t=int],[explain]","fl","score,abs(val_i)"),
-                                        params("fl","x_alias:[value v=10 t=int]","fl","[explain]","fl","score","fl","abs(val_i)"))) {
+      for (SolrParams p : Arrays.asList(params("fl","[docid],x_alias:[value v=10 t=int],[explain],score,abs(val_i)"),
+                                        params("fl","x_alias:[value v=10 t=int],[explain]","fl","[docid],score,abs(val_i)"),
+                                        params("fl","[docid]","fl","x_alias:[value v=10 t=int]","fl","[explain]","fl","score","fl","abs(val_i)"))) {
         
         assertQ(p.toString(),
                 req(p, "qt","/get","id",id, "wt","xml")
                 
-                // ,"//doc/int[@name='[docid]']" // TODO
+                ,"//doc/int[@name='[docid]']" // TODO
                 ,"//doc/float[@name='abs(val_i)'][.='1.0']"
                 ,"//doc/int[@name='x_alias'][.=10]"
                 // RTG: [explain] and score should be missing (ignored)
                 
-                ,"//doc[count(*)=2]"
+                ,"//doc[count(*)=3]"
                 );
       }
     }
@@ -713,8 +702,7 @@ public class TestPseudoReturnFields extends SolrTestCaseJ4 {
 
     // NOTE: 'ssto' is the missing one
     final List<String> fl = Arrays.asList
-      // NOTE: once testDocIdAugmenterRTG can pass, [docid] should be tested here as well.
-      ("id","[explain]","score","val_*","subj*","abs(val_i)");
+      ("id","[explain]","score","val_*","subj*","abs(val_i)","[docid]");
     
     final int iters = atLeast(random, 10);
     for (int i = 0; i< iters; i++) {
@@ -734,12 +722,12 @@ public class TestPseudoReturnFields extends SolrTestCaseJ4 {
                   req(p, "qt","/get","id",id, "wt","xml")
                   ,"count(//doc)=1"
                   ,"//doc/str[@name='id']"
-                  // ,"//doc/int[@name='[docid]']" // TODO
+                  ,"//doc/int[@name='[docid]'][.>=-1]"
                   ,"//doc/float[@name='abs(val_i)'][.='1.0']"
                   // RTG: [explain] and score should be missing (ignored)
                   ,"//doc/int[@name='val_i'][.=1]"
                   ,"//doc/str[@name='subject']"
-                  ,"//doc[count(*)=4]"
+                  ,"//doc[count(*)=5]"
                   );
         }
       }


[03/51] [abbrv] lucene-solr:apiv2: LUCENE-7368: Remove queryNorm.

Posted by sa...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/join/src/java/org/apache/lucene/search/join/GlobalOrdinalsWithScoreQuery.java
----------------------------------------------------------------------
diff --git a/lucene/join/src/java/org/apache/lucene/search/join/GlobalOrdinalsWithScoreQuery.java b/lucene/join/src/java/org/apache/lucene/search/join/GlobalOrdinalsWithScoreQuery.java
index 289f833..8781d1e 100644
--- a/lucene/join/src/java/org/apache/lucene/search/join/GlobalOrdinalsWithScoreQuery.java
+++ b/lucene/join/src/java/org/apache/lucene/search/join/GlobalOrdinalsWithScoreQuery.java
@@ -61,8 +61,8 @@ final class GlobalOrdinalsWithScoreQuery extends Query {
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-    return new W(this, toQuery.createWeight(searcher, false));
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    return new W(this, toQuery.createWeight(searcher, false, 1f));
   }
 
   @Override
@@ -142,17 +142,6 @@ final class GlobalOrdinalsWithScoreQuery extends Query {
     }
 
     @Override
-    public float getValueForNormalization() throws IOException {
-      return 1f;
-    }
-
-    @Override
-    public void normalize(float norm, float boost) {
-      // no normalization, we ignore the normalization process
-      // and produce scores based on the join
-    }
-
-    @Override
     public Scorer scorer(LeafReaderContext context) throws IOException {
       SortedDocValues values = DocValues.getSorted(context.reader(), joinField);
       if (values == null) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/join/src/java/org/apache/lucene/search/join/PointInSetIncludingScoreQuery.java
----------------------------------------------------------------------
diff --git a/lucene/join/src/java/org/apache/lucene/search/join/PointInSetIncludingScoreQuery.java b/lucene/join/src/java/org/apache/lucene/search/join/PointInSetIncludingScoreQuery.java
index d83bc8f..5018e97 100644
--- a/lucene/join/src/java/org/apache/lucene/search/join/PointInSetIncludingScoreQuery.java
+++ b/lucene/join/src/java/org/apache/lucene/search/join/PointInSetIncludingScoreQuery.java
@@ -118,8 +118,7 @@ abstract class PointInSetIncludingScoreQuery extends Query {
   }
 
   @Override
-  public final Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-    final Weight originalWeight = originalQuery.createWeight(searcher, needsScores);
+  public final Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
     return new Weight(this) {
 
       @Override
@@ -139,16 +138,6 @@ abstract class PointInSetIncludingScoreQuery extends Query {
       }
 
       @Override
-      public float getValueForNormalization() throws IOException {
-        return originalWeight.getValueForNormalization();
-      }
-
-      @Override
-      public void normalize(float norm, float boost) {
-        originalWeight.normalize(norm, boost);
-      }
-
-      @Override
       public Scorer scorer(LeafReaderContext context) throws IOException {
         LeafReader reader = context.reader();
         PointValues values = reader.getPointValues();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/join/src/java/org/apache/lucene/search/join/TermsIncludingScoreQuery.java
----------------------------------------------------------------------
diff --git a/lucene/join/src/java/org/apache/lucene/search/join/TermsIncludingScoreQuery.java b/lucene/join/src/java/org/apache/lucene/search/join/TermsIncludingScoreQuery.java
index 94df35b..3b03bd3 100644
--- a/lucene/join/src/java/org/apache/lucene/search/join/TermsIncludingScoreQuery.java
+++ b/lucene/join/src/java/org/apache/lucene/search/join/TermsIncludingScoreQuery.java
@@ -106,8 +106,7 @@ class TermsIncludingScoreQuery extends Query {
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-    final Weight originalWeight = originalQuery.createWeight(searcher, needsScores);
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
     return new Weight(TermsIncludingScoreQuery.this) {
 
       @Override
@@ -134,16 +133,6 @@ class TermsIncludingScoreQuery extends Query {
       }
 
       @Override
-      public float getValueForNormalization() throws IOException {
-        return originalWeight.getValueForNormalization();
-      }
-
-      @Override
-      public void normalize(float norm, float boost) {
-        originalWeight.normalize(norm, boost);
-      }
-
-      @Override
       public Scorer scorer(LeafReaderContext context) throws IOException {
         Terms terms = context.reader().terms(field);
         if (terms == null) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/join/src/java/org/apache/lucene/search/join/ToChildBlockJoinQuery.java
----------------------------------------------------------------------
diff --git a/lucene/join/src/java/org/apache/lucene/search/join/ToChildBlockJoinQuery.java b/lucene/join/src/java/org/apache/lucene/search/join/ToChildBlockJoinQuery.java
index f3d487f..b925b31 100644
--- a/lucene/join/src/java/org/apache/lucene/search/join/ToChildBlockJoinQuery.java
+++ b/lucene/join/src/java/org/apache/lucene/search/join/ToChildBlockJoinQuery.java
@@ -81,8 +81,8 @@ public class ToChildBlockJoinQuery extends Query {
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-    return new ToChildBlockJoinWeight(this, parentQuery.createWeight(searcher, needsScores), parentsFilter, needsScores);
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    return new ToChildBlockJoinWeight(this, parentQuery.createWeight(searcher, needsScores, boost), parentsFilter, needsScores);
   }
 
   /** Return our parent query. */
@@ -107,16 +107,6 @@ public class ToChildBlockJoinQuery extends Query {
       parentWeight.extractTerms(terms);
     }
 
-    @Override
-    public float getValueForNormalization() throws IOException {
-      return parentWeight.getValueForNormalization();
-    }
-
-    @Override
-    public void normalize(float norm, float boost) {
-      parentWeight.normalize(norm, boost);
-    }
-
     // NOTE: acceptDocs applies (and is checked) only in the
     // child document space
     @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinQuery.java
----------------------------------------------------------------------
diff --git a/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinQuery.java b/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinQuery.java
index e4e099f..d5745d9 100644
--- a/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinQuery.java
+++ b/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinQuery.java
@@ -115,8 +115,8 @@ public class ToParentBlockJoinQuery extends Query {
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-    return new BlockJoinWeight(this, childQuery.createWeight(searcher, needsScores), parentsFilter, needsScores ? scoreMode : ScoreMode.None);
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    return new BlockJoinWeight(this, childQuery.createWeight(searcher, needsScores, boost), parentsFilter, needsScores ? scoreMode : ScoreMode.None);
   }
   
   /** Return our child query. */
@@ -141,16 +141,6 @@ public class ToParentBlockJoinQuery extends Query {
       childWeight.extractTerms(terms);
     }
 
-    @Override
-    public float getValueForNormalization() throws IOException {
-      return childWeight.getValueForNormalization();
-    }
-
-    @Override
-    public void normalize(float norm, float boost) {
-      childWeight.normalize(norm, boost);
-    }
-
     // NOTE: acceptDocs applies (and is checked) only in the
     // parent document space
     @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java
----------------------------------------------------------------------
diff --git a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java
index 3fc6485..af9ff5c 100644
--- a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java
+++ b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java
@@ -118,13 +118,13 @@ public class TestBlockJoin extends LuceneTestCase {
     IndexReader indexReader = DirectoryReader.open(directory);
     IndexSearcher indexSearcher = new IndexSearcher(indexReader);
 
-    Weight weight = toParentBlockJoinQuery.createWeight(indexSearcher, false);
+    Weight weight = toParentBlockJoinQuery.createWeight(indexSearcher, false, 1f);
     Set<Term> terms = new HashSet<>();
     weight.extractTerms(terms);
     Term[] termArr =terms.toArray(new Term[0]);
     assertEquals(1, termArr.length);
 
-    weight = toChildBlockJoinQuery.createWeight(indexSearcher, false);
+    weight = toChildBlockJoinQuery.createWeight(indexSearcher, false, 1f);
     terms = new HashSet<>();
     weight.extractTerms(terms);
     termArr =terms.toArray(new Term[0]);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java
----------------------------------------------------------------------
diff --git a/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java b/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java
index 5591d5d..b29e9ff 100644
--- a/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java
+++ b/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java
@@ -464,8 +464,8 @@ public class TestJoinUtil extends LuceneTestCase {
       private final Query fieldQuery = new FieldValueQuery(priceField);
 
       @Override
-      public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-        Weight fieldWeight = fieldQuery.createWeight(searcher, false);
+      public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+        Weight fieldWeight = fieldQuery.createWeight(searcher, false, boost);
         return new Weight(this) {
 
           @Override
@@ -478,15 +478,6 @@ public class TestJoinUtil extends LuceneTestCase {
           }
 
           @Override
-          public float getValueForNormalization() throws IOException {
-            return 0;
-          }
-
-          @Override
-          public void normalize(float norm, float topLevelBoost) {
-          }
-
-          @Override
           public Scorer scorer(LeafReaderContext context) throws IOException {
             Scorer fieldScorer = fieldWeight.scorer(context);
             if (fieldScorer == null) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/misc/src/test/org/apache/lucene/search/TestDiversifiedTopDocsCollector.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/test/org/apache/lucene/search/TestDiversifiedTopDocsCollector.java b/lucene/misc/src/test/org/apache/lucene/search/TestDiversifiedTopDocsCollector.java
index 2f2d4f7..da9fdc5 100644
--- a/lucene/misc/src/test/org/apache/lucene/search/TestDiversifiedTopDocsCollector.java
+++ b/lucene/misc/src/test/org/apache/lucene/search/TestDiversifiedTopDocsCollector.java
@@ -419,9 +419,9 @@ public class TestDiversifiedTopDocsCollector extends LuceneTestCase {
     }
 
     @Override
-    public SimWeight computeWeight(
+    public SimWeight computeWeight(float boost,
         CollectionStatistics collectionStats, TermStatistics... termStats) {
-      return sim.computeWeight(collectionStats, termStats);
+      return sim.computeWeight(boost, collectionStats, termStats);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/queries/src/java/org/apache/lucene/queries/BoostingQuery.java
----------------------------------------------------------------------
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/BoostingQuery.java b/lucene/queries/src/java/org/apache/lucene/queries/BoostingQuery.java
index 713014c..3bbbf1b 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/BoostingQuery.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/BoostingQuery.java
@@ -63,12 +63,12 @@ public class BoostingQuery extends Query {
     }
 
     @Override
-    public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
+    public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
       if (needsScores == false) {
-        return match.createWeight(searcher, needsScores);
+        return match.createWeight(searcher, needsScores, boost);
       }
-      final Weight matchWeight = searcher.createWeight(match, needsScores);
-      final Weight contextWeight = searcher.createWeight(context, false);
+      final Weight matchWeight = searcher.createWeight(match, needsScores, boost);
+      final Weight contextWeight = searcher.createWeight(context, false, boost);
       return new Weight(this) {
 
         @Override
@@ -92,16 +92,6 @@ public class BoostingQuery extends Query {
         }
 
         @Override
-        public float getValueForNormalization() throws IOException {
-          return matchWeight.getValueForNormalization();
-        }
-
-        @Override
-        public void normalize(float norm, float boost) {
-          matchWeight.normalize(norm, boost);
-        }
-
-        @Override
         public Scorer scorer(LeafReaderContext context) throws IOException {
           final Scorer matchScorer = matchWeight.scorer(context);
           if (matchScorer == null) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/queries/src/java/org/apache/lucene/queries/CustomScoreQuery.java
----------------------------------------------------------------------
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/CustomScoreQuery.java b/lucene/queries/src/java/org/apache/lucene/queries/CustomScoreQuery.java
index ab44fed..2ae6c01 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/CustomScoreQuery.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/CustomScoreQuery.java
@@ -26,7 +26,6 @@ import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.queries.function.FunctionQuery;
-import org.apache.lucene.queries.function.ValueSource;
 import org.apache.lucene.search.Explanation;
 import org.apache.lucene.search.FilterScorer;
 import org.apache.lucene.search.IndexSearcher;
@@ -48,7 +47,6 @@ public class CustomScoreQuery extends Query implements Cloneable {
 
   private Query subQuery;
   private Query[] scoringQueries; // never null (empty array if there are no valSrcQueries).
-  private boolean strict = false; // if true, valueSource part of query does not take part in weights normalization.
 
   /**
    * Create a CustomScoreQuery over input subQuery.
@@ -131,7 +129,6 @@ public class CustomScoreQuery extends Query implements Cloneable {
       sb.append(", ").append(scoringQuery.toString(field));
     }
     sb.append(")");
-    sb.append(strict?" STRICT" : "");
     return sb.toString();
   }
 
@@ -144,7 +141,6 @@ public class CustomScoreQuery extends Query implements Cloneable {
 
   private boolean equalsTo(CustomScoreQuery other) {
     return subQuery.equals(other.subQuery) &&
-           strict == other.strict &&
            scoringQueries.length == other.scoringQueries.length &&
            Arrays.equals(scoringQueries, other.scoringQueries);
   }
@@ -155,7 +151,7 @@ public class CustomScoreQuery extends Query implements Cloneable {
     // Didn't change this hashcode, but it looks suspicious.
     return (classHash() + 
         subQuery.hashCode() + 
-        Arrays.hashCode(scoringQueries)) ^ (strict ? 1234 : 4321);
+        Arrays.hashCode(scoringQueries));
   }
   
   /**
@@ -171,19 +167,23 @@ public class CustomScoreQuery extends Query implements Cloneable {
   //=========================== W E I G H T ============================
   
   private class CustomWeight extends Weight {
-    Weight subQueryWeight;
-    Weight[] valSrcWeights;
-    boolean qStrict;
-    float queryWeight;
+    final Weight subQueryWeight;
+    final Weight[] valSrcWeights;
+    final float queryWeight;
 
-    public CustomWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
+    public CustomWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
       super(CustomScoreQuery.this);
-      this.subQueryWeight = subQuery.createWeight(searcher, needsScores);
+      // note we DONT incorporate our boost, nor pass down any boost 
+      // (e.g. from outer BQ), as there is no guarantee that the CustomScoreProvider's 
+      // function obeys the distributive law... it might call sqrt() on the subQuery score
+      // or some other arbitrary function other than multiplication.
+      // so, instead boosts are applied directly in score()
+      this.subQueryWeight = subQuery.createWeight(searcher, needsScores, 1f);
       this.valSrcWeights = new Weight[scoringQueries.length];
       for(int i = 0; i < scoringQueries.length; i++) {
-        this.valSrcWeights[i] = scoringQueries[i].createWeight(searcher, needsScores);
+        this.valSrcWeights[i] = scoringQueries[i].createWeight(searcher, needsScores, 1f);
       }
-      this.qStrict = strict;
+      this.queryWeight = boost;
     }
 
     @Override
@@ -195,36 +195,6 @@ public class CustomScoreQuery extends Query implements Cloneable {
     }
 
     @Override
-    public float getValueForNormalization() throws IOException {
-      float sum = subQueryWeight.getValueForNormalization();
-      for (Weight valSrcWeight : valSrcWeights) {
-        if (qStrict == false) { // otherwise do not include ValueSource part in the query normalization
-          sum += valSrcWeight.getValueForNormalization();
-        }
-      }
-      return sum;
-    }
-
-    /*(non-Javadoc) @see org.apache.lucene.search.Weight#normalize(float) */
-    @Override
-    public void normalize(float norm, float boost) {
-      // note we DONT incorporate our boost, nor pass down any boost 
-      // (e.g. from outer BQ), as there is no guarantee that the CustomScoreProvider's 
-      // function obeys the distributive law... it might call sqrt() on the subQuery score
-      // or some other arbitrary function other than multiplication.
-      // so, instead boosts are applied directly in score()
-      subQueryWeight.normalize(norm, 1f);
-      for (Weight valSrcWeight : valSrcWeights) {
-        if (qStrict) {
-          valSrcWeight.normalize(1, 1); // do not normalize the ValueSource part
-        } else {
-          valSrcWeight.normalize(norm, 1f);
-        }
-      }
-      queryWeight = boost;
-    }
-
-    @Override
     public Scorer scorer(LeafReaderContext context) throws IOException {
       Scorer subQueryScorer = subQueryWeight.scorer(context);
       if (subQueryScorer == null) {
@@ -311,30 +281,8 @@ public class CustomScoreQuery extends Query implements Cloneable {
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-    return new CustomWeight(searcher, needsScores);
-  }
-
-  /**
-   * Checks if this is strict custom scoring.
-   * In strict custom scoring, the {@link ValueSource} part does not participate in weight normalization.
-   * This may be useful when one wants full control over how scores are modified, and does 
-   * not care about normalizing by the {@link ValueSource} part.
-   * One particular case where this is useful if for testing this query.   
-   * <P>
-   * Note: only has effect when the {@link ValueSource} part is not null.
-   */
-  public boolean isStrict() {
-    return strict;
-  }
-
-  /**
-   * Set the strict mode of this query. 
-   * @param strict The strict mode to set.
-   * @see #isStrict()
-   */
-  public void setStrict(boolean strict) {
-    this.strict = strict;
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    return new CustomWeight(searcher, needsScores, boost);
   }
 
   /** The sub-query that CustomScoreQuery wraps, affecting both the score and which documents match. */

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/queries/src/java/org/apache/lucene/queries/TermsQuery.java
----------------------------------------------------------------------
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/TermsQuery.java b/lucene/queries/src/java/org/apache/lucene/queries/TermsQuery.java
index 5c03b2d..7b7f094 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/TermsQuery.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/TermsQuery.java
@@ -254,8 +254,8 @@ public class TermsQuery extends Query implements Accountable {
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-    return new ConstantScoreWeight(this) {
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    return new ConstantScoreWeight(this, boost) {
 
       @Override
       public void extractTerms(Set<Term> terms) {
@@ -334,8 +334,7 @@ public class TermsQuery extends Query implements Accountable {
             bq.add(new TermQuery(new Term(t.field, t.term), termContext), Occur.SHOULD);
           }
           Query q = new ConstantScoreQuery(bq.build());
-          final Weight weight = searcher.rewrite(q).createWeight(searcher, needsScores);
-          weight.normalize(1f, score());
+          final Weight weight = searcher.rewrite(q).createWeight(searcher, needsScores, score());
           return new WeightOrDocIdSet(weight);
         } else {
           assert builder != null;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/queries/src/java/org/apache/lucene/queries/function/BoostedQuery.java
----------------------------------------------------------------------
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/BoostedQuery.java b/lucene/queries/src/java/org/apache/lucene/queries/function/BoostedQuery.java
index 3d82723..c8b9423 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/BoostedQuery.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/BoostedQuery.java
@@ -59,17 +59,17 @@ public final class BoostedQuery extends Query {
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-    return new BoostedQuery.BoostedWeight(searcher, needsScores);
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    return new BoostedQuery.BoostedWeight(searcher, needsScores, boost);
   }
 
   private class BoostedWeight extends Weight {
     Weight qWeight;
     Map fcontext;
 
-    public BoostedWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
+    public BoostedWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
       super(BoostedQuery.this);
-      this.qWeight = searcher.createWeight(q, needsScores);
+      this.qWeight = searcher.createWeight(q, needsScores, boost);
       this.fcontext = ValueSource.newContext(searcher);
       boostVal.createWeight(fcontext,searcher);
     }
@@ -80,16 +80,6 @@ public final class BoostedQuery extends Query {
     }
 
     @Override
-    public float getValueForNormalization() throws IOException {
-      return qWeight.getValueForNormalization();
-    }
-
-    @Override
-    public void normalize(float norm, float boost) {
-      qWeight.normalize(norm, boost);
-    }
-
-    @Override
     public Scorer scorer(LeafReaderContext context) throws IOException {
       Scorer subQueryScorer = qWeight.scorer(context);
       if (subQueryScorer == null) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/queries/src/java/org/apache/lucene/queries/function/FunctionQuery.java
----------------------------------------------------------------------
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/FunctionQuery.java b/lucene/queries/src/java/org/apache/lucene/queries/function/FunctionQuery.java
index e6152e8..4acb563 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/FunctionQuery.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/FunctionQuery.java
@@ -55,35 +55,23 @@ public class FunctionQuery extends Query {
 
   protected class FunctionWeight extends Weight {
     protected final IndexSearcher searcher;
-    protected float queryNorm, boost, queryWeight;
+    protected final float boost;
     protected final Map context;
 
-    public FunctionWeight(IndexSearcher searcher) throws IOException {
+    public FunctionWeight(IndexSearcher searcher, float boost) throws IOException {
       super(FunctionQuery.this);
       this.searcher = searcher;
       this.context = ValueSource.newContext(searcher);
       func.createWeight(context, searcher);
-      normalize(1f, 1f);;
+      this.boost = boost;
     }
 
     @Override
     public void extractTerms(Set<Term> terms) {}
 
     @Override
-    public float getValueForNormalization() throws IOException {
-      return queryWeight * queryWeight;
-    }
-
-    @Override
-    public void normalize(float norm, float boost) {
-      this.queryNorm = norm;
-      this.boost = boost;
-      this.queryWeight = norm * boost;
-    }
-
-    @Override
     public Scorer scorer(LeafReaderContext context) throws IOException {
-      return new AllScorer(context, this, queryWeight);
+      return new AllScorer(context, this, boost);
     }
 
     @Override
@@ -96,14 +84,14 @@ public class FunctionQuery extends Query {
     final IndexReader reader;
     final FunctionWeight weight;
     final int maxDoc;
-    final float qWeight;
+    final float boost;
     final DocIdSetIterator iterator;
     final FunctionValues vals;
 
-    public AllScorer(LeafReaderContext context, FunctionWeight w, float qWeight) throws IOException {
+    public AllScorer(LeafReaderContext context, FunctionWeight w, float boost) throws IOException {
       super(w);
       this.weight = w;
-      this.qWeight = qWeight;
+      this.boost = boost;
       this.reader = context.reader();
       this.maxDoc = reader.maxDoc();
       iterator = DocIdSetIterator.all(context.reader().maxDoc());
@@ -122,7 +110,7 @@ public class FunctionQuery extends Query {
 
     @Override
     public float score() throws IOException {
-      float score = qWeight * vals.floatVal(docID());
+      float score = boost * vals.floatVal(docID());
 
       // Current Lucene priority queues can't handle NaN and -Infinity, so
       // map to -Float.MAX_VALUE. This conditional handles both -infinity
@@ -136,20 +124,19 @@ public class FunctionQuery extends Query {
     }
 
     public Explanation explain(int doc) throws IOException {
-      float sc = qWeight * vals.floatVal(doc);
+      float sc = boost * vals.floatVal(doc);
 
       return Explanation.match(sc, "FunctionQuery(" + func + "), product of:",
           vals.explain(doc),
-          Explanation.match(weight.boost, "boost"),
-          Explanation.match(weight.queryNorm, "queryNorm"));
+          Explanation.match(weight.boost, "boost"));
     }
 
   }
 
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-    return new FunctionQuery.FunctionWeight(searcher);
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    return new FunctionQuery.FunctionWeight(searcher, boost);
   }
 
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/queries/src/java/org/apache/lucene/queries/function/FunctionRangeQuery.java
----------------------------------------------------------------------
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/FunctionRangeQuery.java b/lucene/queries/src/java/org/apache/lucene/queries/function/FunctionRangeQuery.java
index 60cfca3..41572a5 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/FunctionRangeQuery.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/FunctionRangeQuery.java
@@ -114,7 +114,7 @@ public class FunctionRangeQuery extends Query {
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
     return new FunctionRangeWeight(searcher);
   }
 
@@ -133,17 +133,6 @@ public class FunctionRangeQuery extends Query {
       //none
     }
 
-    //Note: this uses the functionValue's floatVal() as the score; queryNorm/boost is ignored.
-    @Override
-    public float getValueForNormalization() throws IOException {
-      return 1f;
-    }
-
-    @Override
-    public void normalize(float norm, float topLevelBoost) {
-      //no-op
-    }
-
     @Override
     public Explanation explain(LeafReaderContext context, int doc) throws IOException {
       FunctionValues functionValues = valueSource.getValues(vsContext, context);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/queries/src/java/org/apache/lucene/queries/payloads/PayloadScoreQuery.java
----------------------------------------------------------------------
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/payloads/PayloadScoreQuery.java b/lucene/queries/src/java/org/apache/lucene/queries/payloads/PayloadScoreQuery.java
index 2d483ba..5669406 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/payloads/PayloadScoreQuery.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/payloads/PayloadScoreQuery.java
@@ -86,11 +86,11 @@ public class PayloadScoreQuery extends SpanQuery {
   }
 
   @Override
-  public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-    SpanWeight innerWeight = wrappedQuery.createWeight(searcher, needsScores);
+  public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    SpanWeight innerWeight = wrappedQuery.createWeight(searcher, needsScores, boost);
     if (!needsScores)
       return innerWeight;
-    return new PayloadSpanWeight(searcher, innerWeight);
+    return new PayloadSpanWeight(searcher, innerWeight, boost);
   }
 
   @Override
@@ -116,8 +116,8 @@ public class PayloadScoreQuery extends SpanQuery {
 
     private final SpanWeight innerWeight;
 
-    public PayloadSpanWeight(IndexSearcher searcher, SpanWeight innerWeight) throws IOException {
-      super(PayloadScoreQuery.this, searcher, null);
+    public PayloadSpanWeight(IndexSearcher searcher, SpanWeight innerWeight, float boost) throws IOException {
+      super(PayloadScoreQuery.this, searcher, null, boost);
       this.innerWeight = innerWeight;
     }
 
@@ -147,16 +147,6 @@ public class PayloadScoreQuery extends SpanQuery {
     }
 
     @Override
-    public float getValueForNormalization() throws IOException {
-      return innerWeight.getValueForNormalization();
-    }
-
-    @Override
-    public void normalize(float queryNorm, float topLevelBoost) {
-      innerWeight.normalize(queryNorm, topLevelBoost);
-    }
-
-    @Override
     public Explanation explain(LeafReaderContext context, int doc) throws IOException {
       PayloadSpanScorer scorer = scorer(context);
       if (scorer == null || scorer.iterator().advance(doc) != doc)

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/queries/src/java/org/apache/lucene/queries/payloads/SpanPayloadCheckQuery.java
----------------------------------------------------------------------
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/payloads/SpanPayloadCheckQuery.java b/lucene/queries/src/java/org/apache/lucene/queries/payloads/SpanPayloadCheckQuery.java
index 419a82a..57ba678 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/payloads/SpanPayloadCheckQuery.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/payloads/SpanPayloadCheckQuery.java
@@ -59,9 +59,9 @@ public class SpanPayloadCheckQuery extends SpanQuery {
   }
 
   @Override
-  public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-    SpanWeight matchWeight = match.createWeight(searcher, false);
-    return new SpanPayloadCheckWeight(searcher, needsScores ? getTermContexts(matchWeight) : null, matchWeight);
+  public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    SpanWeight matchWeight = match.createWeight(searcher, false, boost);
+    return new SpanPayloadCheckWeight(searcher, needsScores ? getTermContexts(matchWeight) : null, matchWeight, boost);
   }
 
   /**
@@ -71,8 +71,8 @@ public class SpanPayloadCheckQuery extends SpanQuery {
 
     final SpanWeight matchWeight;
 
-    public SpanPayloadCheckWeight(IndexSearcher searcher, Map<Term, TermContext> termContexts, SpanWeight matchWeight) throws IOException {
-      super(SpanPayloadCheckQuery.this, searcher, termContexts);
+    public SpanPayloadCheckWeight(IndexSearcher searcher, Map<Term, TermContext> termContexts, SpanWeight matchWeight, float boost) throws IOException {
+      super(SpanPayloadCheckQuery.this, searcher, termContexts, boost);
       this.matchWeight = matchWeight;
     }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/queries/src/test/org/apache/lucene/queries/TestCustomScoreExplanations.java
----------------------------------------------------------------------
diff --git a/lucene/queries/src/test/org/apache/lucene/queries/TestCustomScoreExplanations.java b/lucene/queries/src/test/org/apache/lucene/queries/TestCustomScoreExplanations.java
index c87a004..ab3cced 100644
--- a/lucene/queries/src/test/org/apache/lucene/queries/TestCustomScoreExplanations.java
+++ b/lucene/queries/src/test/org/apache/lucene/queries/TestCustomScoreExplanations.java
@@ -62,36 +62,30 @@ public class TestCustomScoreExplanations extends BaseExplanationTestCase {
     searcher.setSimilarity(new BM25Similarity());
 
     Explanation expl = searcher.explain(query, 0);
+    assertEquals(2, expl.getDetails().length);
     // function
     assertEquals(5f, expl.getDetails()[0].getValue(), 0f);
     // boost
     assertEquals("boost", expl.getDetails()[1].getDescription());
     assertEquals(1f, expl.getDetails()[1].getValue(), 0f);
-    // norm
-    assertEquals("queryNorm", expl.getDetails()[2].getDescription());
-    assertEquals(1f, expl.getDetails()[2].getValue(), 0f);
 
     query = new BoostQuery(query, 2);
     expl = searcher.explain(query, 0);
+    assertEquals(2, expl.getDetails().length);
     // function
     assertEquals(5f, expl.getDetails()[0].getValue(), 0f);
     // boost
     assertEquals("boost", expl.getDetails()[1].getDescription());
     assertEquals(2f, expl.getDetails()[1].getValue(), 0f);
-    // norm
-    assertEquals("queryNorm", expl.getDetails()[2].getDescription());
-    assertEquals(1f, expl.getDetails()[2].getValue(), 0f);
 
     searcher.setSimilarity(new ClassicSimilarity()); // in order to have a queryNorm != 1
     expl = searcher.explain(query, 0);
+    assertEquals(2, expl.getDetails().length);
     // function
     assertEquals(5f, expl.getDetails()[0].getValue(), 0f);
     // boost
     assertEquals("boost", expl.getDetails()[1].getDescription());
     assertEquals(2f, expl.getDetails()[1].getValue(), 0f);
-    // norm
-    assertEquals("queryNorm", expl.getDetails()[2].getDescription());
-    assertEquals(0.5f, expl.getDetails()[2].getValue(), 0f);
   }
 }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/queries/src/test/org/apache/lucene/queries/TestCustomScoreQuery.java
----------------------------------------------------------------------
diff --git a/lucene/queries/src/test/org/apache/lucene/queries/TestCustomScoreQuery.java b/lucene/queries/src/test/org/apache/lucene/queries/TestCustomScoreQuery.java
index c3ee47d..0caa4ce 100644
--- a/lucene/queries/src/test/org/apache/lucene/queries/TestCustomScoreQuery.java
+++ b/lucene/queries/src/test/org/apache/lucene/queries/TestCustomScoreQuery.java
@@ -246,7 +246,6 @@ public class TestCustomScoreQuery extends FunctionTestSetup {
     Query q3CustomMul;
     {
       CustomScoreQuery csq = new CustomScoreQuery(q1, functionQuery);
-      csq.setStrict(true);
       q3CustomMul = csq;
     }
     q3CustomMul = new BoostQuery(q3CustomMul, boost);
@@ -256,7 +255,6 @@ public class TestCustomScoreQuery extends FunctionTestSetup {
     Query q4CustomAdd;
     {
       CustomScoreQuery csq = new CustomAddQuery(q1, functionQuery);
-      csq.setStrict(true);
       q4CustomAdd = csq;
     }
     q4CustomAdd = new BoostQuery(q4CustomAdd, boost);
@@ -266,7 +264,6 @@ public class TestCustomScoreQuery extends FunctionTestSetup {
     Query q5CustomMulAdd;
     {
       CustomScoreQuery csq = new CustomMulAddQuery(q1, functionQuery, functionQuery);
-      csq.setStrict(true);
       q5CustomMulAdd = csq;
     }
     q5CustomMulAdd = new BoostQuery(q5CustomMulAdd, boost);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/queries/src/test/org/apache/lucene/queries/function/TestLongNormValueSource.java
----------------------------------------------------------------------
diff --git a/lucene/queries/src/test/org/apache/lucene/queries/function/TestLongNormValueSource.java b/lucene/queries/src/test/org/apache/lucene/queries/function/TestLongNormValueSource.java
index 4518699..cfebc47 100644
--- a/lucene/queries/src/test/org/apache/lucene/queries/function/TestLongNormValueSource.java
+++ b/lucene/queries/src/test/org/apache/lucene/queries/function/TestLongNormValueSource.java
@@ -124,12 +124,6 @@ class PreciseClassicSimilarity extends TFIDFSimilarity {
   /** Sole constructor: parameter-free */
   public PreciseClassicSimilarity() {}
 
-  /** Implemented as <code>1/sqrt(sumOfSquaredWeights)</code>. */
-  @Override
-  public float queryNorm(float sumOfSquaredWeights) {
-    return (float)(1.0 / Math.sqrt(sumOfSquaredWeights));
-  }
-
   /**
    * Encodes a normalization factor for storage in an index.
    * <p>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadScoreQuery.java
----------------------------------------------------------------------
diff --git a/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadScoreQuery.java b/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadScoreQuery.java
index 7429797..188ef61 100644
--- a/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadScoreQuery.java
+++ b/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadScoreQuery.java
@@ -262,11 +262,6 @@ public class TestPayloadScoreQuery extends LuceneTestCase {
 
   static class BoostingSimilarity extends MultiplyingSimilarity {
 
-    @Override
-    public float queryNorm(float sumOfSquaredWeights) {
-      return 1.0f;
-    }
-
     //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
     //Make everything else 1 so we see the effect of the payload
     //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadSpans.java
----------------------------------------------------------------------
diff --git a/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadSpans.java b/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadSpans.java
index 2c0204d..3c5b80f 100644
--- a/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadSpans.java
+++ b/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadSpans.java
@@ -74,12 +74,12 @@ public class TestPayloadSpans extends LuceneTestCase {
     Spans spans;
     stq = new SpanTermQuery(new Term(PayloadHelper.FIELD, "seventy"));
 
-    spans = stq.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
+    spans = stq.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
     assertTrue("spans is null and it shouldn't be", spans != null);
     checkSpans(spans, 100, 1, 1, 1);
 
     stq = new SpanTermQuery(new Term(PayloadHelper.NO_PAYLOAD_FIELD, "seventy"));  
-    spans = stq.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
+    spans = stq.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
     assertTrue("spans is null and it shouldn't be", spans != null);
     checkSpans(spans, 100, 0, 0, 0);
   }
@@ -90,7 +90,7 @@ public class TestPayloadSpans extends LuceneTestCase {
     SpanFirstQuery sfq;
     match = new SpanTermQuery(new Term(PayloadHelper.FIELD, "one"));
     sfq = new SpanFirstQuery(match, 2);
-    Spans spans = sfq.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
+    Spans spans = sfq.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
     checkSpans(spans, 109, 1, 1, 1);
     //Test more complicated subclause
     SpanQuery[] clauses = new SpanQuery[2];
@@ -98,11 +98,11 @@ public class TestPayloadSpans extends LuceneTestCase {
     clauses[1] = new SpanTermQuery(new Term(PayloadHelper.FIELD, "hundred"));
     match = new SpanNearQuery(clauses, 0, true);
     sfq = new SpanFirstQuery(match, 2);
-    checkSpans(sfq.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS), 100, 2, 1, 1);
+    checkSpans(sfq.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS), 100, 2, 1, 1);
 
     match = new SpanNearQuery(clauses, 0, false);
     sfq = new SpanFirstQuery(match, 2);
-    checkSpans(sfq.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS), 100, 2, 1, 1);
+    checkSpans(sfq.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS), 100, 2, 1, 1);
     
   }
   
@@ -125,7 +125,7 @@ public class TestPayloadSpans extends LuceneTestCase {
     IndexReader reader = getOnlyLeafReader(writer.getReader());
     writer.close();
 
-    checkSpans(snq.createWeight(newSearcher(reader, false), false).getSpans(reader.leaves().get(0), SpanWeight.Postings.PAYLOADS), 1, new int[]{2});
+    checkSpans(snq.createWeight(newSearcher(reader, false), false, 1f).getSpans(reader.leaves().get(0), SpanWeight.Postings.PAYLOADS), 1, new int[]{2});
     reader.close();
     directory.close();
   }
@@ -136,7 +136,7 @@ public class TestPayloadSpans extends LuceneTestCase {
     IndexSearcher searcher = getSearcher();
 
     stq = new SpanTermQuery(new Term(PayloadHelper.FIELD, "mark"));
-    spans = stq.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
+    spans = stq.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
     assertNull(spans);
 
     SpanQuery[] clauses = new SpanQuery[3];
@@ -145,7 +145,7 @@ public class TestPayloadSpans extends LuceneTestCase {
     clauses[2] = new SpanTermQuery(new Term(PayloadHelper.FIELD, "xx"));
     SpanNearQuery spanNearQuery = new SpanNearQuery(clauses, 12, false);
 
-    spans = spanNearQuery.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
+    spans = spanNearQuery.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
     assertTrue("spans is null and it shouldn't be", spans != null);
     checkSpans(spans, 2, new int[]{3,3});
 
@@ -156,7 +156,7 @@ public class TestPayloadSpans extends LuceneTestCase {
 
     spanNearQuery = new SpanNearQuery(clauses, 6, true);
    
-    spans = spanNearQuery.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
+    spans = spanNearQuery.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
 
     assertTrue("spans is null and it shouldn't be", spans != null);
     checkSpans(spans, 1, new int[]{3});
@@ -178,7 +178,7 @@ public class TestPayloadSpans extends LuceneTestCase {
     SpanNearQuery nestedSpanNearQuery = new SpanNearQuery(clauses2, 6, false);
     
     // yy within 6 of xx within 6 of rr
-    spans = nestedSpanNearQuery.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
+    spans = nestedSpanNearQuery.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
     assertTrue("spans is null and it shouldn't be", spans != null);
     checkSpans(spans, 2, new int[]{3,3});
     closeIndexReader.close();
@@ -209,7 +209,7 @@ public class TestPayloadSpans extends LuceneTestCase {
     clauses3[1] = snq;
 
     SpanNearQuery nestedSpanNearQuery = new SpanNearQuery(clauses3, 6, false);
-    spans = nestedSpanNearQuery.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
+    spans = nestedSpanNearQuery.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
 
     assertTrue("spans is null and it shouldn't be", spans != null);
     checkSpans(spans, 1, new int[]{3});
@@ -247,7 +247,7 @@ public class TestPayloadSpans extends LuceneTestCase {
      
     SpanNearQuery nestedSpanNearQuery = new SpanNearQuery(clauses3, 6, false);
 
-    spans = nestedSpanNearQuery.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
+    spans = nestedSpanNearQuery.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
     assertTrue("spans is null and it shouldn't be", spans != null);
     checkSpans(spans, 2, new int[]{8, 8});
     closeIndexReader.close();
@@ -272,7 +272,7 @@ public class TestPayloadSpans extends LuceneTestCase {
     SpanQuery[] sqs = { stq1, stq2 };
     SpanNearQuery snq = new SpanNearQuery(sqs, 1, true);
     VerifyingCollector collector = new VerifyingCollector();
-    Spans spans = snq.createWeight(is, false).getSpans(is.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
+    Spans spans = snq.createWeight(is, false, 1f).getSpans(is.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
 
     TopDocs topDocs = is.search(snq, 1);
     Set<String> payloadSet = new HashSet<>();
@@ -311,7 +311,7 @@ public class TestPayloadSpans extends LuceneTestCase {
     SpanQuery[] sqs = { stq1, stq2 };
     SpanNearQuery snq = new SpanNearQuery(sqs, 0, true);
     VerifyingCollector collector = new VerifyingCollector();
-    Spans spans = snq.createWeight(is, false).getSpans(is.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
+    Spans spans = snq.createWeight(is, false, 1f).getSpans(is.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
 
     TopDocs topDocs = is.search(snq, 1);
     Set<String> payloadSet = new HashSet<>();
@@ -349,7 +349,7 @@ public class TestPayloadSpans extends LuceneTestCase {
     SpanTermQuery stq2 = new SpanTermQuery(new Term("content", "k"));
     SpanQuery[] sqs = { stq1, stq2 };
     SpanNearQuery snq = new SpanNearQuery(sqs, 0, true);
-    Spans spans = snq.createWeight(is, false).getSpans(is.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
+    Spans spans = snq.createWeight(is, false, 1f).getSpans(is.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
 
     TopDocs topDocs = is.search(snq, 1);
     Set<String> payloadSet = new HashSet<>();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadTermQuery.java
----------------------------------------------------------------------
diff --git a/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadTermQuery.java b/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadTermQuery.java
index 8f65c1a..9cc7067 100644
--- a/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadTermQuery.java
+++ b/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadTermQuery.java
@@ -164,7 +164,7 @@ public class TestPayloadTermQuery extends LuceneTestCase {
       assertTrue(doc.score + " does not equal: " + 1, doc.score == 1);
     }
     CheckHits.checkExplanations(query, PayloadHelper.FIELD, searcher, true);
-    Spans spans = query.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
+    Spans spans = query.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertTrue("spans is null and it shouldn't be", spans != null);
     /*float score = hits.score(0);
     for (int i =1; i < hits.length(); i++)
@@ -215,7 +215,7 @@ public class TestPayloadTermQuery extends LuceneTestCase {
     }
     assertTrue(numTens + " does not equal: " + 10, numTens == 10);
     CheckHits.checkExplanations(query, "field", searcher, true);
-    Spans spans = query.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
+    Spans spans = query.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertTrue("spans is null and it shouldn't be", spans != null);
     //should be two matches per document
     int count = 0;
@@ -257,11 +257,6 @@ public class TestPayloadTermQuery extends LuceneTestCase {
 
   static class BoostingSimilarity extends ClassicSimilarity {
 
-    @Override
-    public float queryNorm(float sumOfSquaredWeights) {
-      return 1;
-    }
-
     // TODO: Remove warning after API has been finalized
     @Override
     public float scorePayload(int docId, int start, int end, BytesRef payload) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/sandbox/src/java/org/apache/lucene/document/LatLonPointDistanceQuery.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/java/org/apache/lucene/document/LatLonPointDistanceQuery.java b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonPointDistanceQuery.java
index d479713..1b20d95 100644
--- a/lucene/sandbox/src/java/org/apache/lucene/document/LatLonPointDistanceQuery.java
+++ b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonPointDistanceQuery.java
@@ -67,7 +67,7 @@ final class LatLonPointDistanceQuery extends Query {
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
     Rectangle box = Rectangle.fromPointDistance(latitude, longitude, radiusMeters);
     // create bounding box(es) for the distance range
     // these are pre-encoded with LatLonPoint's encoding
@@ -100,7 +100,7 @@ final class LatLonPointDistanceQuery extends Query {
 
     final double axisLat = Rectangle.axisLat(latitude, radiusMeters);
 
-    return new ConstantScoreWeight(this) {
+    return new ConstantScoreWeight(this, boost) {
 
       @Override
       public Scorer scorer(LeafReaderContext context) throws IOException {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/sandbox/src/java/org/apache/lucene/document/LatLonPointInPolygonQuery.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/java/org/apache/lucene/document/LatLonPointInPolygonQuery.java b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonPointInPolygonQuery.java
index 8db8296..036fe2c 100644
--- a/lucene/sandbox/src/java/org/apache/lucene/document/LatLonPointInPolygonQuery.java
+++ b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonPointInPolygonQuery.java
@@ -74,7 +74,7 @@ final class LatLonPointInPolygonQuery extends Query {
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
 
     // I don't use RandomAccessWeight here: it's no good to approximate with "match all docs"; this is an inverted structure and should be
     // used in the first pass:
@@ -93,7 +93,7 @@ final class LatLonPointInPolygonQuery extends Query {
 
     final Polygon2D tree = Polygon2D.create(polygons);
 
-    return new ConstantScoreWeight(this) {
+    return new ConstantScoreWeight(this, boost) {
 
       @Override
       public Scorer scorer(LeafReaderContext context) throws IOException {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/sandbox/src/java/org/apache/lucene/search/DocValuesNumbersQuery.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/java/org/apache/lucene/search/DocValuesNumbersQuery.java b/lucene/sandbox/src/java/org/apache/lucene/search/DocValuesNumbersQuery.java
index a588e88..655e9f3 100644
--- a/lucene/sandbox/src/java/org/apache/lucene/search/DocValuesNumbersQuery.java
+++ b/lucene/sandbox/src/java/org/apache/lucene/search/DocValuesNumbersQuery.java
@@ -95,8 +95,8 @@ public class DocValuesNumbersQuery extends Query {
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-    return new RandomAccessWeight(this) {
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    return new RandomAccessWeight(this, boost) {
 
       @Override
       protected Bits getMatchingDocs(LeafReaderContext context) throws IOException {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/sandbox/src/java/org/apache/lucene/search/DocValuesRangeQuery.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/java/org/apache/lucene/search/DocValuesRangeQuery.java b/lucene/sandbox/src/java/org/apache/lucene/search/DocValuesRangeQuery.java
index d603040..44f3f8c 100644
--- a/lucene/sandbox/src/java/org/apache/lucene/search/DocValuesRangeQuery.java
+++ b/lucene/sandbox/src/java/org/apache/lucene/search/DocValuesRangeQuery.java
@@ -139,11 +139,11 @@ public final class DocValuesRangeQuery extends Query {
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
     if (lowerVal == null && upperVal == null) {
       throw new IllegalStateException("Both min and max values must not be null, call rewrite first");
     }
-    return new RandomAccessWeight(DocValuesRangeQuery.this) {
+    return new RandomAccessWeight(DocValuesRangeQuery.this, boost) {
       
       @Override
       protected Bits getMatchingDocs(LeafReaderContext context) throws IOException {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/sandbox/src/java/org/apache/lucene/search/DocValuesTermsQuery.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/java/org/apache/lucene/search/DocValuesTermsQuery.java b/lucene/sandbox/src/java/org/apache/lucene/search/DocValuesTermsQuery.java
index 7cb2ce1..4be4b18 100644
--- a/lucene/sandbox/src/java/org/apache/lucene/search/DocValuesTermsQuery.java
+++ b/lucene/sandbox/src/java/org/apache/lucene/search/DocValuesTermsQuery.java
@@ -148,8 +148,8 @@ public class DocValuesTermsQuery extends Query {
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-    return new RandomAccessWeight(this) {
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    return new RandomAccessWeight(this, boost) {
 
       @Override
       protected Bits getMatchingDocs(LeafReaderContext context) throws IOException {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/sandbox/src/java/org/apache/lucene/search/TermAutomatonQuery.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/java/org/apache/lucene/search/TermAutomatonQuery.java b/lucene/sandbox/src/java/org/apache/lucene/search/TermAutomatonQuery.java
index b3a7ba2..530a9b6 100644
--- a/lucene/sandbox/src/java/org/apache/lucene/search/TermAutomatonQuery.java
+++ b/lucene/sandbox/src/java/org/apache/lucene/search/TermAutomatonQuery.java
@@ -186,7 +186,7 @@ public class TermAutomatonQuery extends Query {
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
     IndexReaderContext context = searcher.getTopReaderContext();
     Map<Integer,TermContext> termStates = new HashMap<>();
 
@@ -196,7 +196,7 @@ public class TermAutomatonQuery extends Query {
       }
     }
 
-    return new TermAutomatonWeight(det, searcher, termStates);
+    return new TermAutomatonWeight(det, searcher, termStates, boost);
   }
 
   @Override
@@ -332,7 +332,7 @@ public class TermAutomatonQuery extends Query {
     private final Similarity.SimWeight stats;
     private final Similarity similarity;
 
-    public TermAutomatonWeight(Automaton automaton, IndexSearcher searcher, Map<Integer,TermContext> termStates) throws IOException {
+    public TermAutomatonWeight(Automaton automaton, IndexSearcher searcher, Map<Integer,TermContext> termStates, float boost) throws IOException {
       super(TermAutomatonQuery.this);
       this.automaton = automaton;
       this.termStates = termStates;
@@ -345,7 +345,7 @@ public class TermAutomatonQuery extends Query {
         }
       }
 
-      stats = similarity.computeWeight(searcher.collectionStatistics(field),
+      stats = similarity.computeWeight(boost, searcher.collectionStatistics(field),
                                        allTermStats.toArray(new TermStatistics[allTermStats.size()]));
     }
 
@@ -364,16 +364,6 @@ public class TermAutomatonQuery extends Query {
     }
 
     @Override
-    public float getValueForNormalization() {
-      return stats.getValueForNormalization();
-    }
-
-    @Override
-    public void normalize(float queryNorm, float boost) {
-      stats.normalize(queryNorm, boost);
-    }
-
-    @Override
     public Scorer scorer(LeafReaderContext context) throws IOException {
 
       // Initialize the enums; null for a given slot means that term didn't appear in this reader

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/FuzzyLikeThisQueryTest.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/FuzzyLikeThisQueryTest.java b/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/FuzzyLikeThisQueryTest.java
index b321494..a8c8b51 100644
--- a/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/FuzzyLikeThisQueryTest.java
+++ b/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/FuzzyLikeThisQueryTest.java
@@ -80,7 +80,7 @@ public class FuzzyLikeThisQueryTest extends LuceneTestCase {
     flt.addTerms("smith", "name", 0.3f, 1);
     Query q = flt.rewrite(searcher.getIndexReader());
     HashSet<Term> queryTerms = new HashSet<>();
-    searcher.createWeight(q, true).extractTerms(queryTerms);
+    searcher.createWeight(q, true, 1f).extractTerms(queryTerms);
     assertTrue("Should have variant smythe", queryTerms.contains(new Term("name", "smythe")));
     assertTrue("Should have variant smith", queryTerms.contains(new Term("name", "smith")));
     assertTrue("Should have variant smyth", queryTerms.contains(new Term("name", "smyth")));
@@ -97,7 +97,7 @@ public class FuzzyLikeThisQueryTest extends LuceneTestCase {
     flt.addTerms("jonathin smoth", "name", 0.3f, 1);
     Query q = flt.rewrite(searcher.getIndexReader());
     HashSet<Term> queryTerms = new HashSet<>();
-    searcher.createWeight(q, true).extractTerms(queryTerms);
+    searcher.createWeight(q, true, 1f).extractTerms(queryTerms);
     assertTrue("Should have variant jonathan", queryTerms.contains(new Term("name", "jonathan")));
     assertTrue("Should have variant smith", queryTerms.contains(new Term("name", "smith")));
     TopDocs topDocs = searcher.search(flt, 1);
@@ -115,7 +115,7 @@ public class FuzzyLikeThisQueryTest extends LuceneTestCase {
     // don't fail here just because the field doesn't exits
     Query q = flt.rewrite(searcher.getIndexReader());
     HashSet<Term> queryTerms = new HashSet<>();
-    searcher.createWeight(q, true).extractTerms(queryTerms);
+    searcher.createWeight(q, true, 1f).extractTerms(queryTerms);
     assertTrue("Should have variant jonathan", queryTerms.contains(new Term("name", "jonathan")));
     assertTrue("Should have variant smith", queryTerms.contains(new Term("name", "smith")));
     TopDocs topDocs = searcher.search(flt, 1);
@@ -132,7 +132,7 @@ public class FuzzyLikeThisQueryTest extends LuceneTestCase {
     flt.addTerms("fernando smith", "name", 0.3f, 1);
     Query q = flt.rewrite(searcher.getIndexReader());
     HashSet<Term> queryTerms = new HashSet<>();
-    searcher.createWeight(q, true).extractTerms(queryTerms);
+    searcher.createWeight(q, true, 1f).extractTerms(queryTerms);
     assertTrue("Should have variant smith", queryTerms.contains(new Term("name", "smith")));
     TopDocs topDocs = searcher.search(flt, 1);
     ScoreDoc[] sd = topDocs.scoreDocs;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/TestSlowFuzzyQuery2.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/TestSlowFuzzyQuery2.java b/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/TestSlowFuzzyQuery2.java
deleted file mode 100644
index 84145a1..0000000
--- a/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/TestSlowFuzzyQuery2.java
+++ /dev/null
@@ -1,184 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.sandbox.queries;
-
-import java.io.BufferedReader;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.nio.charset.StandardCharsets;
-
-import org.apache.lucene.analysis.Analyzer;
-import org.apache.lucene.analysis.MockAnalyzer;
-import org.apache.lucene.analysis.MockTokenizer;
-import org.apache.lucene.document.Document;
-import org.apache.lucene.document.Field;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.RandomIndexWriter;
-import org.apache.lucene.index.Term;
-import org.apache.lucene.search.IndexSearcher;
-import org.apache.lucene.search.MultiTermQuery;
-import org.apache.lucene.search.TopDocs;
-import org.apache.lucene.search.similarities.ClassicSimilarity;
-import org.apache.lucene.store.Directory;
-import org.apache.lucene.util.IOUtils;
-import org.apache.lucene.util.LuceneTestCase;
-
-/** 
- * Tests the results of fuzzy against pre-recorded output 
- * The format of the file is the following:
- * 
- * Header Row: # of bits: generate 2^n sequential documents 
- * with a value of Integer.toBinaryString
- * 
- * Entries: an entry is a param spec line, a resultCount line, and
- * then 'resultCount' results lines. The results lines are in the
- * expected order.
- * 
- * param spec line: a comma-separated list of params to FuzzyQuery
- *   (query, prefixLen, pqSize, minScore)
- * query = query text as a number (expand with Integer.toBinaryString)
- * prefixLen = prefix length
- * pqSize = priority queue maximum size for TopTermsBoostOnlyBooleanQueryRewrite
- * minScore = minimum similarity
- * 
- * resultCount line: total number of expected hits.
- * 
- * results line: comma-separated docID, score pair
- **/
-public class TestSlowFuzzyQuery2 extends LuceneTestCase {
-  /** epsilon for score comparisons */
-  static final float epsilon = 0.00001f;
-
-  static int[][] mappings = new int[][] {
-    new int[] { 0x40, 0x41 },
-    new int[] { 0x40, 0x0195 },
-    new int[] { 0x40, 0x0906 },
-    new int[] { 0x40, 0x1040F },
-    new int[] { 0x0194, 0x0195 },
-    new int[] { 0x0194, 0x0906 },
-    new int[] { 0x0194, 0x1040F },
-    new int[] { 0x0905, 0x0906 },
-    new int[] { 0x0905, 0x1040F },
-    new int[] { 0x1040E, 0x1040F }
-  };
-  public void testFromTestData() throws Exception {
-    // TODO: randomize!
-    assertFromTestData(mappings[random().nextInt(mappings.length)]);
-  }
-
-  public void assertFromTestData(int codePointTable[]) throws Exception {
-    if (VERBOSE) {
-      System.out.println("TEST: codePointTable=" + codePointTable);
-    }
-    InputStream stream = getClass().getResourceAsStream("fuzzyTestData.txt");
-    BufferedReader reader = new BufferedReader(new InputStreamReader(stream, StandardCharsets.UTF_8));
-    
-    int bits = Integer.parseInt(reader.readLine());
-    int terms = (int) Math.pow(2, bits);
-    
-    Directory dir = newDirectory();
-    Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.KEYWORD, false);
-    RandomIndexWriter writer = new RandomIndexWriter(random(), dir, newIndexWriterConfig(analyzer).setMergePolicy(newLogMergePolicy()));
-
-    Document doc = new Document();
-    Field field = newTextField("field", "", Field.Store.NO);
-    doc.add(field);
-    
-    for (int i = 0; i < terms; i++) {
-      field.setStringValue(mapInt(codePointTable, i));
-      writer.addDocument(doc);
-    }   
-    
-    IndexReader r = writer.getReader();
-    IndexSearcher searcher = newSearcher(r);
-    if (VERBOSE) {
-      System.out.println("TEST: searcher=" + searcher);
-    }
-    // even though this uses a boost-only rewrite, this test relies upon queryNorm being the default implementation,
-    // otherwise scores are different!
-    searcher.setSimilarity(new ClassicSimilarity());
-    
-    writer.close();
-    String line;
-    while ((line = reader.readLine()) != null) {
-      String params[] = line.split(",");
-      String query = mapInt(codePointTable, Integer.parseInt(params[0]));
-      int prefix = Integer.parseInt(params[1]);
-      int pqSize = Integer.parseInt(params[2]);
-      float minScore = Float.parseFloat(params[3]);
-      SlowFuzzyQuery q = new SlowFuzzyQuery(new Term("field", query), minScore, prefix);
-      q.setRewriteMethod(new MultiTermQuery.TopTermsBoostOnlyBooleanQueryRewrite(pqSize));
-      int expectedResults = Integer.parseInt(reader.readLine());
-      TopDocs docs = searcher.search(q, expectedResults);
-      assertEquals(expectedResults, docs.totalHits);
-      for (int i = 0; i < expectedResults; i++) {
-        String scoreDoc[] = reader.readLine().split(",");
-        assertEquals(Integer.parseInt(scoreDoc[0]), docs.scoreDocs[i].doc);
-        assertEquals(Float.parseFloat(scoreDoc[1]), docs.scoreDocs[i].score, epsilon);
-      }
-    }
-    IOUtils.close(r, dir, analyzer);
-  }
-  
-  /* map bits to unicode codepoints */
-  private static String mapInt(int codePointTable[], int i) {
-    StringBuilder sb = new StringBuilder();
-    String binary = Integer.toBinaryString(i);
-    for (int j = 0; j < binary.length(); j++)
-      sb.appendCodePoint(codePointTable[binary.charAt(j) - '0']);
-    return sb.toString();
-  }
-
-  /* Code to generate test data
-  public static void main(String args[]) throws Exception {
-    int bits = 3;
-    System.out.println(bits);
-    int terms = (int) Math.pow(2, bits);
-    
-    RAMDirectory dir = new RAMDirectory();
-    IndexWriter writer = new IndexWriter(dir, new KeywordAnalyzer(),
-        IndexWriter.MaxFieldLength.UNLIMITED);
-    
-    Document doc = new Document();
-    Field field = newField("field", "", Field.Store.NO, Field.Index.ANALYZED);
-    doc.add(field);
-
-    for (int i = 0; i < terms; i++) {
-      field.setValue(Integer.toBinaryString(i));
-      writer.addDocument(doc);
-    }
-    
-    writer.forceMerge(1);
-    writer.close();
-
-    IndexSearcher searcher = new IndexSearcher(dir);
-    for (int prefix = 0; prefix < bits; prefix++)
-      for (int pqsize = 1; pqsize <= terms; pqsize++)
-        for (float minscore = 0.1F; minscore < 1F; minscore += 0.2F)
-          for (int query = 0; query < terms; query++) {
-            FuzzyQuery q = new FuzzyQuery(
-                new Term("field", Integer.toBinaryString(query)), minscore, prefix);
-            q.setRewriteMethod(new MultiTermQuery.TopTermsBoostOnlyBooleanQueryRewrite(pqsize));
-            System.out.println(query + "," + prefix + "," + pqsize + "," + minscore);
-            TopDocs docs = searcher.search(q, terms);
-            System.out.println(docs.totalHits);
-            for (int i = 0; i < docs.totalHits; i++)
-              System.out.println(docs.scoreDocs[i].doc + "," + docs.scoreDocs[i].score);
-          }
-  }
-  */
-}


[47/51] [abbrv] lucene-solr:apiv2: LUCENE-7311: Cached term queries do not seek the terms dictionary anymore.

Posted by sa...@apache.org.
LUCENE-7311: Cached term queries do not seek the terms dictionary anymore.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/71541bcd
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/71541bcd
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/71541bcd

Branch: refs/heads/apiv2
Commit: 71541bcd6cfd1e279faa1f2402403ac74cc5362d
Parents: ee44da6
Author: Adrien Grand <jp...@gmail.com>
Authored: Wed Jul 20 17:30:30 2016 +0200
Committer: Adrien Grand <jp...@gmail.com>
Committed: Wed Jul 20 17:42:51 2016 +0200

----------------------------------------------------------------------
 lucene/CHANGES.txt                              |   3 +
 .../org/apache/lucene/search/TermQuery.java     |  58 ++++---
 .../org/apache/lucene/search/TestTermQuery.java | 154 +++++++++++++++++++
 3 files changed, 196 insertions(+), 19 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/71541bcd/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index ec395a3..432e1d2 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -137,6 +137,9 @@ Optimizations
 * LUCENE-7371: Point values are now better compressed using run-length
   encoding. (Adrien Grand)
 
+* LUCENE-7311: Cached term queries do not seek the terms dictionary anymore.
+  (Adrien Grand)
+
 Other
 
 * LUCENE-4787: Fixed some highlighting javadocs. (Michael Dodsworth via Adrien

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/71541bcd/lucene/core/src/java/org/apache/lucene/search/TermQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/TermQuery.java b/lucene/core/src/java/org/apache/lucene/search/TermQuery.java
index 590c3b3..73170b9 100644
--- a/lucene/core/src/java/org/apache/lucene/search/TermQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/TermQuery.java
@@ -29,6 +29,7 @@ import org.apache.lucene.index.ReaderUtil;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.index.TermContext;
 import org.apache.lucene.index.TermState;
+import org.apache.lucene.index.Terms;
 import org.apache.lucene.index.TermsEnum;
 import org.apache.lucene.search.similarities.Similarity;
 import org.apache.lucene.search.similarities.Similarity.SimScorer;
@@ -51,8 +52,10 @@ public class TermQuery extends Query {
     public TermWeight(IndexSearcher searcher, boolean needsScores,
         float boost, TermContext termStates) throws IOException {
       super(TermQuery.this);
+      if (needsScores && termStates == null) {
+        throw new IllegalStateException("termStates are required when scores are needed");
+      }
       this.needsScores = needsScores;
-      assert termStates != null : "TermContext must not be null";
       this.termStates = termStates;
       this.similarity = searcher.getSimilarity(needsScores);
 
@@ -62,12 +65,10 @@ public class TermQuery extends Query {
         collectionStats = searcher.collectionStatistics(term.field());
         termStats = searcher.termStatistics(term, termStates);
       } else {
-        // do not bother computing actual stats, scores are not needed
+        // we do not need the actual stats, use fake stats with docFreq=maxDoc and ttf=-1
         final int maxDoc = searcher.getIndexReader().maxDoc();
-        final int docFreq = termStates.docFreq();
-        final long totalTermFreq = termStates.totalTermFreq();
         collectionStats = new CollectionStatistics(term.field(), maxDoc, -1, -1, -1);
-        termStats = new TermStatistics(term.bytes(), docFreq, totalTermFreq);
+        termStats = new TermStatistics(term.bytes(), maxDoc, -1);
       }
      
       this.stats = similarity.computeWeight(boost, collectionStats, termStats);
@@ -85,7 +86,7 @@ public class TermQuery extends Query {
 
     @Override
     public Scorer scorer(LeafReaderContext context) throws IOException {
-      assert termStates.topReaderContext == ReaderUtil.getTopLevelContext(context) : "The top-reader used to create Weight (" + termStates.topReaderContext + ") is not the same as the current reader's top-reader (" + ReaderUtil.getTopLevelContext(context);
+      assert termStates == null || termStates.topReaderContext == ReaderUtil.getTopLevelContext(context) : "The top-reader used to create Weight (" + termStates.topReaderContext + ") is not the same as the current reader's top-reader (" + ReaderUtil.getTopLevelContext(context);;
       final TermsEnum termsEnum = getTermsEnum(context);
       if (termsEnum == null) {
         return null;
@@ -100,17 +101,30 @@ public class TermQuery extends Query {
      * the term does not exist in the given context
      */
     private TermsEnum getTermsEnum(LeafReaderContext context) throws IOException {
-      final TermState state = termStates.get(context.ord);
-      if (state == null) { // term is not present in that reader
-        assert termNotInReader(context.reader(), term) : "no termstate found but term exists in reader term=" + term;
-        return null;
+      if (termStates != null) {
+        // TermQuery either used as a Query or the term states have been provided at construction time
+        assert termStates.topReaderContext == ReaderUtil.getTopLevelContext(context) : "The top-reader used to create Weight (" + termStates.topReaderContext + ") is not the same as the current reader's top-reader (" + ReaderUtil.getTopLevelContext(context);
+        final TermState state = termStates.get(context.ord);
+        if (state == null) { // term is not present in that reader
+          assert termNotInReader(context.reader(), term) : "no termstate found but term exists in reader term=" + term;
+          return null;
+        }
+        final TermsEnum termsEnum = context.reader().terms(term.field()).iterator();
+        termsEnum.seekExact(term.bytes(), state);
+        return termsEnum;
+      } else {
+        // TermQuery used as a filter, so the term states have not been built up front
+        Terms terms = context.reader().terms(term.field());
+        if (terms == null) {
+          return null;
+        }
+        final TermsEnum termsEnum = terms.iterator();
+        if (termsEnum.seekExact(term.bytes())) {
+          return termsEnum;
+        } else {
+          return null;
+        }
       }
-      // System.out.println("LD=" + reader.getLiveDocs() + " set?=" +
-      // (reader.getLiveDocs() != null ? reader.getLiveDocs().get(0) : "null"));
-      final TermsEnum termsEnum = context.reader().terms(term.field())
-          .iterator();
-      termsEnum.seekExact(term.bytes(), state);
-      return termsEnum;
     }
 
     private boolean termNotInReader(LeafReader reader, Term term) throws IOException {
@@ -168,9 +182,15 @@ public class TermQuery extends Query {
     final TermContext termState;
     if (perReaderTermState == null
         || perReaderTermState.topReaderContext != context) {
-      // make TermQuery single-pass if we don't have a PRTS or if the context
-      // differs!
-      termState = TermContext.build(context, term);
+      if (needsScores) {
+        // make TermQuery single-pass if we don't have a PRTS or if the context
+        // differs!
+        termState = TermContext.build(context, term);
+      } else {
+        // do not compute the term state, this will help save seeks in the terms
+        // dict on segments that have a cache entry for this query
+        termState = null;
+      }
     } else {
       // PRTS was pre-build for this IS
       termState = this.perReaderTermState;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/71541bcd/lucene/core/src/test/org/apache/lucene/search/TestTermQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestTermQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestTermQuery.java
new file mode 100644
index 0000000..a994118
--- /dev/null
+++ b/lucene/core/src/test/org/apache/lucene/search/TestTermQuery.java
@@ -0,0 +1,154 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.search;
+
+import java.io.IOException;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field.Store;
+import org.apache.lucene.document.StringField;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.Fields;
+import org.apache.lucene.index.FilterDirectoryReader;
+import org.apache.lucene.index.FilterLeafReader;
+import org.apache.lucene.index.LeafReader;
+import org.apache.lucene.index.MultiReader;
+import org.apache.lucene.index.NoMergePolicy;
+import org.apache.lucene.index.RandomIndexWriter;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.index.TermContext;
+import org.apache.lucene.index.TermState;
+import org.apache.lucene.index.Terms;
+import org.apache.lucene.index.TermsEnum;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.IOUtils;
+import org.apache.lucene.util.LuceneTestCase;
+
+public class TestTermQuery extends LuceneTestCase {
+
+  public void testEquals() throws IOException {
+    QueryUtils.checkEqual(
+        new TermQuery(new Term("foo", "bar")),
+        new TermQuery(new Term("foo", "bar")));
+    QueryUtils.checkUnequal(
+        new TermQuery(new Term("foo", "bar")),
+        new TermQuery(new Term("foo", "baz")));
+    QueryUtils.checkEqual(
+        new TermQuery(new Term("foo", "bar")),
+        new TermQuery(new Term("foo", "bar"), TermContext.build(new MultiReader().getContext(), new Term("foo", "bar"))));
+  }
+
+  public void testCreateWeightDoesNotSeekIfScoresAreNotNeeded() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random(), dir, newIndexWriterConfig().setMergePolicy(NoMergePolicy.INSTANCE));
+    // segment that contains the term
+    Document doc = new Document();
+    doc.add(new StringField("foo", "bar", Store.NO));
+    w.addDocument(doc);
+    w.getReader().close();
+    // segment that does not contain the term
+    doc = new Document();
+    doc.add(new StringField("foo", "baz", Store.NO));
+    w.addDocument(doc);
+    w.getReader().close();
+    // segment that does not contain the field
+    w.addDocument(new Document());
+
+    DirectoryReader reader = w.getReader();
+    FilterDirectoryReader noSeekReader = new NoSeekDirectoryReader(reader);
+    IndexSearcher noSeekSearcher = new IndexSearcher(noSeekReader);
+    Query query = new TermQuery(new Term("foo", "bar"));
+    AssertionError e = expectThrows(AssertionError.class,
+        () -> noSeekSearcher.createNormalizedWeight(query, true));
+    assertEquals("no seek", e.getMessage());
+
+    noSeekSearcher.createNormalizedWeight(query, false); // no exception
+    IndexSearcher searcher = new IndexSearcher(reader);
+    // use a collector rather than searcher.count() which would just read the
+    // doc freq instead of creating a scorer
+    TotalHitCountCollector collector = new TotalHitCountCollector();
+    searcher.search(query, collector);
+    assertEquals(1, collector.getTotalHits());
+    TermQuery queryWithContext = new TermQuery(new Term("foo", "bar"),
+        TermContext.build(reader.getContext(), new Term("foo", "bar")));
+    collector = new TotalHitCountCollector();
+    searcher.search(queryWithContext, collector);
+    assertEquals(1, collector.getTotalHits());
+
+    IOUtils.close(reader, w, dir);
+  }
+
+  private static class NoSeekDirectoryReader extends FilterDirectoryReader {
+
+    public NoSeekDirectoryReader(DirectoryReader in) throws IOException {
+      super(in, new SubReaderWrapper() {
+        @Override
+        public LeafReader wrap(LeafReader reader) {
+          return new NoSeekLeafReader(reader);
+        }
+      });
+    }
+
+    @Override
+    protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException {
+      return new NoSeekDirectoryReader(in);
+    }
+    
+  }
+
+  private static class NoSeekLeafReader extends FilterLeafReader {
+
+    public NoSeekLeafReader(LeafReader in) {
+      super(in);
+    }
+
+    @Override
+    public Fields fields() throws IOException {
+      return new FilterFields(super.fields()) {
+        @Override
+        public Terms terms(String field) throws IOException {
+          return new FilterTerms(super.terms(field)) {
+            @Override
+            public TermsEnum iterator() throws IOException {
+              return new FilterTermsEnum(super.iterator()) {
+                @Override
+                public SeekStatus seekCeil(BytesRef text) throws IOException {
+                  throw new AssertionError("no seek");
+                }
+                @Override
+                public void seekExact(BytesRef term, TermState state) throws IOException {
+                  throw new AssertionError("no seek");
+                }
+                @Override
+                public boolean seekExact(BytesRef text) throws IOException {
+                  throw new AssertionError("no seek");
+                }
+                @Override
+                public void seekExact(long ord) throws IOException {
+                  throw new AssertionError("no seek");
+                }
+              };
+            }
+          };
+        }
+      };
+    }
+
+  };
+
+}


[36/51] [abbrv] lucene-solr:apiv2: SOLR-9285: Fixed AIOOBE when using ValueSourceAugmenter in single node RTG

Posted by sa...@apache.org.
SOLR-9285: Fixed AIOOBE when using ValueSourceAugmenter in single node RTG


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/4123b3bf
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/4123b3bf
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/4123b3bf

Branch: refs/heads/apiv2
Commit: 4123b3bf26156227174ef3c417b36309c2beeb9a
Parents: 6f76ac1
Author: Chris Hostetter <ho...@apache.org>
Authored: Mon Jul 18 10:21:08 2016 -0700
Committer: Chris Hostetter <ho...@apache.org>
Committed: Mon Jul 18 10:21:08 2016 -0700

----------------------------------------------------------------------
 solr/CHANGES.txt                                |   2 +
 .../handler/component/RealTimeGetComponent.java |  79 ++-
 .../solr/response/transform/DocTransformer.java |  26 +-
 .../response/transform/DocTransformers.java     |  12 +
 .../transform/ValueSourceAugmenter.java         |  13 +-
 .../solr/cloud/TestCloudPseudoReturnFields.java |   5 +-
 .../apache/solr/cloud/TestRandomFlRTGCloud.java | 625 +++++++++++++++++++
 .../solr/search/TestPseudoReturnFields.java     |  57 +-
 8 files changed, 764 insertions(+), 55 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4123b3bf/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 4864925..bff2909 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -155,6 +155,8 @@ Bug Fixes
 * SOLR-7280: In cloud-mode sort the cores smartly before loading & limit threads to improve cluster stability
   (noble, Erick Erickson, shalin)
 
+* SOLR-9285: Fixed AIOOBE when using ValueSourceAugmenter in single node RTG (hossman)
+
 Optimizations
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4123b3bf/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java b/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java
index 78cebd3..9865a11 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java
@@ -23,6 +23,7 @@ import java.util.ArrayList;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -58,13 +59,13 @@ import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.StrUtils;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.request.SolrQueryRequest;
-import org.apache.solr.response.BasicResultContext;
 import org.apache.solr.response.ResultContext;
 import org.apache.solr.response.SolrQueryResponse;
 import org.apache.solr.response.transform.DocTransformer;
 import org.apache.solr.schema.FieldType;
 import org.apache.solr.schema.IndexSchema;
 import org.apache.solr.schema.SchemaField;
+import org.apache.solr.search.DocList;
 import org.apache.solr.search.QParser;
 import org.apache.solr.search.ReturnFields;
 import org.apache.solr.search.SolrIndexSearcher;
@@ -192,12 +193,18 @@ public class RealTimeGetComponent extends SearchComponent
     UpdateLog ulog = core.getUpdateHandler().getUpdateLog();
 
     RefCounted<SolrIndexSearcher> searcherHolder = null;
+    
+    // this is initialized & set on the context *after* any searcher (re-)opening
+    ResultContext resultContext = null;
+    final DocTransformer transformer = rsp.getReturnFields().getTransformer();
+
+    // true in any situation where we have to use a realtime searcher rather then returning docs
+    // directly from the UpdateLog
+    final boolean mustUseRealtimeSearcher =
+      // if we have filters, we need to check those against the indexed form of the doc
+      (rb.getFilters() != null)
+      || ((null != transformer) && transformer.needsSolrIndexSearcher());
 
-    DocTransformer transformer = rsp.getReturnFields().getTransformer();
-    if (transformer != null) {
-      ResultContext context = new BasicResultContext(null, rsp.getReturnFields(), null, null, req);
-      transformer.setContext(context);
-    }
    try {
      SolrIndexSearcher searcher = null;
 
@@ -214,13 +221,13 @@ public class RealTimeGetComponent extends SearchComponent
            switch (oper) {
              case UpdateLog.ADD:
 
-               if (rb.getFilters() != null) {
-                 // we have filters, so we need to check those against the indexed form of the doc
+               if (mustUseRealtimeSearcher) {
                  if (searcherHolder != null) {
-                   // close handles to current searchers
+                   // close handles to current searchers & result context
                    searcher = null;
                    searcherHolder.decref();
                    searcherHolder = null;
+                   resultContext = null;
                  }
                  ulog.openRealtimeSearcher();  // force open a new realtime searcher
                  o = null;  // pretend we never found this record and fall through to use the searcher
@@ -228,7 +235,7 @@ public class RealTimeGetComponent extends SearchComponent
                }
 
                SolrDocument doc = toSolrDoc((SolrInputDocument)entry.get(entry.size()-1), core.getLatestSchema());
-               if(transformer!=null) {
+               if (transformer!=null) {
                  transformer.transform(doc, -1, 0); // unknown docID
                }
               docList.add(doc);
@@ -246,6 +253,7 @@ public class RealTimeGetComponent extends SearchComponent
        if (searcher == null) {
          searcherHolder = core.getRealtimeSearcher();
          searcher = searcherHolder.get();
+         // don't bother with ResultContext yet, we won't need it if doc doesn't match filters
        }
 
        int docid = -1;
@@ -267,12 +275,17 @@ public class RealTimeGetComponent extends SearchComponent
          }
        }
 
-
        if (docid < 0) continue;
+       
        Document luceneDocument = searcher.doc(docid, rsp.getReturnFields().getLuceneFieldNames());
        SolrDocument doc = toSolrDoc(luceneDocument,  core.getLatestSchema());
        searcher.decorateDocValueFields(doc, docid, searcher.getNonStoredDVs(true));
-       if( transformer != null ) {
+       if ( null != transformer) {
+         if (null == resultContext) {
+           // either first pass, or we've re-opened searcher - either way now we setContext
+           resultContext = new RTGResultContext(rsp.getReturnFields(), searcher, req);
+           transformer.setContext(resultContext);
+         }
          transformer.transform(doc, docid, 0);
        }
        docList.add(doc);
@@ -754,4 +767,46 @@ public class RealTimeGetComponent extends SearchComponent
     // TODO do we need to sort versions using PeerSync.absComparator?
     return new ArrayList<>(versionsToRet);
   }
+
+  /**
+   * A lite weight ResultContext for use with RTG requests that can point at Realtime Searchers
+   */
+  private static final class RTGResultContext extends ResultContext {
+    final ReturnFields returnFields;
+    final SolrIndexSearcher searcher;
+    final SolrQueryRequest req;
+    public RTGResultContext(ReturnFields returnFields, SolrIndexSearcher searcher, SolrQueryRequest req) {
+      this.returnFields = returnFields;
+      this.searcher = searcher;
+      this.req = req;
+    }
+    
+    /** @returns null */
+    public DocList getDocList() {
+      return null;
+    }
+    
+    public ReturnFields getReturnFields() {
+      return this.returnFields;
+    }
+    
+    public SolrIndexSearcher getSearcher() {
+      return this.searcher;
+    }
+    
+    /** @returns null */
+    public Query getQuery() {
+      return null;
+    }
+    
+    public SolrQueryRequest getRequest() {
+      return this.req;
+    }
+    
+    /** @returns null */
+    public Iterator<SolrDocument> getProcessedDocuments() {
+      return null;
+    }
+  }
+  
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4123b3bf/solr/core/src/java/org/apache/solr/response/transform/DocTransformer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/response/transform/DocTransformer.java b/solr/core/src/java/org/apache/solr/response/transform/DocTransformer.java
index 11e3a9b..6111804 100644
--- a/solr/core/src/java/org/apache/solr/response/transform/DocTransformer.java
+++ b/solr/core/src/java/org/apache/solr/response/transform/DocTransformer.java
@@ -42,8 +42,10 @@ public abstract class DocTransformer {
   public abstract String getName();
 
   /**
-   * This is called before transform and sets
+   * This is called before {@link #transform} to provide context for any subsequent transformations.
+   *
    * @param context The {@link ResultContext} stores information about how the documents were produced.
+   * @see #needsSolrIndexSearcher
    */
   public void setContext( ResultContext context ) {
     this.context = context;
@@ -51,13 +53,31 @@ public abstract class DocTransformer {
   }
 
   /**
-   * This is where implementations do the actual work
+   * Indicates if this transformer requires access to the underlying index to perform it's functions.
+   *
+   * In some situations (notably RealTimeGet) this method <i>may</i> be called before {@link #setContext} 
+   * to determine if the transformer must be given a "full" ResultContext and accurate docIds 
+   * that can be looked up using {@link ResultContext#getSearcher}, or if optimizations can be taken 
+   * advantage of such that {@link ResultContext#getSearcher} <i>may</i> return null, and docIds passed to 
+   * {@link #transform} <i>may</i> be negative.
    *
+   * The default implementation always returns <code>false</code>.
+   * 
+   * @see ResultContext#getSearcher
+   * @see #transform
+   */
+  public boolean needsSolrIndexSearcher() { return false; }
+  
+  /**
+   * This is where implementations do the actual work.
+   * If implementations require a valid docId and index access, the {@link #needsSolrIndexSearcher} 
+   * method must return true
    *
    * @param doc The document to alter
-   * @param docid The Lucene internal doc id
+   * @param docid The Lucene internal doc id, or -1 in cases where the <code>doc</code> did not come from the index
    * @param score the score for this document
    * @throws IOException If there is a low-level I/O error.
+   * @see #needsSolrIndexSearcher
    */
   public abstract void transform(SolrDocument doc, int docid, float score) throws IOException;
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4123b3bf/solr/core/src/java/org/apache/solr/response/transform/DocTransformers.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/response/transform/DocTransformers.java b/solr/core/src/java/org/apache/solr/response/transform/DocTransformers.java
index e0b3a3c..7bb53ff 100644
--- a/solr/core/src/java/org/apache/solr/response/transform/DocTransformers.java
+++ b/solr/core/src/java/org/apache/solr/response/transform/DocTransformers.java
@@ -76,4 +76,16 @@ public class DocTransformers extends DocTransformer
       a.transform( doc, docid, score);
     }
   }
+
+  /** Returns true if and only if at least 1 child transformer returns true */
+  @Override
+  public boolean needsSolrIndexSearcher() {
+    for( DocTransformer kid : children ) {
+      if (kid.needsSolrIndexSearcher()) {
+        return true;
+      }
+    }
+    return false;
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4123b3bf/solr/core/src/java/org/apache/solr/response/transform/ValueSourceAugmenter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/response/transform/ValueSourceAugmenter.java b/solr/core/src/java/org/apache/solr/response/transform/ValueSourceAugmenter.java
index 9ca0f2b..9edf826 100644
--- a/solr/core/src/java/org/apache/solr/response/transform/ValueSourceAugmenter.java
+++ b/solr/core/src/java/org/apache/solr/response/transform/ValueSourceAugmenter.java
@@ -21,7 +21,6 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.ReaderUtil;
 import org.apache.lucene.queries.function.FunctionValues;
 import org.apache.lucene.queries.function.ValueSource;
@@ -64,11 +63,9 @@ public class ValueSourceAugmenter extends DocTransformer
   public void setContext( ResultContext context ) {
     super.setContext(context);
     try {
-      IndexReader reader = qparser.getReq().getSearcher().getIndexReader();
-      readerContexts = reader.leaves();
+      searcher = context.getSearcher();
+      readerContexts = searcher.getIndexReader().leaves();
       docValuesArr = new FunctionValues[readerContexts.size()];
-
-      searcher = qparser.getReq().getSearcher();
       fcontext = ValueSource.newContext(searcher);
       this.valueSource.createWeight(fcontext, searcher);
     } catch (IOException e) {
@@ -76,13 +73,11 @@ public class ValueSourceAugmenter extends DocTransformer
     }
   }
 
-
   Map fcontext;
   SolrIndexSearcher searcher;
   List<LeafReaderContext> readerContexts;
   FunctionValues docValuesArr[];
 
-
   @Override
   public void transform(SolrDocument doc, int docid, float score) {
     // This is only good for random-access functions
@@ -103,6 +98,10 @@ public class ValueSourceAugmenter extends DocTransformer
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "exception at docid " + docid + " for valuesource " + valueSource, e);
     }
   }
+
+  /** Always returns true */
+  @Override
+  public boolean needsSolrIndexSearcher() { return true; }
   
   protected void setValue(SolrDocument doc, Object val) {
     if(val!=null) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4123b3bf/solr/core/src/test/org/apache/solr/cloud/TestCloudPseudoReturnFields.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestCloudPseudoReturnFields.java b/solr/core/src/test/org/apache/solr/cloud/TestCloudPseudoReturnFields.java
index bf56821..8553697 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestCloudPseudoReturnFields.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestCloudPseudoReturnFields.java
@@ -53,7 +53,10 @@ import org.apache.commons.lang.StringUtils;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 
-/** @see TestPseudoReturnFields */
+/** 
+ * @see TestPseudoReturnFields 
+ * @see TestRandomFlRTGCloud
+ */
 public class TestCloudPseudoReturnFields extends SolrCloudTestCase {
   
   private static final String DEBUG_LABEL = MethodHandles.lookup().lookupClass().getName();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4123b3bf/solr/core/src/test/org/apache/solr/cloud/TestRandomFlRTGCloud.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestRandomFlRTGCloud.java b/solr/core/src/test/org/apache/solr/cloud/TestRandomFlRTGCloud.java
new file mode 100644
index 0000000..8cf1129
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/cloud/TestRandomFlRTGCloud.java
@@ -0,0 +1,625 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.cloud;
+
+import java.lang.invoke.MethodHandles;
+
+import java.io.IOException;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.Random;
+
+import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.embedded.JettySolrRunner;
+import org.apache.solr.client.solrj.impl.HttpSolrClient;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.client.solrj.response.QueryResponse;
+
+import org.apache.solr.cloud.SolrCloudTestCase;
+
+import org.apache.solr.common.SolrDocument;
+import org.apache.solr.common.SolrDocumentList;
+import org.apache.solr.common.SolrInputDocument;
+import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.params.SolrParams;
+import org.apache.solr.common.util.NamedList;
+
+import org.apache.solr.util.RandomizeSSL;
+import org.apache.lucene.util.TestUtil;
+
+import org.apache.commons.io.FilenameUtils;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+
+/** @see TestCloudPseudoReturnFields */
+@RandomizeSSL(clientAuth=0.0,reason="client auth uses too much RAM")
+public class TestRandomFlRTGCloud extends SolrCloudTestCase {
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final String DEBUG_LABEL = MethodHandles.lookup().lookupClass().getName();
+  private static final String COLLECTION_NAME = DEBUG_LABEL + "_collection";
+  
+  /** A basic client for operations at the cloud level, default collection will be set */
+  private static CloudSolrClient CLOUD_CLIENT;
+  /** One client per node */
+  private static ArrayList<HttpSolrClient> CLIENTS = new ArrayList<>(5);
+
+  /** Always included in fl so we can vet what doc we're looking at */
+  private static final FlValidator ID_VALIDATOR = new SimpleFieldValueValidator("id");
+  
+  /** 
+   * Types of things we will randomly ask for in fl param, and validate in response docs.
+   *
+   * This list starts out with the things we know concretely should work for any type of request,
+   * {@link #createMiniSolrCloudCluster} will add too it with additional validators that are expected 
+   * to work dependingon hte random cluster creation
+   * 
+   * @see #addRandomFlValidators
+   */
+  private static final List<FlValidator> FL_VALIDATORS = new ArrayList<>
+    // TODO: SOLR-9314: once all the known bugs are fixed, and this list can be constant
+    // regardless of single/multi node, change this to Collections.unmodifiableList
+    // (and adjust jdocs accordingly)
+    (Arrays.<FlValidator>asList(
+      // TODO: SOLR-9314: add more of these for other various transformers
+      //
+      // TODO: add a [docid] validator (blocked by SOLR-9288 & SOLR-9289)
+      //
+      new GlobValidator("*"),
+      new GlobValidator("*_i"),
+      new GlobValidator("*_s"),
+      new GlobValidator("a*"),
+      new SimpleFieldValueValidator("aaa_i"),
+      new SimpleFieldValueValidator("ccc_s"),
+      new NotIncludedValidator("bogus_unused_field_ss"),
+      new NotIncludedValidator("bogus_alias","bogus_alias:other_bogus_field_i"),
+      new NotIncludedValidator("explain_alias","explain_alias:[explain]"),
+      new NotIncludedValidator("score")));
+  
+  @BeforeClass
+  private static void createMiniSolrCloudCluster() throws Exception {
+
+    // Due to known bugs with some transformers in either multi vs single node, we want
+    // to test both possible cases explicitly and modify the List of FL_VALIDATORS we use accordingly:
+    //  - 50% runs use single node/shard a FL_VALIDATORS with all validators known to work on single node
+    //  - 50% runs use multi node/shard with FL_VALIDATORS only containing stuff that works in cloud
+    final boolean singleCoreMode = random().nextBoolean();
+    if (singleCoreMode) {
+      // these don't work in distrib cloud mode due to SOLR-9286
+      FL_VALIDATORS.addAll(Arrays.asList
+                           (new FunctionValidator("aaa_i"), // fq field
+                            new FunctionValidator("aaa_i", "func_aaa_alias"),
+                            new RenameFieldValueValidator("id", "my_id_alias"),
+                            new RenameFieldValueValidator("bbb_i", "my_int_field_alias"),
+                            new RenameFieldValueValidator("ddd_s", "my_str_field_alias")));
+    } else {
+      // No-Op
+      // No known transformers that only work in distrib cloud but fail in singleCoreMode
+
+    }
+    // TODO: SOLR-9314: programatically compare FL_VALIDATORS with all known transformers.
+    // (ala QueryEqualityTest) can't be done until we eliminate the need for "singleCodeMode"
+    // conditional logic (might still want 'singleCoreMode' on the MiniSolrCloudCluster side,
+    // but shouldn't have conditional FlValidators
+
+    // (asuming multi core multi replicas shouldn't matter (assuming multi node) ...
+    final int repFactor = singleCoreMode ? 1 : (usually() ? 1 : 2);
+    // ... but we definitely want to ensure forwarded requests to other shards work ...
+    final int numShards = singleCoreMode ? 1 : 2;
+    // ... including some forwarded requests from nodes not hosting a shard
+    final int numNodes = 1 + (singleCoreMode ? 0 : (numShards * repFactor));
+    
+    final String configName = DEBUG_LABEL + "_config-set";
+    final Path configDir = Paths.get(TEST_HOME(), "collection1", "conf");
+    
+    configureCluster(numNodes).addConfig(configName, configDir).configure();
+    
+    Map<String, String> collectionProperties = new HashMap<>();
+    collectionProperties.put("config", "solrconfig-tlog.xml");
+    collectionProperties.put("schema", "schema-psuedo-fields.xml");
+
+    assertNotNull(cluster.createCollection(COLLECTION_NAME, numShards, repFactor,
+                                           configName, null, null, collectionProperties));
+    
+    CLOUD_CLIENT = cluster.getSolrClient();
+    CLOUD_CLIENT.setDefaultCollection(COLLECTION_NAME);
+
+    waitForRecoveriesToFinish(CLOUD_CLIENT);
+
+    for (JettySolrRunner jetty : cluster.getJettySolrRunners()) {
+      CLIENTS.add(getHttpSolrClient(jetty.getBaseUrl() + "/" + COLLECTION_NAME + "/"));
+    }
+  }
+
+  @AfterClass
+  private static void afterClass() throws Exception {
+    CLOUD_CLIENT.close(); CLOUD_CLIENT = null;
+    for (HttpSolrClient client : CLIENTS) {
+      client.close();
+    }
+    CLIENTS = null;
+  }
+  
+  public void testRandomizedUpdatesAndRTGs() throws Exception {
+
+    final int maxNumDocs = atLeast(100);
+    final int numSeedDocs = random().nextInt(maxNumDocs / 10); // at most ~10% of the max possible docs
+    final int numIters = atLeast(maxNumDocs * 10);
+    final SolrInputDocument[] knownDocs = new SolrInputDocument[maxNumDocs];
+
+    log.info("Starting {} iters by seeding {} of {} max docs",
+             numIters, numSeedDocs, maxNumDocs);
+
+    int itersSinceLastCommit = 0;
+    for (int i = 0; i < numIters; i++) {
+      itersSinceLastCommit = maybeCommit(random(), itersSinceLastCommit, numIters);
+
+      if (i < numSeedDocs) {
+        // first N iters all we worry about is seeding
+        knownDocs[i] = addRandomDocument(i);
+      } else {
+        assertOneIter(knownDocs);
+      }
+    }
+  }
+
+  /** 
+   * Randomly chooses to do a commit, where the probability of doing so increases the longer it's been since 
+   * a commit was done.
+   *
+   * @returns <code>0</code> if a commit was done, else <code>itersSinceLastCommit + 1</code>
+   */
+  private static int maybeCommit(final Random rand, final int itersSinceLastCommit, final int numIters) throws IOException, SolrServerException {
+    final float threshold = itersSinceLastCommit / numIters;
+    if (rand.nextFloat() < threshold) {
+      log.info("COMMIT");
+      assertEquals(0, getRandClient(rand).commit().getStatus());
+      return 0;
+    }
+    return itersSinceLastCommit + 1;
+  }
+  
+  private void assertOneIter(final SolrInputDocument[] knownDocs) throws IOException, SolrServerException {
+    // we want to occasionally test more then one doc per RTG
+    final int numDocsThisIter = TestUtil.nextInt(random(), 1, atLeast(2));
+    int numDocsThisIterThatExist = 0;
+    
+    // pick some random docIds for this iteration and ...
+    final int[] docIds = new int[numDocsThisIter];
+    for (int i = 0; i < numDocsThisIter; i++) {
+      docIds[i] = random().nextInt(knownDocs.length);
+      if (null != knownDocs[docIds[i]]) {
+        // ...check how many already exist
+        numDocsThisIterThatExist++;
+      }
+    }
+
+    // we want our RTG requests to occasionally include missing/deleted docs,
+    // but that's not the primary focus of the test, so weight the odds accordingly
+    if (random().nextInt(numDocsThisIter + 2) <= numDocsThisIterThatExist) {
+
+      if (0 < TestUtil.nextInt(random(), 0, 13)) {
+        log.info("RTG: numDocsThisIter={} numDocsThisIterThatExist={}, docIds={}",
+                 numDocsThisIter, numDocsThisIterThatExist, docIds);
+        assertRTG(knownDocs, docIds);
+      } else {
+        // sporadically delete some docs instead of doing an RTG
+        log.info("DEL: numDocsThisIter={} numDocsThisIterThatExist={}, docIds={}",
+                 numDocsThisIter, numDocsThisIterThatExist, docIds);
+        assertDelete(knownDocs, docIds);
+      }
+    } else {
+      log.info("UPD: numDocsThisIter={} numDocsThisIterThatExist={}, docIds={}",
+               numDocsThisIter, numDocsThisIterThatExist, docIds);
+      assertUpdate(knownDocs, docIds);
+    }
+  }
+
+  /**
+   * Does some random indexing of the specified docIds and adds them to knownDocs
+   */
+  private void assertUpdate(final SolrInputDocument[] knownDocs, final int[] docIds) throws IOException, SolrServerException {
+    
+    for (final int docId : docIds) {
+      // TODO: this method should also do some atomic update operations (ie: "inc" and "set")
+      // (but make sure to eval the updates locally as well before modifying knownDocs)
+      knownDocs[docId] = addRandomDocument(docId);
+    }
+  }
+  
+  /**
+   * Deletes the docIds specified and asserts the results are valid, updateing knownDocs accordingly
+   */
+  private void assertDelete(final SolrInputDocument[] knownDocs, final int[] docIds) throws IOException, SolrServerException {
+    List<String> ids = new ArrayList<>(docIds.length);
+    for (final int docId : docIds) {
+      ids.add("" + docId);
+      knownDocs[docId] = null;
+    }
+    assertEquals("Failed delete: " + docIds, 0, getRandClient(random()).deleteById(ids).getStatus());
+  }
+  
+  /**
+   * Adds one randomly generated document with the specified docId, asserting success, and returns 
+   * the document added
+   */
+  private SolrInputDocument addRandomDocument(final int docId) throws IOException, SolrServerException {
+    final SolrClient client = getRandClient(random());
+    
+    final SolrInputDocument doc = sdoc("id", "" + docId,
+                                       "aaa_i", random().nextInt(),
+                                       "bbb_i", random().nextInt(),
+                                       //
+                                       "ccc_s", TestUtil.randomSimpleString(random()),
+                                       "ddd_s", TestUtil.randomSimpleString(random()),
+                                       //
+                                       "axx_i", random().nextInt(),
+                                       "ayy_i", random().nextInt(),
+                                       "azz_s", TestUtil.randomSimpleString(random()));
+    
+    log.info("ADD: {} = {}", docId, doc);
+    assertEquals(0, client.add(doc).getStatus());
+    return doc;
+  }
+
+  
+  /**
+   * Does one or more RTG request for the specified docIds with a randomized fl &amp; fq params, asserting
+   * that the returned document (if any) makes sense given the expected SolrInputDocuments
+   */
+  private void assertRTG(final SolrInputDocument[] knownDocs, final int[] docIds) throws IOException, SolrServerException {
+    final SolrClient client = getRandClient(random());
+    // NOTE: not using SolrClient.getById or getByIds because we want to force choice of "id" vs "ids" params
+    final ModifiableSolrParams params = params("qt","/get");
+    
+    // TODO: fq testing blocked by SOLR-9308
+    //
+    // // random fq -- nothing fancy, secondary concern for our test
+    final Integer FQ_MAX = null;                                           // TODO: replace this...
+    // final Integer FQ_MAX = usually() ? null : random().nextInt();       //       ... with this
+    // if (null != FQ_MAX) {
+    //   params.add("fq", "aaa_i:[* TO " + FQ_MAX + "]");
+    // }
+    // TODO: END
+    
+    final Set<FlValidator> validators = new HashSet<>();
+    validators.add(ID_VALIDATOR); // always include id so we can be confident which doc we're looking at
+    addRandomFlValidators(random(), validators);
+    FlValidator.addFlParams(validators, params);
+    
+    final List<String> idsToRequest = new ArrayList<>(docIds.length);
+    final List<SolrInputDocument> docsToExpect = new ArrayList<>(docIds.length);
+    for (int docId : docIds) {
+      // every docId will be included in the request
+      idsToRequest.add("" + docId);
+      
+      // only docs that should actually exist and match our (optional) filter will be expected in response
+      if (null != knownDocs[docId]) {
+        Integer filterVal = (Integer) knownDocs[docId].getFieldValue("aaa_i");
+        if (null == FQ_MAX || ((null != filterVal) && filterVal.intValue() <= FQ_MAX.intValue())) {
+          docsToExpect.add(knownDocs[docId]);
+        }
+      }
+    }
+
+    // even w/only 1 docId requested, the response format can vary depending on how we request it
+    final boolean askForList = random().nextBoolean() || (1 != idsToRequest.size());
+    if (askForList) {
+      if (1 == idsToRequest.size()) {
+        // have to be careful not to try to use "multi" 'id' params with only 1 docId
+        // with a single docId, the only way to ask for a list is with the "ids" param
+        params.add("ids", idsToRequest.get(0));
+      } else {
+        if (random().nextBoolean()) {
+          // each id in it's own param
+          for (String id : idsToRequest) {
+            params.add("id",id);
+          }
+        } else {
+          // add one or more comma seperated ids params
+          params.add(buildCommaSepParams(random(), "ids", idsToRequest));
+        }
+      }
+    } else {
+      assert 1 == idsToRequest.size();
+      params.add("id",idsToRequest.get(0));
+    }
+    
+    final QueryResponse rsp = client.query(params);
+    assertNotNull(params.toString(), rsp);
+
+    final SolrDocumentList docs = getDocsFromRTGResponse(askForList, rsp);
+    assertNotNull(params + " => " + rsp, docs);
+    
+    assertEquals("num docs mismatch: " + params + " => " + docsToExpect + " vs " + docs,
+                 docsToExpect.size(), docs.size());
+    
+    // NOTE: RTG makes no garuntees about the order docs will be returned in when multi requested
+    for (SolrDocument actual : docs) {
+      try {
+        int actualId = Integer.parseInt(actual.getFirstValue("id").toString());
+        final SolrInputDocument expected = knownDocs[actualId];
+        assertNotNull("expected null doc but RTG returned: " + actual, expected);
+        
+        Set<String> expectedFieldNames = new HashSet<>();
+        for (FlValidator v : validators) {
+          expectedFieldNames.addAll(v.assertRTGResults(validators, expected, actual));
+        }
+        // ensure only expected field names are in the actual document
+        Set<String> actualFieldNames = new HashSet<>(actual.getFieldNames());
+        assertEquals("More actual fields then expected", expectedFieldNames, actualFieldNames);
+      } catch (AssertionError ae) {
+        throw new AssertionError(params + " => " + actual + ": " + ae.getMessage(), ae);
+      }
+    }
+  }
+
+  /** 
+   * trivial helper method to deal with diff response structure between using a single 'id' param vs
+   * 2 or more 'id' params (or 1 or more 'ids' params).
+   *
+   * NOTE: <code>expectList</code> is currently ignored due to SOLR-9309 -- instead best efforst are made to
+   * return a synthetic list based on whatever can be found in the response.
+   *
+   * @return List from response, or a synthetic one created from single response doc if 
+   * <code>expectList</code> was false; May be empty; May be null if response included null list.
+   */
+  private static SolrDocumentList getDocsFromRTGResponse(final boolean expectList, final QueryResponse rsp) {
+    // TODO: blocked by SOLR-9309 (once this can be fixed, update jdocs)
+    if (null != rsp.getResults()) { // TODO: replace this..
+    // if (expectList) {            // TODO: ...with this tighter check.
+      return rsp.getResults();
+    }
+    
+    // else: expect single doc, make our own list...
+    
+    final SolrDocumentList result = new SolrDocumentList();
+    NamedList<Object> raw = rsp.getResponse();
+    Object doc = raw.get("doc");
+    if (null != doc) {
+      result.add((SolrDocument) doc);
+      result.setNumFound(1);
+    }
+    return result;
+  }
+    
+  /** 
+   * returns a random SolrClient -- either a CloudSolrClient, or an HttpSolrClient pointed 
+   * at a node in our cluster 
+   */
+  public static SolrClient getRandClient(Random rand) {
+    int numClients = CLIENTS.size();
+    int idx = TestUtil.nextInt(rand, 0, numClients);
+    return (idx == numClients) ? CLOUD_CLIENT : CLIENTS.get(idx);
+  }
+
+  public static void waitForRecoveriesToFinish(CloudSolrClient client) throws Exception {
+    assert null != client.getDefaultCollection();
+    AbstractDistribZkTestBase.waitForRecoveriesToFinish(client.getDefaultCollection(),
+                                                        client.getZkStateReader(),
+                                                        true, true, 330);
+  }
+
+  /** 
+   * abstraction for diff types of things that can be added to an 'fl' param that can validate
+   * the results are correct compared to an expected SolrInputDocument
+   */
+  private interface FlValidator {
+    
+    /** Given a list of FlValidators, adds one or more fl params that corrispond to the entire set */
+    public static void addFlParams(final Collection<FlValidator> validators, final ModifiableSolrParams params) {
+      final List<String> fls = new ArrayList<>(validators.size());
+      for (FlValidator v : validators) {
+        fls.add(v.getFlParam());
+      }
+      params.add(buildCommaSepParams(random(), "fl", fls));
+    }
+    
+    /** 
+     * Must return a non null String that can be used in an fl param -- either by itself, 
+     * or with other items separated by commas
+     */
+    public String getFlParam();
+
+    /** 
+     * Given the expected document and the actual document returned from an RTG, this method
+     * should assert that relative to what {@link #getFlParam} returns, the actual document contained
+     * what it should relative to the expected document.
+     *
+     * @param validators all validators in use for this request, including the current one
+     * @param expected a document containing the expected fields &amp; values that should be in the index
+     * @param actual A document that was returned by an RTG request
+     * @return A set of "field names" in the actual document that this validator expected.
+     */
+    public Collection<String> assertRTGResults(final Collection<FlValidator> validators,
+                                               final SolrInputDocument expected,
+                                               final SolrDocument actual);
+  }
+
+  private abstract static class FieldValueValidator implements FlValidator {
+    protected final String expectedFieldName;
+    protected final String actualFieldName;
+    public FieldValueValidator(final String expectedFieldName, final String actualFieldName) {
+      this.expectedFieldName = expectedFieldName;
+      this.actualFieldName = actualFieldName;
+    }
+    public abstract String getFlParam();
+    public Collection<String> assertRTGResults(final Collection<FlValidator> validators,
+                                               final SolrInputDocument expected,
+                                               final SolrDocument actual) {
+      assertEquals(expectedFieldName + " vs " + actualFieldName,
+                   expected.getFieldValue(expectedFieldName), actual.getFirstValue(actualFieldName));
+      return Collections.<String>singleton(actualFieldName);
+    }
+  }
+  
+  private static class SimpleFieldValueValidator extends FieldValueValidator {
+    public SimpleFieldValueValidator(final String fieldName) {
+      super(fieldName, fieldName);
+    }
+    public String getFlParam() { return expectedFieldName; }
+  }
+  
+  private static class RenameFieldValueValidator extends FieldValueValidator {
+    /** @see GlobValidator */
+    public String getRealFieldName() { return expectedFieldName; }
+    public RenameFieldValueValidator(final String origFieldName, final String alias) {
+      super(origFieldName, alias);
+    }
+    public String getFlParam() { return actualFieldName + ":" + expectedFieldName; }
+  }
+
+  /** Trivial validator of a ValueSourceAugmenter */
+  private static class FunctionValidator implements FlValidator {
+    private static String func(String fieldName) {
+      return "add(1.3,sub("+fieldName+","+fieldName+"))";
+    }
+    protected final String fl;
+    protected final String resultKey;
+    protected final String fieldName;
+    public FunctionValidator(final String fieldName) {
+      this(func(fieldName), fieldName, func(fieldName));
+    }
+    public FunctionValidator(final String fieldName, final String resultKey) {
+      this(resultKey + ":" + func(fieldName), fieldName, resultKey);
+    }
+    private FunctionValidator(final String fl, final String fieldName, final String resultKey) {
+      this.fl = fl;
+      this.resultKey = resultKey;
+      this.fieldName = fieldName;
+    }
+    public String getFlParam() { return fl; }
+    public Collection<String> assertRTGResults(final Collection<FlValidator> validators,
+                                               final SolrInputDocument expected,
+                                               final SolrDocument actual) {
+      final Object origVal = expected.getFieldValue(fieldName);
+      assertTrue("this validator only works on numeric fields: " + origVal, origVal instanceof Number);
+      
+      assertEquals(fl, 1.3F, actual.getFirstValue(resultKey));
+      return Collections.<String>singleton(resultKey);
+    }
+  }
+
+  /** 
+   * Glob based validator.
+   * This class checks that every field in the expected doc exists in the actual doc with the expected 
+   * value -- with special exceptions for fields that are "renamed" with an alias. 
+   *
+   * By design, fields that are aliased are "moved" unless the original field name was explicitly included 
+   * in the fl, globs don't count.
+   *
+   * @see RenameFieldValueValidator
+   */
+  private static class GlobValidator implements FlValidator {
+    private final String glob;
+    public GlobValidator(final String glob) {
+      this.glob = glob;
+    }
+    private final Set<String> matchingFieldsCache = new HashSet<>();
+    
+    public String getFlParam() { return glob; }
+    
+    private boolean matchesGlob(final String fieldName) {
+      if ( FilenameUtils.wildcardMatch(fieldName, glob) ) {
+        matchingFieldsCache.add(fieldName); // Don't calculate it again
+        return true;
+      }
+      return false;
+    }
+                                
+    public Collection<String> assertRTGResults(final Collection<FlValidator> validators,
+                                               final SolrInputDocument expected,
+                                               final SolrDocument actual) {
+
+      final Set<String> renamed = new HashSet<>(validators.size());
+      for (FlValidator v : validators) {
+        if (v instanceof RenameFieldValueValidator) {
+          renamed.add(((RenameFieldValueValidator)v).getRealFieldName());
+        }
+      }
+      
+      // every real field name matching the glob that is not renamed should be in the results
+      Set<String> result = new HashSet<>(expected.getFieldNames().size());
+      for (String f : expected.getFieldNames()) {
+        if ( matchesGlob(f) && (! renamed.contains(f) ) ) {
+          result.add(f);
+          assertEquals(glob + " => " + f, expected.getFieldValue(f), actual.getFirstValue(f));
+        }
+      }
+      return result;
+    }
+  }
+  
+  /** 
+   * for things like "score" and "[explain]" where we explicitly expect what we ask for in the fl
+   * to <b>not</b> be returned when using RTG.
+   */
+  private static class NotIncludedValidator implements FlValidator {
+    private final String fieldName;
+    private final String fl;
+    public NotIncludedValidator(final String fl) {
+      this(fl, fl);
+    }
+    public NotIncludedValidator(final String fieldName, final String fl) {
+      this.fieldName = fieldName;
+      this.fl = fl;
+    }
+    public String getFlParam() { return fl; }
+    public Collection<String> assertRTGResults(final Collection<FlValidator> validators,
+                                               final SolrInputDocument expected,
+                                               final SolrDocument actual) {
+      assertEquals(fl, null, actual.getFirstValue(fieldName));
+      return Collections.emptySet();
+    }
+  }
+
+  /** helper method for adding a random number (may be 0) of items from {@link #FL_VALIDATORS} */
+  private static void addRandomFlValidators(final Random r, final Set<FlValidator> validators) {
+    List<FlValidator> copyToShuffle = new ArrayList<>(FL_VALIDATORS);
+    Collections.shuffle(copyToShuffle, r);
+    final int numToReturn = r.nextInt(copyToShuffle.size());
+    validators.addAll(copyToShuffle.subList(0, numToReturn + 1));
+  }
+
+  /**
+   * Given an ordered list of values to include in a (key) param, randomly groups them (ie: comma seperated) 
+   * into actual param key=values which are returned as a new SolrParams instance
+   */
+  private static SolrParams buildCommaSepParams(final Random rand, final String key, Collection<String> values) {
+    ModifiableSolrParams result = new ModifiableSolrParams();
+    List<String> copy = new ArrayList<>(values);
+    while (! copy.isEmpty()) {
+      List<String> slice = copy.subList(0, random().nextInt(1 + copy.size()));
+      result.add(key,String.join(",",slice));
+      slice.clear();
+    }
+    return result;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4123b3bf/solr/core/src/test/org/apache/solr/search/TestPseudoReturnFields.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/TestPseudoReturnFields.java b/solr/core/src/test/org/apache/solr/search/TestPseudoReturnFields.java
index 8b85ba0..68f0773 100644
--- a/solr/core/src/test/org/apache/solr/search/TestPseudoReturnFields.java
+++ b/solr/core/src/test/org/apache/solr/search/TestPseudoReturnFields.java
@@ -95,7 +95,6 @@ public class TestPseudoReturnFields extends SolrTestCaseJ4 {
     );
   }
 
-  @AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/SOLR-9285")
   public void testMultiValuedRTG() throws Exception {
 
     // single value int using alias that matches multivalued dynamic field - via RTG
@@ -247,7 +246,6 @@ public class TestPseudoReturnFields extends SolrTestCaseJ4 {
             );
   }
   
-  @AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/SOLR-9285")
   public void testFunctionsRTG() throws Exception {
     // if we use RTG (committed or otherwise) functions should behave the same
     for (String id : Arrays.asList("42","99")) {
@@ -286,7 +284,6 @@ public class TestPseudoReturnFields extends SolrTestCaseJ4 {
             );
   }
 
-  @AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/SOLR-9285")
   public void testFunctionsAndExplicitRTG() throws Exception {
     // shouldn't matter if we use RTG (committed or otherwise)
     for (String id : Arrays.asList("42","99")) {
@@ -346,10 +343,7 @@ public class TestPseudoReturnFields extends SolrTestCaseJ4 {
     
   }
   
-  @AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/SOLR-9285")
   public void testFunctionsAndScoreRTG() throws Exception {
-    // NOTE: once this test is fixed to pass, testAugmentersRTG should also be updated to test a abs(val_i)
-
     // if we use RTG (committed or otherwise) score should be ignored
     for (String id : Arrays.asList("42","99")) {
       for (SolrParams p : Arrays.asList(params("fl","score","fl","log(val_i)","fl","abs(val_i)"),
@@ -360,7 +354,7 @@ public class TestPseudoReturnFields extends SolrTestCaseJ4 {
                 req(p, "qt","/get","id",id, "wt","xml")
                 ,"count(//doc)=1"
                 ,"//doc/double[@name='log(val_i)']"
-                ,"//doc/float[@name='abs(val_i)']"
+                ,"//doc/float[@name='abs(val_i)'][.='1.0']"
                 ,"//doc[count(*)=2]"
                 );
       }
@@ -561,20 +555,21 @@ public class TestPseudoReturnFields extends SolrTestCaseJ4 {
     // behavior shouldn't matter if we are committed or uncommitted
     for (String id : Arrays.asList("42","99")) {
       // NOTE: once testDocIdAugmenterRTG can pass, [docid] should be tested here as well.
-      // NOTE: once testFunctionsAndScoreRTG can pass, abs(val_i) should be tested here as well
-      for (SolrParams p : Arrays.asList(params("fl","[shard],[explain],x_alias:[value v=10 t=int]"),
-                                        params("fl","[shard]","fl","[explain],x_alias:[value v=10 t=int]"),
-                                        params("fl","[shard]","fl","[explain]","fl","x_alias:[value v=10 t=int]"))) {
+      for (SolrParams p : Arrays.asList
+             (params("fl","[shard],[explain],x_alias:[value v=10 t=int],abs(val_i)"),
+              params("fl","[shard],abs(val_i)","fl","[explain],x_alias:[value v=10 t=int]"),
+              params("fl","[shard]","fl","[explain],x_alias:[value v=10 t=int]","fl","abs(val_i)"),
+              params("fl","[shard]","fl","[explain]","fl","x_alias:[value v=10 t=int]","fl","abs(val_i)"))) {
         assertQ(id + ": " + p,
                 req(p, "qt","/get","id",id, "wt","xml")
                 ,"count(//doc)=1"
                 // ,"//doc/int[@name='[docid]']" // TODO
-                // ,"//doc/gloat[@name='abs(val_i)']" // TODO
+                ,"//doc/float[@name='abs(val_i)'][.='1.0']"
                 ,"//doc/str[@name='[shard]'][.='[not a shard request]']"
                 // RTG: [explain] should be missing (ignored)
                 ,"//doc/int[@name='x_alias'][.=10]"
                 
-                ,"//doc[count(*)=2]"
+                ,"//doc[count(*)=3]"
                 );
       }
     }
@@ -601,20 +596,20 @@ public class TestPseudoReturnFields extends SolrTestCaseJ4 {
     // behavior shouldn't matter if we are committed or uncommitted
     for (String id : Arrays.asList("42","99")) {
       // NOTE: once testDocIdAugmenterRTG can pass, [docid] should be tested here as well.
-      // NOTE: once testFunctionsAndScoreRTG can pass, abs(val_i) should be tested here as well
-      for (SolrParams p : Arrays.asList(params("fl","id,[explain],x_alias:[value v=10 t=int]"),
-                                        params("fl","id","fl","[explain],x_alias:[value v=10 t=int]"),
-                                        params("fl","id","fl","[explain]","fl","x_alias:[value v=10 t=int]"))) {
+      for (SolrParams p : Arrays.asList
+             (params("fl","id,[explain],x_alias:[value v=10 t=int],abs(val_i)"),
+              params("fl","id,abs(val_i)","fl","[explain],x_alias:[value v=10 t=int]"),
+              params("fl","id","fl","[explain]","fl","x_alias:[value v=10 t=int]","fl","abs(val_i)"))) {
         assertQ(id + ": " + p,
                 req(p, "qt","/get","id",id, "wt","xml")
                 ,"count(//doc)=1"
                 ,"//doc/str[@name='id']"
                 // ,"//doc/int[@name='[docid]']" // TODO
-                // ,"//doc/gloat[@name='abs(val_i)']" // TODO
+                ,"//doc/float[@name='abs(val_i)'][.='1.0']"
                 // RTG: [explain] should be missing (ignored)
                 ,"//doc/int[@name='x_alias'][.=10]"
                 
-                ,"//doc[count(*)=2]"
+                ,"//doc[count(*)=3]"
               );
       }
     }
@@ -652,29 +647,28 @@ public class TestPseudoReturnFields extends SolrTestCaseJ4 {
     // if we use RTG (committed or otherwise) score should be ignored
     for (String id : Arrays.asList("42","99")) {
       // NOTE: once testDocIdAugmenterRTG can pass, [docid] should be tested here as well.
-      // NOTE: once testFunctionsAndScoreRTG can pass, abs(val_i) should be tested here as well
       assertQ(id,
               req("qt","/get","id",id, "wt","xml",
-                  "fl","x_alias:[value v=10 t=int],score")
+                  "fl","x_alias:[value v=10 t=int],score,abs(val_i)")
               // ,"//doc/int[@name='[docid]']" // TODO
-              // ,"//doc/gloat[@name='abs(val_i)']" // TODO
+              ,"//doc/float[@name='abs(val_i)'][.='1.0']"
               ,"//doc/int[@name='x_alias'][.=10]"
               
-              ,"//doc[count(*)=1]"
+              ,"//doc[count(*)=2]"
               );
-      for (SolrParams p : Arrays.asList(params("fl","x_alias:[value v=10 t=int],[explain],score"),
-                                        params("fl","x_alias:[value v=10 t=int],[explain]","fl","score"),
-                                        params("fl","x_alias:[value v=10 t=int]","fl","[explain]","fl","score"))) {
+      for (SolrParams p : Arrays.asList(params("fl","x_alias:[value v=10 t=int],[explain],score,abs(val_i)"),
+                                        params("fl","x_alias:[value v=10 t=int],[explain]","fl","score,abs(val_i)"),
+                                        params("fl","x_alias:[value v=10 t=int]","fl","[explain]","fl","score","fl","abs(val_i)"))) {
         
         assertQ(p.toString(),
                 req(p, "qt","/get","id",id, "wt","xml")
                 
                 // ,"//doc/int[@name='[docid]']" // TODO
-                // ,"//doc/gloat[@name='abs(val_i)']" // TODO
+                ,"//doc/float[@name='abs(val_i)'][.='1.0']"
                 ,"//doc/int[@name='x_alias'][.=10]"
                 // RTG: [explain] and score should be missing (ignored)
                 
-                ,"//doc[count(*)=1]"
+                ,"//doc[count(*)=2]"
                 );
       }
     }
@@ -720,8 +714,7 @@ public class TestPseudoReturnFields extends SolrTestCaseJ4 {
     // NOTE: 'ssto' is the missing one
     final List<String> fl = Arrays.asList
       // NOTE: once testDocIdAugmenterRTG can pass, [docid] should be tested here as well.
-      // NOTE: once testFunctionsAndScoreRTG can pass, abs(val_i) should be tested here as well
-      ("id","[explain]","score","val_*","subj*");
+      ("id","[explain]","score","val_*","subj*","abs(val_i)");
     
     final int iters = atLeast(random, 10);
     for (int i = 0; i< iters; i++) {
@@ -742,11 +735,11 @@ public class TestPseudoReturnFields extends SolrTestCaseJ4 {
                   ,"count(//doc)=1"
                   ,"//doc/str[@name='id']"
                   // ,"//doc/int[@name='[docid]']" // TODO
-                  // ,"//doc/gloat[@name='abs(val_i)']" // TODO
+                  ,"//doc/float[@name='abs(val_i)'][.='1.0']"
                   // RTG: [explain] and score should be missing (ignored)
                   ,"//doc/int[@name='val_i'][.=1]"
                   ,"//doc/str[@name='subject']"
-                  ,"//doc[count(*)=3]"
+                  ,"//doc[count(*)=4]"
                   );
         }
       }


[29/51] [abbrv] lucene-solr:apiv2: SOLR-9240:Support running the topic() Streaming Expression in parallel mode.

Posted by sa...@apache.org.
SOLR-9240:Support running the topic() Streaming Expression in parallel mode.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/fc3894e8
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/fc3894e8
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/fc3894e8

Branch: refs/heads/apiv2
Commit: fc3894e837701b78a4704cf27529c34c15666586
Parents: 7463359
Author: jbernste <jb...@apache.org>
Authored: Mon Jul 11 20:10:27 2016 -0400
Committer: jbernste <jb...@apache.org>
Committed: Sat Jul 16 22:36:30 2016 -0400

----------------------------------------------------------------------
 .../client/solrj/io/stream/ParallelStream.java  |   2 +-
 .../client/solrj/io/stream/TopicStream.java     |  41 +++-
 .../solr/configsets/streaming/conf/schema.xml   |   2 +-
 .../solr/client/solrj/io/sql/JdbcTest.java      |   4 +-
 .../solrj/io/stream/StreamExpressionTest.java   | 185 ++++++++++++++++++-
 .../client/solrj/io/stream/StreamingTest.java   |  19 +-
 6 files changed, 231 insertions(+), 22 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/fc3894e8/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ParallelStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ParallelStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ParallelStream.java
index 779cc31..3125ff0 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ParallelStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ParallelStream.java
@@ -101,7 +101,7 @@ public class ParallelStream extends CloudSolrStream implements Expressible {
 
     // Workers
     if(null == workersParam || null == workersParam.getParameter() || !(workersParam.getParameter() instanceof StreamExpressionValue)){
-      throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - expecting a single 'workersParam' parameter of type positive integer but didn't find one",expression));
+      throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - expecting a single 'workers' parameter of type positive integer but didn't find one",expression));
     }
     String workersStr = ((StreamExpressionValue)workersParam.getParameter()).getValue();
     int workersInt = 0;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/fc3894e8/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TopicStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TopicStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TopicStream.java
index 30c6f59..c4343c6 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TopicStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TopicStream.java
@@ -23,6 +23,7 @@ import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Locale;
 import java.util.Map;
@@ -74,9 +75,9 @@ public class TopicStream extends CloudSolrStream implements Expressible  {
   private int runCount;
   private String id;
   protected long checkpointEvery;
-
   private Map<String, Long> checkpoints = new HashMap<String, Long>();
   private String checkpointCollection;
+  private long initialCheckpoint = -1;
 
   // Use TopicStream that takes a SolrParams
   @Deprecated
@@ -84,12 +85,14 @@ public class TopicStream extends CloudSolrStream implements Expressible  {
                      String checkpointCollection,
                      String collection,
                      String id,
+                     long initialCheckpoint,
                      long checkpointEvery,
                      Map<String, String> params) {
     init(zkHost,
          checkpointCollection,
          collection,
          id,
+         initialCheckpoint,
          checkpointEvery,
          new MapSolrParams(params));
   }
@@ -98,12 +101,14 @@ public class TopicStream extends CloudSolrStream implements Expressible  {
                      String checkpointCollection,
                      String collection,
                      String id,
+                     long initialCheckpoint,
                      long checkpointEvery,
                      SolrParams params) {
     init(zkHost,
         checkpointCollection,
         collection,
         id,
+        initialCheckpoint,
         checkpointEvery,
         params);
   }
@@ -113,6 +118,7 @@ public class TopicStream extends CloudSolrStream implements Expressible  {
                     String checkpointCollection,
                     String collection,
                     String id,
+                    long initialCheckpoint,
                     long checkpointEvery,
                     SolrParams params) {
     this.zkHost  = zkHost;
@@ -121,11 +127,13 @@ public class TopicStream extends CloudSolrStream implements Expressible  {
     if(mParams.getParams("rows") == null) {
       mParams.set("rows", "500");
     }
+
     this.params  = mParams; 
     this.collection = collection;
     this.checkpointCollection = checkpointCollection;
     this.checkpointEvery = checkpointEvery;
     this.id = id;
+    this.initialCheckpoint = initialCheckpoint;
     this.comp = new FieldComparator("_version_", ComparatorOrder.ASCENDING);
   }
 
@@ -147,6 +155,13 @@ public class TopicStream extends CloudSolrStream implements Expressible  {
       throw new IOException("invalid TopicStream fl cannot be null");
     }
 
+    long initialCheckpoint = -1;
+    StreamExpressionNamedParameter initialCheckpointParam = factory.getNamedOperand(expression, "initialCheckpoint");
+
+    if(initialCheckpointParam != null) {
+      initialCheckpoint = Long.parseLong(((StreamExpressionValue) initialCheckpointParam.getParameter()).getValue());
+    }
+
     long checkpointEvery = -1;
     StreamExpressionNamedParameter checkpointEveryParam = factory.getNamedOperand(expression, "checkpointEvery");
 
@@ -198,6 +213,7 @@ public class TopicStream extends CloudSolrStream implements Expressible  {
         checkpointCollectionName,
         collectionName,
         ((StreamExpressionValue) idParam.getParameter()).getValue(),
+        initialCheckpoint,
         checkpointEvery,
         params);
   }
@@ -226,6 +242,9 @@ public class TopicStream extends CloudSolrStream implements Expressible  {
     // zkHost
     expression.addParameter(new StreamExpressionNamedParameter("zkHost", zkHost));
     expression.addParameter(new StreamExpressionNamedParameter("id", id));
+    if(initialCheckpoint > -1) {
+      expression.addParameter(new StreamExpressionNamedParameter("initialCheckpoint", Long.toString(initialCheckpoint)));
+    }
     expression.addParameter(new StreamExpressionNamedParameter("checkpointEvery", Long.toString(checkpointEvery)));
 
     return expression;
@@ -279,6 +298,11 @@ public class TopicStream extends CloudSolrStream implements Expressible  {
     this.solrStreams = new ArrayList();
     this.eofTuples = Collections.synchronizedMap(new HashMap());
 
+    if(checkpoints.size() == 0 && streamContext.numWorkers > 1) {
+      //Each worker must maintain it's own checkpoints
+      this.id = this.id+"_"+streamContext.workerID;
+    }
+
     if(streamContext.getSolrClientCache() != null) {
       cloudSolrClient = streamContext.getSolrClientCache().getCloudSolrClient(zkHost);
     } else {
@@ -385,7 +409,13 @@ public class TopicStream extends CloudSolrStream implements Expressible  {
 
     for(Slice slice : slices) {
       String sliceName = slice.getName();
-      long checkpoint = getCheckpoint(slice, clusterState.getLiveNodes());
+      long checkpoint = 0;
+      if(initialCheckpoint > -1) {
+        checkpoint = initialCheckpoint;
+      } else {
+        checkpoint = getCheckpoint(slice, clusterState.getLiveNodes());
+      }
+
       this.checkpoints.put(sliceName, checkpoint);
     }
   }
@@ -405,7 +435,9 @@ public class TopicStream extends CloudSolrStream implements Expressible  {
         SolrStream solrStream = new SolrStream(coreUrl, params);
 
         if(streamContext != null) {
-          solrStream.setStreamContext(streamContext);
+          StreamContext localContext = new StreamContext();
+          localContext.setSolrClientCache(streamContext.getSolrClientCache());
+          solrStream.setStreamContext(localContext);
         }
 
         try {
@@ -502,6 +534,9 @@ public class TopicStream extends CloudSolrStream implements Expressible  {
           throw new Exception("Collection not found:" + this.collection);
         }
       }
+
+
+      Iterator<String> iterator = params.getParameterNamesIterator();
       ModifiableSolrParams mParams = new ModifiableSolrParams(params);
       mParams.set("distrib", "false"); // We are the aggregator.
       String fl = mParams.get("fl");

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/fc3894e8/solr/solrj/src/test-files/solrj/solr/configsets/streaming/conf/schema.xml
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test-files/solrj/solr/configsets/streaming/conf/schema.xml b/solr/solrj/src/test-files/solrj/solr/configsets/streaming/conf/schema.xml
index 34ecdcb..e7f2772 100644
--- a/solr/solrj/src/test-files/solrj/solr/configsets/streaming/conf/schema.xml
+++ b/solr/solrj/src/test-files/solrj/solr/configsets/streaming/conf/schema.xml
@@ -387,7 +387,7 @@
     -->
 
 
-    <field name="id" type="int" indexed="true" stored="true" multiValued="false" required="false"/>
+    <field name="id" type="string" indexed="true" stored="true" multiValued="false" required="false"/>
     <field name="signatureField" type="string" indexed="true" stored="false"/>
 
     <field name="s_multi" type="string" indexed="true" stored="true" docValues="true" multiValued="true"/>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/fc3894e8/solr/solrj/src/test/org/apache/solr/client/solrj/io/sql/JdbcTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/sql/JdbcTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/sql/JdbcTest.java
index a031e58..41f3309 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/sql/JdbcTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/sql/JdbcTest.java
@@ -615,13 +615,13 @@ public class JdbcTest extends SolrCloudTestCase {
     assertEquals("my_float_col".length(), resultSetMetaData.getColumnDisplaySize(4));
     assertEquals("testnull_i".length(), resultSetMetaData.getColumnDisplaySize(5));
 
-    assertEquals("Long", resultSetMetaData.getColumnTypeName(1));
+    assertEquals("String", resultSetMetaData.getColumnTypeName(1));
     assertEquals("Long", resultSetMetaData.getColumnTypeName(2));
     assertEquals("String", resultSetMetaData.getColumnTypeName(3));
     assertEquals("Double", resultSetMetaData.getColumnTypeName(4));
     assertEquals("Long", resultSetMetaData.getColumnTypeName(5));
 
-    assertEquals(Types.DOUBLE, resultSetMetaData.getColumnType(1));
+    assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(1));
     assertEquals(Types.DOUBLE, resultSetMetaData.getColumnType(2));
     assertEquals(Types.VARCHAR, resultSetMetaData.getColumnType(3));
     assertEquals(Types.DOUBLE, resultSetMetaData.getColumnType(4));

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/fc3894e8/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
index b1da1c6..4af565a 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
@@ -261,7 +261,7 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     tuples = getTuples(stream);
 
     assertEquals(5, tuples.size());
-    assertOrder(tuples, 0,2,1,3,4);
+    assertOrder(tuples, 0, 2, 1, 3, 4);
   }
 
   @Test
@@ -1548,7 +1548,9 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     stream = new HashJoinStream(expression, factory);
     tuples = getTuples(stream);
     assertEquals(17, tuples.size());
-    assertOrder(tuples, 1, 1, 2, 2, 15, 15, 3, 3, 3, 4, 4, 4, 5, 5, 5, 6, 7);
+
+    //Does a lexical sort
+    assertOrder(tuples, 1, 1, 15, 15, 2, 2, 3, 3, 3, 4, 4, 4, 5, 5, 5, 6, 7);
 
   }
 
@@ -2526,6 +2528,138 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     }
   }
 
+
+  @Test
+  public void testParallelTopicStream() throws Exception {
+
+    new UpdateRequest()
+        .add(id, "0", "a_s", "hello", "a_i", "0", "a_f", "1")
+        .add(id, "2", "a_s", "hello", "a_i", "2", "a_f", "2")
+        .add(id, "3", "a_s", "hello", "a_i", "3", "a_f", "3")
+        .add(id, "4", "a_s", "hello", "a_i", "4", "a_f", "4")
+        .add(id, "1", "a_s", "hello", "a_i", "1", "a_f", "5")
+        .add(id, "5", "a_s", "hello", "a_i", "10", "a_f", "6")
+        .add(id, "6", "a_s", "hello", "a_i", "11", "a_f", "7")
+        .add(id, "7", "a_s", "hello", "a_i", "12", "a_f", "8")
+        .add(id, "8", "a_s", "hello", "a_i", "13", "a_f", "9")
+        .add(id, "9", "a_s", "hello", "a_i", "14", "a_f", "10")
+        .commit(cluster.getSolrClient(), COLLECTION);
+
+    StreamFactory factory = new StreamFactory()
+        .withCollectionZkHost("collection1", cluster.getZkServer().getZkAddress())
+        .withFunctionName("topic", TopicStream.class)
+        .withFunctionName("search", CloudSolrStream.class)
+        .withFunctionName("parallel", ParallelStream.class)
+        .withFunctionName("daemon", DaemonStream.class);
+
+    StreamExpression expression;
+    TupleStream stream;
+    List<Tuple> tuples;
+
+    SolrClientCache cache = new SolrClientCache();
+
+    try {
+      //Store checkpoints in the same index as the main documents. This is perfectly valid
+      expression = StreamExpressionParser.parse("parallel(collection1, " +
+                                                         "workers=\"2\", " +
+                                                         "sort=\"_version_ asc\"," +
+                                                         "topic(collection1, " +
+                                                               "collection1, " +
+                                                               "q=\"a_s:hello\", " +
+                                                               "fl=\"id\", " +
+                                                               "id=\"1000000\", " +
+                                                               "partitionKeys=\"id\"))");
+
+      stream = factory.constructStream(expression);
+      StreamContext context = new StreamContext();
+      context.setSolrClientCache(cache);
+      stream.setStreamContext(context);
+      tuples = getTuples(stream);
+
+      //Should be zero because the checkpoints will be set to the highest version on the shards.
+      assertEquals(tuples.size(), 0);
+
+      cluster.getSolrClient().commit("collection1");
+      //Now check to see if the checkpoints are present
+
+      expression = StreamExpressionParser.parse("search(collection1, q=\"id:1000000*\", fl=\"id, checkpoint_ss, _version_\", sort=\"id asc\")");
+
+      stream = factory.constructStream(expression);
+      context = new StreamContext();
+      context.setSolrClientCache(cache);
+      stream.setStreamContext(context);
+      tuples = getTuples(stream);
+      assertEquals(tuples.size(), 2);
+      List<String> checkpoints = tuples.get(0).getStrings("checkpoint_ss");
+      assertEquals(checkpoints.size(), 2);
+      String id1 = tuples.get(0).getString("id");
+      String id2 = tuples.get(1).getString("id");
+      assertTrue(id1.equals("1000000_0"));
+      assertTrue(id2.equals("1000000_1"));
+
+      //Index a few more documents
+      new UpdateRequest()
+          .add(id, "10", "a_s", "hello", "a_i", "13", "a_f", "9")
+          .add(id, "11", "a_s", "hello", "a_i", "14", "a_f", "10")
+          .commit(cluster.getSolrClient(), COLLECTION);
+
+      expression = StreamExpressionParser.parse("parallel(collection1, " +
+          "workers=\"2\", " +
+          "sort=\"_version_ asc\"," +
+          "topic(collection1, " +
+          "collection1, " +
+          "q=\"a_s:hello\", " +
+          "fl=\"id\", " +
+          "id=\"1000000\", " +
+          "partitionKeys=\"id\"))");
+
+      stream = factory.constructStream(expression);
+      context = new StreamContext();
+      context.setSolrClientCache(cache);
+      stream.setStreamContext(context);
+
+      assertTopicRun(stream, "10", "11");
+
+      //Test will initial checkpoint. This should pull all
+
+      expression = StreamExpressionParser.parse("parallel(collection1, " +
+          "workers=\"2\", " +
+          "sort=\"_version_ asc\"," +
+          "topic(collection1, " +
+          "collection1, " +
+          "q=\"a_s:hello\", " +
+          "fl=\"id\", " +
+          "id=\"2000000\", " +
+          "initialCheckpoint=\"0\", " +
+          "partitionKeys=\"id\"))");
+
+      stream = factory.constructStream(expression);
+      context = new StreamContext();
+      context.setSolrClientCache(cache);
+      stream.setStreamContext(context);
+      assertTopicRun(stream, "0","1","2","3","4","5","6","7","8","9","10","11");
+
+      //Add more documents
+      //Index a few more documents
+      new UpdateRequest()
+          .add(id, "12", "a_s", "hello", "a_i", "13", "a_f", "9")
+          .add(id, "13", "a_s", "hello", "a_i", "14", "a_f", "10")
+          .commit(cluster.getSolrClient(), COLLECTION);
+
+      //Run the same topic again including the initialCheckpoint. It should start where it left off.
+      //initialCheckpoint should be ignored for all but the first run.
+      stream = factory.constructStream(expression);
+      context = new StreamContext();
+      context.setSolrClientCache(cache);
+      stream.setStreamContext(context);
+      assertTopicRun(stream, "12","13");
+    } finally {
+      cache.close();
+    }
+  }
+
+
+
   @Test
   public void testUpdateStream() throws Exception {
 
@@ -3031,9 +3165,9 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     int i = 0;
     for(int val : ids) {
       Tuple t = tuples.get(i);
-      Long tip = (Long)t.get(fieldName);
-      if(tip.intValue() != val) {
-        throw new Exception("Found value:"+tip.intValue()+" expecting:"+val);
+      String tip = t.getString(fieldName);
+      if(!tip.equals(Integer.toString(val))) {
+        throw new Exception("Found value:"+tip+" expecting:"+val);
       }
       ++i;
     }
@@ -3119,9 +3253,9 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     int i=0;
     for(int val : ids) {
       Map t = maps.get(i);
-      Long tip = (Long)t.get("id");
-      if(tip.intValue() != val) {
-        throw new Exception("Found value:"+tip.intValue()+" expecting:"+val);
+      String tip = (String)t.get("id");
+      if(!tip.equals(Integer.toString(val))) {
+        throw new Exception("Found value:"+tip+" expecting:"+val);
       }
       ++i;
     }
@@ -3145,4 +3279,39 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     return true;
   }
 
+  private void assertTopicRun(TupleStream stream, String... idArray) throws Exception {
+    long version = -1;
+    int count = 0;
+    List<String> ids = new ArrayList();
+    for(String id : idArray) {
+      ids.add(id);
+    }
+
+    try {
+      stream.open();
+      while (true) {
+        Tuple tuple = stream.read();
+        if (tuple.EOF) {
+          break;
+        } else {
+          ++count;
+          String id = tuple.getString("id");
+          if (!ids.contains(id)) {
+            throw new Exception("Expecting id in topic run not found:" + id);
+          }
+
+          long v = tuple.getLong("_version_");
+          if (v < version) {
+            throw new Exception("Out of order version in topic run:" + v);
+          }
+        }
+      }
+    } finally {
+      stream.close();
+    }
+
+    if(count != ids.size()) {
+      throw new Exception("Wrong count in topic run:"+count);
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/fc3894e8/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java
index 9685b74..0da6750 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java
@@ -1315,7 +1315,12 @@ public class StreamingTest extends SolrCloudTestCase {
 
     SolrParams sParams = mapParams("q", "a_s:hello0", "rows", "500", "fl", "id");
 
-    TopicStream topicStream = new TopicStream(zkHost, COLLECTION, COLLECTION, "50000000", 1000000, sParams);
+    TopicStream topicStream = new TopicStream(zkHost,
+                                              COLLECTION,
+                                              COLLECTION,
+                                              "50000000",
+                                              -1,
+                                              1000000, sParams);
 
     DaemonStream daemonStream = new DaemonStream(topicStream, "daemon1", 1000, 500);
     daemonStream.setStreamContext(context);
@@ -1895,9 +1900,9 @@ public class StreamingTest extends SolrCloudTestCase {
     int i = 0;
     for(int val : ids) {
       Tuple t = tuples.get(i);
-      Long tip = (Long)t.get("id");
-      if(tip.intValue() != val) {
-        throw new Exception("Found value:"+tip.intValue()+" expecting:"+val);
+      String tip = (String)t.get("id");
+      if(!tip.equals(Integer.toString(val))) {
+        throw new Exception("Found value:"+tip+" expecting:"+val);
       }
       ++i;
     }
@@ -1926,9 +1931,9 @@ public class StreamingTest extends SolrCloudTestCase {
     int i=0;
     for(int val : ids) {
       Map t = maps.get(i);
-      Long tip = (Long)t.get("id");
-      if(tip.intValue() != val) {
-        throw new Exception("Found value:"+tip.intValue()+" expecting:"+val);
+      String tip = (String)t.get("id");
+      if(!tip.equals(Integer.toString(val))) {
+        throw new Exception("Found value:"+tip+" expecting:"+val);
       }
       ++i;
     }


[18/51] [abbrv] lucene-solr:apiv2: LUCENE-7013: add licence header position checker to -validate-source-patterns, and fix the violations it found

Posted by sa...@apache.org.
LUCENE-7013: add licence header position checker to -validate-source-patterns, and fix the violations it found


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/51d4af68
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/51d4af68
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/51d4af68

Branch: refs/heads/apiv2
Commit: 51d4af6859f64434bda4c055449328b847de5ed2
Parents: 1e92fc5
Author: Steve Rowe <sa...@apache.org>
Authored: Wed Jul 13 13:17:50 2016 -0400
Committer: Steve Rowe <sa...@apache.org>
Committed: Wed Jul 13 13:21:36 2016 -0400

----------------------------------------------------------------------
 build.xml                                       | 34 ++++++++++++++----
 .../charfilter/HTMLStripCharFilter.java         |  1 +
 .../charfilter/HTMLStripCharFilter.jflex        |  1 +
 .../lucene/analysis/minhash/MinHashFilter.java  |  5 +--
 .../analysis/minhash/MinHashFilterFactory.java  |  5 +--
 .../analysis/standard/ASCIITLD.jflex-macro      |  2 +-
 .../analysis/standard/ClassicTokenizer.java     |  1 +
 .../standard/ClassicTokenizerImpl.jflex         |  1 +
 .../standard/UAX29URLEmailTokenizer.java        |  1 +
 .../standard/UAX29URLEmailTokenizerImpl.jflex   |  1 +
 .../lucene/analysis/util/UnicodeProps.java      |  4 +--
 .../wikipedia/WikipediaTokenizerImpl.java       |  1 +
 .../wikipedia/WikipediaTokenizerImpl.jflex      |  1 +
 .../analysis/minhash/MinHashFilterTest.java     |  4 +--
 .../tools/groovy/generate-unicode-data.groovy   |  4 +--
 .../analysis/standard/StandardTokenizer.java    |  1 +
 .../standard/StandardTokenizerImpl.jflex        |  1 +
 .../apache/lucene/index/MergeReaderWrapper.java |  4 +--
 .../apache/lucene/index/SortingLeafReader.java  |  4 +--
 .../lucene/store/ByteArrayIndexInput.java       |  4 +--
 .../lucene62/TestLucene62SegmentInfoFormat.java |  4 +--
 .../apache/lucene/index/TestIndexSorting.java   |  4 +--
 .../taxonomy/directory/TestAddTaxonomy.java     |  4 +--
 .../join/PointInSetIncludingScoreQuery.java     | 34 +++++++++---------
 .../store/HardlinkCopyDirectoryWrapper.java     |  4 +--
 .../store/TestHardLinkCopyDirectoryWrapper.java |  4 +--
 .../apache/lucene/replicator/nrt/CopyJob.java   |  4 +--
 .../lucene/replicator/nrt/CopyOneFile.java      |  4 +--
 .../apache/lucene/replicator/nrt/CopyState.java |  4 +--
 .../lucene/replicator/nrt/FileMetaData.java     |  4 +--
 .../org/apache/lucene/replicator/nrt/Node.java  |  4 +--
 .../nrt/NodeCommunicationException.java         |  4 +--
 .../nrt/PreCopyMergedSegmentWarmer.java         |  4 +--
 .../lucene/replicator/nrt/PrimaryNode.java      |  4 +--
 .../replicator/nrt/ReplicaFileDeleter.java      |  4 +--
 .../lucene/replicator/nrt/ReplicaNode.java      |  4 +--
 .../nrt/SegmentInfosSearcherManager.java        |  4 +--
 .../lucene/replicator/nrt/Connection.java       |  4 +--
 .../org/apache/lucene/replicator/nrt/Jobs.java  |  4 +--
 .../lucene/replicator/nrt/NodeProcess.java      |  4 +--
 .../lucene/replicator/nrt/SimpleCopyJob.java    |  4 +--
 .../replicator/nrt/SimplePrimaryNode.java       |  4 +--
 .../replicator/nrt/SimpleReplicaNode.java       |  4 +--
 .../lucene/replicator/nrt/SimpleServer.java     |  4 +--
 .../lucene/replicator/nrt/SimpleTransLog.java   |  4 +--
 .../replicator/nrt/TestNRTReplication.java      |  4 +--
 .../nrt/TestStressNRTReplication.java           |  4 +--
 .../lucene/replicator/nrt/ThreadPumper.java     |  4 +--
 .../geopoint/document/GeoPointTokenStream.java  |  4 +--
 .../lucene/spatial/util/TestGeoPointField.java  |  4 +--
 .../lucene/analysis/MockSynonymAnalyzer.java    |  4 +--
 .../lucene/analysis/MockSynonymFilter.java      |  4 +--
 .../org/apache/lucene/geo/EarthDebugger.java    |  4 +--
 .../apache/lucene/mockfile/VirusCheckingFS.java |  4 +--
 .../lucene/mockfile/TestVirusCheckingFS.java    |  4 +--
 .../java/org/apache/solr/cloud/LockTree.java    | 30 ++++++++--------
 .../repository/BackupRepositoryFactory.java     |  4 +--
 .../backup/repository/HdfsBackupRepository.java |  4 +--
 .../repository/LocalFileSystemRepository.java   |  4 +--
 .../org/apache/solr/handler/GraphHandler.java   |  6 ++--
 .../solr/response/GraphMLResponseWriter.java    |  6 ++--
 .../security/AutorizationEditOperation.java     | 29 ++++++++--------
 .../org/apache/solr/security/Permission.java    | 35 +++++++++----------
 .../solr/security/PermissionNameProvider.java   |  4 +--
 .../apache/solr/update/IndexFingerprint.java    |  4 +--
 .../ClassificationUpdateProcessor.java          | 34 +++++++++---------
 .../ClassificationUpdateProcessorFactory.java   | 20 +++++------
 .../solr/cloud/CreateCollectionCleanupTest.java |  4 +--
 .../cloud/OverseerModifyCollectionTest.java     | 34 +++++++++---------
 .../apache/solr/cloud/SolrCLIZkUtilsTest.java   | 34 +++++++++---------
 .../org/apache/solr/cloud/TestLockTree.java     | 34 +++++++++---------
 .../cloud/TestOnReconnectListenerSupport.java   |  4 +--
 .../cloud/TestSizeLimitedDistributedMap.java    |  4 +--
 .../solr/cloud/rule/ImplicitSnitchTest.java     | 36 ++++++++++----------
 .../solr/core/BlobRepositoryCloudTest.java      | 35 +++++++++----------
 .../solr/core/BlobRepositoryMockingTest.java    | 35 +++++++++----------
 .../apache/solr/handler/BackupRestoreUtils.java |  4 +--
 .../solr/handler/TestHdfsBackupRestoreCore.java |  6 ++--
 .../component/ResourceSharingTestComponent.java | 34 +++++++++---------
 .../apache/solr/request/TestFacetMethods.java   |  4 +--
 .../response/TestGraphMLResponseWriter.java     |  4 +--
 .../apache/solr/schema/BooleanFieldTest.java    |  4 +--
 .../solr/schema/TestManagedSchemaAPI.java       | 34 +++++++++---------
 .../search/TestGraphTermsQParserPlugin.java     |  4 +--
 ...lassificationUpdateProcessorFactoryTest.java |  4 +--
 .../solrj/io/graph/ShortestPathStream.java      |  6 ++--
 .../solrj/io/stream/ScoreNodesStream.java       |  6 ++--
 .../client/solrj/io/stream/TopicStream.java     |  4 +--
 .../solr/common/cloud/ClusterProperties.java    |  4 +--
 .../common/cloud/CollectionStatePredicate.java  |  4 +--
 .../common/cloud/CollectionStateWatcher.java    |  6 ++--
 .../solr/common/cloud/ZkMaintenanceUtils.java   | 34 +++++++++---------
 .../solrj/impl/CloudSolrClientBuilderTest.java  | 18 +++++-----
 .../ConcurrentUpdateSolrClientBuilderTest.java  | 12 +++----
 .../solrj/impl/HttpSolrClientBuilderTest.java   | 22 ++++++------
 .../solrj/impl/LBHttpSolrClientBuilderTest.java | 18 +++++-----
 .../solrj/io/graph/GraphExpressionTest.java     |  6 ++--
 .../solr/client/solrj/io/graph/GraphTest.java   |  4 +--
 .../cloud/TestCollectionStateWatchers.java      |  4 +--
 99 files changed, 463 insertions(+), 433 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/build.xml
----------------------------------------------------------------------
diff --git a/build.xml b/build.xml
index d0c9aa5..5cc7bd8 100644
--- a/build.xml
+++ b/build.xml
@@ -166,10 +166,17 @@
       }
       
       def javadocsPattern = ~$/(?sm)^\Q/**\E(.*?)\Q*/\E/$;
+      def commentPattern  = ~$/(?sm)^\Q/*\E(.*?)\Q*/\E/$;
       def lineSplitter = ~$/[\r\n]+/$;
       def licenseMatcher = Defaults.createDefaultMatcher();
       def validLoggerPattern = ~$/(?s)\b(private\s|static\s|final\s){3}+\s*Logger\s+\p{javaJavaIdentifierStart}+\s+=\s+\QLoggerFactory.getLogger(MethodHandles.lookup().lookupClass());\E/$;
+      def packagePattern = ~$/(?m)^\s*package\s+org\.apache.*;/$;
       
+      def isLicense = { matcher, ratDocument ->
+        licenseMatcher.reset();
+        return lineSplitter.split(matcher.group(1)).any{ licenseMatcher.match(ratDocument, it) };
+      }
+
       ant.fileScanner{
         fileset(dir: baseDir){
           extensions.each{
@@ -197,17 +204,32 @@
           }
         }
         def javadocsMatcher = javadocsPattern.matcher(text);
+        def ratDocument = new FileDocument(f);
         while (javadocsMatcher.find()) {
-          def ratDocument = new FileDocument(f);
-          licenseMatcher.reset();
-          if (lineSplitter.split(javadocsMatcher.group(1)).any{ licenseMatcher.match(ratDocument, it) }) {
+          if (isLicense(javadocsMatcher, ratDocument)) {
             reportViolation(f, String.format(Locale.ENGLISH, 'javadoc-style license header [%s]',
               ratDocument.getMetaData().value(MetaData.RAT_URL_LICENSE_FAMILY_NAME)));
           }
         }
-        if (f.toString().endsWith('.java') && text.contains('org.slf4j.LoggerFactory')) {
-          if (!validLoggerPattern.matcher(text).find()) {
-            reportViolation(f, 'invalid logging pattern [not private static final, uses static class name]');
+        if (f.toString().endsWith('.java')) {
+          if (text.contains('org.slf4j.LoggerFactory')) {
+            if (!validLoggerPattern.matcher(text).find()) {
+              reportViolation(f, 'invalid logging pattern [not private static final, uses static class name]');
+            }
+          }
+          def packageMatcher = packagePattern.matcher(text);
+          if (packageMatcher.find()) {
+            def packageStartPos = packageMatcher.start();
+            def commentMatcher = commentPattern.matcher(text);
+            while (commentMatcher.find()) {
+              if (isLicense(commentMatcher, ratDocument)) {
+                if (commentMatcher.start() < packageStartPos) {
+                  break; // This file is all good, so break loop: license header precedes package definition
+                } else {
+                  reportViolation(f, 'package declaration precedes license header');
+                }
+              }
+            }
           }
         }
       };

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/analysis/common/src/java/org/apache/lucene/analysis/charfilter/HTMLStripCharFilter.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/charfilter/HTMLStripCharFilter.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/charfilter/HTMLStripCharFilter.java
index 68a939b..ba44dd8 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/charfilter/HTMLStripCharFilter.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/charfilter/HTMLStripCharFilter.java
@@ -16,6 +16,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.lucene.analysis.charfilter;
 
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/analysis/common/src/java/org/apache/lucene/analysis/charfilter/HTMLStripCharFilter.jflex
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/charfilter/HTMLStripCharFilter.jflex b/lucene/analysis/common/src/java/org/apache/lucene/analysis/charfilter/HTMLStripCharFilter.jflex
index 352ede7..98c3946 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/charfilter/HTMLStripCharFilter.jflex
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/charfilter/HTMLStripCharFilter.jflex
@@ -14,6 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.lucene.analysis.charfilter;
 
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/analysis/common/src/java/org/apache/lucene/analysis/minhash/MinHashFilter.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/minhash/MinHashFilter.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/minhash/MinHashFilter.java
index 60df1c0..1a1a637 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/minhash/MinHashFilter.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/minhash/MinHashFilter.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.analysis.minhash;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -16,6 +14,9 @@ package org.apache.lucene.analysis.minhash;
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
+package org.apache.lucene.analysis.minhash;
+
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/analysis/common/src/java/org/apache/lucene/analysis/minhash/MinHashFilterFactory.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/minhash/MinHashFilterFactory.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/minhash/MinHashFilterFactory.java
index d951b9b..c4c827b 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/minhash/MinHashFilterFactory.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/minhash/MinHashFilterFactory.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.analysis.minhash;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -16,6 +14,9 @@ package org.apache.lucene.analysis.minhash;
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
+package org.apache.lucene.analysis.minhash;
+
 import java.util.Map;
 
 import org.apache.lucene.analysis.TokenStream;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/ASCIITLD.jflex-macro
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/ASCIITLD.jflex-macro b/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/ASCIITLD.jflex-macro
index 5d78558..ec336f2 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/ASCIITLD.jflex-macro
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/ASCIITLD.jflex-macro
@@ -14,6 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 // Generated from IANA Root Zone Database <http://www.internic.net/zones/root.zone>
 // file version from Friday, December 6, 2013 4:34:10 AM UTC
 // generated on Friday, December 6, 2013 3:21:59 PM UTC
@@ -363,4 +364,3 @@ ASCIITLD = "." (
 	| [zZ][mM]
 	| [zZ][wW]
 	) "."?   // Accept trailing root (empty) domain
-

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/ClassicTokenizer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/ClassicTokenizer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/ClassicTokenizer.java
index 76b1ef6..339ab8b 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/ClassicTokenizer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/ClassicTokenizer.java
@@ -14,6 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.lucene.analysis.standard;
 
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/ClassicTokenizerImpl.jflex
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/ClassicTokenizerImpl.jflex b/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/ClassicTokenizerImpl.jflex
index de65d26..4d6ad16 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/ClassicTokenizerImpl.jflex
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/ClassicTokenizerImpl.jflex
@@ -14,6 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.lucene.analysis.standard;
 
 import java.io.Reader;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/UAX29URLEmailTokenizer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/UAX29URLEmailTokenizer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/UAX29URLEmailTokenizer.java
index 03de032..d2b02e4 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/UAX29URLEmailTokenizer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/UAX29URLEmailTokenizer.java
@@ -14,6 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.lucene.analysis.standard;
 
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/UAX29URLEmailTokenizerImpl.jflex
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/UAX29URLEmailTokenizerImpl.jflex b/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/UAX29URLEmailTokenizerImpl.jflex
index 32af631..73a471e 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/UAX29URLEmailTokenizerImpl.jflex
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/UAX29URLEmailTokenizerImpl.jflex
@@ -14,6 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.lucene.analysis.standard;
 
 import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/analysis/common/src/java/org/apache/lucene/analysis/util/UnicodeProps.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/util/UnicodeProps.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/util/UnicodeProps.java
index e1537ae..75070d1 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/util/UnicodeProps.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/util/UnicodeProps.java
@@ -1,7 +1,5 @@
 // DO NOT EDIT THIS FILE! Use "ant unicode-data" to recreate.
 
-package org.apache.lucene.analysis.util;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -19,6 +17,8 @@ package org.apache.lucene.analysis.util;
  * limitations under the License.
  */
 
+package org.apache.lucene.analysis.util;
+
 import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.SparseFixedBitSet;
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/analysis/common/src/java/org/apache/lucene/analysis/wikipedia/WikipediaTokenizerImpl.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/wikipedia/WikipediaTokenizerImpl.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/wikipedia/WikipediaTokenizerImpl.java
index 337a562..7f9227f 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/wikipedia/WikipediaTokenizerImpl.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/wikipedia/WikipediaTokenizerImpl.java
@@ -16,6 +16,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.lucene.analysis.wikipedia;
 
 import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/analysis/common/src/java/org/apache/lucene/analysis/wikipedia/WikipediaTokenizerImpl.jflex
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/wikipedia/WikipediaTokenizerImpl.jflex b/lucene/analysis/common/src/java/org/apache/lucene/analysis/wikipedia/WikipediaTokenizerImpl.jflex
index 404fbd6..3ac31e4 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/wikipedia/WikipediaTokenizerImpl.jflex
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/wikipedia/WikipediaTokenizerImpl.jflex
@@ -14,6 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.lucene.analysis.wikipedia;
 
 import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/analysis/common/src/test/org/apache/lucene/analysis/minhash/MinHashFilterTest.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/test/org/apache/lucene/analysis/minhash/MinHashFilterTest.java b/lucene/analysis/common/src/test/org/apache/lucene/analysis/minhash/MinHashFilterTest.java
index 7c02d3b..a4080fe 100644
--- a/lucene/analysis/common/src/test/org/apache/lucene/analysis/minhash/MinHashFilterTest.java
+++ b/lucene/analysis/common/src/test/org/apache/lucene/analysis/minhash/MinHashFilterTest.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.analysis.minhash;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.lucene.analysis.minhash;
  * limitations under the License.
  */
 
+package org.apache.lucene.analysis.minhash;
+
 import java.io.IOException;
 import java.io.StringReader;
 import java.io.UnsupportedEncodingException;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/analysis/common/src/tools/groovy/generate-unicode-data.groovy
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/tools/groovy/generate-unicode-data.groovy b/lucene/analysis/common/src/tools/groovy/generate-unicode-data.groovy
index 3ea2b9e..37857e0 100644
--- a/lucene/analysis/common/src/tools/groovy/generate-unicode-data.groovy
+++ b/lucene/analysis/common/src/tools/groovy/generate-unicode-data.groovy
@@ -44,8 +44,6 @@ def unicodeVersion = UCharacter.getUnicodeVersion().toString();
 def code = """
 // DO NOT EDIT THIS FILE! Use "ant unicode-data" to recreate.
 
-package org.apache.lucene.analysis.util;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -63,6 +61,8 @@ package org.apache.lucene.analysis.util;
  * limitations under the License.
  */
 
+package org.apache.lucene.analysis.util;
+
 import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.SparseFixedBitSet;
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/core/src/java/org/apache/lucene/analysis/standard/StandardTokenizer.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/analysis/standard/StandardTokenizer.java b/lucene/core/src/java/org/apache/lucene/analysis/standard/StandardTokenizer.java
index 5c5169a..5b8fc75 100644
--- a/lucene/core/src/java/org/apache/lucene/analysis/standard/StandardTokenizer.java
+++ b/lucene/core/src/java/org/apache/lucene/analysis/standard/StandardTokenizer.java
@@ -14,6 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.lucene.analysis.standard;
 
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/core/src/java/org/apache/lucene/analysis/standard/StandardTokenizerImpl.jflex
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/analysis/standard/StandardTokenizerImpl.jflex b/lucene/core/src/java/org/apache/lucene/analysis/standard/StandardTokenizerImpl.jflex
index 24c401d..11b2cbd 100644
--- a/lucene/core/src/java/org/apache/lucene/analysis/standard/StandardTokenizerImpl.jflex
+++ b/lucene/core/src/java/org/apache/lucene/analysis/standard/StandardTokenizerImpl.jflex
@@ -14,6 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.lucene.analysis.standard;
 
 import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/core/src/java/org/apache/lucene/index/MergeReaderWrapper.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/MergeReaderWrapper.java b/lucene/core/src/java/org/apache/lucene/index/MergeReaderWrapper.java
index 2401d0f..bcecf2f 100644
--- a/lucene/core/src/java/org/apache/lucene/index/MergeReaderWrapper.java
+++ b/lucene/core/src/java/org/apache/lucene/index/MergeReaderWrapper.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.index;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.lucene.index;
  * limitations under the License.
  */
 
+package org.apache.lucene.index;
+
 import java.io.IOException;
 
 import org.apache.lucene.codecs.DocValuesProducer;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/core/src/java/org/apache/lucene/index/SortingLeafReader.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/SortingLeafReader.java b/lucene/core/src/java/org/apache/lucene/index/SortingLeafReader.java
index 70d5d20..c1476d0 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SortingLeafReader.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SortingLeafReader.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.index;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.lucene.index;
  * limitations under the License.
  */
 
+package org.apache.lucene.index;
+
 import java.io.IOException;
 import java.util.Arrays;
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/core/src/java/org/apache/lucene/store/ByteArrayIndexInput.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/store/ByteArrayIndexInput.java b/lucene/core/src/java/org/apache/lucene/store/ByteArrayIndexInput.java
index 80f5647..6ad6125 100644
--- a/lucene/core/src/java/org/apache/lucene/store/ByteArrayIndexInput.java
+++ b/lucene/core/src/java/org/apache/lucene/store/ByteArrayIndexInput.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.store;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.lucene.store;
  * limitations under the License.
  */
 
+package org.apache.lucene.store;
+
 import java.io.IOException;
 
 /** 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/core/src/test/org/apache/lucene/codecs/lucene62/TestLucene62SegmentInfoFormat.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/codecs/lucene62/TestLucene62SegmentInfoFormat.java b/lucene/core/src/test/org/apache/lucene/codecs/lucene62/TestLucene62SegmentInfoFormat.java
index 8c758f2..54110f7 100644
--- a/lucene/core/src/test/org/apache/lucene/codecs/lucene62/TestLucene62SegmentInfoFormat.java
+++ b/lucene/core/src/test/org/apache/lucene/codecs/lucene62/TestLucene62SegmentInfoFormat.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.codecs.lucene62;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.lucene.codecs.lucene62;
  * limitations under the License.
  */
 
+package org.apache.lucene.codecs.lucene62;
+
 import org.apache.lucene.codecs.Codec;
 import org.apache.lucene.index.BaseSegmentInfoFormatTestCase;
 import org.apache.lucene.util.TestUtil;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java
index 6528765..363ccb2 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.index;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.lucene.index;
  * limitations under the License.
  */
 
+package org.apache.lucene.index;
+
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collections;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestAddTaxonomy.java
----------------------------------------------------------------------
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestAddTaxonomy.java b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestAddTaxonomy.java
index a860ec9..232ccee 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestAddTaxonomy.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestAddTaxonomy.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.facet.taxonomy.directory;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.lucene.facet.taxonomy.directory;
  * limitations under the License.
  */
 
+package org.apache.lucene.facet.taxonomy.directory;
+
 import java.io.IOException;
 import java.util.HashSet;
 import java.util.Random;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/join/src/java/org/apache/lucene/search/join/PointInSetIncludingScoreQuery.java
----------------------------------------------------------------------
diff --git a/lucene/join/src/java/org/apache/lucene/search/join/PointInSetIncludingScoreQuery.java b/lucene/join/src/java/org/apache/lucene/search/join/PointInSetIncludingScoreQuery.java
index 5018e97..f99f318 100644
--- a/lucene/join/src/java/org/apache/lucene/search/join/PointInSetIncludingScoreQuery.java
+++ b/lucene/join/src/java/org/apache/lucene/search/join/PointInSetIncludingScoreQuery.java
@@ -1,3 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package org.apache.lucene.search.join;
 
 import java.io.IOException;
@@ -32,23 +49,6 @@ import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.BytesRefBuilder;
 import org.apache.lucene.util.FixedBitSet;
 
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
 // A TermsIncludingScoreQuery variant for point values:
 abstract class PointInSetIncludingScoreQuery extends Query {
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/misc/src/java/org/apache/lucene/store/HardlinkCopyDirectoryWrapper.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/java/org/apache/lucene/store/HardlinkCopyDirectoryWrapper.java b/lucene/misc/src/java/org/apache/lucene/store/HardlinkCopyDirectoryWrapper.java
index 374178e..c7b164a 100644
--- a/lucene/misc/src/java/org/apache/lucene/store/HardlinkCopyDirectoryWrapper.java
+++ b/lucene/misc/src/java/org/apache/lucene/store/HardlinkCopyDirectoryWrapper.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.store;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.lucene.store;
  * limitations under the License.
  */
 
+package org.apache.lucene.store;
+
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.nio.file.FileAlreadyExistsException;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/misc/src/test/org/apache/lucene/store/TestHardLinkCopyDirectoryWrapper.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/test/org/apache/lucene/store/TestHardLinkCopyDirectoryWrapper.java b/lucene/misc/src/test/org/apache/lucene/store/TestHardLinkCopyDirectoryWrapper.java
index 953dc59..4a88dd4 100644
--- a/lucene/misc/src/test/org/apache/lucene/store/TestHardLinkCopyDirectoryWrapper.java
+++ b/lucene/misc/src/test/org/apache/lucene/store/TestHardLinkCopyDirectoryWrapper.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.store;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.lucene.store;
  * limitations under the License.
  */
 
+package org.apache.lucene.store;
+
 import java.io.IOException;
 import java.nio.file.Files;
 import java.nio.file.Path;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/CopyJob.java
----------------------------------------------------------------------
diff --git a/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/CopyJob.java b/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/CopyJob.java
index 1e63d1f..2bdcf1d 100644
--- a/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/CopyJob.java
+++ b/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/CopyJob.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.replicator.nrt;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.lucene.replicator.nrt;
  * limitations under the License.
  */
 
+package org.apache.lucene.replicator.nrt;
+
 import java.io.IOException;
 import java.util.Iterator;
 import java.util.List;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/CopyOneFile.java
----------------------------------------------------------------------
diff --git a/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/CopyOneFile.java b/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/CopyOneFile.java
index ab71e05..dd9f16c 100644
--- a/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/CopyOneFile.java
+++ b/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/CopyOneFile.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.replicator.nrt;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.lucene.replicator.nrt;
  * limitations under the License.
  */
 
+package org.apache.lucene.replicator.nrt;
+
 import java.io.Closeable;
 import java.io.IOException;
 import java.util.Locale;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/CopyState.java
----------------------------------------------------------------------
diff --git a/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/CopyState.java b/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/CopyState.java
index c5ddda0..aff45c0 100644
--- a/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/CopyState.java
+++ b/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/CopyState.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.replicator.nrt;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.lucene.replicator.nrt;
  * limitations under the License.
  */
 
+package org.apache.lucene.replicator.nrt;
+
 import java.util.Collections;
 import java.util.Map;
 import java.util.Set;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/FileMetaData.java
----------------------------------------------------------------------
diff --git a/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/FileMetaData.java b/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/FileMetaData.java
index 897d5ca..ac3e1f0 100644
--- a/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/FileMetaData.java
+++ b/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/FileMetaData.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.replicator.nrt;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.lucene.replicator.nrt;
  * limitations under the License.
  */
 
+package org.apache.lucene.replicator.nrt;
+
 /** Holds metadata details about a single file that we use to confirm two files (one remote, one local) are in fact "identical".
  *
  * @lucene.experimental */

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/Node.java
----------------------------------------------------------------------
diff --git a/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/Node.java b/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/Node.java
index 85da89c..759497a 100644
--- a/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/Node.java
+++ b/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/Node.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.replicator.nrt;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.lucene.replicator.nrt;
  * limitations under the License.
  */
 
+package org.apache.lucene.replicator.nrt;
+
 import java.io.Closeable;
 import java.io.EOFException;
 import java.io.FileNotFoundException;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/NodeCommunicationException.java
----------------------------------------------------------------------
diff --git a/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/NodeCommunicationException.java b/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/NodeCommunicationException.java
index d286f3d..cbbb65c 100644
--- a/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/NodeCommunicationException.java
+++ b/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/NodeCommunicationException.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.replicator.nrt;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.lucene.replicator.nrt;
  * limitations under the License.
  */
 
+package org.apache.lucene.replicator.nrt;
+
 /**
  * Should be thrown by subclasses of {@link PrimaryNode} and {@link ReplicaNode} if a non-fatal exception
  * occurred while communicating between nodes.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/PreCopyMergedSegmentWarmer.java
----------------------------------------------------------------------
diff --git a/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/PreCopyMergedSegmentWarmer.java b/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/PreCopyMergedSegmentWarmer.java
index 77f23ab..0b33f17 100644
--- a/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/PreCopyMergedSegmentWarmer.java
+++ b/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/PreCopyMergedSegmentWarmer.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.replicator.nrt;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.lucene.replicator.nrt;
  * limitations under the License.
  */
 
+package org.apache.lucene.replicator.nrt;
+
 /** A merged segment warmer that pre-copies the merged segment out to
  *  replicas before primary cuts over to the merged segment.  This
  *  ensures that NRT reopen time on replicas is only in proportion to

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/PrimaryNode.java
----------------------------------------------------------------------
diff --git a/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/PrimaryNode.java b/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/PrimaryNode.java
index 749f54e..1d04d08 100644
--- a/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/PrimaryNode.java
+++ b/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/PrimaryNode.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.replicator.nrt;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.lucene.replicator.nrt;
  * limitations under the License.
  */
 
+package org.apache.lucene.replicator.nrt;
+
 import java.io.IOException;
 import java.io.PrintStream;
 import java.util.Collections;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/ReplicaFileDeleter.java
----------------------------------------------------------------------
diff --git a/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/ReplicaFileDeleter.java b/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/ReplicaFileDeleter.java
index 1638c29..86dbc52 100644
--- a/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/ReplicaFileDeleter.java
+++ b/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/ReplicaFileDeleter.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.replicator.nrt;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.lucene.replicator.nrt;
  * limitations under the License.
  */
 
+package org.apache.lucene.replicator.nrt;
+
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.nio.file.NoSuchFileException;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/ReplicaNode.java
----------------------------------------------------------------------
diff --git a/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/ReplicaNode.java b/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/ReplicaNode.java
index 1ca1519..ce9c3ce 100644
--- a/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/ReplicaNode.java
+++ b/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/ReplicaNode.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.replicator.nrt;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.lucene.replicator.nrt;
  * limitations under the License.
  */
 
+package org.apache.lucene.replicator.nrt;
+
 import java.io.IOException;
 import java.io.PrintStream;
 import java.util.ArrayList;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/SegmentInfosSearcherManager.java
----------------------------------------------------------------------
diff --git a/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/SegmentInfosSearcherManager.java b/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/SegmentInfosSearcherManager.java
index 4fda0fa..4cb49c4 100644
--- a/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/SegmentInfosSearcherManager.java
+++ b/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/SegmentInfosSearcherManager.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.replicator.nrt;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.lucene.replicator.nrt;
  * limitations under the License.
  */
 
+package org.apache.lucene.replicator.nrt;
+
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/Connection.java
----------------------------------------------------------------------
diff --git a/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/Connection.java b/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/Connection.java
index 87fce57..8d671ba 100644
--- a/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/Connection.java
+++ b/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/Connection.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.replicator.nrt;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.lucene.replicator.nrt;
  * limitations under the License.
  */
 
+package org.apache.lucene.replicator.nrt;
+
 import java.io.BufferedOutputStream;
 import java.io.Closeable;
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/Jobs.java
----------------------------------------------------------------------
diff --git a/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/Jobs.java b/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/Jobs.java
index 6968fdb..87feb33 100644
--- a/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/Jobs.java
+++ b/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/Jobs.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.replicator.nrt;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.lucene.replicator.nrt;
  * limitations under the License.
  */
 
+package org.apache.lucene.replicator.nrt;
+
 import java.io.Closeable;
 import java.io.IOException;
 import java.util.PriorityQueue;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/NodeProcess.java
----------------------------------------------------------------------
diff --git a/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/NodeProcess.java b/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/NodeProcess.java
index a0bfb78..daeffc7 100644
--- a/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/NodeProcess.java
+++ b/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/NodeProcess.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.replicator.nrt;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.lucene.replicator.nrt;
  * limitations under the License.
  */
 
+package org.apache.lucene.replicator.nrt;
+
 import java.io.Closeable;
 import java.io.IOException;
 import java.util.concurrent.atomic.AtomicBoolean;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/SimpleCopyJob.java
----------------------------------------------------------------------
diff --git a/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/SimpleCopyJob.java b/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/SimpleCopyJob.java
index 6793df8..ba72ae4 100644
--- a/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/SimpleCopyJob.java
+++ b/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/SimpleCopyJob.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.replicator.nrt;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.lucene.replicator.nrt;
  * limitations under the License.
  */
 
+package org.apache.lucene.replicator.nrt;
+
 import java.io.IOException;
 import java.util.HashSet;
 import java.util.Iterator;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/SimplePrimaryNode.java
----------------------------------------------------------------------
diff --git a/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/SimplePrimaryNode.java b/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/SimplePrimaryNode.java
index 3d41b32..bb39135 100644
--- a/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/SimplePrimaryNode.java
+++ b/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/SimplePrimaryNode.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.replicator.nrt;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.lucene.replicator.nrt;
  * limitations under the License.
  */
 
+package org.apache.lucene.replicator.nrt;
+
 import java.io.BufferedOutputStream;
 import java.io.EOFException;
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/SimpleReplicaNode.java
----------------------------------------------------------------------
diff --git a/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/SimpleReplicaNode.java b/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/SimpleReplicaNode.java
index 6948b78..c394c65 100644
--- a/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/SimpleReplicaNode.java
+++ b/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/SimpleReplicaNode.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.replicator.nrt;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.lucene.replicator.nrt;
  * limitations under the License.
  */
 
+package org.apache.lucene.replicator.nrt;
+
 import java.io.BufferedOutputStream;
 import java.io.EOFException;
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/SimpleServer.java
----------------------------------------------------------------------
diff --git a/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/SimpleServer.java b/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/SimpleServer.java
index 49d2ce2..20c3f61 100644
--- a/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/SimpleServer.java
+++ b/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/SimpleServer.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.replicator.nrt;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.lucene.replicator.nrt;
  * limitations under the License.
  */
 
+package org.apache.lucene.replicator.nrt;
+
 import java.io.BufferedInputStream;
 import java.io.BufferedOutputStream;
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/SimpleTransLog.java
----------------------------------------------------------------------
diff --git a/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/SimpleTransLog.java b/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/SimpleTransLog.java
index dd2085f..ce75a29 100644
--- a/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/SimpleTransLog.java
+++ b/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/SimpleTransLog.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.replicator.nrt;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.lucene.replicator.nrt;
  * limitations under the License.
  */
 
+package org.apache.lucene.replicator.nrt;
+
 import java.io.Closeable;
 import java.io.EOFException;
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/TestNRTReplication.java
----------------------------------------------------------------------
diff --git a/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/TestNRTReplication.java b/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/TestNRTReplication.java
index f98cf8d..a2e7cf9 100644
--- a/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/TestNRTReplication.java
+++ b/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/TestNRTReplication.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.replicator.nrt;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.lucene.replicator.nrt;
  * limitations under the License.
  */
 
+package org.apache.lucene.replicator.nrt;
+
 import java.io.BufferedReader;
 import java.io.IOException;
 import java.io.InputStreamReader;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/TestStressNRTReplication.java
----------------------------------------------------------------------
diff --git a/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/TestStressNRTReplication.java b/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/TestStressNRTReplication.java
index b53dc78..045bb38 100644
--- a/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/TestStressNRTReplication.java
+++ b/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/TestStressNRTReplication.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.replicator.nrt;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.lucene.replicator.nrt;
  * limitations under the License.
  */
 
+package org.apache.lucene.replicator.nrt;
+
 import java.io.BufferedReader;
 import java.io.Closeable;
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/ThreadPumper.java
----------------------------------------------------------------------
diff --git a/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/ThreadPumper.java b/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/ThreadPumper.java
index 73f3908..ff57ea6 100644
--- a/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/ThreadPumper.java
+++ b/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/ThreadPumper.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.replicator.nrt;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.lucene.replicator.nrt;
  * limitations under the License.
  */
 
+package org.apache.lucene.replicator.nrt;
+
 import java.io.BufferedReader;
 import java.io.IOException;
 import java.io.PrintStream;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/spatial/src/java/org/apache/lucene/spatial/geopoint/document/GeoPointTokenStream.java
----------------------------------------------------------------------
diff --git a/lucene/spatial/src/java/org/apache/lucene/spatial/geopoint/document/GeoPointTokenStream.java b/lucene/spatial/src/java/org/apache/lucene/spatial/geopoint/document/GeoPointTokenStream.java
index 5ecd1db..87b228b 100644
--- a/lucene/spatial/src/java/org/apache/lucene/spatial/geopoint/document/GeoPointTokenStream.java
+++ b/lucene/spatial/src/java/org/apache/lucene/spatial/geopoint/document/GeoPointTokenStream.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.spatial.geopoint.document;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.lucene.spatial.geopoint.document;
  * limitations under the License.
  */
 
+package org.apache.lucene.spatial.geopoint.document;
+
 import java.util.Objects;
 
 import org.apache.lucene.analysis.TokenStream;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/spatial/src/test/org/apache/lucene/spatial/util/TestGeoPointField.java
----------------------------------------------------------------------
diff --git a/lucene/spatial/src/test/org/apache/lucene/spatial/util/TestGeoPointField.java b/lucene/spatial/src/test/org/apache/lucene/spatial/util/TestGeoPointField.java
index 567d46d..1e4f478 100644
--- a/lucene/spatial/src/test/org/apache/lucene/spatial/util/TestGeoPointField.java
+++ b/lucene/spatial/src/test/org/apache/lucene/spatial/util/TestGeoPointField.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.spatial.util;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.lucene.spatial.util;
  * limitations under the License.
  */
 
+package org.apache.lucene.spatial.util;
+
 import org.apache.lucene.spatial.geopoint.document.GeoPointField;
 import org.apache.lucene.util.BytesRefBuilder;
 import org.apache.lucene.util.LuceneTestCase;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/test-framework/src/java/org/apache/lucene/analysis/MockSynonymAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/analysis/MockSynonymAnalyzer.java b/lucene/test-framework/src/java/org/apache/lucene/analysis/MockSynonymAnalyzer.java
index a2ce33e..1bbaabc 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/analysis/MockSynonymAnalyzer.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/analysis/MockSynonymAnalyzer.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.analysis;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.lucene.analysis;
  * limitations under the License.
  */
 
+package org.apache.lucene.analysis;
+
 /** adds synonym of "dog" for "dogs", and synonym of "cavy" for "guinea pig". */
 public class MockSynonymAnalyzer extends Analyzer {
   @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/test-framework/src/java/org/apache/lucene/analysis/MockSynonymFilter.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/analysis/MockSynonymFilter.java b/lucene/test-framework/src/java/org/apache/lucene/analysis/MockSynonymFilter.java
index b50be07..1d8b513 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/analysis/MockSynonymFilter.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/analysis/MockSynonymFilter.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.analysis;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.lucene.analysis;
  * limitations under the License.
  */
 
+package org.apache.lucene.analysis;
+
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/test-framework/src/java/org/apache/lucene/geo/EarthDebugger.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/geo/EarthDebugger.java b/lucene/test-framework/src/java/org/apache/lucene/geo/EarthDebugger.java
index 4f68adb..3553b15 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/geo/EarthDebugger.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/geo/EarthDebugger.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.geo;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.lucene.geo;
  * limitations under the License.
  */
 
+package org.apache.lucene.geo;
+
 import org.apache.lucene.util.SloppyMath;
 
 /** Draws shapes on the earth surface and renders using the very cool http://www.webglearth.org.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/test-framework/src/java/org/apache/lucene/mockfile/VirusCheckingFS.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/mockfile/VirusCheckingFS.java b/lucene/test-framework/src/java/org/apache/lucene/mockfile/VirusCheckingFS.java
index 2c24d10..eff62b1 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/mockfile/VirusCheckingFS.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/mockfile/VirusCheckingFS.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.mockfile;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.lucene.mockfile;
  * limitations under the License.
  */
 
+package org.apache.lucene.mockfile;
+
 import java.io.IOException;
 import java.nio.file.AccessDeniedException;
 import java.nio.file.FileSystem;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/lucene/test-framework/src/test/org/apache/lucene/mockfile/TestVirusCheckingFS.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/test/org/apache/lucene/mockfile/TestVirusCheckingFS.java b/lucene/test-framework/src/test/org/apache/lucene/mockfile/TestVirusCheckingFS.java
index 4a34ab8..91e9aa8 100644
--- a/lucene/test-framework/src/test/org/apache/lucene/mockfile/TestVirusCheckingFS.java
+++ b/lucene/test-framework/src/test/org/apache/lucene/mockfile/TestVirusCheckingFS.java
@@ -1,5 +1,3 @@
-package org.apache.lucene.mockfile;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.lucene.mockfile;
  * limitations under the License.
  */
 
+package org.apache.lucene.mockfile;
+
 import java.io.IOException;
 import java.io.OutputStream;
 import java.net.URI;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/core/src/java/org/apache/solr/cloud/LockTree.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/LockTree.java b/solr/core/src/java/org/apache/solr/cloud/LockTree.java
index d629d1c..8ae7f75 100644
--- a/solr/core/src/java/org/apache/solr/cloud/LockTree.java
+++ b/solr/core/src/java/org/apache/solr/cloud/LockTree.java
@@ -1,18 +1,3 @@
-package org.apache.solr.cloud;
-
-import java.lang.invoke.MethodHandles;
-import java.util.HashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.solr.cloud.OverseerMessageHandler.Lock;
-import org.apache.solr.common.params.CollectionParams;
-import org.apache.solr.common.params.CollectionParams.LockLevel;
-import org.apache.solr.common.util.StrUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -30,6 +15,21 @@ import org.slf4j.LoggerFactory;
  * limitations under the License.
  */
 
+package org.apache.solr.cloud;
+
+import java.lang.invoke.MethodHandles;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.solr.cloud.OverseerMessageHandler.Lock;
+import org.apache.solr.common.params.CollectionParams;
+import org.apache.solr.common.params.CollectionParams.LockLevel;
+import org.apache.solr.common.util.StrUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 /**
  * This is a utility class that offers fine grained locking for various Collection Operations
  * This class is designed for single threaded operation. It's safe for multiple threads to use it

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/core/src/java/org/apache/solr/core/backup/repository/BackupRepositoryFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/backup/repository/BackupRepositoryFactory.java b/solr/core/src/java/org/apache/solr/core/backup/repository/BackupRepositoryFactory.java
index d035874..aaccceb 100644
--- a/solr/core/src/java/org/apache/solr/core/backup/repository/BackupRepositoryFactory.java
+++ b/solr/core/src/java/org/apache/solr/core/backup/repository/BackupRepositoryFactory.java
@@ -1,5 +1,3 @@
-package org.apache.solr.core.backup.repository;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.solr.core.backup.repository;
  * limitations under the License.
  */
 
+package org.apache.solr.core.backup.repository;
+
 import java.lang.invoke.MethodHandles;
 import java.util.HashMap;
 import java.util.Map;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/core/src/java/org/apache/solr/core/backup/repository/HdfsBackupRepository.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/backup/repository/HdfsBackupRepository.java b/solr/core/src/java/org/apache/solr/core/backup/repository/HdfsBackupRepository.java
index 596c271..bb148de 100644
--- a/solr/core/src/java/org/apache/solr/core/backup/repository/HdfsBackupRepository.java
+++ b/solr/core/src/java/org/apache/solr/core/backup/repository/HdfsBackupRepository.java
@@ -1,5 +1,3 @@
-package org.apache.solr.core.backup.repository;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.solr.core.backup.repository;
  * limitations under the License.
  */
 
+package org.apache.solr.core.backup.repository;
+
 import java.io.IOException;
 import java.io.OutputStream;
 import java.net.URI;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/core/src/java/org/apache/solr/core/backup/repository/LocalFileSystemRepository.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/backup/repository/LocalFileSystemRepository.java b/solr/core/src/java/org/apache/solr/core/backup/repository/LocalFileSystemRepository.java
index bb75a9e..4eb7790 100644
--- a/solr/core/src/java/org/apache/solr/core/backup/repository/LocalFileSystemRepository.java
+++ b/solr/core/src/java/org/apache/solr/core/backup/repository/LocalFileSystemRepository.java
@@ -1,5 +1,3 @@
-package org.apache.solr.core.backup.repository;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.solr.core.backup.repository;
  * limitations under the License.
  */
 
+package org.apache.solr.core.backup.repository;
+
 import java.io.IOException;
 import java.io.OutputStream;
 import java.net.URI;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/core/src/java/org/apache/solr/handler/GraphHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/GraphHandler.java b/solr/core/src/java/org/apache/solr/handler/GraphHandler.java
index c4b42d9..4b043b2 100644
--- a/solr/core/src/java/org/apache/solr/handler/GraphHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/GraphHandler.java
@@ -1,5 +1,3 @@
-package org.apache.solr.handler;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.solr.handler;
  * limitations under the License.
  */
 
+package org.apache.solr.handler;
+
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
 import java.util.HashMap;
@@ -278,4 +278,4 @@ public class GraphHandler extends RequestHandlerBase implements SolrCoreAware, P
     }
   }
 
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/core/src/java/org/apache/solr/response/GraphMLResponseWriter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/response/GraphMLResponseWriter.java b/solr/core/src/java/org/apache/solr/response/GraphMLResponseWriter.java
index d941991..7f2fac2 100644
--- a/solr/core/src/java/org/apache/solr/response/GraphMLResponseWriter.java
+++ b/solr/core/src/java/org/apache/solr/response/GraphMLResponseWriter.java
@@ -1,5 +1,3 @@
-package org.apache.solr.response;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.solr.response;
  * limitations under the License.
  */
 
+package org.apache.solr.response;
+
 import java.io.IOException;
 import java.io.PrintWriter;
 import java.io.Writer;
@@ -164,4 +164,4 @@ public class GraphMLResponseWriter implements QueryResponseWriter {
 
     return s;
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/core/src/java/org/apache/solr/security/AutorizationEditOperation.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/security/AutorizationEditOperation.java b/solr/core/src/java/org/apache/solr/security/AutorizationEditOperation.java
index f60b6dd..88c7987 100644
--- a/solr/core/src/java/org/apache/solr/security/AutorizationEditOperation.java
+++ b/solr/core/src/java/org/apache/solr/security/AutorizationEditOperation.java
@@ -1,18 +1,3 @@
-package org.apache.solr.security;
-
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.stream.Collectors;
-
-import org.apache.solr.util.CommandOperation;
-
-import static org.apache.solr.common.util.Utils.getDeepCopy;
-import static org.apache.solr.handler.admin.SecurityConfHandler.getListValue;
-import static org.apache.solr.handler.admin.SecurityConfHandler.getMapValue;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -30,6 +15,20 @@ import static org.apache.solr.handler.admin.SecurityConfHandler.getMapValue;
  * limitations under the License.
  */
 
+package org.apache.solr.security;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.stream.Collectors;
+
+import org.apache.solr.util.CommandOperation;
+
+import static org.apache.solr.common.util.Utils.getDeepCopy;
+import static org.apache.solr.handler.admin.SecurityConfHandler.getListValue;
+import static org.apache.solr.handler.admin.SecurityConfHandler.getMapValue;
 
 enum AutorizationEditOperation {
   SET_USER_ROLE("set-user-role") {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/core/src/java/org/apache/solr/security/Permission.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/security/Permission.java b/solr/core/src/java/org/apache/solr/security/Permission.java
index 0084bdc..33ae8f7 100644
--- a/solr/core/src/java/org/apache/solr/security/Permission.java
+++ b/solr/core/src/java/org/apache/solr/security/Permission.java
@@ -1,3 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package org.apache.solr.security;
 
 import java.util.Collection;
@@ -19,24 +36,6 @@ import static java.util.Collections.singleton;
 import static java.util.Collections.singletonList;
 import static org.apache.solr.common.params.CommonParams.NAME;
 
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
 class Permission {
   String name;
   Set<String> path, role, collections, method;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/core/src/java/org/apache/solr/security/PermissionNameProvider.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/security/PermissionNameProvider.java b/solr/core/src/java/org/apache/solr/security/PermissionNameProvider.java
index 4b6af47..d0e5c9a 100644
--- a/solr/core/src/java/org/apache/solr/security/PermissionNameProvider.java
+++ b/solr/core/src/java/org/apache/solr/security/PermissionNameProvider.java
@@ -1,5 +1,3 @@
-package org.apache.solr.security;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.solr.security;
  * limitations under the License.
  */
 
+package org.apache.solr.security;
+
 import java.util.HashSet;
 import java.util.Map;
 import java.util.Set;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/core/src/java/org/apache/solr/update/IndexFingerprint.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/IndexFingerprint.java b/solr/core/src/java/org/apache/solr/update/IndexFingerprint.java
index 7b8a731..01cf9a6 100644
--- a/solr/core/src/java/org/apache/solr/update/IndexFingerprint.java
+++ b/solr/core/src/java/org/apache/solr/update/IndexFingerprint.java
@@ -1,5 +1,3 @@
-package org.apache.solr.update;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.solr.update;
  * limitations under the License.
  */
 
+package org.apache.solr.update;
+
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
 import java.util.LinkedHashMap;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/core/src/java/org/apache/solr/update/processor/ClassificationUpdateProcessor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/processor/ClassificationUpdateProcessor.java b/solr/core/src/java/org/apache/solr/update/processor/ClassificationUpdateProcessor.java
index 88ac0db..050fff0 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/ClassificationUpdateProcessor.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/ClassificationUpdateProcessor.java
@@ -1,3 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package org.apache.solr.update.processor;
 
 import java.io.IOException;
@@ -17,23 +34,6 @@ import org.apache.solr.schema.IndexSchema;
 import org.apache.solr.schema.SchemaField;
 import org.apache.solr.update.AddUpdateCommand;
 
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
 /**
  * This Class is a Request Update Processor to classify the document in input and add a field
  * containing the class to the Document.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/core/src/java/org/apache/solr/update/processor/ClassificationUpdateProcessorFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/processor/ClassificationUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/ClassificationUpdateProcessorFactory.java
index 2f1b8d7..81bec2f 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/ClassificationUpdateProcessorFactory.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/ClassificationUpdateProcessorFactory.java
@@ -1,13 +1,3 @@
-package org.apache.solr.update.processor;
-
-import org.apache.lucene.index.IndexReader;
-import org.apache.solr.common.SolrException;
-import org.apache.solr.common.params.SolrParams;
-import org.apache.solr.common.util.NamedList;
-import org.apache.solr.request.SolrQueryRequest;
-import org.apache.solr.response.SolrQueryResponse;
-import org.apache.solr.schema.IndexSchema;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -25,6 +15,16 @@ import org.apache.solr.schema.IndexSchema;
  * limitations under the License.
  */
 
+package org.apache.solr.update.processor;
+
+import org.apache.lucene.index.IndexReader;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.params.SolrParams;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.response.SolrQueryResponse;
+import org.apache.solr.schema.IndexSchema;
+
 /**
  * This class implements an UpdateProcessorFactory for the Classification Update Processor.
  * It takes in input a series of parameter that will be necessary to instantiate and use the Classifier

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/core/src/test/org/apache/solr/cloud/CreateCollectionCleanupTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/CreateCollectionCleanupTest.java b/solr/core/src/test/org/apache/solr/cloud/CreateCollectionCleanupTest.java
index 2d1a7f8..989e1af 100644
--- a/solr/core/src/test/org/apache/solr/cloud/CreateCollectionCleanupTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/CreateCollectionCleanupTest.java
@@ -1,5 +1,3 @@
-package org.apache.solr.cloud;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.solr.cloud;
  * limitations under the License.
  */
 
+package org.apache.solr.cloud;
+
 import java.util.ArrayList;
 import java.util.Properties;
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/core/src/test/org/apache/solr/cloud/OverseerModifyCollectionTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/OverseerModifyCollectionTest.java b/solr/core/src/test/org/apache/solr/cloud/OverseerModifyCollectionTest.java
index 6809b6d..e902ab4 100644
--- a/solr/core/src/test/org/apache/solr/cloud/OverseerModifyCollectionTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/OverseerModifyCollectionTest.java
@@ -1,3 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package org.apache.solr.cloud;
 
 import static org.apache.solr.client.solrj.SolrRequest.METHOD.POST;
@@ -22,23 +39,6 @@ import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
 public class OverseerModifyCollectionTest extends AbstractFullDistribZkTestBase {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   


[23/51] [abbrv] lucene-solr:apiv2: SOLR-9290: MockCoreContainer should call super.shutdown()

Posted by sa...@apache.org.
SOLR-9290: MockCoreContainer should call super.shutdown()


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/833c8ee1
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/833c8ee1
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/833c8ee1

Branch: refs/heads/apiv2
Commit: 833c8ee152fc28b7ec767d0e8f8ecd346229d443
Parents: 343f374
Author: Shalin Shekhar Mangar <sh...@apache.org>
Authored: Sat Jul 16 00:59:35 2016 +0530
Committer: Shalin Shekhar Mangar <sh...@apache.org>
Committed: Sat Jul 16 00:59:35 2016 +0530

----------------------------------------------------------------------
 solr/core/src/test/org/apache/solr/cloud/ZkControllerTest.java | 1 +
 1 file changed, 1 insertion(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/833c8ee1/solr/core/src/test/org/apache/solr/cloud/ZkControllerTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/ZkControllerTest.java b/solr/core/src/test/org/apache/solr/cloud/ZkControllerTest.java
index b865439..01d815a 100644
--- a/solr/core/src/test/org/apache/solr/cloud/ZkControllerTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/ZkControllerTest.java
@@ -348,6 +348,7 @@ public class ZkControllerTest extends SolrTestCaseJ4 {
     @Override
     public void shutdown() {
       updateShardHandler.close();
+      super.shutdown();
     }
 
   }


[21/51] [abbrv] lucene-solr:apiv2: LUCENE-7383: fix test, only use BoostQuery once

Posted by sa...@apache.org.
LUCENE-7383: fix test, only use BoostQuery once


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/2e0b2f5e
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/2e0b2f5e
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/2e0b2f5e

Branch: refs/heads/apiv2
Commit: 2e0b2f5e37cb65103248467c02388d4e3f86dc91
Parents: f9c9470
Author: Martijn van Groningen <ma...@gmail.com>
Authored: Fri Jul 15 16:32:04 2016 +0200
Committer: Martijn van Groningen <mv...@apache.org>
Committed: Fri Jul 15 16:32:04 2016 +0200

----------------------------------------------------------------------
 .../org/apache/lucene/search/vectorhighlight/FieldQueryTest.java  | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e0b2f5e/lucene/highlighter/src/test/org/apache/lucene/search/vectorhighlight/FieldQueryTest.java
----------------------------------------------------------------------
diff --git a/lucene/highlighter/src/test/org/apache/lucene/search/vectorhighlight/FieldQueryTest.java b/lucene/highlighter/src/test/org/apache/lucene/search/vectorhighlight/FieldQueryTest.java
index 1976327..6b1e49d 100644
--- a/lucene/highlighter/src/test/org/apache/lucene/search/vectorhighlight/FieldQueryTest.java
+++ b/lucene/highlighter/src/test/org/apache/lucene/search/vectorhighlight/FieldQueryTest.java
@@ -959,10 +959,9 @@ public class FieldQueryTest extends AbstractTestCase {
     initBoost();
     Query childQuery = tq(boost, "a");
     Query query = new ToParentBlockJoinQuery(childQuery, new QueryBitSetProducer(new MatchAllDocsQuery()), ScoreMode.None);
-    query = new BoostQuery(query, boost );
     FieldQuery fq = new FieldQuery(query, true, true );
     Set<Query> flatQueries = new HashSet<>();
-    fq.flatten(query, reader, flatQueries, 1f );
+    fq.flatten(query, reader, flatQueries, 1f);
     assertCollectionQueries(flatQueries, tq(boost, "a"));
   }
 


[19/51] [abbrv] lucene-solr:apiv2: LUCENE-7376: Add support for ToParentBlockJoinQuery to fast vector highlighter's FieldQuery.

Posted by sa...@apache.org.
LUCENE-7376: Add support for ToParentBlockJoinQuery to fast vector highlighter's FieldQuery.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/3a71c7d8
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/3a71c7d8
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/3a71c7d8

Branch: refs/heads/apiv2
Commit: 3a71c7d8df59ae815be28495905f88b1c3207cb4
Parents: 51d4af6
Author: Martijn van Groningen <mv...@apache.org>
Authored: Thu Jul 14 10:06:15 2016 +0200
Committer: Martijn van Groningen <mv...@apache.org>
Committed: Thu Jul 14 10:10:44 2016 +0200

----------------------------------------------------------------------
 lucene/CHANGES.txt                                   |  3 +++
 .../lucene/search/vectorhighlight/FieldQuery.java    |  6 ++++++
 .../search/vectorhighlight/FieldQueryTest.java       | 15 +++++++++++++++
 3 files changed, 24 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3a71c7d8/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index ec8ecf0..9e19d35 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -109,6 +109,9 @@ Improvements
   chain that is about normalization for range/fuzzy/wildcard queries.
   (Adrien Grand)
 
+* LUCENE-7376: Add support for ToParentBlockJoinQuery to fast vector highlighter's
+  FieldQuery. (Martijn van Groningen)
+
 Optimizations
 
 * LUCENE-7330, LUCENE-7339: Speed up conjunction queries. (Adrien Grand)

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3a71c7d8/lucene/highlighter/src/java/org/apache/lucene/search/vectorhighlight/FieldQuery.java
----------------------------------------------------------------------
diff --git a/lucene/highlighter/src/java/org/apache/lucene/search/vectorhighlight/FieldQuery.java b/lucene/highlighter/src/java/org/apache/lucene/search/vectorhighlight/FieldQuery.java
index ac6cd94..8abeaff 100644
--- a/lucene/highlighter/src/java/org/apache/lucene/search/vectorhighlight/FieldQuery.java
+++ b/lucene/highlighter/src/java/org/apache/lucene/search/vectorhighlight/FieldQuery.java
@@ -37,6 +37,7 @@ import org.apache.lucene.search.MultiTermQuery;
 import org.apache.lucene.search.PhraseQuery;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.search.join.ToParentBlockJoinQuery;
 import org.apache.lucene.search.vectorhighlight.FieldTermStack.TermInfo;
 
 /**
@@ -137,6 +138,11 @@ public class FieldQuery {
       if (q != null) {
         flatten( q, reader, flatQueries, boost);
       }
+    } else if (sourceQuery instanceof ToParentBlockJoinQuery) {
+      Query childQuery = ((ToParentBlockJoinQuery) sourceQuery).getChildQuery();
+      if (childQuery != null) {
+        flatten(childQuery, reader, flatQueries, boost);
+      }
     } else if (reader != null) {
       Query query = sourceQuery;
       Query rewritten;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3a71c7d8/lucene/highlighter/src/test/org/apache/lucene/search/vectorhighlight/FieldQueryTest.java
----------------------------------------------------------------------
diff --git a/lucene/highlighter/src/test/org/apache/lucene/search/vectorhighlight/FieldQueryTest.java b/lucene/highlighter/src/test/org/apache/lucene/search/vectorhighlight/FieldQueryTest.java
index ce82546..1976327 100644
--- a/lucene/highlighter/src/test/org/apache/lucene/search/vectorhighlight/FieldQueryTest.java
+++ b/lucene/highlighter/src/test/org/apache/lucene/search/vectorhighlight/FieldQueryTest.java
@@ -27,12 +27,16 @@ import org.apache.lucene.search.BooleanClause.Occur;
 import org.apache.lucene.search.BooleanQuery;
 import org.apache.lucene.search.BoostQuery;
 import org.apache.lucene.search.ConstantScoreQuery;
+import org.apache.lucene.search.MatchAllDocsQuery;
 import org.apache.lucene.search.PrefixQuery;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.search.RegexpQuery;
 import org.apache.lucene.search.TermQuery;
 import org.apache.lucene.search.TermRangeQuery;
 import org.apache.lucene.search.WildcardQuery;
+import org.apache.lucene.search.join.QueryBitSetProducer;
+import org.apache.lucene.search.join.ScoreMode;
+import org.apache.lucene.search.join.ToParentBlockJoinQuery;
 import org.apache.lucene.search.vectorhighlight.FieldQuery.QueryPhraseMap;
 import org.apache.lucene.search.vectorhighlight.FieldTermStack.TermInfo;
 import org.apache.lucene.util.BytesRef;
@@ -951,4 +955,15 @@ public class FieldQueryTest extends AbstractTestCase {
     assertCollectionQueries( flatQueries, tq( boost, "A" ) );
   }
 
+  public void testFlattenToParentBlockJoinQuery() throws Exception {
+    initBoost();
+    Query childQuery = tq(boost, "a");
+    Query query = new ToParentBlockJoinQuery(childQuery, new QueryBitSetProducer(new MatchAllDocsQuery()), ScoreMode.None);
+    query = new BoostQuery(query, boost );
+    FieldQuery fq = new FieldQuery(query, true, true );
+    Set<Query> flatQueries = new HashSet<>();
+    fq.flatten(query, reader, flatQueries, 1f );
+    assertCollectionQueries(flatQueries, tq(boost, "a"));
+  }
+
 }


[10/51] [abbrv] lucene-solr:apiv2: LUCENE-7355: Add Analyzer#normalize() and use it in query parsers.

Posted by sa...@apache.org.
LUCENE-7355: Add Analyzer#normalize() and use it in query parsers.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/e92a38af
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/e92a38af
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/e92a38af

Branch: refs/heads/apiv2
Commit: e92a38af90d12e51390b4307ccbe0c24ac7b6b4e
Parents: ced9140
Author: Adrien Grand <jp...@gmail.com>
Authored: Tue Jun 28 18:23:11 2016 +0200
Committer: Adrien Grand <jp...@gmail.com>
Committed: Tue Jul 12 16:33:07 2016 +0200

----------------------------------------------------------------------
 lucene/CHANGES.txt                              |  10 +
 lucene/MIGRATE.txt                              |  11 +
 .../lucene/analysis/ar/ArabicAnalyzer.java      |   8 +
 .../lucene/analysis/bg/BulgarianAnalyzer.java   |   7 +
 .../lucene/analysis/br/BrazilianAnalyzer.java   |   7 +
 .../lucene/analysis/ca/CatalanAnalyzer.java     |   8 +
 .../apache/lucene/analysis/cjk/CJKAnalyzer.java |   7 +
 .../lucene/analysis/ckb/SoraniAnalyzer.java     |   9 +
 .../lucene/analysis/core/SimpleAnalyzer.java    |   6 +
 .../lucene/analysis/core/StopAnalyzer.java      |   6 +
 .../lucene/analysis/custom/CustomAnalyzer.java  |  28 +-
 .../lucene/analysis/cz/CzechAnalyzer.java       |   7 +
 .../lucene/analysis/da/DanishAnalyzer.java      |   7 +
 .../lucene/analysis/de/GermanAnalyzer.java      |   8 +
 .../lucene/analysis/el/GreekAnalyzer.java       |   7 +
 .../lucene/analysis/en/EnglishAnalyzer.java     |   7 +
 .../lucene/analysis/es/SpanishAnalyzer.java     |   7 +
 .../lucene/analysis/eu/BasqueAnalyzer.java      |   7 +
 .../lucene/analysis/fa/PersianAnalyzer.java     |  14 +-
 .../lucene/analysis/fi/FinnishAnalyzer.java     |   7 +
 .../lucene/analysis/fr/FrenchAnalyzer.java      |   8 +
 .../lucene/analysis/ga/IrishAnalyzer.java       |   8 +
 .../lucene/analysis/gl/GalicianAnalyzer.java    |   7 +
 .../lucene/analysis/hi/HindiAnalyzer.java       |  11 +
 .../lucene/analysis/hu/HungarianAnalyzer.java   |   7 +
 .../lucene/analysis/hy/ArmenianAnalyzer.java    |   7 +
 .../lucene/analysis/id/IndonesianAnalyzer.java  |   7 +
 .../lucene/analysis/it/ItalianAnalyzer.java     |   8 +
 .../lucene/analysis/lt/LithuanianAnalyzer.java  |   7 +
 .../lucene/analysis/lv/LatvianAnalyzer.java     |   7 +
 .../lucene/analysis/nl/DutchAnalyzer.java       |   7 +
 .../lucene/analysis/no/NorwegianAnalyzer.java   |   7 +
 .../lucene/analysis/pt/PortugueseAnalyzer.java  |   7 +
 .../lucene/analysis/ro/RomanianAnalyzer.java    |   7 +
 .../lucene/analysis/ru/RussianAnalyzer.java     |   7 +
 .../analysis/standard/ClassicAnalyzer.java      |   5 +
 .../standard/UAX29URLEmailAnalyzer.java         |   5 +
 .../lucene/analysis/sv/SwedishAnalyzer.java     |   7 +
 .../apache/lucene/analysis/th/ThaiAnalyzer.java |   7 +
 .../lucene/analysis/tr/TurkishAnalyzer.java     |   7 +
 .../lucene/collation/CollationKeyAnalyzer.java  |   7 +
 .../core/TestAllAnalyzersHaveFactories.java     |   2 +
 .../lucene/analysis/core/TestAnalyzers.java     |   4 +
 .../lucene/analysis/core/TestRandomChains.java  |  10 +-
 .../analysis/custom/TestCustomAnalyzer.java     | 143 ++++++++++
 .../lucene/analysis/ja/JapaneseAnalyzer.java    |   7 +
 .../analysis/morfologik/MorfologikAnalyzer.java |   6 +
 .../analysis/cn/smart/SmartChineseAnalyzer.java |   6 +
 .../lucene/analysis/pl/PolishAnalyzer.java      |   7 +
 .../org/apache/lucene/analysis/Analyzer.java    | 135 +++++++++-
 .../analysis/standard/StandardAnalyzer.java     |   7 +
 .../analysis/standard/TestStandardAnalyzer.java |   6 +
 .../analyzing/AnalyzingQueryParser.java         | 202 --------------
 .../queryparser/analyzing/package-info.java     |  22 --
 .../queryparser/classic/QueryParserBase.java    | 140 +++-------
 .../complexPhrase/ComplexPhraseQueryParser.java |  17 +-
 .../CommonQueryParserConfiguration.java         |  12 -
 .../flexible/standard/StandardQueryParser.java  |  30 ---
 .../config/StandardQueryConfigHandler.java      |   9 -
 .../processors/FuzzyQueryNodeProcessor.java     |  11 +-
 ...owercaseExpandedTermsQueryNodeProcessor.java | 100 -------
 .../processors/RegexpQueryNodeProcessor.java    |  56 ++++
 .../StandardQueryNodeProcessorPipeline.java     |   4 +-
 .../processors/TermRangeQueryNodeProcessor.java |  11 +-
 .../processors/WildcardQueryNodeProcessor.java  |  58 +++-
 .../queryparser/simple/SimpleQueryParser.java   |   9 +-
 .../analyzing/TestAnalyzingQueryParser.java     | 268 -------------------
 .../queryparser/classic/TestQueryParser.java    | 241 ++++++++++++++---
 .../precedence/TestPrecedenceQueryParser.java   |  44 +--
 .../flexible/standard/TestQPHelper.java         |  57 ++--
 .../flexible/standard/TestStandardQP.java       |  15 --
 .../queryparser/util/QueryParserTestBase.java   |  63 ++---
 .../analysis/BaseTokenStreamTestCase.java       |   5 +-
 .../apache/lucene/analysis/MockAnalyzer.java    |  11 +-
 .../lucene/analysis/MockBytesAnalyzer.java      |   7 +
 .../lucene/analysis/MockLowerCaseFilter.java    |  40 +++
 .../apache/solr/analysis/TokenizerChain.java    |  28 +-
 77 files changed, 1179 insertions(+), 942 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index eba11c9..c520e1b 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -15,6 +15,9 @@ API Changes
 
 * LUCENE-7368: Removed query normalization. (Adrien Grand)
 
+* LUCENE-7355: AnalyzingQueryParser has been removed as its functionality has
+  been folded into the classic QueryParser. (Adrien Grand)
+
 Bug Fixes
 
 Improvements
@@ -48,6 +51,9 @@ New Features
   methods Directory.rename and Directory.syncMetaData instead (Robert Muir,
   Uwe Schindler, Mike McCandless)
 
+* LUCENE-7355: Added Analyzer#normalize(), which only applies normalization to
+  an input string. (Adrien Grand)
+
 Bug Fixes
 
 * LUCENE-6662: Fixed potential resource leaks. (Rishabh Patel via Adrien Grand)
@@ -99,6 +105,10 @@ Improvements
 * LUCENE-7276: MatchNoDocsQuery now includes an optional reason for
   why it was used (Jim Ferenczi via Mike McCandless)
 
+* LUCENE-7355: AnalyzingQueryParser now only applies the subset of the analysis
+  chain that is about normalization for range/fuzzy/wildcard queries.
+  (Adrien Grand)
+
 Optimizations
 
 * LUCENE-7330, LUCENE-7339: Speed up conjunction queries. (Adrien Grand)

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/MIGRATE.txt
----------------------------------------------------------------------
diff --git a/lucene/MIGRATE.txt b/lucene/MIGRATE.txt
index f914529..06e6a81 100644
--- a/lucene/MIGRATE.txt
+++ b/lucene/MIGRATE.txt
@@ -36,3 +36,14 @@ Query normalization's goal was to make scores comparable across queries, which
 was only implemented by the ClassicSimilarity. Since ClassicSimilarity is not
 the default similarity anymore, this functionality has been removed. Boosts are
 now propagated through Query#createWeight.
+
+## AnalyzingQueryParser removed (LUCENE-7355)
+
+The functionality of AnalyzingQueryParser has been folded into the classic
+QueryParser, which now passes terms through Analyzer#normalize when generating
+queries.
+
+## CommonQueryParserConfiguration.setLowerCaseExpandedTerms removed (LUCENE-7355)
+
+This option has been removed as expanded terms are now normalized through
+Analyzer#normalize.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/java/org/apache/lucene/analysis/ar/ArabicAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/ar/ArabicAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/ar/ArabicAnalyzer.java
index 889a886..61100dd 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/ar/ArabicAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/ar/ArabicAnalyzer.java
@@ -143,5 +143,13 @@ public final class ArabicAnalyzer extends StopwordAnalyzerBase {
     }
     return new TokenStreamComponents(source, new ArabicStemFilter(result));
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    TokenStream result = new LowerCaseFilter(in);
+    result = new DecimalDigitFilter(result);
+    result = new ArabicNormalizationFilter(result);
+    return result;
+  }
 }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/java/org/apache/lucene/analysis/bg/BulgarianAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/bg/BulgarianAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/bg/BulgarianAnalyzer.java
index 9cb0657..06c7eea 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/bg/BulgarianAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/bg/BulgarianAnalyzer.java
@@ -126,4 +126,11 @@ public final class BulgarianAnalyzer extends StopwordAnalyzerBase {
     result = new BulgarianStemFilter(result);
     return new TokenStreamComponents(source, result);
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    TokenStream result = new StandardFilter(in);
+    result = new LowerCaseFilter(result);
+    return result;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/java/org/apache/lucene/analysis/br/BrazilianAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/br/BrazilianAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/br/BrazilianAnalyzer.java
index 5dd0cbc..ad1af92 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/br/BrazilianAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/br/BrazilianAnalyzer.java
@@ -127,5 +127,12 @@ public final class BrazilianAnalyzer extends StopwordAnalyzerBase {
       result = new SetKeywordMarkerFilter(result, excltable);
     return new TokenStreamComponents(source, new BrazilianStemFilter(result));
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    TokenStream result = new StandardFilter(in);
+    result = new LowerCaseFilter(result);
+    return result;
+  }
 }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/java/org/apache/lucene/analysis/ca/CatalanAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/ca/CatalanAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/ca/CatalanAnalyzer.java
index 739b61a..56f36e1 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/ca/CatalanAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/ca/CatalanAnalyzer.java
@@ -130,4 +130,12 @@ public final class CatalanAnalyzer extends StopwordAnalyzerBase {
     result = new SnowballFilter(result, new CatalanStemmer());
     return new TokenStreamComponents(source, result);
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    TokenStream result = new StandardFilter(in);
+    result = new ElisionFilter(result, DEFAULT_ARTICLES);
+    result = new LowerCaseFilter(result);
+    return result;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/java/org/apache/lucene/analysis/cjk/CJKAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/cjk/CJKAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/cjk/CJKAnalyzer.java
index d500ff9..d4214a1 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/cjk/CJKAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/cjk/CJKAnalyzer.java
@@ -92,4 +92,11 @@ public final class CJKAnalyzer extends StopwordAnalyzerBase {
     result = new CJKBigramFilter(result);
     return new TokenStreamComponents(source, new StopFilter(result, stopwords));
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    TokenStream result = new CJKWidthFilter(in);
+    result = new LowerCaseFilter(result);
+    return result;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/java/org/apache/lucene/analysis/ckb/SoraniAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/ckb/SoraniAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/ckb/SoraniAnalyzer.java
index 0f283b8..e7ce3f3 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/ckb/SoraniAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/ckb/SoraniAnalyzer.java
@@ -126,4 +126,13 @@ public final class SoraniAnalyzer extends StopwordAnalyzerBase {
     result = new SoraniStemFilter(result);
     return new TokenStreamComponents(source, result);
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    TokenStream result = new StandardFilter(in);
+    result = new SoraniNormalizationFilter(result);
+    result = new LowerCaseFilter(result);
+    result = new DecimalDigitFilter(result);
+    return result;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/java/org/apache/lucene/analysis/core/SimpleAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/core/SimpleAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/core/SimpleAnalyzer.java
index d0fdcf6..6e0f2f0 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/core/SimpleAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/core/SimpleAnalyzer.java
@@ -19,6 +19,7 @@ package org.apache.lucene.analysis.core;
 
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.LowerCaseFilter;
+import org.apache.lucene.analysis.TokenStream;
 
 /** An {@link Analyzer} that filters {@link LetterTokenizer} 
  *  with {@link LowerCaseFilter} 
@@ -35,4 +36,9 @@ public final class SimpleAnalyzer extends Analyzer {
   protected TokenStreamComponents createComponents(final String fieldName) {
     return new TokenStreamComponents(new LowerCaseTokenizer());
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    return new LowerCaseFilter(in);
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/java/org/apache/lucene/analysis/core/StopAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/core/StopAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/core/StopAnalyzer.java
index 3fa4982..7d7f532 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/core/StopAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/core/StopAnalyzer.java
@@ -25,6 +25,7 @@ import org.apache.lucene.analysis.CharArraySet;
 import org.apache.lucene.analysis.LowerCaseFilter;
 import org.apache.lucene.analysis.StopFilter;
 import org.apache.lucene.analysis.StopwordAnalyzerBase;
+import org.apache.lucene.analysis.TokenStream;
 import org.apache.lucene.analysis.Tokenizer;
 import org.apache.lucene.analysis.WordlistLoader;
 import org.apache.lucene.analysis.standard.StandardAnalyzer;
@@ -79,5 +80,10 @@ public final class StopAnalyzer extends StopwordAnalyzerBase {
     final Tokenizer source = new LowerCaseTokenizer();
     return new TokenStreamComponents(source, new StopFilter(source, stopwords));
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    return new LowerCaseFilter(in);
+  }
 }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/java/org/apache/lucene/analysis/custom/CustomAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/custom/CustomAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/custom/CustomAnalyzer.java
index f2ed01f..b2de5e8 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/custom/CustomAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/custom/CustomAnalyzer.java
@@ -37,6 +37,7 @@ import org.apache.lucene.analysis.util.AbstractAnalysisFactory;
 import org.apache.lucene.analysis.util.CharFilterFactory;
 import org.apache.lucene.analysis.util.ClasspathResourceLoader;
 import org.apache.lucene.analysis.util.FilesystemResourceLoader;
+import org.apache.lucene.analysis.util.MultiTermAwareComponent;
 import org.apache.lucene.analysis.util.ResourceLoader;
 import org.apache.lucene.analysis.util.ResourceLoaderAware;
 import org.apache.lucene.analysis.util.TokenFilterFactory;
@@ -118,15 +119,38 @@ public final class CustomAnalyzer extends Analyzer {
   }
 
   @Override
+  protected Reader initReaderForNormalization(String fieldName, Reader reader) {
+    for (CharFilterFactory charFilter : charFilters) {
+      if (charFilter instanceof MultiTermAwareComponent) {
+        charFilter = (CharFilterFactory) ((MultiTermAwareComponent) charFilter).getMultiTermComponent();
+        reader = charFilter.create(reader);
+      }
+    }
+    return reader;
+  }
+
+  @Override
   protected TokenStreamComponents createComponents(String fieldName) {
-    final Tokenizer tk = tokenizer.create();
+    final Tokenizer tk = tokenizer.create(attributeFactory());
     TokenStream ts = tk;
     for (final TokenFilterFactory filter : tokenFilters) {
       ts = filter.create(ts);
     }
     return new TokenStreamComponents(tk, ts);
   }
-  
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    TokenStream result = in;
+    for (TokenFilterFactory filter : tokenFilters) {
+      if (filter instanceof MultiTermAwareComponent) {
+        filter = (TokenFilterFactory) ((MultiTermAwareComponent) filter).getMultiTermComponent();
+        result = filter.create(in);
+      }
+    }
+    return result;
+  }
+
   @Override
   public int getPositionIncrementGap(String fieldName) {
     // use default from Analyzer base class if null

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/java/org/apache/lucene/analysis/cz/CzechAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/cz/CzechAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/cz/CzechAnalyzer.java
index 9777179..fbb9efa 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/cz/CzechAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/cz/CzechAnalyzer.java
@@ -125,5 +125,12 @@ public final class CzechAnalyzer extends StopwordAnalyzerBase {
     result = new CzechStemFilter(result);
     return new TokenStreamComponents(source, result);
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    TokenStream result = new StandardFilter(in);
+    result = new LowerCaseFilter(result);
+    return result;
+  }
 }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/java/org/apache/lucene/analysis/da/DanishAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/da/DanishAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/da/DanishAnalyzer.java
index f9c316d..ccbd9d1 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/da/DanishAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/da/DanishAnalyzer.java
@@ -124,4 +124,11 @@ public final class DanishAnalyzer extends StopwordAnalyzerBase {
     result = new SnowballFilter(result, new DanishStemmer());
     return new TokenStreamComponents(source, result);
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    TokenStream result = new StandardFilter(in);
+    result = new LowerCaseFilter(result);
+    return result;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/java/org/apache/lucene/analysis/de/GermanAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/de/GermanAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/de/GermanAnalyzer.java
index 790fc48..8a39945 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/de/GermanAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/de/GermanAnalyzer.java
@@ -139,4 +139,12 @@ public final class GermanAnalyzer extends StopwordAnalyzerBase {
     result = new GermanLightStemFilter(result);
     return new TokenStreamComponents(source, result);
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    TokenStream result = new StandardFilter(in);
+    result = new LowerCaseFilter(result);
+    result = new GermanNormalizationFilter(result);
+    return result;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/java/org/apache/lucene/analysis/el/GreekAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/el/GreekAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/el/GreekAnalyzer.java
index c85b6ec..bd09d25 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/el/GreekAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/el/GreekAnalyzer.java
@@ -104,4 +104,11 @@ public final class GreekAnalyzer extends StopwordAnalyzerBase {
     result = new GreekStemFilter(result);
     return new TokenStreamComponents(source, result);
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    TokenStream result = new StandardFilter(in);
+    result = new GreekLowerCaseFilter(result);
+    return result;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/java/org/apache/lucene/analysis/en/EnglishAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/en/EnglishAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/en/EnglishAnalyzer.java
index 16dc0c5..94ba43a 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/en/EnglishAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/en/EnglishAnalyzer.java
@@ -107,4 +107,11 @@ public final class EnglishAnalyzer extends StopwordAnalyzerBase {
     result = new PorterStemFilter(result);
     return new TokenStreamComponents(source, result);
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    TokenStream result = new StandardFilter(in);
+    result = new LowerCaseFilter(result);
+    return result;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/java/org/apache/lucene/analysis/es/SpanishAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/es/SpanishAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/es/SpanishAnalyzer.java
index ab5b6c3..3b21cdd 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/es/SpanishAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/es/SpanishAnalyzer.java
@@ -123,4 +123,11 @@ public final class SpanishAnalyzer extends StopwordAnalyzerBase {
     result = new SpanishLightStemFilter(result);
     return new TokenStreamComponents(source, result);
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    TokenStream result = new StandardFilter(in);
+    result = new LowerCaseFilter(result);
+    return result;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/java/org/apache/lucene/analysis/eu/BasqueAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/eu/BasqueAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/eu/BasqueAnalyzer.java
index cff2da0..4bc1ba7 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/eu/BasqueAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/eu/BasqueAnalyzer.java
@@ -121,4 +121,11 @@ public final class BasqueAnalyzer extends StopwordAnalyzerBase {
     result = new SnowballFilter(result, new BasqueStemmer());
     return new TokenStreamComponents(source, result);
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    TokenStream result = new StandardFilter(in);
+    result = new LowerCaseFilter(result);
+    return result;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/java/org/apache/lucene/analysis/fa/PersianAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/fa/PersianAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/fa/PersianAnalyzer.java
index f29dfd3..9aebc2d 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/fa/PersianAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/fa/PersianAnalyzer.java
@@ -29,6 +29,7 @@ import org.apache.lucene.analysis.TokenStream;
 import org.apache.lucene.analysis.Tokenizer;
 import org.apache.lucene.analysis.ar.ArabicNormalizationFilter;
 import org.apache.lucene.analysis.core.DecimalDigitFilter;
+import org.apache.lucene.analysis.standard.StandardFilter;
 import org.apache.lucene.analysis.standard.StandardTokenizer;
 
 /**
@@ -125,7 +126,18 @@ public final class PersianAnalyzer extends StopwordAnalyzerBase {
      */
     return new TokenStreamComponents(source, new StopFilter(result, stopwords));
   }
-  
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    TokenStream result = new StandardFilter(in);
+    result = new LowerCaseFilter(result);
+    result = new DecimalDigitFilter(result);
+    result = new ArabicNormalizationFilter(result);
+    /* additional persian-specific normalization */
+    result = new PersianNormalizationFilter(result);
+    return result;
+  }
+
   /** 
    * Wraps the Reader with {@link PersianCharFilter}
    */

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/java/org/apache/lucene/analysis/fi/FinnishAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/fi/FinnishAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/fi/FinnishAnalyzer.java
index 6b00101..69cc537 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/fi/FinnishAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/fi/FinnishAnalyzer.java
@@ -124,4 +124,11 @@ public final class FinnishAnalyzer extends StopwordAnalyzerBase {
     result = new SnowballFilter(result, new FinnishStemmer());
     return new TokenStreamComponents(source, result);
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    TokenStream result = new StandardFilter(in);
+    result = new LowerCaseFilter(result);
+    return result;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/java/org/apache/lucene/analysis/fr/FrenchAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/fr/FrenchAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/fr/FrenchAnalyzer.java
index 5f90246..2e072be 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/fr/FrenchAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/fr/FrenchAnalyzer.java
@@ -144,5 +144,13 @@ public final class FrenchAnalyzer extends StopwordAnalyzerBase {
     result = new FrenchLightStemFilter(result);
     return new TokenStreamComponents(source, result);
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    TokenStream result = new StandardFilter(in);
+    result = new ElisionFilter(result, DEFAULT_ARTICLES);
+    result = new LowerCaseFilter(result);
+    return result;
+  }
 }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/java/org/apache/lucene/analysis/ga/IrishAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/ga/IrishAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/ga/IrishAnalyzer.java
index 1ca3455..3ae366d 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/ga/IrishAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/ga/IrishAnalyzer.java
@@ -141,4 +141,12 @@ public final class IrishAnalyzer extends StopwordAnalyzerBase {
     result = new SnowballFilter(result, new IrishStemmer());
     return new TokenStreamComponents(source, result);
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    TokenStream result = new StandardFilter(in);
+    result = new ElisionFilter(result, DEFAULT_ARTICLES);
+    result = new IrishLowerCaseFilter(result);
+    return result;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/java/org/apache/lucene/analysis/gl/GalicianAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/gl/GalicianAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/gl/GalicianAnalyzer.java
index 372a6ec..4f70596 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/gl/GalicianAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/gl/GalicianAnalyzer.java
@@ -122,4 +122,11 @@ public final class GalicianAnalyzer extends StopwordAnalyzerBase {
     result = new GalicianStemFilter(result);
     return new TokenStreamComponents(source, result);
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    TokenStream result = new StandardFilter(in);
+    result = new LowerCaseFilter(result);
+    return result;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/java/org/apache/lucene/analysis/hi/HindiAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/hi/HindiAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/hi/HindiAnalyzer.java
index 8e4868b..f339295 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/hi/HindiAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/hi/HindiAnalyzer.java
@@ -29,6 +29,7 @@ import org.apache.lucene.analysis.Tokenizer;
 import org.apache.lucene.analysis.core.DecimalDigitFilter;
 import org.apache.lucene.analysis.in.IndicNormalizationFilter;
 import org.apache.lucene.analysis.miscellaneous.SetKeywordMarkerFilter;
+import org.apache.lucene.analysis.standard.StandardFilter;
 import org.apache.lucene.analysis.standard.StandardTokenizer;
 
 /**
@@ -125,4 +126,14 @@ public final class HindiAnalyzer extends StopwordAnalyzerBase {
     result = new HindiStemFilter(result);
     return new TokenStreamComponents(source, result);
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    TokenStream result = new StandardFilter(in);
+    result = new LowerCaseFilter(result);
+    result = new DecimalDigitFilter(result);
+    result = new IndicNormalizationFilter(result);
+    result = new HindiNormalizationFilter(result);
+    return result;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/java/org/apache/lucene/analysis/hu/HungarianAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/hu/HungarianAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/hu/HungarianAnalyzer.java
index 0615bdc..e980f5a 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/hu/HungarianAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/hu/HungarianAnalyzer.java
@@ -124,4 +124,11 @@ public final class HungarianAnalyzer extends StopwordAnalyzerBase {
     result = new SnowballFilter(result, new HungarianStemmer());
     return new TokenStreamComponents(source, result);
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    TokenStream result = new StandardFilter(in);
+    result = new LowerCaseFilter(result);
+    return result;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/java/org/apache/lucene/analysis/hy/ArmenianAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/hy/ArmenianAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/hy/ArmenianAnalyzer.java
index 8c04639..95506e1 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/hy/ArmenianAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/hy/ArmenianAnalyzer.java
@@ -121,4 +121,11 @@ public final class ArmenianAnalyzer extends StopwordAnalyzerBase {
     result = new SnowballFilter(result, new ArmenianStemmer());
     return new TokenStreamComponents(source, result);
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    TokenStream result = new StandardFilter(in);
+    result = new LowerCaseFilter(result);
+    return result;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/java/org/apache/lucene/analysis/id/IndonesianAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/id/IndonesianAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/id/IndonesianAnalyzer.java
index fc9b4d2..9804bea 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/id/IndonesianAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/id/IndonesianAnalyzer.java
@@ -119,4 +119,11 @@ public final class IndonesianAnalyzer extends StopwordAnalyzerBase {
     }
     return new TokenStreamComponents(source, new IndonesianStemFilter(result));
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    TokenStream result = new StandardFilter(in);
+    result = new LowerCaseFilter(result);
+    return result;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/java/org/apache/lucene/analysis/it/ItalianAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/it/ItalianAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/it/ItalianAnalyzer.java
index a18aa5d..32f4e30 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/it/ItalianAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/it/ItalianAnalyzer.java
@@ -133,4 +133,12 @@ public final class ItalianAnalyzer extends StopwordAnalyzerBase {
     result = new ItalianLightStemFilter(result);
     return new TokenStreamComponents(source, result);
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    TokenStream result = new StandardFilter(in);
+    result = new ElisionFilter(result, DEFAULT_ARTICLES);
+    result = new LowerCaseFilter(result);
+    return result;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/java/org/apache/lucene/analysis/lt/LithuanianAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/lt/LithuanianAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/lt/LithuanianAnalyzer.java
index 5e24cf9..4eccc51 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/lt/LithuanianAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/lt/LithuanianAnalyzer.java
@@ -121,4 +121,11 @@ public final class LithuanianAnalyzer extends StopwordAnalyzerBase {
     result = new SnowballFilter(result, new LithuanianStemmer());
     return new TokenStreamComponents(source, result);
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    TokenStream result = new StandardFilter(in);
+    result = new LowerCaseFilter(result);
+    return result;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/java/org/apache/lucene/analysis/lv/LatvianAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/lv/LatvianAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/lv/LatvianAnalyzer.java
index 0a016af..1b08b3b 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/lv/LatvianAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/lv/LatvianAnalyzer.java
@@ -122,4 +122,11 @@ public final class LatvianAnalyzer extends StopwordAnalyzerBase {
     result = new LatvianStemFilter(result);
     return new TokenStreamComponents(source, result);
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    TokenStream result = new StandardFilter(in);
+    result = new LowerCaseFilter(result);
+    return result;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/java/org/apache/lucene/analysis/nl/DutchAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/nl/DutchAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/nl/DutchAnalyzer.java
index 0391425..900d9c6 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/nl/DutchAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/nl/DutchAnalyzer.java
@@ -159,4 +159,11 @@ public final class DutchAnalyzer extends Analyzer {
     result = new SnowballFilter(result, new org.tartarus.snowball.ext.DutchStemmer());
     return new TokenStreamComponents(source, result);
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    TokenStream result = new StandardFilter(in);
+    result = new LowerCaseFilter(result);
+    return result;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/java/org/apache/lucene/analysis/no/NorwegianAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/no/NorwegianAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/no/NorwegianAnalyzer.java
index c413793..3570ad4 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/no/NorwegianAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/no/NorwegianAnalyzer.java
@@ -124,5 +124,12 @@ public final class NorwegianAnalyzer extends StopwordAnalyzerBase {
     result = new SnowballFilter(result, new NorwegianStemmer());
     return new TokenStreamComponents(source, result);
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    TokenStream result = new StandardFilter(in);
+    result = new LowerCaseFilter(result);
+    return result;
+  }
 }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/java/org/apache/lucene/analysis/pt/PortugueseAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/pt/PortugueseAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/pt/PortugueseAnalyzer.java
index 769e142..8f54803 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/pt/PortugueseAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/pt/PortugueseAnalyzer.java
@@ -123,4 +123,11 @@ public final class PortugueseAnalyzer extends StopwordAnalyzerBase {
     result = new PortugueseLightStemFilter(result);
     return new TokenStreamComponents(source, result);
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    TokenStream result = new StandardFilter(in);
+    result = new LowerCaseFilter(result);
+    return result;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/java/org/apache/lucene/analysis/ro/RomanianAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/ro/RomanianAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/ro/RomanianAnalyzer.java
index 06ff999..1b74184 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/ro/RomanianAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/ro/RomanianAnalyzer.java
@@ -126,4 +126,11 @@ public final class RomanianAnalyzer extends StopwordAnalyzerBase {
     result = new SnowballFilter(result, new RomanianStemmer());
     return new TokenStreamComponents(source, result);
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    TokenStream result = new StandardFilter(in);
+    result = new LowerCaseFilter(result);
+    return result;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/java/org/apache/lucene/analysis/ru/RussianAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/ru/RussianAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/ru/RussianAnalyzer.java
index dfe8ef3..76bf495 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/ru/RussianAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/ru/RussianAnalyzer.java
@@ -121,4 +121,11 @@ public final class RussianAnalyzer extends StopwordAnalyzerBase {
       result = new SnowballFilter(result, new org.tartarus.snowball.ext.RussianStemmer());
       return new TokenStreamComponents(source, result);
     }
+
+    @Override
+    protected TokenStream normalize(String fieldName, TokenStream in) {
+      TokenStream result = new StandardFilter(in);
+      result = new LowerCaseFilter(result);
+      return result;
+    }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/ClassicAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/ClassicAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/ClassicAnalyzer.java
index dc6c118..ef2ef7e 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/ClassicAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/ClassicAnalyzer.java
@@ -100,4 +100,9 @@ public final class ClassicAnalyzer extends StopwordAnalyzerBase {
       }
     };
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    return new LowerCaseFilter(in);
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/UAX29URLEmailAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/UAX29URLEmailAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/UAX29URLEmailAnalyzer.java
index 9994884..fe71b7e 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/UAX29URLEmailAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/UAX29URLEmailAnalyzer.java
@@ -97,4 +97,9 @@ public final class UAX29URLEmailAnalyzer extends StopwordAnalyzerBase {
       }
     };
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    return new LowerCaseFilter(in);
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/java/org/apache/lucene/analysis/sv/SwedishAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/sv/SwedishAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/sv/SwedishAnalyzer.java
index fd2aa2e..3896d3e 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/sv/SwedishAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/sv/SwedishAnalyzer.java
@@ -124,4 +124,11 @@ public final class SwedishAnalyzer extends StopwordAnalyzerBase {
     result = new SnowballFilter(result, new SwedishStemmer());
     return new TokenStreamComponents(source, result);
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    TokenStream result = new StandardFilter(in);
+    result = new LowerCaseFilter(result);
+    return result;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/java/org/apache/lucene/analysis/th/ThaiAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/th/ThaiAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/th/ThaiAnalyzer.java
index 11f3f77..c1426b8 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/th/ThaiAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/th/ThaiAnalyzer.java
@@ -102,4 +102,11 @@ public final class ThaiAnalyzer extends StopwordAnalyzerBase {
     result = new StopFilter(result, stopwords);
     return new TokenStreamComponents(source, result);
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    TokenStream result = new LowerCaseFilter(in);
+    result = new DecimalDigitFilter(result);
+    return result;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/java/org/apache/lucene/analysis/tr/TurkishAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/tr/TurkishAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/tr/TurkishAnalyzer.java
index a21495f..719e434 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/tr/TurkishAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/tr/TurkishAnalyzer.java
@@ -127,4 +127,11 @@ public final class TurkishAnalyzer extends StopwordAnalyzerBase {
     result = new SnowballFilter(result, new TurkishStemmer());
     return new TokenStreamComponents(source, result);
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    TokenStream result = new StandardFilter(in);
+    result = new TurkishLowerCaseFilter(result);
+    return result;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/java/org/apache/lucene/collation/CollationKeyAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/collation/CollationKeyAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/collation/CollationKeyAnalyzer.java
index f7b15f6..ea98731 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/collation/CollationKeyAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/collation/CollationKeyAnalyzer.java
@@ -20,6 +20,8 @@ package org.apache.lucene.collation;
 
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.core.KeywordTokenizer;
+import org.apache.lucene.util.AttributeFactory;
+
 import java.text.Collator;
 
 /**
@@ -83,6 +85,11 @@ public final class CollationKeyAnalyzer extends Analyzer {
   }
 
   @Override
+  protected AttributeFactory attributeFactory() {
+    return factory;
+  }
+
+  @Override
   protected TokenStreamComponents createComponents(String fieldName) {
     KeywordTokenizer tokenizer = new KeywordTokenizer(factory, KeywordTokenizer.DEFAULT_BUFFER_SIZE);
     return new TokenStreamComponents(tokenizer, tokenizer);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestAllAnalyzersHaveFactories.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestAllAnalyzersHaveFactories.java b/lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestAllAnalyzersHaveFactories.java
index d826a60..7099566 100644
--- a/lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestAllAnalyzersHaveFactories.java
+++ b/lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestAllAnalyzersHaveFactories.java
@@ -35,6 +35,7 @@ import org.apache.lucene.analysis.MockCharFilter;
 import org.apache.lucene.analysis.MockFixedLengthPayloadFilter;
 import org.apache.lucene.analysis.MockGraphTokenFilter;
 import org.apache.lucene.analysis.MockHoleInjectingTokenFilter;
+import org.apache.lucene.analysis.MockLowerCaseFilter;
 import org.apache.lucene.analysis.MockRandomLookaheadTokenFilter;
 import org.apache.lucene.analysis.MockSynonymFilter;
 import org.apache.lucene.analysis.MockTokenFilter;
@@ -75,6 +76,7 @@ public class TestAllAnalyzersHaveFactories extends LuceneTestCase {
       MockFixedLengthPayloadFilter.class,
       MockGraphTokenFilter.class,
       MockHoleInjectingTokenFilter.class,
+      MockLowerCaseFilter.class,
       MockRandomLookaheadTokenFilter.class,
       MockSynonymFilter.class,
       MockTokenFilter.class,

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestAnalyzers.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestAnalyzers.java b/lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestAnalyzers.java
index 8f7f2cd..6d514d1 100644
--- a/lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestAnalyzers.java
+++ b/lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestAnalyzers.java
@@ -52,6 +52,7 @@ public class TestAnalyzers extends BaseTokenStreamTestCase {
                      new String[] { "b" });
     assertAnalyzesTo(a, "\"QUOTED\" word", 
                      new String[] { "quoted", "word" });
+    assertEquals(new BytesRef("\"\\�3[]()! cz@"), a.normalize("dummy", "\"\\�3[]()! Cz@"));
     a.close();
   }
 
@@ -73,6 +74,7 @@ public class TestAnalyzers extends BaseTokenStreamTestCase {
                      new String[] { "2B" });
     assertAnalyzesTo(a, "\"QUOTED\" word", 
                      new String[] { "\"QUOTED\"", "word" });
+    assertEquals(new BytesRef("\"\\�3[]()! Cz@"), a.normalize("dummy", "\"\\�3[]()! Cz@"));
     a.close();
   }
 
@@ -82,6 +84,8 @@ public class TestAnalyzers extends BaseTokenStreamTestCase {
                      new String[] { "foo", "bar", "foo", "bar" });
     assertAnalyzesTo(a, "foo a bar such FOO THESE BAR", 
                      new String[] { "foo", "bar", "foo", "bar" });
+    assertEquals(new BytesRef("\"\\�3[]()! cz@"), a.normalize("dummy", "\"\\�3[]()! Cz@"));
+    assertEquals(new BytesRef("the"), a.normalize("dummy", "the"));
     a.close();
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestRandomChains.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestRandomChains.java b/lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestRandomChains.java
index 4effc79..25ca7a3 100644
--- a/lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestRandomChains.java
+++ b/lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestRandomChains.java
@@ -928,6 +928,7 @@ public class TestRandomChains extends BaseTokenStreamTestCase {
           System.out.println("Creating random analyzer:" + a);
         }
         try {
+          checkNormalize(a);
           checkRandomData(random, a, 500*RANDOM_MULTIPLIER, 20, false,
               false /* We already validate our own offsets... */);
         } catch (Throwable e) {
@@ -937,7 +938,14 @@ public class TestRandomChains extends BaseTokenStreamTestCase {
       }
     }
   }
-  
+
+  public void checkNormalize(Analyzer a) {
+    // normalization should not modify characters that may be used for wildcards
+    // or regular expressions
+    String s = "([0-9]+)?*";
+    assertEquals(s, a.normalize("dummy", s).utf8ToString());
+  }
+
   // we might regret this decision...
   public void testRandomChainsWithLargeStrings() throws Throwable {
     int numIterations = TEST_NIGHTLY ? atLeast(20) : 3;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/common/src/test/org/apache/lucene/analysis/custom/TestCustomAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/test/org/apache/lucene/analysis/custom/TestCustomAnalyzer.java b/lucene/analysis/common/src/test/org/apache/lucene/analysis/custom/TestCustomAnalyzer.java
index 5160dab..aa69b70 100644
--- a/lucene/analysis/common/src/test/org/apache/lucene/analysis/custom/TestCustomAnalyzer.java
+++ b/lucene/analysis/common/src/test/org/apache/lucene/analysis/custom/TestCustomAnalyzer.java
@@ -17,6 +17,8 @@
 package org.apache.lucene.analysis.custom;
 
 
+import java.io.IOException;
+import java.io.Reader;
 import java.nio.file.Paths;
 import java.util.Collections;
 import java.util.HashMap;
@@ -24,16 +26,25 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.lucene.analysis.BaseTokenStreamTestCase;
+import org.apache.lucene.analysis.CharFilter;
+import org.apache.lucene.analysis.TokenStream;
+import org.apache.lucene.analysis.Tokenizer;
 import org.apache.lucene.analysis.charfilter.HTMLStripCharFilterFactory;
+import org.apache.lucene.analysis.core.KeywordTokenizerFactory;
 import org.apache.lucene.analysis.core.LowerCaseFilterFactory;
+import org.apache.lucene.analysis.core.LowerCaseTokenizer;
 import org.apache.lucene.analysis.core.StopFilterFactory;
 import org.apache.lucene.analysis.core.WhitespaceTokenizerFactory;
 import org.apache.lucene.analysis.miscellaneous.ASCIIFoldingFilterFactory;
 import org.apache.lucene.analysis.standard.ClassicTokenizerFactory;
 import org.apache.lucene.analysis.standard.StandardTokenizerFactory;
+import org.apache.lucene.analysis.util.AbstractAnalysisFactory;
 import org.apache.lucene.analysis.util.CharFilterFactory;
+import org.apache.lucene.analysis.util.MultiTermAwareComponent;
 import org.apache.lucene.analysis.util.TokenFilterFactory;
 import org.apache.lucene.analysis.util.TokenizerFactory;
+import org.apache.lucene.util.AttributeFactory;
+import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.SetOnce.AlreadySetException;
 import org.apache.lucene.util.Version;
 
@@ -336,4 +347,136 @@ public class TestCustomAnalyzer extends BaseTokenStreamTestCase {
     });
   }
 
+  private static class DummyCharFilter extends CharFilter {
+
+    private final char match, repl;
+
+    public DummyCharFilter(Reader input, char match, char repl) {
+      super(input);
+      this.match = match;
+      this.repl = repl;
+    }
+
+    @Override
+    protected int correct(int currentOff) {
+      return currentOff;
+    }
+
+    @Override
+    public int read(char[] cbuf, int off, int len) throws IOException {
+      final int read = input.read(cbuf, off, len);
+      for (int i = 0; i < read; ++i) {
+        if (cbuf[off+i] == match) {
+          cbuf[off+i] = repl;
+        }
+      }
+      return read;
+    }
+    
+  }
+
+  public static class DummyCharFilterFactory extends CharFilterFactory {
+
+    private final char match, repl;
+
+    public DummyCharFilterFactory(Map<String,String> args) {
+      this(args, '0', '1');
+    }
+
+    DummyCharFilterFactory(Map<String,String> args, char match, char repl) {
+      super(args);
+      this.match = match;
+      this.repl = repl;
+    }
+
+    @Override
+    public Reader create(Reader input) {
+      return new DummyCharFilter(input, match, repl);
+    }
+    
+  }
+
+  public static class DummyMultiTermAwareCharFilterFactory extends DummyCharFilterFactory implements MultiTermAwareComponent {
+
+    public DummyMultiTermAwareCharFilterFactory(Map<String,String> args) {
+      super(args);
+    }
+
+    @Override
+    public AbstractAnalysisFactory getMultiTermComponent() {
+      return new DummyCharFilterFactory(Collections.emptyMap(), '0', '2');
+    }
+
+  }
+
+  public static class DummyTokenizerFactory extends TokenizerFactory {
+
+    public DummyTokenizerFactory(Map<String,String> args) {
+      super(args);
+    }
+
+    @Override
+    public Tokenizer create(AttributeFactory factory) {
+      return new LowerCaseTokenizer(factory);
+    }
+
+  }
+
+  public static class DummyMultiTermAwareTokenizerFactory extends DummyTokenizerFactory implements MultiTermAwareComponent {
+
+    public DummyMultiTermAwareTokenizerFactory(Map<String,String> args) {
+      super(args);
+    }
+
+    @Override
+    public AbstractAnalysisFactory getMultiTermComponent() {
+      return new KeywordTokenizerFactory(getOriginalArgs());
+    }
+    
+  }
+
+  public static class DummyTokenFilterFactory extends TokenFilterFactory {
+
+    public DummyTokenFilterFactory(Map<String,String> args) {
+      super(args);
+    }
+
+    @Override
+    public TokenStream create(TokenStream input) {
+      return input;
+    }
+    
+  }
+
+  public static class DummyMultiTermAwareTokenFilterFactory extends DummyTokenFilterFactory implements MultiTermAwareComponent {
+
+    public DummyMultiTermAwareTokenFilterFactory(Map<String,String> args) {
+      super(args);
+    }
+
+    @Override
+    public AbstractAnalysisFactory getMultiTermComponent() {
+      return new ASCIIFoldingFilterFactory(Collections.emptyMap());
+    }
+    
+  }
+
+  public void testNormalization() throws IOException {
+    CustomAnalyzer analyzer1 = CustomAnalyzer.builder()
+        // none of these components are multi-term aware so they should not be applied
+        .withTokenizer(DummyTokenizerFactory.class, Collections.emptyMap())
+        .addCharFilter(DummyCharFilterFactory.class, Collections.emptyMap())
+        .addTokenFilter(DummyTokenFilterFactory.class, Collections.emptyMap())
+        .build();
+    assertEquals(new BytesRef("0�"), analyzer1.normalize("dummy", "0�"));
+
+    CustomAnalyzer analyzer2 = CustomAnalyzer.builder()
+        // these components are multi-term aware so they should be applied
+        .withTokenizer(DummyMultiTermAwareTokenizerFactory.class, Collections.emptyMap())
+        .addCharFilter(DummyMultiTermAwareCharFilterFactory.class, Collections.emptyMap())
+        .addTokenFilter(DummyMultiTermAwareTokenFilterFactory.class, Collections.emptyMap())
+        .build();
+    assertEquals(new BytesRef("2A"), analyzer2.normalize("dummy", "0�"));
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/JapaneseAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/JapaneseAnalyzer.java b/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/JapaneseAnalyzer.java
index 46d40b1..06e119e 100644
--- a/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/JapaneseAnalyzer.java
+++ b/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/JapaneseAnalyzer.java
@@ -94,4 +94,11 @@ public class JapaneseAnalyzer extends StopwordAnalyzerBase {
     stream = new LowerCaseFilter(stream);
     return new TokenStreamComponents(tokenizer, stream);
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    TokenStream result = new CJKWidthFilter(in);
+    result = new LowerCaseFilter(result);
+    return result;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/morfologik/src/java/org/apache/lucene/analysis/morfologik/MorfologikAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/morfologik/src/java/org/apache/lucene/analysis/morfologik/MorfologikAnalyzer.java b/lucene/analysis/morfologik/src/java/org/apache/lucene/analysis/morfologik/MorfologikAnalyzer.java
index 091acfd..0caca35 100644
--- a/lucene/analysis/morfologik/src/java/org/apache/lucene/analysis/morfologik/MorfologikAnalyzer.java
+++ b/lucene/analysis/morfologik/src/java/org/apache/lucene/analysis/morfologik/MorfologikAnalyzer.java
@@ -23,6 +23,7 @@ import morfologik.stemming.Dictionary;
 import morfologik.stemming.polish.PolishStemmer;
 
 import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.TokenStream;
 import org.apache.lucene.analysis.Tokenizer;
 import org.apache.lucene.analysis.standard.StandardFilter;
 import org.apache.lucene.analysis.standard.StandardTokenizer;
@@ -69,4 +70,9 @@ public class MorfologikAnalyzer extends Analyzer {
         src, 
         new MorfologikFilter(new StandardFilter(src), dictionary));
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    return new StandardFilter(in);
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/smartcn/src/java/org/apache/lucene/analysis/cn/smart/SmartChineseAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/smartcn/src/java/org/apache/lucene/analysis/cn/smart/SmartChineseAnalyzer.java b/lucene/analysis/smartcn/src/java/org/apache/lucene/analysis/cn/smart/SmartChineseAnalyzer.java
index 5f0347b..f604d4b 100644
--- a/lucene/analysis/smartcn/src/java/org/apache/lucene/analysis/cn/smart/SmartChineseAnalyzer.java
+++ b/lucene/analysis/smartcn/src/java/org/apache/lucene/analysis/cn/smart/SmartChineseAnalyzer.java
@@ -22,6 +22,7 @@ import java.util.Set;
 
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.CharArraySet;
+import org.apache.lucene.analysis.LowerCaseFilter;
 import org.apache.lucene.analysis.StopFilter;
 import org.apache.lucene.analysis.TokenStream;
 import org.apache.lucene.analysis.Tokenizer;
@@ -139,4 +140,9 @@ public final class SmartChineseAnalyzer extends Analyzer {
     }
     return new TokenStreamComponents(tokenizer, result);
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    return new LowerCaseFilter(in);
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/analysis/stempel/src/java/org/apache/lucene/analysis/pl/PolishAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/stempel/src/java/org/apache/lucene/analysis/pl/PolishAnalyzer.java b/lucene/analysis/stempel/src/java/org/apache/lucene/analysis/pl/PolishAnalyzer.java
index 6ed4fda..2d3ef4c 100644
--- a/lucene/analysis/stempel/src/java/org/apache/lucene/analysis/pl/PolishAnalyzer.java
+++ b/lucene/analysis/stempel/src/java/org/apache/lucene/analysis/pl/PolishAnalyzer.java
@@ -146,4 +146,11 @@ public final class PolishAnalyzer extends StopwordAnalyzerBase {
     result = new StempelFilter(result, new StempelStemmer(stemTable));
     return new TokenStreamComponents(source, result);
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    TokenStream result = new StandardFilter(in);
+    result = new LowerCaseFilter(result);
+    return result;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/core/src/java/org/apache/lucene/analysis/Analyzer.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/analysis/Analyzer.java b/lucene/core/src/java/org/apache/lucene/analysis/Analyzer.java
index cce740d..0d60d24 100644
--- a/lucene/core/src/java/org/apache/lucene/analysis/Analyzer.java
+++ b/lucene/core/src/java/org/apache/lucene/analysis/Analyzer.java
@@ -18,11 +18,18 @@ package org.apache.lucene.analysis;
 
 
 import java.io.Closeable;
+import java.io.IOException;
 import java.io.Reader;
+import java.io.StringReader;
 import java.util.HashMap;
 import java.util.Map;
 
+import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
+import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
+import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute;
 import org.apache.lucene.store.AlreadyClosedException;
+import org.apache.lucene.util.AttributeFactory;
+import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.CloseableThreadLocal;
 import org.apache.lucene.util.Version;
 
@@ -44,6 +51,12 @@ import org.apache.lucene.util.Version;
  *     filter = new BarFilter(filter);
  *     return new TokenStreamComponents(source, filter);
  *   }
+ *   {@literal @Override}
+ *   protected TokenStream normalize(TokenStream in) {
+ *     // Assuming FooFilter is about normalization and BarFilter is about
+ *     // stemming, only FooFilter should be applied
+ *     return new FooFilter(in);
+ *   }
  * };
  * </pre>
  * For more examples, see the {@link org.apache.lucene.analysis Analysis package documentation}.
@@ -108,6 +121,15 @@ public abstract class Analyzer implements Closeable {
   protected abstract TokenStreamComponents createComponents(String fieldName);
 
   /**
+   * Wrap the given {@link TokenStream} in order to apply normalization filters.
+   * The default implementation returns the {@link TokenStream} as-is. This is
+   * used by {@link #normalize(String, String)}.
+   */
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    return in;
+  }
+
+  /**
    * Returns a TokenStream suitable for <code>fieldName</code>, tokenizing
    * the contents of <code>reader</code>.
    * <p>
@@ -181,7 +203,65 @@ public abstract class Analyzer implements Closeable {
     components.reusableStringReader = strReader;
     return components.getTokenStream();
   }
-    
+
+  /**
+   * Normalize a string down to the representation that it would have in the
+   * index.
+   * <p>
+   * This is typically used by query parsers in order to generate a query on
+   * a given term, without tokenizing or stemming, which are undesirable if
+   * the string to analyze is a partial word (eg. in case of a wildcard or
+   * fuzzy query).
+   * <p>
+   * This method uses {@link #initReaderForNormalization(String, Reader)} in
+   * order to apply necessary character-level normalization and then
+   * {@link #normalize(String, TokenStream)} in order to apply the normalizing
+   * token filters.
+   */
+  public final BytesRef normalize(final String fieldName, final String text) {
+    try {
+      // apply char filters
+      final String filteredText;
+      try (Reader reader = new StringReader(text)) {
+        Reader filterReader = initReaderForNormalization(fieldName, reader);
+        char[] buffer = new char[64];
+        StringBuilder builder = new StringBuilder();
+        for (;;) {
+          final int read = filterReader.read(buffer, 0, buffer.length);
+          if (read == -1) {
+            break;
+          }
+          builder.append(buffer, 0, read);
+        }
+        filteredText = builder.toString();
+      } catch (IOException e) {
+        throw new IllegalStateException("Normalization threw an unexpected exeption", e);
+      }
+
+      final AttributeFactory attributeFactory = attributeFactory();
+      try (TokenStream ts = normalize(fieldName,
+          new StringTokenStream(attributeFactory, filteredText, text.length()))) {
+        final TermToBytesRefAttribute termAtt = ts.addAttribute(TermToBytesRefAttribute.class);
+        ts.reset();
+        if (ts.incrementToken() == false) {
+          throw new IllegalStateException("The normalization token stream is "
+              + "expected to produce exactly 1 token, but got 0 for analyzer "
+              + this + " and input \"" + text + "\"");
+        }
+        final BytesRef term = BytesRef.deepCopyOf(termAtt.getBytesRef());
+        if (ts.incrementToken()) {
+          throw new IllegalStateException("The normalization token stream is "
+              + "expected to produce exactly 1 token, but got 2+ for analyzer "
+              + this + " and input \"" + text + "\"");
+        }
+        ts.end();
+        return term;
+      }
+    } catch (IOException e) {
+      throw new IllegalStateException("Normalization threw an unexpected exeption", e);
+    }
+  }
+
   /**
    * Override this if you want to add a CharFilter chain.
    * <p>
@@ -196,6 +276,22 @@ public abstract class Analyzer implements Closeable {
     return reader;
   }
 
+  /** Wrap the given {@link Reader} with {@link CharFilter}s that make sense
+   *  for normalization. This is typically a subset of the {@link CharFilter}s
+   *  that are applied in {@link #initReader(String, Reader)}. This is used by
+   *  {@link #normalize(String, String)}. */
+  protected Reader initReaderForNormalization(String fieldName, Reader reader) {
+    return reader;
+  }
+
+  /** Return the {@link AttributeFactory} to be used for
+   *  {@link #tokenStream analysis} and
+   *  {@link #normalize(String, String) normalization}. The default
+   *  implementation returns {@link AttributeFactory#DEFAULT_ATTRIBUTE_FACTORY}. */
+  protected AttributeFactory attributeFactory() {
+    return AttributeFactory.DEFAULT_ATTRIBUTE_FACTORY;
+  }
+
   /**
    * Invoked before indexing a IndexableField instance if
    * terms have already been added to that field.  This allows custom
@@ -435,4 +531,41 @@ public abstract class Analyzer implements Closeable {
     }
   };
 
+  private static final class StringTokenStream extends TokenStream {
+
+    private final String value;
+    private final int length;
+    private boolean used = true;
+    private final CharTermAttribute termAttribute = addAttribute(CharTermAttribute.class);
+    private final OffsetAttribute offsetAttribute = addAttribute(OffsetAttribute.class);
+
+    StringTokenStream(AttributeFactory attributeFactory, String value, int length) {
+      super(attributeFactory);
+      this.value = value;
+      this.length = length;
+    }
+
+    @Override
+    public void reset() {
+      used = false;
+    }
+
+    @Override
+    public boolean incrementToken() {
+      if (used) {
+        return false;
+      }
+      clearAttributes();
+      termAttribute.append(value);
+      offsetAttribute.setOffset(0, length);
+      used = true;
+      return true;
+    }
+
+    @Override
+    public void end() throws IOException {
+      super.end();
+      offsetAttribute.setOffset(length, length);
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/core/src/java/org/apache/lucene/analysis/standard/StandardAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/analysis/standard/StandardAnalyzer.java b/lucene/core/src/java/org/apache/lucene/analysis/standard/StandardAnalyzer.java
index 251017d..fb57573 100644
--- a/lucene/core/src/java/org/apache/lucene/analysis/standard/StandardAnalyzer.java
+++ b/lucene/core/src/java/org/apache/lucene/analysis/standard/StandardAnalyzer.java
@@ -112,4 +112,11 @@ public final class StandardAnalyzer extends StopwordAnalyzerBase {
       }
     };
   }
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    TokenStream result = new StandardFilter(in);
+    result = new LowerCaseFilter(result);
+    return result;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/core/src/test/org/apache/lucene/analysis/standard/TestStandardAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/analysis/standard/TestStandardAnalyzer.java b/lucene/core/src/test/org/apache/lucene/analysis/standard/TestStandardAnalyzer.java
index 6c6ddc8..2cc9274 100644
--- a/lucene/core/src/test/org/apache/lucene/analysis/standard/TestStandardAnalyzer.java
+++ b/lucene/core/src/test/org/apache/lucene/analysis/standard/TestStandardAnalyzer.java
@@ -27,6 +27,7 @@ import org.apache.lucene.analysis.BaseTokenStreamTestCase;
 import org.apache.lucene.analysis.MockGraphTokenFilter;
 import org.apache.lucene.analysis.TokenStream;
 import org.apache.lucene.analysis.Tokenizer;
+import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.TestUtil;
 
 public class TestStandardAnalyzer extends BaseTokenStreamTestCase {
@@ -387,4 +388,9 @@ public class TestStandardAnalyzer extends BaseTokenStreamTestCase {
     checkRandomData(random, analyzer, 100*RANDOM_MULTIPLIER, 8192);
     analyzer.close();
   }
+
+  public void testNormalize() {
+    Analyzer a = new StandardAnalyzer();
+    assertEquals(new BytesRef("\"\\�3[]()! cz@"), a.normalize("dummy", "\"\\�3[]()! Cz@"));
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/queryparser/src/java/org/apache/lucene/queryparser/analyzing/AnalyzingQueryParser.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/analyzing/AnalyzingQueryParser.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/analyzing/AnalyzingQueryParser.java
deleted file mode 100644
index 49690fe..0000000
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/analyzing/AnalyzingQueryParser.java
+++ /dev/null
@@ -1,202 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.queryparser.analyzing;
-
-import java.io.IOException;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-import org.apache.lucene.analysis.Analyzer;
-import org.apache.lucene.analysis.TokenStream;
-import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
-import org.apache.lucene.queryparser.classic.ParseException;
-import org.apache.lucene.search.Query;
-
-/**
- * Overrides Lucene's default QueryParser so that Fuzzy-, Prefix-, Range-, and WildcardQuerys
- * are also passed through the given analyzer, but wildcard characters <code>*</code> and
- * <code>?</code> don't get removed from the search terms.
- * 
- * <p><b>Warning:</b> This class should only be used with analyzers that do not use stopwords
- * or that add tokens. Also, several stemming analyzers are inappropriate: for example, GermanAnalyzer 
- * will turn <code>H&auml;user</code> into <code>hau</code>, but <code>H?user</code> will 
- * become <code>h?user</code> when using this parser and thus no match would be found (i.e.
- * using this parser will be no improvement over QueryParser in such cases). 
- */
-public class AnalyzingQueryParser extends org.apache.lucene.queryparser.classic.QueryParser {
-  // gobble escaped chars or find a wildcard character 
-  private final Pattern wildcardPattern = Pattern.compile("(\\.)|([?*]+)");
-  public AnalyzingQueryParser(String field, Analyzer analyzer) {
-    super(field, analyzer);
-    setAnalyzeRangeTerms(true);
-  }
-
-  /**
-   * Called when parser parses an input term that contains one or more wildcard
-   * characters (like <code>*</code>), but is not a prefix term (one that has
-   * just a single <code>*</code> character at the end).
-   * <p>
-   * Example: will be called for <code>H?user</code> or for <code>H*user</code>.
-   * <p>
-   * Depending on analyzer and settings, a wildcard term may (most probably will)
-   * be lower-cased automatically. It <b>will</b> go through the default Analyzer.
-   * <p>
-   * Overrides super class, by passing terms through analyzer.
-   *
-   * @param  field   Name of the field query will use.
-   * @param  termStr Term that contains one or more wildcard
-   *                 characters (? or *), but is not simple prefix term
-   *
-   * @return Resulting {@link Query} built for the term
-   */
-  @Override
-  protected Query getWildcardQuery(String field, String termStr) throws ParseException {
-
-    if (termStr == null){
-      //can't imagine this would ever happen
-      throw new ParseException("Passed null value as term to getWildcardQuery");
-    }
-    if ( ! getAllowLeadingWildcard() && (termStr.startsWith("*") || termStr.startsWith("?"))) {
-      throw new ParseException("'*' or '?' not allowed as first character in WildcardQuery"
-                              + " unless getAllowLeadingWildcard() returns true");
-    }
-    
-    Matcher wildcardMatcher = wildcardPattern.matcher(termStr);
-    StringBuilder sb = new StringBuilder();
-    int last = 0;
-  
-    while (wildcardMatcher.find()){
-      // continue if escaped char
-      if (wildcardMatcher.group(1) != null){
-        continue;
-      }
-     
-      if (wildcardMatcher.start() > 0){
-        String chunk = termStr.substring(last, wildcardMatcher.start());
-        String analyzed = analyzeSingleChunk(field, termStr, chunk);
-        sb.append(analyzed);
-      }
-      //append the wildcard character
-      sb.append(wildcardMatcher.group(2));
-     
-      last = wildcardMatcher.end();
-    }
-    if (last < termStr.length()){
-      sb.append(analyzeSingleChunk(field, termStr, termStr.substring(last)));
-    }
-    return super.getWildcardQuery(field, sb.toString());
-  }
-  
-  /**
-   * Called when parser parses an input term
-   * that uses prefix notation; that is, contains a single '*' wildcard
-   * character as its last character. Since this is a special case
-   * of generic wildcard term, and such a query can be optimized easily,
-   * this usually results in a different query object.
-   * <p>
-   * Depending on analyzer and settings, a prefix term may (most probably will)
-   * be lower-cased automatically. It <b>will</b> go through the default Analyzer.
-   * <p>
-   * Overrides super class, by passing terms through analyzer.
-   *
-   * @param  field   Name of the field query will use.
-   * @param  termStr Term to use for building term for the query
-   *                 (<b>without</b> trailing '*' character!)
-   *
-   * @return Resulting {@link Query} built for the term
-   */
-  @Override
-  protected Query getPrefixQuery(String field, String termStr) throws ParseException {
-    String analyzed = analyzeSingleChunk(field, termStr, termStr);
-    return super.getPrefixQuery(field, analyzed);
-  }
-
-  /**
-   * Called when parser parses an input term that has the fuzzy suffix (~) appended.
-   * <p>
-   * Depending on analyzer and settings, a fuzzy term may (most probably will)
-   * be lower-cased automatically. It <b>will</b> go through the default Analyzer.
-   * <p>
-   * Overrides super class, by passing terms through analyzer.
-   *
-   * @param field Name of the field query will use.
-   * @param termStr Term to use for building term for the query
-   *
-   * @return Resulting {@link Query} built for the term
-   */
-  @Override
-  protected Query getFuzzyQuery(String field, String termStr, float minSimilarity)
-      throws ParseException {
-   
-    String analyzed = analyzeSingleChunk(field, termStr, termStr);
-    return super.getFuzzyQuery(field, analyzed, minSimilarity);
-  }
-
-  /**
-   * Returns the analyzed form for the given chunk
-   * 
-   * If the analyzer produces more than one output token from the given chunk,
-   * a ParseException is thrown.
-   *
-   * @param field The target field
-   * @param termStr The full term from which the given chunk is excerpted
-   * @param chunk The portion of the given termStr to be analyzed
-   * @return The result of analyzing the given chunk
-   * @throws ParseException when analysis returns other than one output token
-   */
-  protected String analyzeSingleChunk(String field, String termStr, String chunk) throws ParseException{
-    String analyzed = null;
-    try (TokenStream stream = getAnalyzer().tokenStream(field, chunk)) {
-      stream.reset();
-      CharTermAttribute termAtt = stream.getAttribute(CharTermAttribute.class);
-      // get first and hopefully only output token
-      if (stream.incrementToken()) {
-        analyzed = termAtt.toString();
-        
-        // try to increment again, there should only be one output token
-        StringBuilder multipleOutputs = null;
-        while (stream.incrementToken()) {
-          if (null == multipleOutputs) {
-            multipleOutputs = new StringBuilder();
-            multipleOutputs.append('"');
-            multipleOutputs.append(analyzed);
-            multipleOutputs.append('"');
-          }
-          multipleOutputs.append(',');
-          multipleOutputs.append('"');
-          multipleOutputs.append(termAtt.toString());
-          multipleOutputs.append('"');
-        }
-        stream.end();
-        if (null != multipleOutputs) {
-          throw new ParseException(
-              String.format(getLocale(),
-                  "Analyzer created multiple terms for \"%s\": %s", chunk, multipleOutputs.toString()));
-        }
-      } else {
-        // nothing returned by analyzer.  Was it a stop word and the user accidentally
-        // used an analyzer with stop words?
-        stream.end();
-        throw new ParseException(String.format(getLocale(), "Analyzer returned nothing for \"%s\"", chunk));
-      }
-    } catch (IOException e){
-      throw new ParseException(
-          String.format(getLocale(), "IO error while trying to analyze single term: \"%s\"", termStr));
-    }
-    return analyzed;
-  }
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/queryparser/src/java/org/apache/lucene/queryparser/analyzing/package-info.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/analyzing/package-info.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/analyzing/package-info.java
deleted file mode 100644
index 77397b4..0000000
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/analyzing/package-info.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
- 
-/** 
- * QueryParser that passes Fuzzy-, Prefix-, Range-, and WildcardQuerys through the given analyzer.
- */
-package org.apache.lucene.queryparser.analyzing;
-


[50/51] [abbrv] lucene-solr:apiv2: SOLR-8029: merge master into apiv2

Posted by sa...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/49a09217/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java
----------------------------------------------------------------------
diff --cc solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java
index cad1fad,8a5483a..33756f4
--- a/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java
+++ b/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java
@@@ -89,86 -85,98 +89,98 @@@ public class BasicAuthIntegrationTest e
      cloudSolrClient.setDefaultCollection(null);
  
      NamedList<Object> rsp;
-     HttpClient cl = HttpClientUtil.createClient(null);
-     String baseUrl = getRandomReplica(zkStateReader.getClusterState().getCollection(defaultCollName), random()).getStr(BASE_URL_PROP);
-     verifySecurityStatus(cl, baseUrl + authcPrefix, "/errorMessages", null, 20);
-     zkClient.setData("/security.json", STD_CONF.replaceAll("'", "\"").getBytes(UTF_8), true);
-     verifySecurityStatus(cl, baseUrl + authcPrefix, "authentication/class", "solr.BasicAuthPlugin", 20);
- 
-     boolean found = false;
-     for (JettySolrRunner jettySolrRunner : miniCluster.getJettySolrRunners()) {
-       if(baseUrl.contains(String.valueOf(jettySolrRunner.getLocalPort()))){
-         found = true;
-         jettySolrRunner.stop();
-         jettySolrRunner.start();
-         verifySecurityStatus(cl, baseUrl + authcPrefix, "authentication/class", "solr.BasicAuthPlugin", 20);
-         break;
-       }
-     }
- 
-     assertTrue("No server found to restart , looking for : "+baseUrl , found);
- 
-     String command = "{\n" +
-         "'set-user': {'harry':'HarryIsCool'}\n" +
-         "}";
- 
-     GenericSolrRequest genericReq = new GenericSolrRequest(SolrRequest.METHOD.POST, authcPrefix, new ModifiableSolrParams());
-     genericReq.setContentStreams(Collections.singletonList(new ContentStreamBase.ByteArrayStream(command.getBytes(UTF_8), "")));
+     HttpClient cl = null;
      try {
-       rsp = cloudSolrClient.request(genericReq);
-       fail("Should have failed with a 401,  got :" + rsp);
-     } catch (HttpSolrClient.RemoteSolrException e) {
-     }
-     command = "{\n" +
-         "'set-user': {'harry':'HarryIsUberCool'}\n" +
-         "}";
- 
-     HttpPost httpPost = new HttpPost(baseUrl + authcPrefix);
-     setBasicAuthHeader(httpPost, "solr", "SolrRocks");
-     httpPost.setEntity(new ByteArrayEntity(command.getBytes(UTF_8)));
-     httpPost.addHeader("Content-Type", "application/json; charset=UTF-8");
-     verifySecurityStatus(cl, baseUrl + authcPrefix, "authentication.enabled", "true", 20);
-     HttpResponse r = cl.execute(httpPost);
-     int statusCode = r.getStatusLine().getStatusCode();
-     Utils.consumeFully(r.getEntity());
-     assertEquals("proper_cred sent, but access denied", 200, statusCode);
-     baseUrl = getRandomReplica(zkStateReader.getClusterState().getCollection(defaultCollName), random()).getStr(BASE_URL_PROP);
- 
-     verifySecurityStatus(cl, baseUrl + authcPrefix, "authentication/credentials/harry", NOT_NULL_PREDICATE, 20);
-     command = "{\n" +
-         "'set-user-role': {'harry':'admin'}\n" +
-         "}";
- 
-     executeCommand(baseUrl + authzPrefix, cl,command, "solr", "SolrRocks");
- 
-     baseUrl = getRandomReplica(zkStateReader.getClusterState().getCollection(defaultCollName), random()).getStr(BASE_URL_PROP);
-     verifySecurityStatus(cl, baseUrl + authzPrefix, "authorization/user-role/harry", NOT_NULL_PREDICATE, 20);
- 
-     executeCommand(baseUrl + authzPrefix, cl, Utils.toJSONString(singletonMap("set-permission", Utils.makeMap
-         ("collection", "x",
-             "path", "/update/*",
-             "role", "dev"))), "harry", "HarryIsUberCool" );
- 
-     verifySecurityStatus(cl, baseUrl + authzPrefix, "authorization/permissions[1]/collection", "x", 20);
+       cl = HttpClientUtil.createClient(null);
+       String baseUrl = getRandomReplica(zkStateReader.getClusterState().getCollection(defaultCollName), random()).getStr(BASE_URL_PROP);
+       verifySecurityStatus(cl, baseUrl + authcPrefix, "/errorMessages", null, 20);
+       zkClient.setData("/security.json", STD_CONF.replaceAll("'", "\"").getBytes(UTF_8), true);
+       verifySecurityStatus(cl, baseUrl + authcPrefix, "authentication/class", "solr.BasicAuthPlugin", 20);
+ 
+       boolean found = false;
+       for (JettySolrRunner jettySolrRunner : miniCluster.getJettySolrRunners()) {
+         if(baseUrl.contains(String.valueOf(jettySolrRunner.getLocalPort()))){
+           found = true;
+           jettySolrRunner.stop();
+           jettySolrRunner.start();
+           verifySecurityStatus(cl, baseUrl + authcPrefix, "authentication/class", "solr.BasicAuthPlugin", 20);
+           break;
+         }
+       }
  
-     executeCommand(baseUrl + authzPrefix, cl,Utils.toJSONString(singletonMap("set-permission", Utils.makeMap
-         ("name", "collection-admin-edit", "role", "admin"))), "harry", "HarryIsUberCool"  );
-     verifySecurityStatus(cl, baseUrl + authzPrefix, "authorization/permissions[2]/name", "collection-admin-edit", 20);
+       assertTrue("No server found to restart , looking for : "+baseUrl , found);
  
-     CollectionAdminRequest.Reload reload = new CollectionAdminRequest.Reload();
-     reload.setCollectionName(defaultCollName);
+       String command = "{\n" +
+           "'set-user': {'harry':'HarryIsCool'}\n" +
+           "}";
  
-     try (HttpSolrClient solrClient = getHttpSolrClient(baseUrl)) {
+       GenericSolrRequest genericReq = new GenericSolrRequest(SolrRequest.METHOD.POST, authcPrefix, new ModifiableSolrParams());
+       genericReq.setContentStreams(Collections.singletonList(new ContentStreamBase.ByteArrayStream(command.getBytes(UTF_8), "")));
        try {
-         rsp = solrClient.request(reload);
-         fail("must have failed");
 -        cloudSolrClient.request(genericReq);
 -        fail("Should have failed with a 401");
++        rsp = cloudSolrClient.request(genericReq);
++        fail("Should have failed with a 401,  got :" + rsp);
        } catch (HttpSolrClient.RemoteSolrException e) {
- 
        }
-       reload.setMethod(SolrRequest.METHOD.POST);
+       command = "{\n" +
+           "'set-user': {'harry':'HarryIsUberCool'}\n" +
+           "}";
+ 
+       HttpPost httpPost = new HttpPost(baseUrl + authcPrefix);
+       setBasicAuthHeader(httpPost, "solr", "SolrRocks");
+       httpPost.setEntity(new ByteArrayEntity(command.getBytes(UTF_8)));
+       httpPost.addHeader("Content-Type", "application/json; charset=UTF-8");
+       verifySecurityStatus(cl, baseUrl + authcPrefix, "authentication.enabled", "true", 20);
+       HttpResponse r = cl.execute(httpPost);
+       int statusCode = r.getStatusLine().getStatusCode();
+       Utils.consumeFully(r.getEntity());
+       assertEquals("proper_cred sent, but access denied", 200, statusCode);
+       baseUrl = getRandomReplica(zkStateReader.getClusterState().getCollection(defaultCollName), random()).getStr(BASE_URL_PROP);
+ 
+       verifySecurityStatus(cl, baseUrl + authcPrefix, "authentication/credentials/harry", NOT_NULL_PREDICATE, 20);
+       command = "{\n" +
+           "'set-user-role': {'harry':'admin'}\n" +
+           "}";
+ 
+       executeCommand(baseUrl + authzPrefix, cl,command, "solr", "SolrRocks");
+ 
+       baseUrl = getRandomReplica(zkStateReader.getClusterState().getCollection(defaultCollName), random()).getStr(BASE_URL_PROP);
+       verifySecurityStatus(cl, baseUrl + authzPrefix, "authorization/user-role/harry", NOT_NULL_PREDICATE, 20);
+ 
+       executeCommand(baseUrl + authzPrefix, cl, Utils.toJSONString(singletonMap("set-permission", Utils.makeMap
+           ("collection", "x",
+               "path", "/update/*",
+               "role", "dev"))), "harry", "HarryIsUberCool" );
+ 
+       verifySecurityStatus(cl, baseUrl + authzPrefix, "authorization/permissions[1]/collection", "x", 20);
+ 
+       executeCommand(baseUrl + authzPrefix, cl,Utils.toJSONString(singletonMap("set-permission", Utils.makeMap
+           ("name", "collection-admin-edit", "role", "admin"))), "harry", "HarryIsUberCool"  );
+       verifySecurityStatus(cl, baseUrl + authzPrefix, "authorization/permissions[2]/name", "collection-admin-edit", 20);
+ 
+       CollectionAdminRequest.Reload reload = CollectionAdminRequest.reloadCollection(defaultCollName);
+ 
+       try (HttpSolrClient solrClient = getHttpSolrClient(baseUrl)) {
+         try {
+           rsp = solrClient.request(reload);
+           fail("must have failed");
+         } catch (HttpSolrClient.RemoteSolrException e) {
+ 
+         }
+         reload.setMethod(SolrRequest.METHOD.POST);
+         try {
+           rsp = solrClient.request(reload);
+           fail("must have failed");
+         } catch (HttpSolrClient.RemoteSolrException e) {
+ 
+         }
+       }
+       cloudSolrClient.request(CollectionAdminRequest.reloadCollection(defaultCollName)
+           .setBasicAuthCredentials("harry", "HarryIsUberCool"));
+ 
        try {
-         rsp = solrClient.request(reload);
-         fail("must have failed");
+         cloudSolrClient.request(CollectionAdminRequest.reloadCollection(defaultCollName)
+             .setBasicAuthCredentials("harry", "Cool12345"));
+         fail("This should not succeed");
        } catch (HttpSolrClient.RemoteSolrException e) {
  
        }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/49a09217/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/49a09217/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java
----------------------------------------------------------------------


[09/51] [abbrv] lucene-solr:apiv2: LUCENE-7355: Add Analyzer#normalize() and use it in query parsers.

Posted by sa...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/queryparser/src/java/org/apache/lucene/queryparser/classic/QueryParserBase.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/classic/QueryParserBase.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/classic/QueryParserBase.java
index cdfa477..fbe08a9 100644
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/classic/QueryParserBase.java
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/classic/QueryParserBase.java
@@ -16,14 +16,13 @@
  */
 package org.apache.lucene.queryparser.classic;
 
-import java.io.IOException;
 import java.io.StringReader;
 import java.text.DateFormat;
 import java.util.*;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
 
 import org.apache.lucene.analysis.Analyzer;
-import org.apache.lucene.analysis.TokenStream;
-import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute;
 import org.apache.lucene.document.DateTools;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.queryparser.classic.QueryParser.Operator;
@@ -32,6 +31,7 @@ import org.apache.lucene.search.*;
 import org.apache.lucene.search.BooleanClause.Occur;
 import org.apache.lucene.search.BooleanQuery.TooManyClauses;
 import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.BytesRefBuilder;
 import org.apache.lucene.util.QueryBuilder;
 import org.apache.lucene.util.automaton.RegExp;
 
@@ -41,9 +41,6 @@ import static org.apache.lucene.util.automaton.Operations.DEFAULT_MAX_DETERMINIZ
  * and acts to separate the majority of the Java code from the .jj grammar file. 
  */
 public abstract class QueryParserBase extends QueryBuilder implements CommonQueryParserConfiguration {
-  
-  /** Do not catch this exception in your code, it means you are using methods that you should no longer use. */
-  public static class MethodRemovedUseAnother extends Throwable {}
 
   static final int CONJ_NONE   = 0;
   static final int CONJ_AND    = 1;
@@ -63,7 +60,6 @@ public abstract class QueryParserBase extends QueryBuilder implements CommonQuer
   /** The actual operator that parser uses to combine query terms */
   Operator operator = OR_OPERATOR;
 
-  boolean lowercaseExpandedTerms = true;
   MultiTermQuery.RewriteMethod multiTermRewriteMethod = MultiTermQuery.CONSTANT_SCORE_REWRITE;
   boolean allowLeadingWildcard = false;
 
@@ -79,10 +75,6 @@ public abstract class QueryParserBase extends QueryBuilder implements CommonQuer
   // maps field names to date resolutions
   Map<String,DateTools.Resolution> fieldToDateResolution = null;
 
-  //Whether or not to analyze range terms when constructing RangeQuerys
-  // (For example, analyzing terms into collation keys for locale-sensitive RangeQuery)
-  boolean analyzeRangeTerms = false;
-
   boolean autoGeneratePhraseQueries;
   int maxDeterminizedStates = DEFAULT_MAX_DETERMINIZED_STATES;
 
@@ -253,24 +245,7 @@ public abstract class QueryParserBase extends QueryBuilder implements CommonQuer
     return operator;
   }
 
-
-  /**
-   * Whether terms of wildcard, prefix, fuzzy and range queries are to be automatically
-   * lower-cased or not.  Default is <code>true</code>.
-   */
-  @Override
-  public void setLowercaseExpandedTerms(boolean lowercaseExpandedTerms) {
-    this.lowercaseExpandedTerms = lowercaseExpandedTerms;
-  }
-
-  /**
-   * @see #setLowercaseExpandedTerms(boolean)
-   */
-  @Override
-  public boolean getLowercaseExpandedTerms() {
-    return lowercaseExpandedTerms;
-  }
-
+  
   /**
    * By default QueryParser uses {@link org.apache.lucene.search.MultiTermQuery#CONSTANT_SCORE_REWRITE}
    * when creating a {@link PrefixQuery}, {@link WildcardQuery} or {@link TermRangeQuery}. This implementation is generally preferable because it
@@ -379,24 +354,6 @@ public abstract class QueryParserBase extends QueryBuilder implements CommonQuer
   }
 
   /**
-   * Set whether or not to analyze range terms when constructing {@link TermRangeQuery}s.
-   * For example, setting this to true can enable analyzing terms into 
-   * collation keys for locale-sensitive {@link TermRangeQuery}.
-   * 
-   * @param analyzeRangeTerms whether or not terms should be analyzed for RangeQuerys
-   */
-  public void setAnalyzeRangeTerms(boolean analyzeRangeTerms) {
-    this.analyzeRangeTerms = analyzeRangeTerms;
-  }
-
-  /**
-   * @return whether or not to analyze range terms when constructing {@link TermRangeQuery}s.
-   */
-  public boolean getAnalyzeRangeTerms() {
-    return analyzeRangeTerms;
-  }
-
-  /**
    * @param maxDeterminizedStates the maximum number of states that
    *   determinizing a regexp query can result in.  If the query results in any
    *   more states a TooComplexToDeterminizeException is thrown.
@@ -558,12 +515,6 @@ public abstract class QueryParserBase extends QueryBuilder implements CommonQuer
                                 boolean startInclusive,
                                 boolean endInclusive) throws ParseException
   {
-    if (lowercaseExpandedTerms) {
-      part1 = part1==null ? null : part1.toLowerCase(locale);
-      part2 = part2==null ? null : part2.toLowerCase(locale);
-    }
-
-
     DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT, locale);
     df.setLenient(true);
     DateTools.Resolution resolution = getDateResolution(field);
@@ -640,31 +591,6 @@ public abstract class QueryParserBase extends QueryBuilder implements CommonQuer
     return new FuzzyQuery(term,numEdits,prefixLength);
   }
 
-  // TODO: Should this be protected instead?
-  private BytesRef analyzeMultitermTerm(String field, String part) {
-    return analyzeMultitermTerm(field, part, getAnalyzer());
-  }
-
-  protected BytesRef analyzeMultitermTerm(String field, String part, Analyzer analyzerIn) {
-    if (analyzerIn == null) analyzerIn = getAnalyzer();
-
-    try (TokenStream source = analyzerIn.tokenStream(field, part)) {
-      source.reset();
-      
-      TermToBytesRefAttribute termAtt = source.getAttribute(TermToBytesRefAttribute.class);
-
-      if (!source.incrementToken())
-        throw new IllegalArgumentException("analyzer returned no terms for multiTerm term: " + part);
-      BytesRef bytes = BytesRef.deepCopyOf(termAtt.getBytesRef());
-      if (source.incrementToken())
-        throw new IllegalArgumentException("analyzer returned too many terms for multiTerm term: " + part);
-      source.end();
-      return bytes;
-    } catch (IOException e) {
-      throw new RuntimeException("Error analyzing multiTerm term: " + part, e);
-    }
-  }
-
   /**
    * Builds a new {@link TermRangeQuery} instance
    * @param field Field
@@ -681,13 +607,13 @@ public abstract class QueryParserBase extends QueryBuilder implements CommonQuer
     if (part1 == null) {
       start = null;
     } else {
-      start = analyzeRangeTerms ? analyzeMultitermTerm(field, part1) : new BytesRef(part1);
+      start = getAnalyzer().normalize(field, part1);
     }
      
     if (part2 == null) {
       end = null;
     } else {
-      end = analyzeRangeTerms ? analyzeMultitermTerm(field, part2) : new BytesRef(part2);
+      end = getAnalyzer().normalize(field, part2);
     }
       
     final TermRangeQuery query = new TermRangeQuery(field, start, end, startInclusive, endInclusive);
@@ -767,13 +693,38 @@ public abstract class QueryParserBase extends QueryBuilder implements CommonQuer
     }
     if (!allowLeadingWildcard && (termStr.startsWith("*") || termStr.startsWith("?")))
       throw new ParseException("'*' or '?' not allowed as first character in WildcardQuery");
-    if (lowercaseExpandedTerms) {
-      termStr = termStr.toLowerCase(locale);
-    }
-    Term t = new Term(field, termStr);
+
+    Term t = new Term(field, analyzeWildcard(field, termStr));
     return newWildcardQuery(t);
   }
 
+  private static final Pattern WILDCARD_PATTERN = Pattern.compile("(\\\\.)|([?*]+)");
+
+  private BytesRef analyzeWildcard(String field, String termStr) {
+    // best effort to not pass the wildcard characters and escaped characters through #normalize
+    Matcher wildcardMatcher = WILDCARD_PATTERN.matcher(termStr);
+    BytesRefBuilder sb = new BytesRefBuilder();
+    int last = 0;
+
+    while (wildcardMatcher.find()){
+      if (wildcardMatcher.start() > 0) {
+        String chunk = termStr.substring(last, wildcardMatcher.start());
+        BytesRef normalized = getAnalyzer().normalize(field, chunk);
+        sb.append(normalized);
+      }
+      //append the matched group - without normalizing
+      sb.append(new BytesRef(wildcardMatcher.group()));
+
+      last = wildcardMatcher.end();
+    }
+    if (last < termStr.length()){
+      String chunk = termStr.substring(last);
+      BytesRef normalized = getAnalyzer().normalize(field, chunk);
+      sb.append(normalized);
+    }
+    return sb.toBytesRef();
+  }
+
   /**
    * Factory method for generating a query. Called when parser
    * parses an input term token that contains a regular expression
@@ -796,10 +747,11 @@ public abstract class QueryParserBase extends QueryBuilder implements CommonQuer
    */
   protected Query getRegexpQuery(String field, String termStr) throws ParseException
   {
-    if (lowercaseExpandedTerms) {
-      termStr = termStr.toLowerCase(locale);
-    }
-    Term t = new Term(field, termStr);
+    // We need to pass the whole string to #normalize, which will not work with
+    // custom attribute factories for the binary term impl, and may not work
+    // with some analyzers
+    BytesRef term = getAnalyzer().normalize(field, termStr);
+    Term t = new Term(field, term);
     return newRegexpQuery(t);
   }
 
@@ -830,10 +782,8 @@ public abstract class QueryParserBase extends QueryBuilder implements CommonQuer
   {
     if (!allowLeadingWildcard && termStr.startsWith("*"))
       throw new ParseException("'*' not allowed as first character in PrefixQuery");
-    if (lowercaseExpandedTerms) {
-      termStr = termStr.toLowerCase(locale);
-    }
-    Term t = new Term(field, termStr);
+    BytesRef term = getAnalyzer().normalize(field, termStr);
+    Term t = new Term(field, term);
     return newPrefixQuery(t);
   }
 
@@ -850,10 +800,8 @@ public abstract class QueryParserBase extends QueryBuilder implements CommonQuer
    */
   protected Query getFuzzyQuery(String field, String termStr, float minSimilarity) throws ParseException
   {
-    if (lowercaseExpandedTerms) {
-      termStr = termStr.toLowerCase(locale);
-    }
-    Term t = new Term(field, termStr);
+    BytesRef term = getAnalyzer().normalize(field, termStr);
+    Term t = new Term(field, term);
     return newFuzzyQuery(t, minSimilarity, fuzzyPrefixLength);
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/queryparser/src/java/org/apache/lucene/queryparser/complexPhrase/ComplexPhraseQueryParser.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/complexPhrase/ComplexPhraseQueryParser.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/complexPhrase/ComplexPhraseQueryParser.java
index ac808d7..1a7e5e1 100644
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/complexPhrase/ComplexPhraseQueryParser.java
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/complexPhrase/ComplexPhraseQueryParser.java
@@ -33,9 +33,9 @@ import org.apache.lucene.search.BoostQuery;
 import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.search.MatchNoDocsQuery;
 import org.apache.lucene.search.MultiTermQuery;
+import org.apache.lucene.search.MultiTermQuery.RewriteMethod;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.search.TermQuery;
-import org.apache.lucene.search.TermRangeQuery;
 import org.apache.lucene.search.spans.SpanBoostQuery;
 import org.apache.lucene.search.spans.SpanNearQuery;
 import org.apache.lucene.search.spans.SpanNotQuery;
@@ -186,14 +186,15 @@ public class ComplexPhraseQueryParser extends QueryParser {
   @Override
   protected Query newRangeQuery(String field, String part1, String part2,
       boolean startInclusive, boolean endInclusive) {
-    if (isPass2ResolvingPhrases) {
-      // Must use old-style RangeQuery in order to produce a BooleanQuery
-      // that can be turned into SpanOr clause
-      TermRangeQuery rangeQuery = TermRangeQuery.newStringRange(field, part1, part2, startInclusive, endInclusive);
-      rangeQuery.setRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_REWRITE);
-      return rangeQuery;
+    RewriteMethod originalRewriteMethod = getMultiTermRewriteMethod();
+    try {
+      if (isPass2ResolvingPhrases) {
+        setMultiTermRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_REWRITE);
+      }
+      return super.newRangeQuery(field, part1, part2, startInclusive, endInclusive);
+    } finally {
+      setMultiTermRewriteMethod(originalRewriteMethod);
     }
-    return super.newRangeQuery(field, part1, part2, startInclusive, endInclusive);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/CommonQueryParserConfiguration.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/CommonQueryParserConfiguration.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/CommonQueryParserConfiguration.java
index 55e43cd..c44e9e0 100644
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/CommonQueryParserConfiguration.java
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/CommonQueryParserConfiguration.java
@@ -32,18 +32,6 @@ import org.apache.lucene.search.MultiTermQuery;
 public interface CommonQueryParserConfiguration {
   
   /**
-   * Whether terms of multi-term queries (e.g., wildcard,
-   * prefix, fuzzy and range) should be automatically
-   * lower-cased or not.  Default is <code>true</code>.
-   */
-  public void setLowercaseExpandedTerms(boolean lowercaseExpandedTerms);
-  
-  /**
-   * @see #setLowercaseExpandedTerms(boolean)
-   */
-  public boolean getLowercaseExpandedTerms();
-  
-  /**
    * Set to <code>true</code> to allow leading wildcard characters.
    * <p>
    * When set, <code>*</code> or <code>?</code> are allowed as the first

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/StandardQueryParser.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/StandardQueryParser.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/StandardQueryParser.java
index 2cd8084..32cbd02 100644
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/StandardQueryParser.java
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/StandardQueryParser.java
@@ -190,36 +190,6 @@ public class StandardQueryParser extends QueryParserHelper implements CommonQuer
    * Default: false.
    */
   @Override
-  public void setLowercaseExpandedTerms(boolean lowercaseExpandedTerms) {
-    getQueryConfigHandler().set(ConfigurationKeys.LOWERCASE_EXPANDED_TERMS, lowercaseExpandedTerms);
-  }
-  
-  /**
-   * @see #setLowercaseExpandedTerms(boolean)
-   */
-  @Override
-  public boolean getLowercaseExpandedTerms() {
-    Boolean lowercaseExpandedTerms = getQueryConfigHandler().get(ConfigurationKeys.LOWERCASE_EXPANDED_TERMS);
-    
-    if (lowercaseExpandedTerms == null) {
-      return true;
-      
-    } else {
-      return lowercaseExpandedTerms;
-    }
-    
-  }
-  
-  /**
-   * Set to <code>true</code> to allow leading wildcard characters.
-   * <p>
-   * When set, <code>*</code> or <code>?</code> are allowed as the first
-   * character of a PrefixQuery and WildcardQuery. Note that this can produce
-   * very slow queries on big indexes.
-   * <p>
-   * Default: false.
-   */
-  @Override
   public void setAllowLeadingWildcard(boolean allowLeadingWildcard) {
     getQueryConfigHandler().set(ConfigurationKeys.ALLOW_LEADING_WILDCARD, allowLeadingWildcard);
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/StandardQueryConfigHandler.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/StandardQueryConfigHandler.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/StandardQueryConfigHandler.java
index bba95ee..5c53d02 100644
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/StandardQueryConfigHandler.java
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/StandardQueryConfigHandler.java
@@ -55,14 +55,6 @@ public class StandardQueryConfigHandler extends QueryConfigHandler {
      * @see StandardQueryParser#getEnablePositionIncrements()
      */
     final public static ConfigurationKey<Boolean> ENABLE_POSITION_INCREMENTS = ConfigurationKey.newInstance();
-    
-    /**
-     * Key used to set whether expanded terms should be lower-cased
-     * 
-     * @see StandardQueryParser#setLowercaseExpandedTerms(boolean)
-     * @see StandardQueryParser#getLowercaseExpandedTerms()
-     */
-    final public static ConfigurationKey<Boolean> LOWERCASE_EXPANDED_TERMS = ConfigurationKey.newInstance();
 
     /**
      * Key used to set whether leading wildcards are supported
@@ -223,7 +215,6 @@ public class StandardQueryConfigHandler extends QueryConfigHandler {
     set(ConfigurationKeys.ANALYZER, null); //default value 2.4
     set(ConfigurationKeys.DEFAULT_OPERATOR, Operator.OR);
     set(ConfigurationKeys.PHRASE_SLOP, 0); //default value 2.4
-    set(ConfigurationKeys.LOWERCASE_EXPANDED_TERMS, true); //default value 2.4
     set(ConfigurationKeys.ENABLE_POSITION_INCREMENTS, false); //default value 2.4
     set(ConfigurationKeys.FIELD_BOOST_MAP, new LinkedHashMap<String, Float>());
     set(ConfigurationKeys.FUZZY_CONFIG, new FuzzyConfig());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/FuzzyQueryNodeProcessor.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/FuzzyQueryNodeProcessor.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/FuzzyQueryNodeProcessor.java
index 0b8a9a7..9479fcf 100644
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/FuzzyQueryNodeProcessor.java
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/FuzzyQueryNodeProcessor.java
@@ -18,6 +18,7 @@ package org.apache.lucene.queryparser.flexible.standard.processors;
 
 import java.util.List;
 
+import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.queryparser.flexible.core.QueryNodeException;
 import org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler;
 import org.apache.lucene.queryparser.flexible.core.nodes.FuzzyQueryNode;
@@ -55,9 +56,17 @@ public class FuzzyQueryNodeProcessor extends QueryNodeProcessorImpl {
       FuzzyQueryNode fuzzyNode = (FuzzyQueryNode) node;
       QueryConfigHandler config = getQueryConfigHandler();
 
+      Analyzer analyzer = getQueryConfigHandler().get(ConfigurationKeys.ANALYZER);
+      if (analyzer != null) {
+        // because we call utf8ToString, this will only work with the default TermToBytesRefAttribute
+        String text = fuzzyNode.getTextAsString();
+        text = analyzer.normalize(fuzzyNode.getFieldAsString(), text).utf8ToString();
+        fuzzyNode.setText(text);
+      }
+
       FuzzyConfig fuzzyConfig = null;
       
-      if (config != null && (fuzzyConfig = config.get(ConfigurationKeys.FUZZY_CONFIG)) != null) {
+      if ((fuzzyConfig = config.get(ConfigurationKeys.FUZZY_CONFIG)) != null) {
         fuzzyNode.setPrefixLength(fuzzyConfig.getPrefixLength());
 
         if (fuzzyNode.getSimilarity() < 0) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/LowercaseExpandedTermsQueryNodeProcessor.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/LowercaseExpandedTermsQueryNodeProcessor.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/LowercaseExpandedTermsQueryNodeProcessor.java
deleted file mode 100644
index 3bb2075..0000000
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/LowercaseExpandedTermsQueryNodeProcessor.java
+++ /dev/null
@@ -1,100 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.queryparser.flexible.standard.processors;
-
-import java.util.List;
-import java.util.Locale;
-
-import org.apache.lucene.queryparser.flexible.core.QueryNodeException;
-import org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler;
-import org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode;
-import org.apache.lucene.queryparser.flexible.core.nodes.FuzzyQueryNode;
-import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode;
-import org.apache.lucene.queryparser.flexible.core.nodes.RangeQueryNode;
-import org.apache.lucene.queryparser.flexible.core.nodes.TextableQueryNode;
-import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessorImpl;
-import org.apache.lucene.queryparser.flexible.core.util.UnescapedCharSequence;
-import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler.ConfigurationKeys;
-import org.apache.lucene.queryparser.flexible.standard.nodes.RegexpQueryNode;
-import org.apache.lucene.queryparser.flexible.standard.nodes.WildcardQueryNode;
-
-/**
- * This processor verifies if 
- * {@link ConfigurationKeys#LOWERCASE_EXPANDED_TERMS} is defined in the
- * {@link QueryConfigHandler}. If it is and the expanded terms should be
- * lower-cased, it looks for every {@link WildcardQueryNode},
- * {@link FuzzyQueryNode} and children of a {@link RangeQueryNode} and lower-case its
- * term.
- * 
- * @see ConfigurationKeys#LOWERCASE_EXPANDED_TERMS
- */
-public class LowercaseExpandedTermsQueryNodeProcessor extends
-    QueryNodeProcessorImpl {
-
-  public LowercaseExpandedTermsQueryNodeProcessor() {
-  }
-
-  @Override
-  public QueryNode process(QueryNode queryTree) throws QueryNodeException {
-    Boolean lowercaseExpandedTerms = getQueryConfigHandler().get(ConfigurationKeys.LOWERCASE_EXPANDED_TERMS);
-
-    if (lowercaseExpandedTerms != null && lowercaseExpandedTerms) {
-      return super.process(queryTree);
-    }
-
-    return queryTree;
-
-  }
-
-  @Override
-  protected QueryNode postProcessNode(QueryNode node) throws QueryNodeException {
-    
-    Locale locale = getQueryConfigHandler().get(ConfigurationKeys.LOCALE);
-    if (locale == null) {
-      locale = Locale.getDefault();
-    }
-
-    if (node instanceof WildcardQueryNode
-        || node instanceof FuzzyQueryNode
-        || (node instanceof FieldQueryNode && node.getParent() instanceof RangeQueryNode)
-        || node instanceof RegexpQueryNode) {
-
-      TextableQueryNode txtNode = (TextableQueryNode) node;
-      CharSequence text = txtNode.getText();
-      txtNode.setText(text != null ? UnescapedCharSequence.toLowerCase(text, locale) : null);
-    }
-
-    return node;
-
-  }
-
-  @Override
-  protected QueryNode preProcessNode(QueryNode node) throws QueryNodeException {
-
-    return node;
-
-  }
-
-  @Override
-  protected List<QueryNode> setChildrenOrder(List<QueryNode> children)
-      throws QueryNodeException {
-
-    return children;
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/RegexpQueryNodeProcessor.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/RegexpQueryNodeProcessor.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/RegexpQueryNodeProcessor.java
new file mode 100644
index 0000000..652de87
--- /dev/null
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/RegexpQueryNodeProcessor.java
@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.queryparser.flexible.standard.processors;
+
+import java.util.List;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.queryparser.flexible.core.QueryNodeException;
+import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode;
+import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessorImpl;
+import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler.ConfigurationKeys;
+import org.apache.lucene.queryparser.flexible.standard.nodes.RegexpQueryNode;
+
+/** Processor for Regexp queries. */
+public class RegexpQueryNodeProcessor extends QueryNodeProcessorImpl {
+
+  @Override
+  protected QueryNode preProcessNode(QueryNode node) throws QueryNodeException {
+    return node;
+  }
+
+  @Override
+  protected QueryNode postProcessNode(QueryNode node) throws QueryNodeException {
+    if (node instanceof RegexpQueryNode) {
+      RegexpQueryNode regexpNode = (RegexpQueryNode) node;
+      Analyzer analyzer = getQueryConfigHandler().get(ConfigurationKeys.ANALYZER);
+      if (analyzer != null) {
+        String text = regexpNode.getText().toString();
+        // because we call utf8ToString, this will only work with the default TermToBytesRefAttribute
+        text = analyzer.normalize(regexpNode.getFieldAsString(), text).utf8ToString();
+        regexpNode.setText(text);
+      }
+    }
+    return node;
+  }
+
+  @Override
+  protected List<QueryNode> setChildrenOrder(List<QueryNode> children) throws QueryNodeException {
+    return children;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/StandardQueryNodeProcessorPipeline.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/StandardQueryNodeProcessorPipeline.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/StandardQueryNodeProcessorPipeline.java
index 38a9a47..5b681b4 100644
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/StandardQueryNodeProcessorPipeline.java
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/StandardQueryNodeProcessorPipeline.java
@@ -47,16 +47,16 @@ public class StandardQueryNodeProcessorPipeline extends
   public StandardQueryNodeProcessorPipeline(QueryConfigHandler queryConfig) {
     super(queryConfig);
 
-    add(new WildcardQueryNodeProcessor());    
+    add(new WildcardQueryNodeProcessor());   
     add(new MultiFieldQueryNodeProcessor());
     add(new FuzzyQueryNodeProcessor());
+    add(new RegexpQueryNodeProcessor());
     add(new MatchAllDocsQueryNodeProcessor());
     add(new OpenRangeQueryNodeProcessor());
     add(new LegacyNumericQueryNodeProcessor());
     add(new LegacyNumericRangeQueryNodeProcessor());
     add(new PointQueryNodeProcessor());
     add(new PointRangeQueryNodeProcessor());
-    add(new LowercaseExpandedTermsQueryNodeProcessor());
     add(new TermRangeQueryNodeProcessor());
     add(new AllowLeadingWildcardProcessor());    
     add(new AnalyzerQueryNodeProcessor());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/TermRangeQueryNodeProcessor.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/TermRangeQueryNodeProcessor.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/TermRangeQueryNodeProcessor.java
index f9a4583..557c605 100644
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/TermRangeQueryNodeProcessor.java
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/TermRangeQueryNodeProcessor.java
@@ -23,6 +23,7 @@ import java.util.List;
 import java.util.Locale;
 import java.util.TimeZone;
 
+import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.document.DateTools;
 import org.apache.lucene.document.DateTools.Resolution;
 import org.apache.lucene.queryparser.flexible.core.QueryNodeException;
@@ -134,7 +135,15 @@ public class TermRangeQueryNodeProcessor extends QueryNodeProcessorImpl {
         }
         
       } catch (Exception e) {
-        // do nothing
+        // not a date
+        Analyzer analyzer = getQueryConfigHandler().get(ConfigurationKeys.ANALYZER);
+        if (analyzer != null) {
+          // because we call utf8ToString, this will only work with the default TermToBytesRefAttribute
+          part1 = analyzer.normalize(lower.getFieldAsString(), part1).utf8ToString();
+          part2 = analyzer.normalize(lower.getFieldAsString(), part2).utf8ToString();
+          lower.setText(part1);
+          upper.setText(part2);
+        }
       }
       
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/WildcardQueryNodeProcessor.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/WildcardQueryNodeProcessor.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/WildcardQueryNodeProcessor.java
index 7182575..39eb0df 100644
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/WildcardQueryNodeProcessor.java
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/WildcardQueryNodeProcessor.java
@@ -17,7 +17,10 @@
 package org.apache.lucene.queryparser.flexible.standard.processors;
 
 import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
 
+import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.queryparser.flexible.core.QueryNodeException;
 import org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode;
 import org.apache.lucene.queryparser.flexible.core.nodes.FuzzyQueryNode;
@@ -25,11 +28,13 @@ import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode;
 import org.apache.lucene.queryparser.flexible.core.nodes.QuotedFieldQueryNode;
 import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessorImpl;
 import org.apache.lucene.queryparser.flexible.core.util.UnescapedCharSequence;
+import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler.ConfigurationKeys;
 import org.apache.lucene.queryparser.flexible.standard.nodes.PrefixWildcardQueryNode;
 import org.apache.lucene.queryparser.flexible.standard.nodes.TermRangeQueryNode;
 import org.apache.lucene.queryparser.flexible.standard.nodes.WildcardQueryNode;
 import org.apache.lucene.queryparser.flexible.standard.parser.StandardSyntaxParser;
 import org.apache.lucene.search.PrefixQuery;
+import org.apache.lucene.util.BytesRef;
 
 /**
  * The {@link StandardSyntaxParser} creates {@link PrefixWildcardQueryNode} nodes which
@@ -43,6 +48,39 @@ import org.apache.lucene.search.PrefixQuery;
  */
 public class WildcardQueryNodeProcessor extends QueryNodeProcessorImpl {
 
+  private static final Pattern WILDCARD_PATTERN = Pattern.compile("(\\.)|([?*]+)");
+
+  // because we call utf8ToString, this will only work with the default TermToBytesRefAttribute
+  private static String analyzeWildcard(Analyzer a, String field, String wildcard) {
+    // best effort to not pass the wildcard characters through #normalize
+    Matcher wildcardMatcher = WILDCARD_PATTERN.matcher(wildcard);
+    StringBuilder sb = new StringBuilder();
+    int last = 0;
+
+    while (wildcardMatcher.find()){
+      // continue if escaped char
+      if (wildcardMatcher.group(1) != null){
+        continue;
+      }
+
+      if (wildcardMatcher.start() > 0){
+        String chunk = wildcard.substring(last, wildcardMatcher.start());
+        BytesRef normalized = a.normalize(field, chunk);
+        sb.append(normalized.utf8ToString());
+      }
+      //append the wildcard character
+      sb.append(wildcardMatcher.group(2));
+
+      last = wildcardMatcher.end();
+    }
+    if (last < wildcard.length()){
+      String chunk = wildcard.substring(last);
+      BytesRef normalized = a.normalize(field, chunk);
+      sb.append(normalized.utf8ToString());
+    }
+    return sb.toString();
+  }
+
   public WildcardQueryNodeProcessor() {
     // empty constructor
   }
@@ -67,15 +105,19 @@ public class WildcardQueryNodeProcessor extends QueryNodeProcessorImpl {
       
       // Code below simulates the old lucene parser behavior for wildcards
       
-      if (isPrefixWildcard(text)) {        
-        PrefixWildcardQueryNode prefixWildcardQN = new PrefixWildcardQueryNode(fqn);
-        return prefixWildcardQN;
-        
-      } else if (isWildcard(text)){
-        WildcardQueryNode wildcardQN = new WildcardQueryNode(fqn);
-        return wildcardQN;
+      
+      if (isWildcard(text)) {
+        Analyzer analyzer = getQueryConfigHandler().get(ConfigurationKeys.ANALYZER);
+        if (analyzer != null) {
+          text = analyzeWildcard(analyzer, fqn.getFieldAsString(), text.toString());
+        }
+        if (isPrefixWildcard(text)) {
+          return new PrefixWildcardQueryNode(fqn.getField(), text, fqn.getBegin(), fqn.getEnd());
+        } else {
+          return new WildcardQueryNode(fqn.getField(), text, fqn.getBegin(), fqn.getEnd());
+        }
       }
-             
+
     }
 
     return node;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/queryparser/src/java/org/apache/lucene/queryparser/simple/SimpleQueryParser.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/simple/SimpleQueryParser.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/simple/SimpleQueryParser.java
index 3f9d9a4..a417d1b 100644
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/simple/SimpleQueryParser.java
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/simple/SimpleQueryParser.java
@@ -26,6 +26,7 @@ import org.apache.lucene.search.MatchAllDocsQuery;
 import org.apache.lucene.search.MatchNoDocsQuery;
 import org.apache.lucene.search.PrefixQuery;
 import org.apache.lucene.search.Query;
+import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.QueryBuilder;
 import org.apache.lucene.util.automaton.LevenshteinAutomata;
 
@@ -548,7 +549,9 @@ public class SimpleQueryParser extends QueryBuilder {
   protected Query newFuzzyQuery(String text, int fuzziness) {
     BooleanQuery.Builder bq = new BooleanQuery.Builder();
     for (Map.Entry<String,Float> entry : weights.entrySet()) {
-      Query q = new FuzzyQuery(new Term(entry.getKey(), text), fuzziness);
+      final String fieldName = entry.getKey();
+      final BytesRef term = getAnalyzer().normalize(fieldName, text);
+      Query q = new FuzzyQuery(new Term(fieldName, term), fuzziness);
       float boost = entry.getValue();
       if (boost != 1f) {
         q = new BoostQuery(q, boost);
@@ -582,7 +585,9 @@ public class SimpleQueryParser extends QueryBuilder {
   protected Query newPrefixQuery(String text) {
     BooleanQuery.Builder bq = new BooleanQuery.Builder();
     for (Map.Entry<String,Float> entry : weights.entrySet()) {
-      Query q = new PrefixQuery(new Term(entry.getKey(), text));
+      final String fieldName = entry.getKey();
+      final BytesRef term = getAnalyzer().normalize(fieldName, text);
+      Query q = new PrefixQuery(new Term(fieldName, term));
       float boost = entry.getValue();
       if (boost != 1f) {
         q = new BoostQuery(q, boost);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/queryparser/src/test/org/apache/lucene/queryparser/analyzing/TestAnalyzingQueryParser.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/analyzing/TestAnalyzingQueryParser.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/analyzing/TestAnalyzingQueryParser.java
deleted file mode 100644
index bf5f69f..0000000
--- a/lucene/queryparser/src/test/org/apache/lucene/queryparser/analyzing/TestAnalyzingQueryParser.java
+++ /dev/null
@@ -1,268 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.queryparser.analyzing;
-
-import java.io.IOException;
-import java.util.Map;
-import java.util.TreeMap;
-
-import org.apache.lucene.analysis.Analyzer;
-import org.apache.lucene.analysis.MockAnalyzer;
-import org.apache.lucene.analysis.MockBytesAnalyzer;
-import org.apache.lucene.analysis.MockTokenFilter;
-import org.apache.lucene.analysis.MockTokenizer;
-import org.apache.lucene.analysis.TokenFilter;
-import org.apache.lucene.analysis.TokenStream;
-import org.apache.lucene.analysis.Tokenizer;
-import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
-import org.apache.lucene.document.Document;
-import org.apache.lucene.document.Field;
-import org.apache.lucene.document.FieldType;
-import org.apache.lucene.index.DirectoryReader;
-import org.apache.lucene.index.IndexOptions;
-import org.apache.lucene.index.RandomIndexWriter;
-import org.apache.lucene.queryparser.classic.ParseException;
-import org.apache.lucene.queryparser.classic.QueryParser;
-import org.apache.lucene.search.IndexSearcher;
-import org.apache.lucene.search.Query;
-import org.apache.lucene.store.Directory;
-import org.apache.lucene.util.LuceneTestCase;
-
-/**
- */
-public class TestAnalyzingQueryParser extends LuceneTestCase {
-  private final static String FIELD = "field";
-   
-  private Analyzer a;
-
-  private String[] wildcardInput;
-  private String[] wildcardExpected;
-  private String[] prefixInput;
-  private String[] prefixExpected;
-  private String[] rangeInput;
-  private String[] rangeExpected;
-  private String[] fuzzyInput;
-  private String[] fuzzyExpected;
-
-  private Map<String, String> wildcardEscapeHits = new TreeMap<>();
-  private Map<String, String> wildcardEscapeMisses = new TreeMap<>();
-
-  @Override
-  public void setUp() throws Exception {
-    super.setUp();
-    wildcardInput = new String[] { "*bersetzung �ber*ung",
-        "M�tley Cr\u00fce M�tl?* Cr�?", "Ren�e Zellweger Ren?? Zellw?ger" };
-    wildcardExpected = new String[] { "*bersetzung uber*ung", "motley crue motl?* cru?",
-        "renee zellweger ren?? zellw?ger" };
-
-    prefixInput = new String[] { "�bersetzung �bersetz*",
-        "M�tley Cr�e M�tl* cr�*", "Ren�? Zellw*" };
-    prefixExpected = new String[] { "ubersetzung ubersetz*", "motley crue motl* cru*",
-        "rene? zellw*" };
-
-    rangeInput = new String[] { "[aa TO bb]", "{Ana�s TO Zo�}" };
-    rangeExpected = new String[] { "[aa TO bb]", "{anais TO zoe}" };
-
-    fuzzyInput = new String[] { "�bersetzung �bersetzung~0.9",
-        "M�tley Cr�e M�tley~0.75 Cr�e~0.5",
-        "Ren�e Zellweger Ren�e~0.9 Zellweger~" };
-    fuzzyExpected = new String[] { "ubersetzung ubersetzung~1",
-        "motley crue motley~1 crue~2", "renee zellweger renee~0 zellweger~2" };
-
-    wildcardEscapeHits.put("m�*tley", "moatley");
-
-    // need to have at least one genuine wildcard to trigger the wildcard analysis
-    // hence the * before the y
-    wildcardEscapeHits.put("m�\\*tl*y", "mo*tley");
-
-    // escaped backslash then true wildcard
-    wildcardEscapeHits.put("m�\\\\*tley", "mo\\atley");
-    
-    // escaped wildcard then true wildcard
-    wildcardEscapeHits.put("m�\\??ley", "mo?tley");
-
-    // the first is an escaped * which should yield a miss
-    wildcardEscapeMisses.put("m�\\*tl*y", "moatley");
-      
-    a = new ASCIIAnalyzer();
-  }
-
-  public void testSingleChunkExceptions() {
-    String termStr = "the*tre";
-      
-    Analyzer stopsAnalyzer = new MockAnalyzer
-        (random(), MockTokenizer.WHITESPACE, true, MockTokenFilter.ENGLISH_STOPSET);
-
-    ParseException expected = expectThrows(ParseException.class, () -> {
-      parseWithAnalyzingQueryParser(termStr, stopsAnalyzer, true);
-    });
-    assertTrue(expected.getMessage().contains("returned nothing"));
-     
-    AnalyzingQueryParser qp = new AnalyzingQueryParser(FIELD, a);
-    expected = expectThrows(ParseException.class, () -> {
-      qp.analyzeSingleChunk(FIELD, "", "not a single chunk");
-    });
-    assertTrue(expected.getMessage().contains("multiple terms"));
-  }
-   
-  public void testWildcardAlone() throws ParseException {
-    //seems like crazy edge case, but can be useful in concordance 
-    expectThrows(ParseException.class, () -> {
-      getAnalyzedQuery("*", a, false);
-    });
-      
-    String qString = parseWithAnalyzingQueryParser("*", a, true);
-    assertEquals("Every word", "*", qString);
-  }
-  public void testWildCardEscapes() throws ParseException, IOException {
-
-    for (Map.Entry<String, String> entry : wildcardEscapeHits.entrySet()){
-      Query q = getAnalyzedQuery(entry.getKey(), a, false);
-      assertEquals("WildcardEscapeHits: " + entry.getKey(), true, isAHit(q, entry.getValue(), a));
-    }
-    for (Map.Entry<String, String> entry : wildcardEscapeMisses.entrySet()){
-      Query q = getAnalyzedQuery(entry.getKey(), a, false);
-      assertEquals("WildcardEscapeMisses: " + entry.getKey(), false, isAHit(q, entry.getValue(), a));
-    }
-
-  }
-  public void testWildCardQueryNoLeadingAllowed() {
-    expectThrows(ParseException.class, () -> {
-      parseWithAnalyzingQueryParser(wildcardInput[0], a, false);
-    });
-  }
-
-  public void testWildCardQuery() throws ParseException {
-    for (int i = 0; i < wildcardInput.length; i++) {
-      assertEquals("Testing wildcards with analyzer " + a.getClass() + ", input string: "
-          + wildcardInput[i], wildcardExpected[i], parseWithAnalyzingQueryParser(wildcardInput[i], a, true));
-    }
-  }
-
-
-  public void testPrefixQuery() throws ParseException {
-    for (int i = 0; i < prefixInput.length; i++) {
-      assertEquals("Testing prefixes with analyzer " + a.getClass() + ", input string: "
-          + prefixInput[i], prefixExpected[i], parseWithAnalyzingQueryParser(prefixInput[i], a, false));
-    }
-  }
-
-  public void testRangeQuery() throws ParseException {
-    for (int i = 0; i < rangeInput.length; i++) {
-      assertEquals("Testing ranges with analyzer " + a.getClass() + ", input string: "
-          + rangeInput[i], rangeExpected[i], parseWithAnalyzingQueryParser(rangeInput[i], a, false));
-    }
-  }
-
-  public void testFuzzyQuery() throws ParseException {
-    for (int i = 0; i < fuzzyInput.length; i++) {
-      assertEquals("Testing fuzzys with analyzer " + a.getClass() + ", input string: "
-          + fuzzyInput[i], fuzzyExpected[i], parseWithAnalyzingQueryParser(fuzzyInput[i], a, false));
-    }
-  }
-
-
-  private String parseWithAnalyzingQueryParser(String s, Analyzer a, boolean allowLeadingWildcard) throws ParseException {
-    Query q = getAnalyzedQuery(s, a, allowLeadingWildcard);
-    return q.toString(FIELD);
-  }
-
-  private Query getAnalyzedQuery(String s, Analyzer a, boolean allowLeadingWildcard) throws ParseException {
-    AnalyzingQueryParser qp = new AnalyzingQueryParser(FIELD, a);
-    qp.setAllowLeadingWildcard(allowLeadingWildcard);
-    org.apache.lucene.search.Query q = qp.parse(s);
-    return q;
-  }
-
-  final static class FoldingFilter extends TokenFilter {
-    final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class);
-
-    public FoldingFilter(TokenStream input) {
-      super(input);
-    }
-
-    @Override
-    public boolean incrementToken() throws IOException {
-      if (input.incrementToken()) {
-        char term[] = termAtt.buffer();
-        for (int i = 0; i < term.length; i++)
-          switch(term[i]) {
-            case '�':
-              term[i] = 'u'; 
-              break;
-            case '�': 
-              term[i] = 'o'; 
-              break;
-            case '�': 
-              term[i] = 'e'; 
-              break;
-            case '�': 
-              term[i] = 'i'; 
-              break;
-          }
-        return true;
-      } else {
-        return false;
-      }
-    }
-  }
-
-  final static class ASCIIAnalyzer extends Analyzer {
-    @Override
-    public TokenStreamComponents createComponents(String fieldName) {
-      Tokenizer result = new MockTokenizer(MockTokenizer.WHITESPACE, true);
-      return new TokenStreamComponents(result, new FoldingFilter(result));
-    }
-  }
-   
-
-  // LUCENE-4176
-  public void testByteTerms() throws Exception {
-    String s = "\u0e40\u0e02";
-    Analyzer analyzer = new MockBytesAnalyzer();
-    QueryParser qp = new AnalyzingQueryParser(FIELD, analyzer);
-    Query q = qp.parse("[\u0e40\u0e02 TO \u0e40\u0e02]");
-    assertEquals(true, isAHit(q, s, analyzer));
-  }
-   
-  
-  private boolean isAHit(Query q, String content, Analyzer analyzer) throws IOException{
-    Directory ramDir = newDirectory();
-    RandomIndexWriter writer = new RandomIndexWriter(random(), ramDir, analyzer);
-    Document doc = new Document();
-    FieldType fieldType = new FieldType();
-    fieldType.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
-    fieldType.setTokenized(true);
-    fieldType.setStored(true);
-    Field field = new Field(FIELD, content, fieldType);
-    doc.add(field);
-    writer.addDocument(doc);
-    writer.close();
-    DirectoryReader ir = DirectoryReader.open(ramDir);
-    IndexSearcher is = new IndexSearcher(ir);
-      
-    int hits = is.search(q, 10).totalHits;
-    ir.close();
-    ramDir.close();
-    if (hits == 1){
-      return true;
-    } else {
-      return false;
-    }
-
-  }
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/queryparser/src/test/org/apache/lucene/queryparser/classic/TestQueryParser.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/classic/TestQueryParser.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/classic/TestQueryParser.java
index 7a98800..de90e29 100644
--- a/lucene/queryparser/src/test/org/apache/lucene/queryparser/classic/TestQueryParser.java
+++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/classic/TestQueryParser.java
@@ -18,6 +18,8 @@ package org.apache.lucene.queryparser.classic;
 
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.MockAnalyzer;
+import org.apache.lucene.analysis.MockBytesAnalyzer;
+import org.apache.lucene.analysis.MockLowerCaseFilter;
 import org.apache.lucene.analysis.MockSynonymAnalyzer;
 import org.apache.lucene.analysis.MockTokenizer;
 import org.apache.lucene.analysis.TokenFilter;
@@ -25,7 +27,13 @@ import org.apache.lucene.analysis.TokenStream;
 import org.apache.lucene.analysis.Tokenizer;
 import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
 import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.FieldType;
 import org.apache.lucene.document.DateTools.Resolution;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.IndexOptions;
+import org.apache.lucene.index.RandomIndexWriter;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.queryparser.classic.QueryParser.Operator;
 import org.apache.lucene.queryparser.flexible.standard.CommonQueryParserConfiguration;
@@ -33,11 +41,14 @@ import org.apache.lucene.queryparser.util.QueryParserTestBase;
 import org.apache.lucene.search.BooleanClause;
 import org.apache.lucene.search.BooleanQuery;
 import org.apache.lucene.search.BoostQuery;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.MatchAllDocsQuery;
 import org.apache.lucene.search.MultiPhraseQuery;
 import org.apache.lucene.search.PhraseQuery;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.search.SynonymQuery;
 import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.automaton.TooComplexToDeterminizeException;
 
 import java.io.IOException;
@@ -48,6 +59,7 @@ import java.io.IOException;
 public class TestQueryParser extends QueryParserTestBase {
 
   protected boolean splitOnWhitespace = QueryParser.DEFAULT_SPLIT_ON_WHITESPACE;
+  private static final String FIELD = "field";
 
   public static class QPTestParser extends QueryParser {
     public QPTestParser(String f, Analyzer a) {
@@ -115,14 +127,6 @@ public class TestQueryParser extends QueryParserTestBase {
   }
   
   @Override
-  public void setAnalyzeRangeTerms(CommonQueryParserConfiguration cqpC,
-      boolean value) {
-    assert (cqpC instanceof QueryParser);
-    QueryParser qp = (QueryParser) cqpC;
-    qp.setAnalyzeRangeTerms(value);
-  }
-  
-  @Override
   public void setAutoGeneratePhraseQueries(CommonQueryParserConfiguration cqpC,
       boolean value) {
     assert (cqpC instanceof QueryParser);
@@ -200,7 +204,7 @@ public class TestQueryParser extends QueryParserTestBase {
   @Override
   public void testStarParsing() throws Exception {
     final int[] type = new int[1];
-    QueryParser qp = new QueryParser("field",
+    QueryParser qp = new QueryParser(FIELD,
         new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)) {
       @Override
       protected Query getWildcardQuery(String field, String termStr) {
@@ -285,7 +289,7 @@ public class TestQueryParser extends QueryParserTestBase {
     Analyzer morePrecise = new Analyzer2();
     
     public SmartQueryParser() {
-      super("field", new Analyzer1());
+      super(FIELD, new Analyzer1());
     }
     
     @Override
@@ -299,9 +303,9 @@ public class TestQueryParser extends QueryParserTestBase {
   @Override
   public void testNewFieldQuery() throws Exception {
     /** ordinary behavior, synonyms form uncoordinated boolean query */
-    QueryParser dumb = new QueryParser("field",
+    QueryParser dumb = new QueryParser(FIELD,
         new Analyzer1());
-    Query expanded = new SynonymQuery(new Term("field", "dogs"), new Term("field", "dog"));
+    Query expanded = new SynonymQuery(new Term(FIELD, "dogs"), new Term(FIELD, "dog"));
     assertEquals(expanded, dumb.parse("\"dogs\""));
     /** even with the phrase operator the behavior is the same */
     assertEquals(expanded, dumb.parse("dogs"));
@@ -312,14 +316,14 @@ public class TestQueryParser extends QueryParserTestBase {
     QueryParser smart = new SmartQueryParser();
     assertEquals(expanded, smart.parse("dogs"));
     
-    Query unexpanded = new TermQuery(new Term("field", "dogs"));
+    Query unexpanded = new TermQuery(new Term(FIELD, "dogs"));
     assertEquals(unexpanded, smart.parse("\"dogs\""));
   }
 
   /** simple synonyms test */
   public void testSynonyms() throws Exception {
-    Query expected = new SynonymQuery(new Term("field", "dogs"), new Term("field", "dog"));
-    QueryParser qp = new QueryParser("field", new MockSynonymAnalyzer());
+    Query expected = new SynonymQuery(new Term(FIELD, "dogs"), new Term(FIELD, "dog"));
+    QueryParser qp = new QueryParser(FIELD, new MockSynonymAnalyzer());
     assertEquals(expected, qp.parse("dogs"));
     assertEquals(expected, qp.parse("\"dogs\""));
     qp.setDefaultOperator(Operator.AND);
@@ -333,9 +337,9 @@ public class TestQueryParser extends QueryParserTestBase {
   /** forms multiphrase query */
   public void testSynonymsPhrase() throws Exception {
     MultiPhraseQuery.Builder expectedQBuilder = new MultiPhraseQuery.Builder();
-    expectedQBuilder.add(new Term("field", "old"));
-    expectedQBuilder.add(new Term[] { new Term("field", "dogs"), new Term("field", "dog") });
-    QueryParser qp = new QueryParser("field", new MockSynonymAnalyzer());
+    expectedQBuilder.add(new Term(FIELD, "old"));
+    expectedQBuilder.add(new Term[] { new Term(FIELD, "dogs"), new Term(FIELD, "dog") });
+    QueryParser qp = new QueryParser(FIELD, new MockSynonymAnalyzer());
     assertEquals(expectedQBuilder.build(), qp.parse("\"old dogs\""));
     qp.setDefaultOperator(Operator.AND);
     assertEquals(expectedQBuilder.build(), qp.parse("\"old dogs\""));
@@ -387,8 +391,8 @@ public class TestQueryParser extends QueryParserTestBase {
   
   /** simple CJK synonym test */
   public void testCJKSynonym() throws Exception {
-    Query expected = new SynonymQuery(new Term("field", "\u56fd"), new Term("field", "\u570b"));
-    QueryParser qp = new QueryParser("field", new MockCJKSynonymAnalyzer());
+    Query expected = new SynonymQuery(new Term(FIELD, "\u56fd"), new Term(FIELD, "\u570b"));
+    QueryParser qp = new QueryParser(FIELD, new MockCJKSynonymAnalyzer());
     assertEquals(expected, qp.parse("\u56fd"));
     qp.setDefaultOperator(Operator.AND);
     assertEquals(expected, qp.parse("\u56fd"));
@@ -399,11 +403,11 @@ public class TestQueryParser extends QueryParserTestBase {
   /** synonyms with default OR operator */
   public void testCJKSynonymsOR() throws Exception {
     BooleanQuery.Builder expectedB = new BooleanQuery.Builder();
-    expectedB.add(new TermQuery(new Term("field", "\u4e2d")), BooleanClause.Occur.SHOULD);
-    Query inner = new SynonymQuery(new Term("field", "\u56fd"), new Term("field", "\u570b"));
+    expectedB.add(new TermQuery(new Term(FIELD, "\u4e2d")), BooleanClause.Occur.SHOULD);
+    Query inner = new SynonymQuery(new Term(FIELD, "\u56fd"), new Term(FIELD, "\u570b"));
     expectedB.add(inner, BooleanClause.Occur.SHOULD);
     Query expected = expectedB.build();
-    QueryParser qp = new QueryParser("field", new MockCJKSynonymAnalyzer());
+    QueryParser qp = new QueryParser(FIELD, new MockCJKSynonymAnalyzer());
     assertEquals(expected, qp.parse("\u4e2d\u56fd"));
     expected = new BoostQuery(expected, 2f);
     assertEquals(expected, qp.parse("\u4e2d\u56fd^2"));
@@ -412,13 +416,13 @@ public class TestQueryParser extends QueryParserTestBase {
   /** more complex synonyms with default OR operator */
   public void testCJKSynonymsOR2() throws Exception {
     BooleanQuery.Builder expectedB = new BooleanQuery.Builder();
-    expectedB.add(new TermQuery(new Term("field", "\u4e2d")), BooleanClause.Occur.SHOULD);
-    SynonymQuery inner = new SynonymQuery(new Term("field", "\u56fd"), new Term("field", "\u570b"));
+    expectedB.add(new TermQuery(new Term(FIELD, "\u4e2d")), BooleanClause.Occur.SHOULD);
+    SynonymQuery inner = new SynonymQuery(new Term(FIELD, "\u56fd"), new Term(FIELD, "\u570b"));
     expectedB.add(inner, BooleanClause.Occur.SHOULD);
-    SynonymQuery inner2 = new SynonymQuery(new Term("field", "\u56fd"), new Term("field", "\u570b"));
+    SynonymQuery inner2 = new SynonymQuery(new Term(FIELD, "\u56fd"), new Term(FIELD, "\u570b"));
     expectedB.add(inner2, BooleanClause.Occur.SHOULD);
     Query expected = expectedB.build();
-    QueryParser qp = new QueryParser("field", new MockCJKSynonymAnalyzer());
+    QueryParser qp = new QueryParser(FIELD, new MockCJKSynonymAnalyzer());
     assertEquals(expected, qp.parse("\u4e2d\u56fd\u56fd"));
     expected = new BoostQuery(expected, 2f);
     assertEquals(expected, qp.parse("\u4e2d\u56fd\u56fd^2"));
@@ -427,11 +431,11 @@ public class TestQueryParser extends QueryParserTestBase {
   /** synonyms with default AND operator */
   public void testCJKSynonymsAND() throws Exception {
     BooleanQuery.Builder expectedB = new BooleanQuery.Builder();
-    expectedB.add(new TermQuery(new Term("field", "\u4e2d")), BooleanClause.Occur.MUST);
-    Query inner = new SynonymQuery(new Term("field", "\u56fd"), new Term("field", "\u570b"));
+    expectedB.add(new TermQuery(new Term(FIELD, "\u4e2d")), BooleanClause.Occur.MUST);
+    Query inner = new SynonymQuery(new Term(FIELD, "\u56fd"), new Term(FIELD, "\u570b"));
     expectedB.add(inner, BooleanClause.Occur.MUST);
     Query expected = expectedB.build();
-    QueryParser qp = new QueryParser("field", new MockCJKSynonymAnalyzer());
+    QueryParser qp = new QueryParser(FIELD, new MockCJKSynonymAnalyzer());
     qp.setDefaultOperator(Operator.AND);
     assertEquals(expected, qp.parse("\u4e2d\u56fd"));
     expected = new BoostQuery(expected, 2f);
@@ -441,13 +445,13 @@ public class TestQueryParser extends QueryParserTestBase {
   /** more complex synonyms with default AND operator */
   public void testCJKSynonymsAND2() throws Exception {
     BooleanQuery.Builder expectedB = new BooleanQuery.Builder();
-    expectedB.add(new TermQuery(new Term("field", "\u4e2d")), BooleanClause.Occur.MUST);
-    Query inner = new SynonymQuery(new Term("field", "\u56fd"), new Term("field", "\u570b"));
+    expectedB.add(new TermQuery(new Term(FIELD, "\u4e2d")), BooleanClause.Occur.MUST);
+    Query inner = new SynonymQuery(new Term(FIELD, "\u56fd"), new Term(FIELD, "\u570b"));
     expectedB.add(inner, BooleanClause.Occur.MUST);
-    Query inner2 = new SynonymQuery(new Term("field", "\u56fd"), new Term("field", "\u570b"));
+    Query inner2 = new SynonymQuery(new Term(FIELD, "\u56fd"), new Term(FIELD, "\u570b"));
     expectedB.add(inner2, BooleanClause.Occur.MUST);
     Query expected = expectedB.build();
-    QueryParser qp = new QueryParser("field", new MockCJKSynonymAnalyzer());
+    QueryParser qp = new QueryParser(FIELD, new MockCJKSynonymAnalyzer());
     qp.setDefaultOperator(Operator.AND);
     assertEquals(expected, qp.parse("\u4e2d\u56fd\u56fd"));
     expected = new BoostQuery(expected, 2f);
@@ -457,9 +461,9 @@ public class TestQueryParser extends QueryParserTestBase {
   /** forms multiphrase query */
   public void testCJKSynonymsPhrase() throws Exception {
     MultiPhraseQuery.Builder expectedQBuilder = new MultiPhraseQuery.Builder();
-    expectedQBuilder.add(new Term("field", "\u4e2d"));
-    expectedQBuilder.add(new Term[] { new Term("field", "\u56fd"), new Term("field", "\u570b")});
-    QueryParser qp = new QueryParser("field", new MockCJKSynonymAnalyzer());
+    expectedQBuilder.add(new Term(FIELD, "\u4e2d"));
+    expectedQBuilder.add(new Term[] { new Term(FIELD, "\u56fd"), new Term(FIELD, "\u570b")});
+    QueryParser qp = new QueryParser(FIELD, new MockCJKSynonymAnalyzer());
     qp.setDefaultOperator(Operator.AND);
     assertEquals(expectedQBuilder.build(), qp.parse("\"\u4e2d\u56fd\""));
     Query expected = new BoostQuery(expectedQBuilder.build(), 2f);
@@ -471,7 +475,7 @@ public class TestQueryParser extends QueryParserTestBase {
 
   /** LUCENE-6677: make sure wildcard query respects maxDeterminizedStates. */
   public void testWildcardMaxDeterminizedStates() throws Exception {
-    QueryParser qp = new QueryParser("field", new MockAnalyzer(random()));
+    QueryParser qp = new QueryParser(FIELD, new MockAnalyzer(random()));
     qp.setMaxDeterminizedStates(10);
     expectThrows(TooComplexToDeterminizeException.class, () -> {
       qp.parse("a*aaaaaaa");
@@ -703,4 +707,163 @@ public class TestQueryParser extends QueryParserTestBase {
     assertQueryEquals("guinea pig", new MockSynonymAnalyzer(), "Synonym(cavy guinea) pig");
     splitOnWhitespace = oldSplitOnWhitespace;
   }
-}
\ No newline at end of file
+   
+  public void testWildcardAlone() throws ParseException {
+    //seems like crazy edge case, but can be useful in concordance 
+    QueryParser parser = new QueryParser(FIELD, new ASCIIAnalyzer());
+    parser.setAllowLeadingWildcard(false);
+    expectThrows(ParseException.class, () -> {
+      parser.parse("*");
+    });
+
+    QueryParser parser2 = new QueryParser("*", new ASCIIAnalyzer());
+    parser2.setAllowLeadingWildcard(false);
+    assertEquals(new MatchAllDocsQuery(), parser2.parse("*"));
+  }
+
+  public void testWildCardEscapes() throws ParseException, IOException {
+    Analyzer a = new ASCIIAnalyzer();
+    QueryParser parser = new QueryParser(FIELD, a);
+    assertTrue(isAHit(parser.parse("m�*tley"), "moatley", a));
+    // need to have at least one genuine wildcard to trigger the wildcard analysis
+    // hence the * before the y
+    assertTrue(isAHit(parser.parse("m�\\*tl*y"), "mo*tley", a));
+    // escaped backslash then true wildcard
+    assertTrue(isAHit(parser.parse("m�\\\\*tley"), "mo\\atley", a));
+    // escaped wildcard then true wildcard
+    assertTrue(isAHit(parser.parse("m�\\??ley"), "mo?tley", a));
+
+    // the first is an escaped * which should yield a miss
+    assertFalse(isAHit(parser.parse("m�\\*tl*y"), "moatley", a));
+  }
+
+  public void testWildcardDoesNotNormalizeEscapedChars() throws Exception {
+    Analyzer asciiAnalyzer = new ASCIIAnalyzer();
+    Analyzer keywordAnalyzer = new MockAnalyzer(random());
+    QueryParser parser = new QueryParser(FIELD, asciiAnalyzer);
+
+    assertTrue(isAHit(parser.parse("e*e"), "�tude", asciiAnalyzer));
+    assertTrue(isAHit(parser.parse("�*e"), "etude", asciiAnalyzer));
+    assertFalse(isAHit(parser.parse("\\�*e"), "etude", asciiAnalyzer));
+    assertTrue(isAHit(parser.parse("\\�*e"), "�tude", keywordAnalyzer));
+  }
+
+  public void testWildCardQuery() throws ParseException {
+    Analyzer a = new ASCIIAnalyzer();
+    QueryParser parser = new QueryParser(FIELD, a);
+    parser.setAllowLeadingWildcard(true);
+    assertEquals("*bersetzung uber*ung", parser.parse("*bersetzung �ber*ung").toString(FIELD));
+    parser.setAllowLeadingWildcard(false);
+    assertEquals("motley crue motl?* cru?", parser.parse("M�tley Cr\u00fce M�tl?* Cr�?").toString(FIELD));
+    assertEquals("renee zellweger ren?? zellw?ger", parser.parse("Ren�e Zellweger Ren?? Zellw?ger").toString(FIELD));
+  }
+
+
+  public void testPrefixQuery() throws ParseException {
+    Analyzer a = new ASCIIAnalyzer();
+    QueryParser parser = new QueryParser(FIELD, a);
+    assertEquals("ubersetzung ubersetz*", parser.parse("�bersetzung �bersetz*").toString(FIELD));
+    assertEquals("motley crue motl* cru*", parser.parse("M�tley Cr�e M�tl* cr�*").toString(FIELD));
+    assertEquals("rene? zellw*", parser.parse("Ren�? Zellw*").toString(FIELD));
+  }
+
+  public void testRangeQuery() throws ParseException {
+    Analyzer a = new ASCIIAnalyzer();
+    QueryParser parser = new QueryParser(FIELD, a);
+    assertEquals("[aa TO bb]", parser.parse("[aa TO bb]").toString(FIELD));
+    assertEquals("{anais TO zoe}", parser.parse("{Ana�s TO Zo�}").toString(FIELD));
+  }
+
+  public void testFuzzyQuery() throws ParseException {
+    Analyzer a = new ASCIIAnalyzer();
+    QueryParser parser = new QueryParser(FIELD, a);
+    assertEquals("ubersetzung ubersetzung~1", parser.parse("�bersetzung �bersetzung~0.9").toString(FIELD));
+    assertEquals("motley crue motley~1 crue~2", parser.parse("M�tley Cr�e M�tley~0.75 Cr�e~0.5").toString(FIELD));
+    assertEquals("renee zellweger renee~0 zellweger~2", parser.parse("Ren�e Zellweger Ren�e~0.9 Zellweger~").toString(FIELD));
+  }
+
+  final static class FoldingFilter extends TokenFilter {
+    final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class);
+
+    public FoldingFilter(TokenStream input) {
+      super(input);
+    }
+
+    @Override
+    public boolean incrementToken() throws IOException {
+      if (input.incrementToken()) {
+        char term[] = termAtt.buffer();
+        for (int i = 0; i < term.length; i++)
+          switch(term[i]) {
+            case '�':
+              term[i] = 'u'; 
+              break;
+            case '�': 
+              term[i] = 'o'; 
+              break;
+            case '�': 
+              term[i] = 'e'; 
+              break;
+            case '�': 
+              term[i] = 'i'; 
+              break;
+          }
+        return true;
+      } else {
+        return false;
+      }
+    }
+  }
+
+  final static class ASCIIAnalyzer extends Analyzer {
+    @Override
+    public TokenStreamComponents createComponents(String fieldName) {
+      Tokenizer result = new MockTokenizer(MockTokenizer.WHITESPACE, true);
+      return new TokenStreamComponents(result, new FoldingFilter(result));
+    }
+    @Override
+    protected TokenStream normalize(String fieldName, TokenStream in) {
+      return new FoldingFilter(new MockLowerCaseFilter(in));
+    }
+  }
+
+  // LUCENE-4176
+  public void testByteTerms() throws Exception {
+    String s = "\u0e40\u0e02";
+    Analyzer analyzer = new MockBytesAnalyzer();
+    QueryParser qp = new QueryParser(FIELD, analyzer);
+
+    assertTrue(isAHit(qp.parse("[\u0e40\u0e02 TO \u0e40\u0e02]"), s, analyzer));
+    assertTrue(isAHit(qp.parse("\u0e40\u0e02~1"), s, analyzer));
+    assertTrue(isAHit(qp.parse("\u0e40\u0e02*"), s, analyzer));
+    assertTrue(isAHit(qp.parse("\u0e40*"), s, analyzer));
+    assertTrue(isAHit(qp.parse("\u0e40??"), s, analyzer));
+  }
+   
+  
+  private boolean isAHit(Query q, String content, Analyzer analyzer) throws IOException{
+    Directory ramDir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random(), ramDir, analyzer);
+    Document doc = new Document();
+    FieldType fieldType = new FieldType();
+    fieldType.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
+    fieldType.setTokenized(true);
+    fieldType.setStored(true);
+    Field field = new Field(FIELD, content, fieldType);
+    doc.add(field);
+    writer.addDocument(doc);
+    writer.close();
+    DirectoryReader ir = DirectoryReader.open(ramDir);
+    IndexSearcher is = new IndexSearcher(ir);
+      
+    int hits = is.search(q, 10).totalHits;
+    ir.close();
+    ramDir.close();
+    if (hits == 1){
+      return true;
+    } else {
+      return false;
+    }
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/precedence/TestPrecedenceQueryParser.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/precedence/TestPrecedenceQueryParser.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/precedence/TestPrecedenceQueryParser.java
index 88e8b9b..d2deaa6 100644
--- a/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/precedence/TestPrecedenceQueryParser.java
+++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/precedence/TestPrecedenceQueryParser.java
@@ -181,20 +181,7 @@ public class TestPrecedenceQueryParser extends LuceneTestCase {
     }
   }
 
-  public void assertWildcardQueryEquals(String query, boolean lowercase,
-      String result) throws Exception {
-    PrecedenceQueryParser qp = getParser(null);
-    qp.setLowercaseExpandedTerms(lowercase);
-    Query q = qp.parse(query, "field");
-    String s = q.toString("field");
-    if (!s.equals(result)) {
-      fail("WildcardQuery /" + query + "/ yielded /" + s + "/, expecting /"
-          + result + "/");
-    }
-  }
-
-  public void assertWildcardQueryEquals(String query, String result)
-      throws Exception {
+  public void assertWildcardQueryEquals(String query, String result) throws Exception {
     PrecedenceQueryParser qp = getParser(null);
     Query q = qp.parse(query, "field");
     String s = q.toString("field");
@@ -339,36 +326,23 @@ public class TestPrecedenceQueryParser extends LuceneTestCase {
      */
     // First prefix queries:
     // by default, convert to lowercase:
-    assertWildcardQueryEquals("Term*", true, "term*");
+    assertWildcardQueryEquals("Term*", "term*");
     // explicitly set lowercase:
-    assertWildcardQueryEquals("term*", true, "term*");
-    assertWildcardQueryEquals("Term*", true, "term*");
-    assertWildcardQueryEquals("TERM*", true, "term*");
-    // explicitly disable lowercase conversion:
-    assertWildcardQueryEquals("term*", false, "term*");
-    assertWildcardQueryEquals("Term*", false, "Term*");
-    assertWildcardQueryEquals("TERM*", false, "TERM*");
+    assertWildcardQueryEquals("term*", "term*");
+    assertWildcardQueryEquals("Term*", "term*");
+    assertWildcardQueryEquals("TERM*", "term*");
     // Then 'full' wildcard queries:
     // by default, convert to lowercase:
     assertWildcardQueryEquals("Te?m", "te?m");
     // explicitly set lowercase:
-    assertWildcardQueryEquals("te?m", true, "te?m");
-    assertWildcardQueryEquals("Te?m", true, "te?m");
-    assertWildcardQueryEquals("TE?M", true, "te?m");
-    assertWildcardQueryEquals("Te?m*gerM", true, "te?m*germ");
-    // explicitly disable lowercase conversion:
-    assertWildcardQueryEquals("te?m", false, "te?m");
-    assertWildcardQueryEquals("Te?m", false, "Te?m");
-    assertWildcardQueryEquals("TE?M", false, "TE?M");
-    assertWildcardQueryEquals("Te?m*gerM", false, "Te?m*gerM");
+    assertWildcardQueryEquals("te?m", "te?m");
+    assertWildcardQueryEquals("Te?m", "te?m");
+    assertWildcardQueryEquals("TE?M", "te?m");
+    assertWildcardQueryEquals("Te?m*gerM", "te?m*germ");
     // Fuzzy queries:
     assertWildcardQueryEquals("Term~", "term~2");
-    assertWildcardQueryEquals("Term~", true, "term~2");
-    assertWildcardQueryEquals("Term~", false, "Term~2");
     // Range queries:
     assertWildcardQueryEquals("[A TO C]", "[a TO c]");
-    assertWildcardQueryEquals("[A TO C]", true, "[a TO c]");
-    assertWildcardQueryEquals("[A TO C]", false, "[A TO C]");
   }
 
   public void testQPA() throws Exception {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestQPHelper.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestQPHelper.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestQPHelper.java
index 91b799d..2d5ee43 100644
--- a/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestQPHelper.java
+++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestQPHelper.java
@@ -288,10 +288,9 @@ public class TestQPHelper extends LuceneTestCase {
     }
   }
 
-  public void assertWildcardQueryEquals(String query, boolean lowercase,
+  public void assertWildcardQueryEquals(String query,
       String result, boolean allowLeadingWildcard) throws Exception {
     StandardQueryParser qp = getParser(null);
-    qp.setLowercaseExpandedTerms(lowercase);
     qp.setAllowLeadingWildcard(allowLeadingWildcard);
     Query q = qp.parse(query, "field");
     String s = q.toString("field");
@@ -301,20 +300,9 @@ public class TestQPHelper extends LuceneTestCase {
     }
   }
 
-  public void assertWildcardQueryEquals(String query, boolean lowercase,
+  public void assertWildcardQueryEquals(String query,
       String result) throws Exception {
-    assertWildcardQueryEquals(query, lowercase, result, false);
-  }
-
-  public void assertWildcardQueryEquals(String query, String result)
-      throws Exception {
-    StandardQueryParser qp = getParser(null);
-    Query q = qp.parse(query, "field");
-    String s = q.toString("field");
-    if (!s.equals(result)) {
-      fail("WildcardQuery /" + query + "/ yielded /" + s + "/, expecting /"
-          + result + "/");
-    }
+    assertWildcardQueryEquals(query, result, false);
   }
 
   public Query getQueryDOA(String query, Analyzer a) throws Exception {
@@ -597,32 +585,21 @@ public class TestQPHelper extends LuceneTestCase {
      */
     // First prefix queries:
     // by default, convert to lowercase:
-    assertWildcardQueryEquals("Term*", true, "term*");
+    assertWildcardQueryEquals("Term*", "term*");
     // explicitly set lowercase:
-    assertWildcardQueryEquals("term*", true, "term*");
-    assertWildcardQueryEquals("Term*", true, "term*");
-    assertWildcardQueryEquals("TERM*", true, "term*");
-    // explicitly disable lowercase conversion:
-    assertWildcardQueryEquals("term*", false, "term*");
-    assertWildcardQueryEquals("Term*", false, "Term*");
-    assertWildcardQueryEquals("TERM*", false, "TERM*");
+    assertWildcardQueryEquals("term*", "term*");
+    assertWildcardQueryEquals("Term*", "term*");
+    assertWildcardQueryEquals("TERM*", "term*");
     // Then 'full' wildcard queries:
     // by default, convert to lowercase:
     assertWildcardQueryEquals("Te?m", "te?m");
     // explicitly set lowercase:
-    assertWildcardQueryEquals("te?m", true, "te?m");
-    assertWildcardQueryEquals("Te?m", true, "te?m");
-    assertWildcardQueryEquals("TE?M", true, "te?m");
-    assertWildcardQueryEquals("Te?m*gerM", true, "te?m*germ");
-    // explicitly disable lowercase conversion:
-    assertWildcardQueryEquals("te?m", false, "te?m");
-    assertWildcardQueryEquals("Te?m", false, "Te?m");
-    assertWildcardQueryEquals("TE?M", false, "TE?M");
-    assertWildcardQueryEquals("Te?m*gerM", false, "Te?m*gerM");
+    assertWildcardQueryEquals("te?m", "te?m");
+    assertWildcardQueryEquals("Te?m", "te?m");
+    assertWildcardQueryEquals("TE?M", "te?m");
+    assertWildcardQueryEquals("Te?m*gerM", "te?m*germ");
     // Fuzzy queries:
     assertWildcardQueryEquals("Term~", "term~2");
-    assertWildcardQueryEquals("Term~", true, "term~2");
-    assertWildcardQueryEquals("Term~", false, "Term~2");
     // Range queries:
 
     // TODO: implement this on QueryParser
@@ -630,20 +607,18 @@ public class TestQPHelper extends LuceneTestCase {
     // C]': Lexical error at line 1, column 1. Encountered: "[" (91), after
     // : ""
     assertWildcardQueryEquals("[A TO C]", "[a TO c]");
-    assertWildcardQueryEquals("[A TO C]", true, "[a TO c]");
-    assertWildcardQueryEquals("[A TO C]", false, "[A TO C]");
     // Test suffix queries: first disallow
     expectThrows(QueryNodeException.class, () -> {
-      assertWildcardQueryEquals("*Term", true, "*term");
+      assertWildcardQueryEquals("*Term", "*term");
     });
 
     expectThrows(QueryNodeException.class, () -> {
-      assertWildcardQueryEquals("?Term", true, "?term");
+      assertWildcardQueryEquals("?Term", "?term");
     });
 
     // Test suffix queries: then allow
-    assertWildcardQueryEquals("*Term", true, "*term", true);
-    assertWildcardQueryEquals("?Term", true, "?term", true);
+    assertWildcardQueryEquals("*Term", "*term", true);
+    assertWildcardQueryEquals("?Term", "?term", true);
   }
 
   public void testLeadingWildcardType() throws Exception {
@@ -1159,10 +1134,10 @@ public class TestQPHelper extends LuceneTestCase {
   
   public void testRegexps() throws Exception {
     StandardQueryParser qp = new StandardQueryParser();
+    qp.setAnalyzer(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, true));
     final String df = "field" ;
     RegexpQuery q = new RegexpQuery(new Term("field", "[a-z][123]"));
     assertEquals(q, qp.parse("/[a-z][123]/", df));
-    qp.setLowercaseExpandedTerms(true);
     assertEquals(q, qp.parse("/[A-Z][123]/", df));
     assertEquals(new BoostQuery(q, 0.5f), qp.parse("/[A-Z][123]/^0.5", df));
     qp.setMultiTermRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_REWRITE);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestStandardQP.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestStandardQP.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestStandardQP.java
index f678796..7e50eeb 100644
--- a/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestStandardQP.java
+++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestStandardQP.java
@@ -88,12 +88,6 @@ public class TestStandardQP extends QueryParserTestBase {
   }
   
   @Override
-  public void setAnalyzeRangeTerms(CommonQueryParserConfiguration cqpC,
-      boolean value) {
-    throw new UnsupportedOperationException();
-  }
-  
-  @Override
   public void setAutoGeneratePhraseQueries(CommonQueryParserConfiguration cqpC,
       boolean value) {
     throw new UnsupportedOperationException();
@@ -149,15 +143,6 @@ public class TestStandardQP extends QueryParserTestBase {
     WildcardQuery q = new WildcardQuery(new Term("field", "foo?ba?r"));//TODO not correct!!
     assertEquals(q, getQuery("foo\\?ba?r", qp));
   }
-
-  
-  @Override
-  public void testCollatedRange() throws Exception {
-    expectThrows(UnsupportedOperationException.class, () -> {
-      setAnalyzeRangeTerms(getParser(null), true);
-      super.testCollatedRange();
-    });
-  }
   
   @Override
   public void testAutoGeneratePhraseQueriesOn() throws Exception {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/queryparser/src/test/org/apache/lucene/queryparser/util/QueryParserTestBase.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/util/QueryParserTestBase.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/util/QueryParserTestBase.java
index d58f660..2170193 100644
--- a/lucene/queryparser/src/test/org/apache/lucene/queryparser/util/QueryParserTestBase.java
+++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/util/QueryParserTestBase.java
@@ -145,8 +145,6 @@ public abstract class QueryParserTestBase extends LuceneTestCase {
 
   public abstract void setDefaultOperatorAND(CommonQueryParserConfiguration cqpC);
 
-  public abstract void setAnalyzeRangeTerms(CommonQueryParserConfiguration cqpC, boolean value);
-
   public abstract void setAutoGeneratePhraseQueries(CommonQueryParserConfiguration cqpC, boolean value);
 
   public abstract void setDateResolution(CommonQueryParserConfiguration cqpC, CharSequence field, DateTools.Resolution value);
@@ -203,10 +201,9 @@ public abstract class QueryParserTestBase extends LuceneTestCase {
     }
   }
 
-  public void assertWildcardQueryEquals(String query, boolean lowercase, String result, boolean allowLeadingWildcard)
+  public void assertWildcardQueryEquals(String query, String result, boolean allowLeadingWildcard)
     throws Exception {
     CommonQueryParserConfiguration cqpC = getParserConfig(null);
-    cqpC.setLowercaseExpandedTerms(lowercase);
     cqpC.setAllowLeadingWildcard(allowLeadingWildcard);
     Query q = getQuery(query, cqpC);
     String s = q.toString("field");
@@ -216,18 +213,9 @@ public abstract class QueryParserTestBase extends LuceneTestCase {
     }
   }
 
-  public void assertWildcardQueryEquals(String query, boolean lowercase, String result)
+  public void assertWildcardQueryEquals(String query, String result)
     throws Exception {
-    assertWildcardQueryEquals(query, lowercase, result, false);
-  }
-
-  public void assertWildcardQueryEquals(String query, String result) throws Exception {
-    Query q = getQuery(query);
-    String s = q.toString("field");
-    if (!s.equals(result)) {
-      fail("WildcardQuery /" + query + "/ yielded /" + s + "/, expecting /"
-          + result + "/");
-    }
+    assertWildcardQueryEquals(query, result, false);
   }
 
   public Query getQueryDOA(String query, Analyzer a)
@@ -473,39 +461,26 @@ public abstract class QueryParserTestBase extends LuceneTestCase {
    */
 // First prefix queries:
     // by default, convert to lowercase:
-    assertWildcardQueryEquals("Term*", true, "term*");
+    assertWildcardQueryEquals("Term*", "term*");
     // explicitly set lowercase:
-    assertWildcardQueryEquals("term*", true, "term*");
-    assertWildcardQueryEquals("Term*", true, "term*");
-    assertWildcardQueryEquals("TERM*", true, "term*");
-    // explicitly disable lowercase conversion:
-    assertWildcardQueryEquals("term*", false, "term*");
-    assertWildcardQueryEquals("Term*", false, "Term*");
-    assertWildcardQueryEquals("TERM*", false, "TERM*");
+    assertWildcardQueryEquals("term*", "term*");
+    assertWildcardQueryEquals("Term*", "term*");
+    assertWildcardQueryEquals("TERM*", "term*");
 // Then 'full' wildcard queries:
     // by default, convert to lowercase:
     assertWildcardQueryEquals("Te?m", "te?m");
     // explicitly set lowercase:
-    assertWildcardQueryEquals("te?m", true, "te?m");
-    assertWildcardQueryEquals("Te?m", true, "te?m");
-    assertWildcardQueryEquals("TE?M", true, "te?m");
-    assertWildcardQueryEquals("Te?m*gerM", true, "te?m*germ");
-    // explicitly disable lowercase conversion:
-    assertWildcardQueryEquals("te?m", false, "te?m");
-    assertWildcardQueryEquals("Te?m", false, "Te?m");
-    assertWildcardQueryEquals("TE?M", false, "TE?M");
-    assertWildcardQueryEquals("Te?m*gerM", false, "Te?m*gerM");
+    assertWildcardQueryEquals("te?m", "te?m");
+    assertWildcardQueryEquals("Te?m", "te?m");
+    assertWildcardQueryEquals("TE?M", "te?m");
+    assertWildcardQueryEquals("Te?m*gerM", "te?m*germ");
 //  Fuzzy queries:
     assertWildcardQueryEquals("Term~", "term~2");
-    assertWildcardQueryEquals("Term~", true, "term~2");
-    assertWildcardQueryEquals("Term~", false, "Term~2");
 //  Range queries:
     assertWildcardQueryEquals("[A TO C]", "[a TO c]");
-    assertWildcardQueryEquals("[A TO C]", true, "[a TO c]");
-    assertWildcardQueryEquals("[A TO C]", false, "[A TO C]");
     // Test suffix queries: first disallow
     try {
-      assertWildcardQueryEquals("*Term", true, "*term");
+      assertWildcardQueryEquals("*Term", "*term", false);
     } catch(Exception pe) {
       // expected exception
       if(!isQueryParserException(pe)){
@@ -513,7 +488,7 @@ public abstract class QueryParserTestBase extends LuceneTestCase {
       }
     }
     try {
-      assertWildcardQueryEquals("?Term", true, "?term");
+      assertWildcardQueryEquals("?Term", "?term");
       fail();
     } catch(Exception pe) {
       // expected exception
@@ -522,8 +497,8 @@ public abstract class QueryParserTestBase extends LuceneTestCase {
       }
     }
     // Test suffix queries: then allow
-    assertWildcardQueryEquals("*Term", true, "*term", true);
-    assertWildcardQueryEquals("?Term", true, "?term", true);
+    assertWildcardQueryEquals("*Term", "*term", true);
+    assertWildcardQueryEquals("?Term", "?term", true);
   }
   
   public void testLeadingWildcardType() throws Exception {
@@ -982,10 +957,9 @@ public abstract class QueryParserTestBase extends LuceneTestCase {
   }
   
   public void testRegexps() throws Exception {
-    CommonQueryParserConfiguration qp = getParserConfig( new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false));
+    CommonQueryParserConfiguration qp = getParserConfig( new MockAnalyzer(random(), MockTokenizer.WHITESPACE, true));
     RegexpQuery q = new RegexpQuery(new Term("field", "[a-z][123]"));
     assertEquals(q, getQuery("/[a-z][123]/",qp));
-    qp.setLowercaseExpandedTerms(true);
     assertEquals(q, getQuery("/[A-Z][123]/",qp));
     assertEquals(new BoostQuery(q, 0.5f), getQuery("/[A-Z][123]/^0.5",qp));
     qp.setMultiTermRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_REWRITE);
@@ -1169,11 +1143,14 @@ public abstract class QueryParserTestBase extends LuceneTestCase {
       Tokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, true);
       return new TokenStreamComponents(tokenizer, new MockCollationFilter(tokenizer));
     }
+    @Override
+    protected TokenStream normalize(String fieldName, TokenStream in) {
+      return new MockCollationFilter(new LowerCaseFilter(in));
+    }
   }
   
   public void testCollatedRange() throws Exception {
     CommonQueryParserConfiguration qp = getParserConfig(new MockCollationAnalyzer());
-    setAnalyzeRangeTerms(qp, true);
     Query expected = TermRangeQuery.newStringRange(getDefaultField(), "collatedabc", "collateddef", true, true);
     Query actual = getQuery("[abc TO def]", qp);
     assertEquals(expected, actual);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/test-framework/src/java/org/apache/lucene/analysis/BaseTokenStreamTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/analysis/BaseTokenStreamTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/analysis/BaseTokenStreamTestCase.java
index c57a8bc..0bb623f 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/analysis/BaseTokenStreamTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/analysis/BaseTokenStreamTestCase.java
@@ -883,7 +883,10 @@ public abstract class BaseTokenStreamTestCase extends LuceneTestCase {
       assertTokenStreamContents(ts, 
                                 tokens.toArray(new String[tokens.size()]));
     }
-    
+
+    a.normalize("dummy", text);
+    // TODO: what can we do besides testing that the above method does not throw?
+
     if (field != null) {
       reader = new StringReader(text);
       random = new Random(seed);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/test-framework/src/java/org/apache/lucene/analysis/MockAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/analysis/MockAnalyzer.java b/lucene/test-framework/src/java/org/apache/lucene/analysis/MockAnalyzer.java
index e87bf45..bbeffe9 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/analysis/MockAnalyzer.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/analysis/MockAnalyzer.java
@@ -92,7 +92,16 @@ public final class MockAnalyzer extends Analyzer {
     MockTokenFilter filt = new MockTokenFilter(tokenizer, filter);
     return new TokenStreamComponents(tokenizer, maybePayload(filt, fieldName));
   }
-  
+
+  @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    TokenStream result = in;
+    if (lowerCase) {
+      result = new MockLowerCaseFilter(result);
+    }
+    return result;
+  }
+
   private synchronized TokenFilter maybePayload(TokenFilter stream, String fieldName) {
     Integer val = previousMappings.get(fieldName);
     if (val == null) {


[37/51] [abbrv] lucene-solr:apiv2: LUCENE-7385: Improve/fix assert messages in SpanScorer.

Posted by sa...@apache.org.
LUCENE-7385: Improve/fix assert messages in SpanScorer.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/efef37bb
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/efef37bb
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/efef37bb

Branch: refs/heads/apiv2
Commit: efef37bb6795ed3be33056025a88d2cd4b848604
Parents: 4123b3b
Author: David Smiley <ds...@apache.org>
Authored: Mon Jul 18 22:04:26 2016 -0400
Committer: David Smiley <ds...@apache.org>
Committed: Mon Jul 18 22:04:26 2016 -0400

----------------------------------------------------------------------
 lucene/CHANGES.txt                                        |  2 ++
 .../java/org/apache/lucene/search/spans/SpanScorer.java   | 10 +++++-----
 2 files changed, 7 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/efef37bb/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 6c62aab..4ac3169 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -131,6 +131,8 @@ Optimizations
 * LUCENE-7371: Point values are now better compressed using run-length
   encoding. (Adrien Grand)
 
+* LUCENE-7385: Improve/fix assert messages in SpanScorer. (David Smiley)
+
 Other
 
 * LUCENE-4787: Fixed some highlighting javadocs. (Michael Dodsworth via Adrien

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/efef37bb/lucene/core/src/java/org/apache/lucene/search/spans/SpanScorer.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/spans/SpanScorer.java b/lucene/core/src/java/org/apache/lucene/search/spans/SpanScorer.java
index 4841ddc..508a2b3 100644
--- a/lucene/core/src/java/org/apache/lucene/search/spans/SpanScorer.java
+++ b/lucene/core/src/java/org/apache/lucene/search/spans/SpanScorer.java
@@ -87,13 +87,13 @@ public class SpanScorer extends Scorer {
 
     spans.doStartCurrentDoc();
 
-    assert spans.startPosition() == -1 : "incorrect initial start position, " + this.toString();
-    assert spans.endPosition() == -1 : "incorrect initial end position, " + this.toString();
+    assert spans.startPosition() == -1 : "incorrect initial start position, " + spans;
+    assert spans.endPosition() == -1 : "incorrect initial end position, " + spans;
     int prevStartPos = -1;
     int prevEndPos = -1;
 
     int startPos = spans.nextStartPosition();
-    assert startPos != Spans.NO_MORE_POSITIONS : "initial startPos NO_MORE_POSITIONS, " + this.toString();
+    assert startPos != Spans.NO_MORE_POSITIONS : "initial startPos NO_MORE_POSITIONS, " + spans;
     do {
       assert startPos >= prevStartPos;
       int endPos = spans.endPosition();
@@ -113,8 +113,8 @@ public class SpanScorer extends Scorer {
       startPos = spans.nextStartPosition();
     } while (startPos != Spans.NO_MORE_POSITIONS);
 
-    assert spans.startPosition() == Spans.NO_MORE_POSITIONS : "incorrect final start position, " + this.toString();
-    assert spans.endPosition() == Spans.NO_MORE_POSITIONS : "incorrect final end position, " + this.toString();
+    assert spans.startPosition() == Spans.NO_MORE_POSITIONS : "incorrect final start position, " + spans;
+    assert spans.endPosition() == Spans.NO_MORE_POSITIONS : "incorrect final end position, " + spans;
   }
 
   /**


[43/51] [abbrv] lucene-solr:apiv2: LUCENE-7384: Tweak SpanWeight.buildSimWeight to reuse the existing similarity.

Posted by sa...@apache.org.
LUCENE-7384: Tweak SpanWeight.buildSimWeight to reuse the existing similarity.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/180f9562
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/180f9562
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/180f9562

Branch: refs/heads/apiv2
Commit: 180f9562aa9c1e271d8dce48ac5695d0612bf808
Parents: abb81e4
Author: David Smiley <ds...@apache.org>
Authored: Tue Jul 19 12:45:28 2016 -0400
Committer: David Smiley <ds...@apache.org>
Committed: Tue Jul 19 12:45:28 2016 -0400

----------------------------------------------------------------------
 .../core/src/java/org/apache/lucene/search/spans/SpanWeight.java   | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/180f9562/lucene/core/src/java/org/apache/lucene/search/spans/SpanWeight.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/spans/SpanWeight.java b/lucene/core/src/java/org/apache/lucene/search/spans/SpanWeight.java
index c0b231e..4d08172 100644
--- a/lucene/core/src/java/org/apache/lucene/search/spans/SpanWeight.java
+++ b/lucene/core/src/java/org/apache/lucene/search/spans/SpanWeight.java
@@ -99,7 +99,7 @@ public abstract class SpanWeight extends Weight {
       i++;
     }
     CollectionStatistics collectionStats = searcher.collectionStatistics(query.getField());
-    return searcher.getSimilarity(true).computeWeight(boost, collectionStats, termStats);
+    return similarity.computeWeight(boost, collectionStats, termStats);
   }
 
   /**


[39/51] [abbrv] lucene-solr:apiv2: SOLR-9275: fix NPE in SolrCoreParser.init

Posted by sa...@apache.org.
SOLR-9275: fix NPE in SolrCoreParser.init


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/832dacff
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/832dacff
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/832dacff

Branch: refs/heads/apiv2
Commit: 832dacffc8dddfef07456624660118a593f176bd
Parents: 5c4b717
Author: Christine Poerschke <cp...@apache.org>
Authored: Tue Jul 19 11:28:57 2016 +0100
Committer: Christine Poerschke <cp...@apache.org>
Committed: Tue Jul 19 11:28:57 2016 +0100

----------------------------------------------------------------------
 solr/core/src/java/org/apache/solr/search/SolrCoreParser.java | 3 +++
 1 file changed, 3 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/832dacff/solr/core/src/java/org/apache/solr/search/SolrCoreParser.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/SolrCoreParser.java b/solr/core/src/java/org/apache/solr/search/SolrCoreParser.java
index 1e0e5bd..4857b75 100755
--- a/solr/core/src/java/org/apache/solr/search/SolrCoreParser.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrCoreParser.java
@@ -43,6 +43,9 @@ public class SolrCoreParser extends CoreParser implements NamedListInitializedPl
 
   @Override
   public void init(NamedList initArgs) {
+    if (initArgs == null || initArgs.size() == 0) {
+      return;
+    }
     final SolrResourceLoader loader;
     if (req == null) {
       loader = new SolrResourceLoader();


[30/51] [abbrv] lucene-solr:apiv2: Update JGit to keep up-to-date with recent GIT versions

Posted by sa...@apache.org.
Update JGit to keep up-to-date with recent GIT versions


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/621527d1
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/621527d1
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/621527d1

Branch: refs/heads/apiv2
Commit: 621527d1a2cb952e180f8ecc4b99d3ac618c6e2b
Parents: c3c1f8d
Author: Uwe Schindler <us...@apache.org>
Authored: Sun Jul 17 12:50:23 2016 +0200
Committer: Uwe Schindler <us...@apache.org>
Committed: Sun Jul 17 12:50:23 2016 +0200

----------------------------------------------------------------------
 build.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/621527d1/build.xml
----------------------------------------------------------------------
diff --git a/build.xml b/build.xml
index 5cc7bd8..53b278f 100644
--- a/build.xml
+++ b/build.xml
@@ -20,7 +20,7 @@
 <project name="lucene-solr" default="-projecthelp" basedir=".">
   <import file="lucene/common-build.xml"/>
 
-  <property name="jgit-version" value="4.2.0.201601211800-r"/>
+  <property name="jgit-version" value="4.4.1.201607150455-r"/>
   
   <property name="tests.heap-dump-dir" location="heapdumps"/>
   


[28/51] [abbrv] lucene-solr:apiv2: SOLR-9240: Added testcase with text field in the fl for topic

Posted by sa...@apache.org.
SOLR-9240: Added testcase with text field in the fl for topic


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/c3c1f8d6
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/c3c1f8d6
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/c3c1f8d6

Branch: refs/heads/apiv2
Commit: c3c1f8d6e6cb57cb30e736d5ff0387400729d216
Parents: fc3894e
Author: jbernste <jb...@apache.org>
Authored: Tue Jul 12 11:36:05 2016 -0400
Committer: jbernste <jb...@apache.org>
Committed: Sat Jul 16 22:36:30 2016 -0400

----------------------------------------------------------------------
 .../solrj/io/stream/StreamExpressionTest.java   | 79 +++++++++++++++++---
 1 file changed, 69 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c3c1f8d6/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
index 4af565a..f2446f3 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
@@ -2533,16 +2533,16 @@ public class StreamExpressionTest extends SolrCloudTestCase {
   public void testParallelTopicStream() throws Exception {
 
     new UpdateRequest()
-        .add(id, "0", "a_s", "hello", "a_i", "0", "a_f", "1")
-        .add(id, "2", "a_s", "hello", "a_i", "2", "a_f", "2")
-        .add(id, "3", "a_s", "hello", "a_i", "3", "a_f", "3")
-        .add(id, "4", "a_s", "hello", "a_i", "4", "a_f", "4")
-        .add(id, "1", "a_s", "hello", "a_i", "1", "a_f", "5")
-        .add(id, "5", "a_s", "hello", "a_i", "10", "a_f", "6")
-        .add(id, "6", "a_s", "hello", "a_i", "11", "a_f", "7")
-        .add(id, "7", "a_s", "hello", "a_i", "12", "a_f", "8")
-        .add(id, "8", "a_s", "hello", "a_i", "13", "a_f", "9")
-        .add(id, "9", "a_s", "hello", "a_i", "14", "a_f", "10")
+        .add(id, "0", "a_s", "hello", "a_i", "0", "a_f", "1", "subject", "ha ha bla blah0")
+        .add(id, "2", "a_s", "hello", "a_i", "2", "a_f", "2", "subject", "ha ha bla blah2")
+        .add(id, "3", "a_s", "hello", "a_i", "3", "a_f", "3", "subject", "ha ha bla blah3")
+        .add(id, "4", "a_s", "hello", "a_i", "4", "a_f", "4", "subject", "ha ha bla blah4")
+        .add(id, "1", "a_s", "hello", "a_i", "1", "a_f", "5", "subject", "ha ha bla blah5")
+        .add(id, "5", "a_s", "hello", "a_i", "10", "a_f", "6","subject", "ha ha bla blah6")
+        .add(id, "6", "a_s", "hello", "a_i", "11", "a_f", "7","subject", "ha ha bla blah7")
+        .add(id, "7", "a_s", "hello", "a_i", "12", "a_f", "8", "subject", "ha ha bla blah8")
+        .add(id, "8", "a_s", "hello", "a_i", "13", "a_f", "9", "subject", "ha ha bla blah9")
+        .add(id, "9", "a_s", "hello", "a_i", "14", "a_f", "10", "subject", "ha ha bla blah10")
         .commit(cluster.getSolrClient(), COLLECTION);
 
     StreamFactory factory = new StreamFactory()
@@ -2653,6 +2653,37 @@ public class StreamExpressionTest extends SolrCloudTestCase {
       context.setSolrClientCache(cache);
       stream.setStreamContext(context);
       assertTopicRun(stream, "12","13");
+
+      //Test text extraction
+
+      expression = StreamExpressionParser.parse("parallel(collection1, " +
+          "workers=\"2\", " +
+          "sort=\"_version_ asc\"," +
+          "topic(collection1, " +
+          "collection1, " +
+          "q=\"subject:bla\", " +
+          "fl=\"subject\", " +
+          "id=\"3000000\", " +
+          "initialCheckpoint=\"0\", " +
+          "partitionKeys=\"id\"))");
+
+      stream = factory.constructStream(expression);
+      context = new StreamContext();
+      context.setSolrClientCache(cache);
+      stream.setStreamContext(context);
+
+      assertTopicSubject(stream, "ha ha bla blah0",
+          "ha ha bla blah1",
+          "ha ha bla blah2",
+          "ha ha bla blah3",
+          "ha ha bla blah4",
+          "ha ha bla blah5",
+          "ha ha bla blah6",
+          "ha ha bla blah7",
+          "ha ha bla blah8",
+          "ha ha bla blah9",
+          "ha ha bla blah10");
+
     } finally {
       cache.close();
     }
@@ -3314,4 +3345,32 @@ public class StreamExpressionTest extends SolrCloudTestCase {
       throw new Exception("Wrong count in topic run:"+count);
     }
   }
+
+  private void assertTopicSubject(TupleStream stream, String... textArray) throws Exception {
+    long version = -1;
+    int count = 0;
+    List<String> texts = new ArrayList();
+    for(String text : textArray) {
+      texts.add(text);
+    }
+
+    try {
+      stream.open();
+      while (true) {
+        Tuple tuple = stream.read();
+        if (tuple.EOF) {
+          break;
+        } else {
+          ++count;
+          String subject = tuple.getString("subject");
+          if (!texts.contains(subject)) {
+            throw new Exception("Expecting subject in topic run not found:" + subject);
+          }
+        }
+      }
+    } finally {
+      stream.close();
+    }
+  }
+
 }


[48/51] [abbrv] lucene-solr:apiv2: SOLR-8995: Use lambda to simplify CollectionsHandler

Posted by sa...@apache.org.
SOLR-8995: Use lambda to simplify CollectionsHandler


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/fb4de6ad
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/fb4de6ad
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/fb4de6ad

Branch: refs/heads/apiv2
Commit: fb4de6adb1bd8b7b97999a98d8464e33ef9e965e
Parents: 9aa639d
Author: Noble Paul <no...@apache.org>
Authored: Wed Jul 20 22:00:55 2016 +0530
Committer: Noble Paul <no...@apache.org>
Committed: Wed Jul 20 22:00:55 2016 +0530

----------------------------------------------------------------------
 .../solr/handler/admin/CollectionsHandler.java  | 897 +++++++++----------
 1 file changed, 399 insertions(+), 498 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/fb4de6ad/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
index 874e68c..cb72790 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
@@ -37,7 +37,6 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.solr.client.solrj.SolrResponse;
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
 import org.apache.solr.client.solrj.impl.HttpSolrClient.Builder;
-import org.apache.solr.client.solrj.request.CoreAdminRequest;
 import org.apache.solr.client.solrj.request.CoreAdminRequest.RequestSyncShard;
 import org.apache.solr.client.solrj.response.RequestStatusState;
 import org.apache.solr.client.solrj.util.SolrIdentifierValidator;
@@ -204,7 +203,7 @@ public class CollectionsHandler extends RequestHandlerBase implements Permission
       log.info("Invoked Collection Action :{} with params {} and sendToOCPQueue={}", action.toLower(), req.getParamString(), operation.sendToOCPQueue);
 
       SolrResponse response = null;
-      Map<String, Object> props = operation.call(req, rsp, this);
+      Map<String, Object> props = operation.execute(req, rsp, this);
       String asyncId = req.getParams().get(ASYNC);
       if (props != null) {
         if (asyncId != null) {
@@ -335,7 +334,34 @@ public class CollectionsHandler extends RequestHandlerBase implements Permission
 
   public static final String SYSTEM_COLL = ".system";
 
-  enum CollectionOperation {
+  private static void createSysConfigSet(CoreContainer coreContainer) throws KeeperException, InterruptedException {
+    SolrZkClient zk = coreContainer.getZkController().getZkStateReader().getZkClient();
+    ZkCmdExecutor cmdExecutor = new ZkCmdExecutor(zk.getZkClientTimeout());
+    cmdExecutor.ensureExists(ZkStateReader.CONFIGS_ZKNODE, zk);
+    cmdExecutor.ensureExists(ZkStateReader.CONFIGS_ZKNODE + "/" + SYSTEM_COLL, zk);
+
+    try {
+      String path = ZkStateReader.CONFIGS_ZKNODE + "/" + SYSTEM_COLL + "/schema.xml";
+      byte[] data = IOUtils.toByteArray(Thread.currentThread().getContextClassLoader().getResourceAsStream("SystemCollectionSchema.xml"));
+      cmdExecutor.ensureExists(path, data, CreateMode.PERSISTENT, zk);
+      path = ZkStateReader.CONFIGS_ZKNODE + "/" + SYSTEM_COLL + "/solrconfig.xml";
+      data = IOUtils.toByteArray(Thread.currentThread().getContextClassLoader().getResourceAsStream("SystemCollectionSolrConfig.xml"));
+      cmdExecutor.ensureExists(path, data, CreateMode.PERSISTENT, zk);
+    } catch (IOException e) {
+      throw new SolrException(ErrorCode.SERVER_ERROR, e);
+    }
+
+
+  }
+
+  private static void addStatusToResponse(NamedList<Object> results, RequestStatusState state, String msg) {
+    SimpleOrderedMap<String> status = new SimpleOrderedMap<>();
+    status.add("state", state.getKey());
+    status.add("msg", msg);
+    results.add("status", status);
+  }
+
+  enum CollectionOperation implements CollectionOp {
     /**
      * very simple currently, you can pass a template collection, and the new collection is created on
      * every node the template collection is on
@@ -343,284 +369,194 @@ public class CollectionsHandler extends RequestHandlerBase implements Permission
      * we might also want to think about error handling (add the request to a zk queue and involve overseer?)
      * as well as specific replicas= options
      */
-    CREATE_OP(CREATE) {
-      @Override
-      Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h)
-          throws KeeperException, InterruptedException {
-        Map<String, Object> props = req.getParams().required().getAll(null, NAME);
-        props.put("fromApi", "true");
-        req.getParams().getAll(props,
-            REPLICATION_FACTOR,
-            COLL_CONF,
-            NUM_SLICES,
-            MAX_SHARDS_PER_NODE,
-            CREATE_NODE_SET, CREATE_NODE_SET_SHUFFLE,
-            SHARDS_PROP,
-            STATE_FORMAT,
-            AUTO_ADD_REPLICAS,
-            RULE,
-            SNITCH);
-
-        if (props.get(STATE_FORMAT) == null) {
-          props.put(STATE_FORMAT, "2");
-        }
-        addMapObject(props, RULE);
-        addMapObject(props, SNITCH);
-        verifyRuleParams(h.coreContainer, props);
-        final String collectionName = SolrIdentifierValidator.validateCollectionName((String)props.get(NAME));
-        final String shardsParam = (String) props.get(SHARDS_PROP);
-        if (StringUtils.isNotEmpty(shardsParam)) {
-          verifyShardsParam(shardsParam);
-        }
-        if (SYSTEM_COLL.equals(collectionName)) {
-          //We must always create a .system collection with only a single shard
-          props.put(NUM_SLICES, 1);
-          props.remove(SHARDS_PROP);
-          createSysConfigSet(h.coreContainer);
-
-        }
-        copyPropertiesWithPrefix(req.getParams(), props, COLL_PROP_PREFIX);
-        return copyPropertiesWithPrefix(req.getParams(), props, "router.");
-
-      }
-
-      private void createSysConfigSet(CoreContainer coreContainer) throws KeeperException, InterruptedException {
-        SolrZkClient zk = coreContainer.getZkController().getZkStateReader().getZkClient();
-        ZkCmdExecutor cmdExecutor = new ZkCmdExecutor(zk.getZkClientTimeout());
-        cmdExecutor.ensureExists(ZkStateReader.CONFIGS_ZKNODE, zk);
-        cmdExecutor.ensureExists(ZkStateReader.CONFIGS_ZKNODE + "/" + SYSTEM_COLL, zk);
-
-        try {
-          String path = ZkStateReader.CONFIGS_ZKNODE + "/" + SYSTEM_COLL + "/schema.xml";
-          byte[] data = IOUtils.toByteArray(Thread.currentThread().getContextClassLoader().getResourceAsStream("SystemCollectionSchema.xml"));
-          cmdExecutor.ensureExists(path, data, CreateMode.PERSISTENT, zk);
-          path = ZkStateReader.CONFIGS_ZKNODE + "/" + SYSTEM_COLL + "/solrconfig.xml";
-          data = IOUtils.toByteArray(Thread.currentThread().getContextClassLoader().getResourceAsStream("SystemCollectionSolrConfig.xml"));
-          cmdExecutor.ensureExists(path, data, CreateMode.PERSISTENT, zk);
-        } catch (IOException e) {
-          throw new SolrException(ErrorCode.SERVER_ERROR, e);
-
-        }
-
-
-      }
-    },
-    DELETE_OP(DELETE) {
-      @Override
-      Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler handler)
-          throws Exception {
-        return req.getParams().required().getAll(null, NAME);
-      }
-    },
-    RELOAD_OP(RELOAD) {
-      @Override
-      Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler handler)
-          throws Exception {
-        return req.getParams().required().getAll(null, NAME);
-      }
-    },
-    SYNCSHARD_OP(SYNCSHARD) {
-      @Override
-      Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h)
-          throws Exception {
-        String collection = req.getParams().required().get("collection");
-        String shard = req.getParams().required().get("shard");
-
-        ClusterState clusterState = h.coreContainer.getZkController().getClusterState();
-
-        DocCollection docCollection = clusterState.getCollection(collection);
-        ZkNodeProps leaderProps = docCollection.getLeader(shard);
-        ZkCoreNodeProps nodeProps = new ZkCoreNodeProps(leaderProps);
-
-        try (HttpSolrClient client = new Builder(nodeProps.getBaseUrl()).build()) {
-          client.setConnectionTimeout(15000);
-          client.setSoTimeout(60000);
-          RequestSyncShard reqSyncShard = new CoreAdminRequest.RequestSyncShard();
-          reqSyncShard.setCollection(collection);
-          reqSyncShard.setShard(shard);
-          reqSyncShard.setCoreName(nodeProps.getCoreName());
-          client.request(reqSyncShard);
-        }
-        return null;
-      }
-
-    },
-    CREATEALIAS_OP(CREATEALIAS) {
-      @Override
-      Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler handler)
-          throws Exception {
-        final String aliasName = SolrIdentifierValidator.validateAliasName(req.getParams().get(NAME));
-        return req.getParams().required().getAll(null, NAME, "collections");
-      }
-    },
-    DELETEALIAS_OP(DELETEALIAS) {
-      @Override
-      Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler handler)
-          throws Exception {
-        return req.getParams().required().getAll(null, NAME);
-      }
-
-    },
-    SPLITSHARD_OP(SPLITSHARD, DEFAULT_COLLECTION_OP_TIMEOUT * 5, true) {
-      @Override
-      Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h)
-          throws Exception {
-        String name = req.getParams().required().get(COLLECTION_PROP);
-        // TODO : add support for multiple shards
-        String shard = req.getParams().get(SHARD_ID_PROP);
-        String rangesStr = req.getParams().get(CoreAdminParams.RANGES);
-        String splitKey = req.getParams().get("split.key");
-
-        if (splitKey == null && shard == null) {
-          throw new SolrException(ErrorCode.BAD_REQUEST, "At least one of shard, or split.key should be specified.");
-        }
-        if (splitKey != null && shard != null) {
-          throw new SolrException(ErrorCode.BAD_REQUEST,
-              "Only one of 'shard' or 'split.key' should be specified");
-        }
-        if (splitKey != null && rangesStr != null) {
-          throw new SolrException(ErrorCode.BAD_REQUEST,
-              "Only one of 'ranges' or 'split.key' should be specified");
-        }
-
-        Map<String, Object> map = req.getParams().getAll(null,
-            COLLECTION_PROP,
-            SHARD_ID_PROP,
-            "split.key",
-            CoreAdminParams.RANGES);
-        return copyPropertiesWithPrefix(req.getParams(), map, COLL_PROP_PREFIX);
-      }
-    },
-    DELETESHARD_OP(DELETESHARD) {
-      @Override
-      Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler handler) throws Exception {
-        Map<String, Object> map = req.getParams().required().getAll(null,
-            COLLECTION_PROP,
-            SHARD_ID_PROP);
-        req.getParams().getAll(map,
-            DELETE_INDEX,
-            DELETE_DATA_DIR,
-            DELETE_INSTANCE_DIR);
-        return map;
-      }
-    },
-    FORCELEADER_OP(FORCELEADER) {
-      @Override
-      Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler handler) throws Exception {
-        forceLeaderElection(req, handler);
-        return null;
-      }
-    },
-    CREATESHARD_OP(CREATESHARD) {
-      @Override
-      Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler handler) throws Exception {
-        Map<String, Object> map = req.getParams().required().getAll(null,
-            COLLECTION_PROP,
-            SHARD_ID_PROP);
-        ClusterState clusterState = handler.coreContainer.getZkController().getClusterState();
-        final String newShardName = SolrIdentifierValidator.validateShardName(req.getParams().get(SHARD_ID_PROP));
-        if (!ImplicitDocRouter.NAME.equals(((Map) clusterState.getCollection(req.getParams().get(COLLECTION_PROP)).get(DOC_ROUTER)).get(NAME)))
-          throw new SolrException(ErrorCode.BAD_REQUEST, "shards can be added only to 'implicit' collections");
-        req.getParams().getAll(map,
-            REPLICATION_FACTOR,
-            CREATE_NODE_SET);
-        return copyPropertiesWithPrefix(req.getParams(), map, COLL_PROP_PREFIX);
-      }
-    },
-    DELETEREPLICA_OP(DELETEREPLICA) {
-      @Override
-      Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler handler) throws Exception {
-        Map<String, Object> map = req.getParams().required().getAll(null,
-            COLLECTION_PROP,
-            SHARD_ID_PROP,
-            REPLICA_PROP);
-
-        req.getParams().getAll(map,
-            DELETE_INDEX,
-            DELETE_DATA_DIR,
-            DELETE_INSTANCE_DIR);
-
-        return req.getParams().getAll(map, ONLY_IF_DOWN);
-      }
-    },
-    MIGRATE_OP(MIGRATE) {
-      @Override
-      Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h) throws Exception {
-        Map<String, Object> map = req.getParams().required().getAll(null, COLLECTION_PROP, "split.key", "target.collection");
-        return req.getParams().getAll(map, "forward.timeout");
-      }
-    },
-    ADDROLE_OP(ADDROLE) {
-      @Override
-      Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler handler) throws Exception {
-        Map<String, Object> map = req.getParams().required().getAll(null, "role", "node");
-        if (!KNOWN_ROLES.contains(map.get("role")))
-          throw new SolrException(ErrorCode.BAD_REQUEST, "Unknown role. Supported roles are ," + KNOWN_ROLES);
-        return map;
-      }
-    },
-    REMOVEROLE_OP(REMOVEROLE) {
-      @Override
-      Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h) throws Exception {
-        Map<String, Object> map = req.getParams().required().getAll(null, "role", "node");
-        if (!KNOWN_ROLES.contains(map.get("role")))
-          throw new SolrException(ErrorCode.BAD_REQUEST, "Unknown role. Supported roles are ," + KNOWN_ROLES);
-        return map;
-      }
-    },
-    CLUSTERPROP_OP(CLUSTERPROP) {
-      @Override
-      Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h) throws Exception {
-        String name = req.getParams().required().get(NAME);
-        String val = req.getParams().get(VALUE_LONG);
-        ClusterProperties cp = new ClusterProperties(h.coreContainer.getZkController().getZkClient());
-        cp.setClusterProperty(name, val);
-        return null;
-      }
-    },
-    REQUESTSTATUS_OP(REQUESTSTATUS) {
-      @SuppressWarnings("unchecked")
-      @Override
-      Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h) throws Exception {
-        req.getParams().required().check(REQUESTID);
-
-        final CoreContainer coreContainer = h.coreContainer;
-        final String requestId = req.getParams().get(REQUESTID);
-        final ZkController zkController = coreContainer.getZkController();
-
-        final NamedList<Object> results = new NamedList<>();
-        if (zkController.getOverseerCompletedMap().contains(requestId)) {
-          final byte[] mapEntry = zkController.getOverseerCompletedMap().get(requestId);
-          rsp.getValues().addAll(SolrResponse.deserialize(mapEntry).getResponse());
-          addStatusToResponse(results, COMPLETED, "found [" + requestId + "] in completed tasks");
-        } else if (zkController.getOverseerFailureMap().contains(requestId)) {
-          final byte[] mapEntry = zkController.getOverseerFailureMap().get(requestId);
-          rsp.getValues().addAll(SolrResponse.deserialize(mapEntry).getResponse());
-          addStatusToResponse(results, FAILED, "found [" + requestId + "] in failed tasks");
-        } else if (zkController.getOverseerRunningMap().contains(requestId)) {
-          addStatusToResponse(results, RUNNING, "found [" + requestId + "] in running tasks");
-        } else if (h.overseerCollectionQueueContains(requestId)) {
-          addStatusToResponse(results, SUBMITTED, "found [" + requestId + "] in submitted tasks");
-        } else {
-          addStatusToResponse(results, NOT_FOUND, "Did not find [" + requestId + "] in any tasks queue");
-        }
-
-        final SolrResponse response = new OverseerSolrResponse(results);
-        rsp.getValues().addAll(response.getResponse());
-        return null;
+    CREATE_OP(CREATE, (req, rsp, h) -> {
+      Map<String, Object> props = req.getParams().required().getAll(null, NAME);
+      props.put("fromApi", "true");
+      req.getParams().getAll(props,
+          REPLICATION_FACTOR,
+          COLL_CONF,
+          NUM_SLICES,
+          MAX_SHARDS_PER_NODE,
+          CREATE_NODE_SET, CREATE_NODE_SET_SHUFFLE,
+          SHARDS_PROP,
+          STATE_FORMAT,
+          AUTO_ADD_REPLICAS,
+          RULE,
+          SNITCH);
+
+      if (props.get(STATE_FORMAT) == null) {
+        props.put(STATE_FORMAT, "2");
+      }
+      addMapObject(props, RULE);
+      addMapObject(props, SNITCH);
+      verifyRuleParams(h.coreContainer, props);
+      final String collectionName = SolrIdentifierValidator.validateCollectionName((String) props.get(NAME));
+      final String shardsParam = (String) props.get(SHARDS_PROP);
+      if (StringUtils.isNotEmpty(shardsParam)) {
+        verifyShardsParam(shardsParam);
+      }
+      if (SYSTEM_COLL.equals(collectionName)) {
+        //We must always create a .system collection with only a single shard
+        props.put(NUM_SLICES, 1);
+        props.remove(SHARDS_PROP);
+        createSysConfigSet(h.coreContainer);
+
+      }
+      copyPropertiesWithPrefix(req.getParams(), props, COLL_PROP_PREFIX);
+      return copyPropertiesWithPrefix(req.getParams(), props, "router.");
+
+    }),
+    DELETE_OP(DELETE, (req, rsp, h) -> req.getParams().required().getAll(null, NAME)),
+
+    RELOAD_OP(RELOAD, (req, rsp, h) -> req.getParams().required().getAll(null, NAME)),
+
+    SYNCSHARD_OP(SYNCSHARD, (req, rsp, h) -> {
+      String collection = req.getParams().required().get("collection");
+      String shard = req.getParams().required().get("shard");
+
+      ClusterState clusterState = h.coreContainer.getZkController().getClusterState();
+
+      DocCollection docCollection = clusterState.getCollection(collection);
+      ZkNodeProps leaderProps = docCollection.getLeader(shard);
+      ZkCoreNodeProps nodeProps = new ZkCoreNodeProps(leaderProps);
+
+      try (HttpSolrClient client = new Builder(nodeProps.getBaseUrl()).build()) {
+        client.setConnectionTimeout(15000);
+        client.setSoTimeout(60000);
+        RequestSyncShard reqSyncShard = new RequestSyncShard();
+        reqSyncShard.setCollection(collection);
+        reqSyncShard.setShard(shard);
+        reqSyncShard.setCoreName(nodeProps.getCoreName());
+        client.request(reqSyncShard);
+      }
+      return null;
+    }),
+    CREATEALIAS_OP(CREATEALIAS, (req, rsp, h) -> {
+      final String aliasName = SolrIdentifierValidator.validateAliasName(req.getParams().get(NAME));
+      return req.getParams().required().getAll(null, NAME, "collections");
+    }),
+    DELETEALIAS_OP(DELETEALIAS, (req, rsp, h) -> req.getParams().required().getAll(null, NAME)),
+    SPLITSHARD_OP(SPLITSHARD, DEFAULT_COLLECTION_OP_TIMEOUT * 5, true, (req, rsp, h) -> {
+      String name = req.getParams().required().get(COLLECTION_PROP);
+      // TODO : add support for multiple shards
+      String shard = req.getParams().get(SHARD_ID_PROP);
+      String rangesStr = req.getParams().get(CoreAdminParams.RANGES);
+      String splitKey = req.getParams().get("split.key");
+
+      if (splitKey == null && shard == null) {
+        throw new SolrException(ErrorCode.BAD_REQUEST, "At least one of shard, or split.key should be specified.");
+      }
+      if (splitKey != null && shard != null) {
+        throw new SolrException(ErrorCode.BAD_REQUEST,
+            "Only one of 'shard' or 'split.key' should be specified");
+      }
+      if (splitKey != null && rangesStr != null) {
+        throw new SolrException(ErrorCode.BAD_REQUEST,
+            "Only one of 'ranges' or 'split.key' should be specified");
+      }
+
+      Map<String, Object> map = req.getParams().getAll(null,
+          COLLECTION_PROP,
+          SHARD_ID_PROP,
+          "split.key",
+          CoreAdminParams.RANGES);
+      return copyPropertiesWithPrefix(req.getParams(), map, COLL_PROP_PREFIX);
+    }),
+    DELETESHARD_OP(DELETESHARD, (req, rsp, h) -> {
+      Map<String, Object> map = req.getParams().required().getAll(null,
+          COLLECTION_PROP,
+          SHARD_ID_PROP);
+      req.getParams().getAll(map,
+          DELETE_INDEX,
+          DELETE_DATA_DIR,
+          DELETE_INSTANCE_DIR);
+      return map;
+    }),
+    FORCELEADER_OP(FORCELEADER, (req, rsp, h) -> {
+      forceLeaderElection(req, h);
+      return null;
+    }),
+    CREATESHARD_OP(CREATESHARD, (req, rsp, h) -> {
+      Map<String, Object> map = req.getParams().required().getAll(null,
+          COLLECTION_PROP,
+          SHARD_ID_PROP);
+      ClusterState clusterState = h.coreContainer.getZkController().getClusterState();
+      final String newShardName = SolrIdentifierValidator.validateShardName(req.getParams().get(SHARD_ID_PROP));
+      if (!ImplicitDocRouter.NAME.equals(((Map) clusterState.getCollection(req.getParams().get(COLLECTION_PROP)).get(DOC_ROUTER)).get(NAME)))
+        throw new SolrException(ErrorCode.BAD_REQUEST, "shards can be added only to 'implicit' collections");
+      req.getParams().getAll(map,
+          REPLICATION_FACTOR,
+          CREATE_NODE_SET);
+      return copyPropertiesWithPrefix(req.getParams(), map, COLL_PROP_PREFIX);
+    }),
+    DELETEREPLICA_OP(DELETEREPLICA, (req, rsp, h) -> {
+      Map<String, Object> map = req.getParams().required().getAll(null,
+          COLLECTION_PROP,
+          SHARD_ID_PROP,
+          REPLICA_PROP);
+
+      req.getParams().getAll(map,
+          DELETE_INDEX,
+          DELETE_DATA_DIR,
+          DELETE_INSTANCE_DIR);
+
+      return req.getParams().getAll(map, ONLY_IF_DOWN);
+    }),
+    MIGRATE_OP(MIGRATE, (req, rsp, h) -> {
+      Map<String, Object> map = req.getParams().required().getAll(null, COLLECTION_PROP, "split.key", "target.collection");
+      return req.getParams().getAll(map, "forward.timeout");
+    }),
+    ADDROLE_OP(ADDROLE, (req, rsp, h) -> {
+      Map<String, Object> map = req.getParams().required().getAll(null, "role", "node");
+      if (!KNOWN_ROLES.contains(map.get("role")))
+        throw new SolrException(ErrorCode.BAD_REQUEST, "Unknown role. Supported roles are ," + KNOWN_ROLES);
+      return map;
+    }),
+    REMOVEROLE_OP(REMOVEROLE, (req, rsp, h) -> {
+      Map<String, Object> map = req.getParams().required().getAll(null, "role", "node");
+      if (!KNOWN_ROLES.contains(map.get("role")))
+        throw new SolrException(ErrorCode.BAD_REQUEST, "Unknown role. Supported roles are ," + KNOWN_ROLES);
+      return map;
+    }),
+    CLUSTERPROP_OP(CLUSTERPROP, (req, rsp, h) -> {
+      String name = req.getParams().required().get(NAME);
+      String val = req.getParams().get(VALUE_LONG);
+      ClusterProperties cp = new ClusterProperties(h.coreContainer.getZkController().getZkClient());
+      cp.setClusterProperty(name, val);
+      return null;
+    }),
+    REQUESTSTATUS_OP(REQUESTSTATUS, (req, rsp, h) -> {
+      req.getParams().required().check(REQUESTID);
+
+      final CoreContainer coreContainer1 = h.coreContainer;
+      final String requestId = req.getParams().get(REQUESTID);
+      final ZkController zkController = coreContainer1.getZkController();
+
+      final NamedList<Object> results = new NamedList<>();
+      if (zkController.getOverseerCompletedMap().contains(requestId)) {
+        final byte[] mapEntry = zkController.getOverseerCompletedMap().get(requestId);
+        rsp.getValues().addAll(SolrResponse.deserialize(mapEntry).getResponse());
+        addStatusToResponse(results, COMPLETED, "found [" + requestId + "] in completed tasks");
+      } else if (zkController.getOverseerFailureMap().contains(requestId)) {
+        final byte[] mapEntry = zkController.getOverseerFailureMap().get(requestId);
+        rsp.getValues().addAll(SolrResponse.deserialize(mapEntry).getResponse());
+        addStatusToResponse(results, FAILED, "found [" + requestId + "] in failed tasks");
+      } else if (zkController.getOverseerRunningMap().contains(requestId)) {
+        addStatusToResponse(results, RUNNING, "found [" + requestId + "] in running tasks");
+      } else if (h.overseerCollectionQueueContains(requestId)) {
+        addStatusToResponse(results, SUBMITTED, "found [" + requestId + "] in submitted tasks");
+      } else {
+        addStatusToResponse(results, NOT_FOUND, "Did not find [" + requestId + "] in any tasks queue");
       }
 
-      private void addStatusToResponse(NamedList<Object> results, RequestStatusState state, String msg) {
-        SimpleOrderedMap<String> status = new SimpleOrderedMap<>();
-        status.add("state", state.getKey());
-        status.add("msg", msg);
-        results.add("status", status);
-      }
-    },
-    DELETESTATUS_OP(DELETESTATUS) {
+      final SolrResponse response = new OverseerSolrResponse(results);
+      rsp.getValues().addAll(response.getResponse());
+      return null;
+    }),
+    DELETESTATUS_OP(DELETESTATUS, new CollectionOp() {
       @SuppressWarnings("unchecked")
       @Override
-      Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h) throws Exception {
+      public Map<String, Object> execute(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h) throws Exception {
         final CoreContainer coreContainer = h.coreContainer;
         final String requestId = req.getParams().get(REQUESTID);
         final ZkController zkController = coreContainer.getZkController();
@@ -652,263 +588,217 @@ public class CollectionsHandler extends RequestHandlerBase implements Permission
         }
         return null;
       }
-    },
-    ADDREPLICA_OP(ADDREPLICA) {
-      @Override
-      Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h)
-          throws Exception {
-        Map<String, Object> props = req.getParams().getAll(null,
-            COLLECTION_PROP,
-            "node",
-            SHARD_ID_PROP,
-            _ROUTE_,
-            CoreAdminParams.NAME,
-            INSTANCE_DIR,
-            DATA_DIR);
-        return copyPropertiesWithPrefix(req.getParams(), props, COLL_PROP_PREFIX);
-      }
-    },
-    OVERSEERSTATUS_OP(OVERSEERSTATUS) {
-      @Override
-      Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h) throws Exception {
-        return new LinkedHashMap<>();
-      }
-    },
+    }),
+    ADDREPLICA_OP(ADDREPLICA, (req, rsp, h) -> {
+      Map<String, Object> props = req.getParams().getAll(null,
+          COLLECTION_PROP,
+          "node",
+          SHARD_ID_PROP,
+          _ROUTE_,
+          CoreAdminParams.NAME,
+          INSTANCE_DIR,
+          DATA_DIR);
+      return copyPropertiesWithPrefix(req.getParams(), props, COLL_PROP_PREFIX);
+    }),
+    OVERSEERSTATUS_OP(OVERSEERSTATUS, (req, rsp, h) -> (Map) new LinkedHashMap<>()),
 
     /**
      * Handle list collection request.
      * Do list collection request to zk host
      */
-    LIST_OP(LIST) {
-      @Override
-      Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler handler) throws Exception {
-        NamedList<Object> results = new NamedList<>();
-        Map<String, DocCollection> collections = handler.coreContainer.getZkController().getZkStateReader().getClusterState().getCollectionsMap();
-        List<String> collectionList = new ArrayList<>(collections.keySet());
-        results.add("collections", collectionList);
-        SolrResponse response = new OverseerSolrResponse(results);
-        rsp.getValues().addAll(response.getResponse());
-        return null;
-      }
-    },
+    LIST_OP(LIST, (req, rsp, h) -> {
+      NamedList<Object> results = new NamedList<>();
+      Map<String, DocCollection> collections = h.coreContainer.getZkController().getZkStateReader().getClusterState().getCollectionsMap();
+      List<String> collectionList = new ArrayList<>(collections.keySet());
+      results.add("collections", collectionList);
+      SolrResponse response = new OverseerSolrResponse(results);
+      rsp.getValues().addAll(response.getResponse());
+      return null;
+    }),
     /**
      * Handle cluster status request.
      * Can return status per specific collection/shard or per all collections.
      */
-    CLUSTERSTATUS_OP(CLUSTERSTATUS) {
-      @Override
-      Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler handler)
-          throws KeeperException, InterruptedException {
-        Map<String, Object> all = req.getParams().getAll(null,
-            COLLECTION_PROP,
-            SHARD_ID_PROP,
-            _ROUTE_);
-        new ClusterStatus(handler.coreContainer.getZkController().getZkStateReader(),
-            new ZkNodeProps(all)).getClusterStatus(rsp.getValues());
-        return null;
-      }
-    },
-    ADDREPLICAPROP_OP(ADDREPLICAPROP) {
-      @Override
-      Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h) throws Exception {
-        Map<String, Object> map = req.getParams().required().getAll(null,
-            COLLECTION_PROP,
-            PROPERTY_PROP,
-            SHARD_ID_PROP,
-            REPLICA_PROP,
-            PROPERTY_VALUE_PROP);
-        req.getParams().getAll(map, SHARD_UNIQUE);
-        String property = (String) map.get(PROPERTY_PROP);
-        if (!property.startsWith(COLL_PROP_PREFIX)) {
-          property = COLL_PROP_PREFIX + property;
+    CLUSTERSTATUS_OP(CLUSTERSTATUS, (req, rsp, h) -> {
+      Map<String, Object> all = req.getParams().getAll(null,
+          COLLECTION_PROP,
+          SHARD_ID_PROP,
+          _ROUTE_);
+      new ClusterStatus(h.coreContainer.getZkController().getZkStateReader(),
+          new ZkNodeProps(all)).getClusterStatus(rsp.getValues());
+      return null;
+    }),
+    ADDREPLICAPROP_OP(ADDREPLICAPROP, (req, rsp, h) -> {
+      Map<String, Object> map = req.getParams().required().getAll(null,
+          COLLECTION_PROP,
+          PROPERTY_PROP,
+          SHARD_ID_PROP,
+          REPLICA_PROP,
+          PROPERTY_VALUE_PROP);
+      req.getParams().getAll(map, SHARD_UNIQUE);
+      String property = (String) map.get(PROPERTY_PROP);
+      if (!property.startsWith(COLL_PROP_PREFIX)) {
+        property = COLL_PROP_PREFIX + property;
+      }
+
+      boolean uniquePerSlice = Boolean.parseBoolean((String) map.get(SHARD_UNIQUE));
+
+      // Check if we're trying to set a property with parameters that allow us to set the property on multiple replicas
+      // in a slice on properties that are known to only be one-per-slice and error out if so.
+      if (StringUtils.isNotBlank((String) map.get(SHARD_UNIQUE)) &&
+          SliceMutator.SLICE_UNIQUE_BOOLEAN_PROPERTIES.contains(property.toLowerCase(Locale.ROOT)) &&
+          uniquePerSlice == false) {
+        throw new SolrException(ErrorCode.BAD_REQUEST,
+            "Overseer replica property command received for property " + property +
+                " with the " + SHARD_UNIQUE +
+                " parameter set to something other than 'true'. No action taken.");
+      }
+      return map;
+    }),
+    DELETEREPLICAPROP_OP(DELETEREPLICAPROP, (req, rsp, h) -> {
+      Map<String, Object> map = req.getParams().required().getAll(null,
+          COLLECTION_PROP,
+          PROPERTY_PROP,
+          SHARD_ID_PROP,
+          REPLICA_PROP);
+      return req.getParams().getAll(map, PROPERTY_PROP);
+    }),
+    BALANCESHARDUNIQUE_OP(BALANCESHARDUNIQUE, (req, rsp, h) -> {
+      Map<String, Object> map = req.getParams().required().getAll(null,
+          COLLECTION_PROP,
+          PROPERTY_PROP);
+      Boolean shardUnique = Boolean.parseBoolean(req.getParams().get(SHARD_UNIQUE));
+      String prop = req.getParams().get(PROPERTY_PROP).toLowerCase(Locale.ROOT);
+      if (!StringUtils.startsWith(prop, COLL_PROP_PREFIX)) {
+        prop = COLL_PROP_PREFIX + prop;
+      }
+
+      if (!shardUnique && !SliceMutator.SLICE_UNIQUE_BOOLEAN_PROPERTIES.contains(prop)) {
+        throw new SolrException(ErrorCode.BAD_REQUEST, "Balancing properties amongst replicas in a slice requires that"
+            + " the property be pre-defined as a unique property (e.g. 'preferredLeader') or that 'shardUnique' be set to 'true'. " +
+            " Property: " + prop + " shardUnique: " + Boolean.toString(shardUnique));
+      }
+
+      return req.getParams().getAll(map, ONLY_ACTIVE_NODES, SHARD_UNIQUE);
+    }),
+    REBALANCELEADERS_OP(REBALANCELEADERS, (req, rsp, h) -> {
+      new RebalanceLeaders(req, rsp, h).execute();
+      return null;
+    }),
+    MODIFYCOLLECTION_OP(MODIFYCOLLECTION, (req, rsp, h) -> {
+      Map<String, Object> m = req.getParams().getAll(null, MODIFIABLE_COLL_PROPS);
+      if (m.isEmpty()) throw new SolrException(ErrorCode.BAD_REQUEST,
+          formatString("no supported values provided rule, snitch, maxShardsPerNode, replicationFactor, collection.configName"));
+      req.getParams().required().getAll(m, COLLECTION_PROP);
+      addMapObject(m, RULE);
+      addMapObject(m, SNITCH);
+      for (String prop : MODIFIABLE_COLL_PROPS) DocCollection.verifyProp(m, prop);
+      verifyRuleParams(h.coreContainer, m);
+      return m;
+    }),
+    MIGRATESTATEFORMAT_OP(MIGRATESTATEFORMAT, (req, rsp, h) -> req.getParams().required().getAll(null, COLLECTION_PROP)),
+
+    BACKUP_OP(BACKUP, (req, rsp, h) -> {
+      req.getParams().required().check(NAME, COLLECTION_PROP);
+
+      String collectionName = req.getParams().get(COLLECTION_PROP);
+      ClusterState clusterState = h.coreContainer.getZkController().getClusterState();
+      if (!clusterState.hasCollection(collectionName)) {
+        throw new SolrException(ErrorCode.BAD_REQUEST, "Collection '" + collectionName + "' does not exist, no action taken.");
+      }
+
+      CoreContainer cc = h.coreContainer;
+      String repo = req.getParams().get(CoreAdminParams.BACKUP_REPOSITORY);
+      BackupRepository repository = cc.newBackupRepository(Optional.ofNullable(repo));
+
+      String location = repository.getBackupLocation(req.getParams().get(CoreAdminParams.BACKUP_LOCATION));
+      if (location == null) {
+        //Refresh the cluster property file to make sure the value set for location is the latest
+        h.coreContainer.getZkController().getZkStateReader().forceUpdateClusterProperties();
+
+        // Check if the location is specified in the cluster property.
+        location = h.coreContainer.getZkController().getZkStateReader().getClusterProperty(CoreAdminParams.BACKUP_LOCATION, null);
+        if (location == null) {
+          throw new SolrException(ErrorCode.BAD_REQUEST, "'location' is not specified as a query"
+              + " parameter or as a default repository property or as a cluster property.");
         }
+      }
 
-        boolean uniquePerSlice = Boolean.parseBoolean((String) map.get(SHARD_UNIQUE));
-
-        // Check if we're trying to set a property with parameters that allow us to set the property on multiple replicas
-        // in a slice on properties that are known to only be one-per-slice and error out if so.
-        if (StringUtils.isNotBlank((String) map.get(SHARD_UNIQUE)) &&
-            SliceMutator.SLICE_UNIQUE_BOOLEAN_PROPERTIES.contains(property.toLowerCase(Locale.ROOT)) &&
-            uniquePerSlice == false) {
-          throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
-              "Overseer replica property command received for property " + property +
-                  " with the " + SHARD_UNIQUE +
-                  " parameter set to something other than 'true'. No action taken.");
+      // Check if the specified location is valid for this repository.
+      URI uri = repository.createURI(location);
+      try {
+        if (!repository.exists(uri)) {
+          throw new SolrException(ErrorCode.SERVER_ERROR, "specified location " + uri + " does not exist.");
         }
-        return map;
-      }
-    },
-    DELETEREPLICAPROP_OP(DELETEREPLICAPROP) {
-      @Override
-      Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h) throws Exception {
-        Map<String, Object> map = req.getParams().required().getAll(null,
-            COLLECTION_PROP,
-            PROPERTY_PROP,
-            SHARD_ID_PROP,
-            REPLICA_PROP);
-        return req.getParams().getAll(map, PROPERTY_PROP);
+      } catch (IOException ex) {
+        throw new SolrException(ErrorCode.SERVER_ERROR, "Failed to check the existance of " + uri + ". Is it valid?", ex);
       }
-    },
-    BALANCESHARDUNIQUE_OP(BALANCESHARDUNIQUE) {
-      @Override
-      Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h) throws Exception {
-        Map<String, Object> map = req.getParams().required().getAll(null,
-            COLLECTION_PROP,
-            PROPERTY_PROP);
-        Boolean shardUnique = Boolean.parseBoolean(req.getParams().get(SHARD_UNIQUE));
-        String prop = req.getParams().get(PROPERTY_PROP).toLowerCase(Locale.ROOT);
-        if (!StringUtils.startsWith(prop, COLL_PROP_PREFIX)) {
-          prop = COLL_PROP_PREFIX + prop;
-        }
 
-        if (!shardUnique && !SliceMutator.SLICE_UNIQUE_BOOLEAN_PROPERTIES.contains(prop)) {
-          throw new SolrException(ErrorCode.BAD_REQUEST, "Balancing properties amongst replicas in a slice requires that"
-              + " the property be pre-defined as a unique property (e.g. 'preferredLeader') or that 'shardUnique' be set to 'true'. " +
-              " Property: " + prop + " shardUnique: " + Boolean.toString(shardUnique));
-        }
+      Map<String, Object> params = req.getParams().getAll(null, NAME, COLLECTION_PROP);
+      params.put(CoreAdminParams.BACKUP_LOCATION, location);
+      return params;
+    }),
+    RESTORE_OP(RESTORE, (req, rsp, h) -> {
+      req.getParams().required().check(NAME, COLLECTION_PROP);
 
-        return req.getParams().getAll(map, ONLY_ACTIVE_NODES, SHARD_UNIQUE);
+      String collectionName = SolrIdentifierValidator.validateCollectionName(req.getParams().get(COLLECTION_PROP));
+      ClusterState clusterState = h.coreContainer.getZkController().getClusterState();
+      //We always want to restore into an collection name which doesn't  exist yet.
+      if (clusterState.hasCollection(collectionName)) {
+        throw new SolrException(ErrorCode.BAD_REQUEST, "Collection '" + collectionName + "' exists, no action taken.");
       }
-    },
-    REBALANCELEADERS_OP(REBALANCELEADERS) {
-      @Override
-      Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h) throws Exception {
-        new RebalanceLeaders(req,rsp,h).execute();
-        return null;
-      }
-    },
-    MODIFYCOLLECTION_OP(MODIFYCOLLECTION) {
-      @Override
-      Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h) throws Exception {
-
-        Map<String, Object> m = req.getParams().getAll(null, MODIFIABLE_COLL_PROPS);
-        if (m.isEmpty()) throw new SolrException(ErrorCode.BAD_REQUEST,
-            formatString("no supported values provided rule, snitch, maxShardsPerNode, replicationFactor, collection.configName"));
-        req.getParams().required().getAll(m, COLLECTION_PROP);
-        addMapObject(m, RULE);
-        addMapObject(m, SNITCH);
-        for (String prop : MODIFIABLE_COLL_PROPS) DocCollection.verifyProp(m, prop);
-        verifyRuleParams(h.coreContainer, m);
-        return m;
-      }
-    },
-    MIGRATESTATEFORMAT_OP(MIGRATESTATEFORMAT) {
-      @Override
-      Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler handler)
-          throws Exception {
-        return req.getParams().required().getAll(null, COLLECTION_PROP);
-      }
-    },
-    BACKUP_OP(BACKUP) {
-      @Override
-      Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h) throws Exception {
-        req.getParams().required().check(NAME, COLLECTION_PROP);
 
-        String collectionName = req.getParams().get(COLLECTION_PROP);
-        ClusterState clusterState = h.coreContainer.getZkController().getClusterState();
-        if (!clusterState.hasCollection(collectionName)) {
-          throw new SolrException(ErrorCode.BAD_REQUEST, "Collection '" + collectionName + "' does not exist, no action taken.");
-        }
+      CoreContainer cc = h.coreContainer;
+      String repo = req.getParams().get(CoreAdminParams.BACKUP_REPOSITORY);
+      BackupRepository repository = cc.newBackupRepository(Optional.ofNullable(repo));
 
-        CoreContainer cc = h.coreContainer;
-        String repo = req.getParams().get(CoreAdminParams.BACKUP_REPOSITORY);
-        BackupRepository repository = cc.newBackupRepository(Optional.ofNullable(repo));
+      String location = repository.getBackupLocation(req.getParams().get(CoreAdminParams.BACKUP_LOCATION));
+      if (location == null) {
+        //Refresh the cluster property file to make sure the value set for location is the latest
+        h.coreContainer.getZkController().getZkStateReader().forceUpdateClusterProperties();
 
-        String location = repository.getBackupLocation(req.getParams().get(CoreAdminParams.BACKUP_LOCATION));
+        // Check if the location is specified in the cluster property.
+        location = h.coreContainer.getZkController().getZkStateReader().getClusterProperty("location", null);
         if (location == null) {
-          //Refresh the cluster property file to make sure the value set for location is the latest
-          h.coreContainer.getZkController().getZkStateReader().forceUpdateClusterProperties();
-
-          // Check if the location is specified in the cluster property.
-          location = h.coreContainer.getZkController().getZkStateReader().getClusterProperty(CoreAdminParams.BACKUP_LOCATION, null);
-          if (location == null) {
-            throw new SolrException(ErrorCode.BAD_REQUEST, "'location' is not specified as a query"
-                + " parameter or as a default repository property or as a cluster property.");
-          }
-        }
-
-        // Check if the specified location is valid for this repository.
-        URI uri = repository.createURI(location);
-        try {
-          if (!repository.exists(uri)) {
-            throw new SolrException(ErrorCode.SERVER_ERROR, "specified location " + uri + " does not exist.");
-          }
-        } catch (IOException ex) {
-          throw new SolrException(ErrorCode.SERVER_ERROR, "Failed to check the existance of " + uri + ". Is it valid?", ex);
+          throw new SolrException(ErrorCode.BAD_REQUEST, "'location' is not specified as a query"
+              + " parameter or as a default repository property or as a cluster property.");
         }
-
-        Map<String, Object> params = req.getParams().getAll(null, NAME, COLLECTION_PROP);
-        params.put(CoreAdminParams.BACKUP_LOCATION, location);
-        return params;
       }
-    },
-    RESTORE_OP(RESTORE) {
-      @Override
-      Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h) throws Exception {
-        req.getParams().required().check(NAME, COLLECTION_PROP);
-
-        String collectionName = SolrIdentifierValidator.validateCollectionName(req.getParams().get(COLLECTION_PROP));
-        ClusterState clusterState = h.coreContainer.getZkController().getClusterState();
-        //We always want to restore into an collection name which doesn't  exist yet.
-        if (clusterState.hasCollection(collectionName)) {
-          throw new SolrException(ErrorCode.BAD_REQUEST, "Collection '" + collectionName + "' exists, no action taken.");
-        }
 
-        CoreContainer cc = h.coreContainer;
-        String repo = req.getParams().get(CoreAdminParams.BACKUP_REPOSITORY);
-        BackupRepository repository = cc.newBackupRepository(Optional.ofNullable(repo));
-
-        String location = repository.getBackupLocation(req.getParams().get(CoreAdminParams.BACKUP_LOCATION));
-        if (location == null) {
-          //Refresh the cluster property file to make sure the value set for location is the latest
-          h.coreContainer.getZkController().getZkStateReader().forceUpdateClusterProperties();
-
-          // Check if the location is specified in the cluster property.
-          location = h.coreContainer.getZkController().getZkStateReader().getClusterProperty("location", null);
-          if (location == null) {
-            throw new SolrException(ErrorCode.BAD_REQUEST, "'location' is not specified as a query"
-                + " parameter or as a default repository property or as a cluster property.");
-          }
-        }
-
-        // Check if the specified location is valid for this repository.
-        URI uri = repository.createURI(location);
-        try {
-          if (!repository.exists(uri)) {
-            throw new SolrException(ErrorCode.SERVER_ERROR, "specified location " + uri + " does not exist.");
-          }
-        } catch (IOException ex) {
-          throw new SolrException(ErrorCode.SERVER_ERROR, "Failed to check the existance of " + uri + ". Is it valid?", ex);
+      // Check if the specified location is valid for this repository.
+      URI uri = repository.createURI(location);
+      try {
+        if (!repository.exists(uri)) {
+          throw new SolrException(ErrorCode.SERVER_ERROR, "specified location " + uri + " does not exist.");
         }
-
-        Map<String, Object> params = req.getParams().getAll(null, NAME, COLLECTION_PROP);
-        params.put(CoreAdminParams.BACKUP_LOCATION, location);
-        // from CREATE_OP:
-        req.getParams().getAll(params, COLL_CONF, REPLICATION_FACTOR, MAX_SHARDS_PER_NODE, STATE_FORMAT, AUTO_ADD_REPLICAS);
-        copyPropertiesWithPrefix(req.getParams(), params, COLL_PROP_PREFIX);
-        return params;
-      }
-    };
+      } catch (IOException ex) {
+        throw new SolrException(ErrorCode.SERVER_ERROR, "Failed to check the existance of " + uri + ". Is it valid?", ex);
+      }
+
+      Map<String, Object> params = req.getParams().getAll(null, NAME, COLLECTION_PROP);
+      params.put(CoreAdminParams.BACKUP_LOCATION, location);
+      // from CREATE_OP:
+      req.getParams().getAll(params, COLL_CONF, REPLICATION_FACTOR, MAX_SHARDS_PER_NODE, STATE_FORMAT, AUTO_ADD_REPLICAS);
+      copyPropertiesWithPrefix(req.getParams(), params, COLL_PROP_PREFIX);
+      return params;
+    });
+    public final CollectionOp fun;
     CollectionAction action;
     long timeOut;
     boolean sendToOCPQueue;
 
-    CollectionOperation(CollectionAction action) {
-      this(action, DEFAULT_COLLECTION_OP_TIMEOUT, true);
+    CollectionOperation(CollectionAction action, CollectionOp fun) {
+      this(action, DEFAULT_COLLECTION_OP_TIMEOUT, true, fun);
     }
 
-    CollectionOperation(CollectionAction action, long timeOut, boolean sendToOCPQueue) {
+    CollectionOperation(CollectionAction action, long timeOut, boolean sendToOCPQueue, CollectionOp fun) {
       this.action = action;
       this.timeOut = timeOut;
       this.sendToOCPQueue = sendToOCPQueue;
+      this.fun = fun;
 
     }
 
-    /**
-     * All actions must implement this method. If a non null map is returned , the action name is added to
-     * the map and sent to overseer for processing. If it returns a null, the call returns immediately
-     */
-    abstract Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h) throws Exception;
 
     public static CollectionOperation get(CollectionAction action) {
       for (CollectionOperation op : values()) {
@@ -916,6 +806,12 @@ public class CollectionsHandler extends RequestHandlerBase implements Permission
       }
       throw new SolrException(ErrorCode.SERVER_ERROR, "No such action" + action);
     }
+
+    @Override
+    public Map<String, Object> execute(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h)
+        throws Exception {
+      return fun.execute(req, rsp, h);
+    }
   }
 
   private static void forceLeaderElection(SolrQueryRequest req, CollectionsHandler handler) {
@@ -1096,6 +992,11 @@ public class CollectionsHandler extends RequestHandlerBase implements Permission
     }
   }
 
+  interface CollectionOp {
+    Map<String, Object> execute(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h) throws Exception;
+    
+  }
+
   public static final List<String> MODIFIABLE_COLL_PROPS = Arrays.asList(
       RULE,
       SNITCH,


[51/51] [abbrv] lucene-solr:apiv2: SOLR-8029: merge master into apiv2

Posted by sa...@apache.org.
SOLR-8029: merge master into apiv2


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/49a09217
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/49a09217
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/49a09217

Branch: refs/heads/apiv2
Commit: 49a09217064d6f1578895cf8425946bb2d08338d
Parents: 22f1be6 d4cb52f
Author: Steve Rowe <sa...@apache.org>
Authored: Thu Jul 21 09:34:37 2016 -0400
Committer: Steve Rowe <sa...@apache.org>
Committed: Thu Jul 21 09:36:10 2016 -0400

----------------------------------------------------------------------
 build.xml                                       |   39 +-
 .../dot.settings/org.eclipse.jdt.core.prefs     |    3 +
 .../idea/.idea/copyright/profiles_settings.xml  |    1 -
 .../test-framework/lucene-test-framework.iml    |    1 +
 .../lucene/analysis/common/pom.xml.template     |   13 +
 dev-tools/maven/pom.xml.template                |    4 +-
 .../contrib/analysis-extras/pom.xml.template    |    7 +
 dev-tools/maven/solr/pom.xml.template           |    6 +-
 .../maven/solr/test-framework/pom.xml.template  |    5 +-
 dev-tools/scripts/addBackcompatIndexes.py       |   30 +-
 dev-tools/scripts/addVersion.py                 |   19 +-
 dev-tools/scripts/releasedJirasRegex.py         |   93 +
 dev-tools/scripts/scriptutil.py                 |    7 +-
 lucene/CHANGES.txt                              |  164 +-
 lucene/MIGRATE.txt                              |  103 +-
 lucene/analysis/common/build.xml                |   39 +-
 .../lucene/analysis/ar/ArabicAnalyzer.java      |   20 +-
 .../lucene/analysis/bg/BulgarianAnalyzer.java   |   17 +-
 .../lucene/analysis/br/BrazilianAnalyzer.java   |   17 +-
 .../lucene/analysis/ca/CatalanAnalyzer.java     |   18 +-
 .../charfilter/HTMLStripCharFilter.java         |    5 +-
 .../charfilter/HTMLStripCharFilter.jflex        |    1 +
 .../apache/lucene/analysis/cjk/CJKAnalyzer.java |   15 +-
 .../lucene/analysis/ckb/SoraniAnalyzer.java     |   23 +-
 .../analysis/commongrams/CommonGramsFilter.java |    4 +-
 .../commongrams/CommonGramsFilterFactory.java   |    3 +-
 .../compound/CompoundWordTokenFilterBase.java   |    8 +-
 .../DictionaryCompoundWordTokenFilter.java      |    2 +-
 ...ictionaryCompoundWordTokenFilterFactory.java |    8 +-
 .../HyphenationCompoundWordTokenFilter.java     |    7 +-
 ...phenationCompoundWordTokenFilterFactory.java |   11 +-
 .../lucene/analysis/core/LowerCaseFilter.java   |   50 -
 .../analysis/core/LowerCaseFilterFactory.java   |    2 +-
 .../lucene/analysis/core/SimpleAnalyzer.java    |    7 +
 .../lucene/analysis/core/StopAnalyzer.java      |   31 +-
 .../apache/lucene/analysis/core/StopFilter.java |  111 -
 .../lucene/analysis/core/StopFilterFactory.java |   11 +-
 .../lucene/analysis/core/TypeTokenFilter.java   |    2 +-
 .../lucene/analysis/core/UpperCaseFilter.java   |    3 +-
 .../lucene/analysis/custom/CustomAnalyzer.java  |   28 +-
 .../lucene/analysis/cz/CzechAnalyzer.java       |   25 +-
 .../lucene/analysis/da/DanishAnalyzer.java      |   19 +-
 .../lucene/analysis/de/GermanAnalyzer.java      |   20 +-
 .../lucene/analysis/el/GreekAnalyzer.java       |   13 +-
 .../apache/lucene/analysis/el/GreekStemmer.java |    2 +-
 .../lucene/analysis/en/EnglishAnalyzer.java     |   15 +-
 .../org/apache/lucene/analysis/en/KStemmer.java |    2 +-
 .../lucene/analysis/es/SpanishAnalyzer.java     |   19 +-
 .../lucene/analysis/eu/BasqueAnalyzer.java      |   17 +-
 .../lucene/analysis/fa/PersianAnalyzer.java     |   22 +-
 .../lucene/analysis/fi/FinnishAnalyzer.java     |   19 +-
 .../lucene/analysis/fr/FrenchAnalyzer.java      |   32 +-
 .../lucene/analysis/ga/IrishAnalyzer.java       |   16 +-
 .../lucene/analysis/gl/GalicianAnalyzer.java    |   19 +-
 .../lucene/analysis/hi/HindiAnalyzer.java       |   23 +-
 .../lucene/analysis/hu/HungarianAnalyzer.java   |   19 +-
 .../lucene/analysis/hunspell/Stemmer.java       |    2 +-
 .../lucene/analysis/hy/ArmenianAnalyzer.java    |   17 +-
 .../lucene/analysis/id/IndonesianAnalyzer.java  |   17 +-
 .../lucene/analysis/it/ItalianAnalyzer.java     |   20 +-
 .../lucene/analysis/lt/LithuanianAnalyzer.java  |   17 +-
 .../lucene/analysis/lv/LatvianAnalyzer.java     |   19 +-
 .../lucene/analysis/minhash/MinHashFilter.java  |    5 +-
 .../analysis/minhash/MinHashFilterFactory.java  |    5 +-
 .../miscellaneous/CapitalizationFilter.java     |    2 +-
 .../CapitalizationFilterFactory.java            |    8 +-
 .../miscellaneous/CodepointCountFilter.java     |    2 +-
 .../miscellaneous/DateRecognizerFilter.java     |    2 +-
 .../miscellaneous/FingerprintFilter.java        |    2 +-
 .../analysis/miscellaneous/KeepWordFilter.java  |    4 +-
 .../miscellaneous/KeepWordFilterFactory.java    |    8 +-
 .../KeywordMarkerFilterFactory.java             |    2 +-
 .../analysis/miscellaneous/LengthFilter.java    |    2 +-
 .../RemoveDuplicatesTokenFilter.java            |    2 +-
 .../ScandinavianFoldingFilterFactory.java       |   10 +-
 .../ScandinavianNormalizationFilterFactory.java |   10 +-
 .../miscellaneous/SetKeywordMarkerFilter.java   |    2 +-
 .../miscellaneous/WordDelimiterFilter.java      |   10 +-
 .../WordDelimiterFilterFactory.java             |   16 +-
 .../lucene/analysis/ngram/NGramTokenizer.java   |    2 +-
 .../lucene/analysis/nl/DutchAnalyzer.java       |   29 +-
 .../lucene/analysis/no/NorwegianAnalyzer.java   |   19 +-
 .../lucene/analysis/pt/PortugueseAnalyzer.java  |   19 +-
 .../lucene/analysis/pt/RSLPStemmerBase.java     |    2 +-
 .../query/QueryAutoStopWordAnalyzer.java        |    4 +-
 .../lucene/analysis/ro/RomanianAnalyzer.java    |   17 +-
 .../lucene/analysis/ru/RussianAnalyzer.java     |   23 +-
 .../analysis/snowball/SnowballFilter.java       |    4 +-
 .../snowball/SnowballPorterFilterFactory.java   |    6 +-
 .../analysis/standard/ASCIITLD.jflex-macro      |    2 +-
 .../analysis/standard/ClassicAnalyzer.java      |   15 +-
 .../analysis/standard/ClassicTokenizer.java     |    1 +
 .../standard/ClassicTokenizerImpl.jflex         |    1 +
 .../analysis/standard/StandardAnalyzer.java     |   98 -
 .../analysis/standard/StandardFilter.java       |   38 -
 .../analysis/standard/StandardTokenizer.java    |  201 -
 .../standard/StandardTokenizerImpl.java         |  818 ---
 .../standard/StandardTokenizerImpl.jflex        |  201 -
 .../standard/UAX29URLEmailAnalyzer.java         |   19 +-
 .../standard/UAX29URLEmailTokenizer.java        |    1 +
 .../standard/UAX29URLEmailTokenizerImpl.jflex   |    1 +
 .../lucene/analysis/standard/package-info.java  |   63 -
 .../lucene/analysis/standard/package.html       |   50 +
 .../lucene/analysis/sv/SwedishAnalyzer.java     |   19 +-
 .../analysis/synonym/SynonymFilterFactory.java  |    2 +-
 .../apache/lucene/analysis/th/ThaiAnalyzer.java |   15 +-
 .../lucene/analysis/tr/TurkishAnalyzer.java     |   15 +-
 .../analysis/util/AbstractAnalysisFactory.java  |    4 +-
 .../lucene/analysis/util/CharArrayMap.java      |  669 ---
 .../lucene/analysis/util/CharArraySet.java      |  193 -
 .../lucene/analysis/util/CharTokenizer.java     |   10 +-
 .../lucene/analysis/util/CharacterUtils.java    |  251 -
 .../lucene/analysis/util/ElisionFilter.java     |    2 +-
 .../analysis/util/ElisionFilterFactory.java     |    1 +
 .../analysis/util/FilteringTokenFilter.java     |   76 -
 .../analysis/util/StopwordAnalyzerBase.java     |  138 -
 .../lucene/analysis/util/UnicodeProps.java      |    4 +-
 .../lucene/analysis/util/WordlistLoader.java    |  244 -
 .../wikipedia/WikipediaTokenizerImpl.java       |    1 +
 .../wikipedia/WikipediaTokenizerImpl.jflex      |    1 +
 .../lucene/collation/CollationKeyAnalyzer.java  |    7 +
 .../apache/lucene/collation/package-info.java   |    2 +-
 .../lucene/analysis/ar/TestArabicAnalyzer.java  |    2 +-
 .../analysis/ar/TestArabicStemFilter.java       |    2 +-
 .../analysis/bg/TestBulgarianAnalyzer.java      |    2 +-
 .../analysis/bg/TestBulgarianStemmer.java       |    2 +-
 .../analysis/br/TestBrazilianAnalyzer.java      |    2 +-
 .../lucene/analysis/ca/TestCatalanAnalyzer.java |    2 +-
 .../lucene/analysis/cjk/TestCJKAnalyzer.java    |    4 +-
 .../lucene/analysis/ckb/TestSoraniAnalyzer.java |    2 +-
 .../commongrams/CommonGramsFilterTest.java      |    2 +-
 .../TestCommonGramsFilterFactory.java           |   10 +-
 .../TestCommonGramsQueryFilterFactory.java      |    7 +-
 .../compound/TestCompoundWordTokenFilter.java   |    2 +-
 .../core/TestAllAnalyzersHaveFactories.java     |    4 +
 .../lucene/analysis/core/TestAnalyzers.java     |    5 +
 .../analysis/core/TestBugInSomething.java       |    2 +-
 .../lucene/analysis/core/TestRandomChains.java  |   16 +-
 .../lucene/analysis/core/TestStopAnalyzer.java  |   14 +-
 .../lucene/analysis/core/TestStopFilter.java    |  176 -
 .../analysis/core/TestStopFilterFactory.java    |    2 +-
 .../analysis/custom/TestCustomAnalyzer.java     |  143 +
 .../lucene/analysis/cz/TestCzechAnalyzer.java   |    4 +-
 .../lucene/analysis/cz/TestCzechStemmer.java    |    2 +-
 .../lucene/analysis/da/TestDanishAnalyzer.java  |    2 +-
 .../lucene/analysis/de/TestGermanAnalyzer.java  |    2 +-
 .../analysis/de/TestGermanLightStemFilter.java  |    2 +-
 .../de/TestGermanMinimalStemFilter.java         |    2 +-
 .../analysis/de/TestGermanStemFilter.java       |    4 +-
 .../lucene/analysis/en/TestEnglishAnalyzer.java |    2 +-
 .../analysis/en/TestPorterStemFilter.java       |    8 +-
 .../lucene/analysis/es/TestSpanishAnalyzer.java |    2 +-
 .../lucene/analysis/eu/TestBasqueAnalyzer.java  |    2 +-
 .../lucene/analysis/fa/TestPersianAnalyzer.java |    2 +-
 .../lucene/analysis/fi/TestFinnishAnalyzer.java |    2 +-
 .../analysis/fi/TestFinnishLightStemFilter.java |    2 +-
 .../lucene/analysis/fr/TestFrenchAnalyzer.java  |    2 +-
 .../analysis/fr/TestFrenchLightStemFilter.java  |    2 +-
 .../fr/TestFrenchMinimalStemFilter.java         |    2 +-
 .../lucene/analysis/ga/TestIrishAnalyzer.java   |    2 +-
 .../analysis/gl/TestGalicianAnalyzer.java       |    2 +-
 .../gl/TestGalicianMinimalStemFilter.java       |    2 +-
 .../lucene/analysis/hi/TestHindiAnalyzer.java   |    2 +-
 .../analysis/hu/TestHungarianAnalyzer.java      |    2 +-
 .../hu/TestHungarianLightStemFilter.java        |    2 +-
 .../hunspell/TestHunspellStemFilter.java        |    2 +-
 .../analysis/hy/TestArmenianAnalyzer.java       |    2 +-
 .../analysis/id/TestIndonesianAnalyzer.java     |    2 +-
 .../lucene/analysis/it/TestItalianAnalyzer.java |    2 +-
 .../analysis/lt/TestLithuanianAnalyzer.java     |    2 +-
 .../lucene/analysis/lv/TestLatvianAnalyzer.java |    2 +-
 .../analysis/minhash/MinHashFilterTest.java     |    4 +-
 .../miscellaneous/TestCapitalizationFilter.java |    2 +-
 .../miscellaneous/TestKeepFilterFactory.java    |    4 +-
 .../miscellaneous/TestKeepWordFilter.java       |    2 +-
 .../miscellaneous/TestKeywordMarkerFilter.java  |    4 +-
 .../TestStemmerOverrideFilter.java              |    2 +-
 .../miscellaneous/TestWordDelimiterFilter.java  |   12 +-
 .../lucene/analysis/nl/TestDutchAnalyzer.java   |    6 +-
 .../analysis/no/TestNorwegianAnalyzer.java      |    2 +-
 .../no/TestNorwegianLightStemFilter.java        |    2 +-
 .../no/TestNorwegianMinimalStemFilter.java      |    2 +-
 .../analysis/pt/TestPortugueseAnalyzer.java     |    2 +-
 .../pt/TestPortugueseLightStemFilter.java       |    2 +-
 .../pt/TestPortugueseMinimalStemFilter.java     |    2 +-
 .../analysis/pt/TestPortugueseStemFilter.java   |    6 +-
 .../analysis/ro/TestRomanianAnalyzer.java       |    2 +-
 .../lucene/analysis/ru/TestRussianAnalyzer.java |    4 +-
 .../analysis/ru/TestRussianLightStemFilter.java |    2 +-
 .../shingle/ShingleAnalyzerWrapperTest.java     |    4 +-
 .../analysis/sinks/TestTeeSinkTokenFilter.java  |    4 +-
 .../analysis/standard/TestStandardAnalyzer.java |  390 --
 .../standard/WordBreakTestUnicode_6_3_0.java    | 5537 ------------------
 .../generateJavaUnicodeWordBreakTest.pl         |  232 -
 .../lucene/analysis/sv/TestSwedishAnalyzer.java |    2 +-
 .../analysis/sv/TestSwedishLightStemFilter.java |    2 +-
 .../lucene/analysis/th/TestThaiAnalyzer.java    |    2 +-
 .../lucene/analysis/tr/TestTurkishAnalyzer.java |    2 +-
 .../lucene/analysis/util/TestCharArrayMap.java  |  244 -
 .../lucene/analysis/util/TestCharArraySet.java  |  429 --
 .../analysis/util/TestCharacterUtils.java       |  107 -
 .../lucene/analysis/util/TestElision.java       |    2 +-
 .../util/TestFilesystemResourceLoader.java      |    2 +
 .../analysis/util/TestWordlistLoader.java       |   79 -
 .../tools/groovy/generate-unicode-data.groovy   |    4 +-
 lucene/analysis/icu/src/java/overview.html      |    2 +-
 .../segmentation/TestWithCJKBigramFilter.java   |    4 +-
 .../lucene/analysis/ja/JapaneseAnalyzer.java    |   15 +-
 .../analysis/ja/JapaneseNumberFilter.java       |    4 +-
 .../ja/JapanesePartOfSpeechStopFilter.java      |    4 +-
 .../JapanesePartOfSpeechStopFilterFactory.java  |    2 +-
 .../analysis/ja/TestJapaneseBaseFormFilter.java |    2 +-
 .../ja/TestJapaneseKatakanaStemFilter.java      |    6 +-
 .../analysis/ja/TestJapaneseNumberFilter.java   |    2 +-
 .../analysis/morfologik/MorfologikAnalyzer.java |    6 +
 .../analysis/morfologik/MorfologikFilter.java   |   12 +-
 .../uk/UkrainianMorfologikAnalyzer.java         |  153 +
 .../apache/lucene/analysis/uk/package-info.java |   21 +
 .../apache/lucene/analysis/uk/mapping_uk.txt    |   19 +
 .../org/apache/lucene/analysis/uk/stopwords.txt | 1269 ++++
 .../apache/lucene/analysis/uk/ukrainian.dict    |  Bin 0 -> 1989243 bytes
 .../apache/lucene/analysis/uk/ukrainian.info    |   10 +
 .../morfologik/TestMorfologikAnalyzer.java      |    2 +-
 .../analysis/uk/TestUkrainianAnalyzer.java      |   72 +
 .../analysis/cn/smart/SmartChineseAnalyzer.java |   12 +-
 .../lucene/analysis/pl/PolishAnalyzer.java      |   21 +-
 .../lucene/analysis/pl/TestPolishAnalyzer.java  |    2 +-
 .../index/TestBackwardsCompatibility.java       |   42 +-
 .../org/apache/lucene/index/index.6.0.0-cfs.zip |  Bin 13744 -> 15807 bytes
 .../apache/lucene/index/index.6.0.0-nocfs.zip   |  Bin 13749 -> 15806 bytes
 .../org/apache/lucene/index/index.6.0.1-cfs.zip |  Bin 13734 -> 15820 bytes
 .../apache/lucene/index/index.6.0.1-nocfs.zip   |  Bin 13735 -> 15823 bytes
 .../org/apache/lucene/index/index.6.1.0-cfs.zip |  Bin 0 -> 15803 bytes
 .../apache/lucene/index/index.6.1.0-nocfs.zip   |  Bin 0 -> 15829 bytes
 .../lucene/index/unsupported.5.5.2-cfs.zip      |  Bin 0 -> 13712 bytes
 .../lucene/index/unsupported.5.5.2-nocfs.zip    |  Bin 0 -> 13720 bytes
 .../benchmark/byTask/tasks/CommitIndexTask.java |    2 +-
 .../benchmark/quality/TestQualityRun.java       |    3 -
 .../lucene/benchmark/quality/trecQRels.txt      |  428 +-
 lucene/build.xml                                |   15 +-
 .../BooleanPerceptronClassifier.java            |   20 +-
 .../CachingNaiveBayesClassifier.java            |   18 +-
 .../KNearestNeighborClassifier.java             |    9 +-
 .../SimpleNaiveBayesClassifier.java             |   24 +-
 .../KNearestNeighborDocumentClassifier.java     |   10 +-
 .../SimpleNaiveBayesDocumentClassifier.java     |   16 +-
 .../utils/ConfusionMatrixGenerator.java         |    8 +-
 .../classification/utils/DatasetSplitter.java   |   20 +-
 .../DocumentClassificationTestBase.java         |   11 +-
 .../KNearestNeighborDocumentClassifierTest.java |   40 +-
 .../SimpleNaiveBayesDocumentClassifierTest.java |   32 +-
 .../autoprefix/AutoPrefixPostingsFormat.java    |  125 -
 .../lucene/codecs/autoprefix/package-info.java  |   22 -
 .../simpletext/SimpleTextCompoundFormat.java    |    5 +-
 .../simpletext/SimpleTextPointsWriter.java      |   16 +-
 .../org.apache.lucene.codecs.PostingsFormat     |    1 -
 .../TestAutoPrefixPostingsFormat.java           |   38 -
 .../codecs/autoprefix/TestAutoPrefixTerms.java  |  941 ---
 lucene/common-build.xml                         |   48 +-
 lucene/core/build.xml                           |   18 +-
 .../org/apache/lucene/analysis/Analyzer.java    |  135 +-
 .../apache/lucene/analysis/CharArrayMap.java    |  669 +++
 .../apache/lucene/analysis/CharArraySet.java    |  196 +
 .../apache/lucene/analysis/CharacterUtils.java  |  251 +
 .../lucene/analysis/FilteringTokenFilter.java   |   76 +
 .../apache/lucene/analysis/LowerCaseFilter.java |   50 +
 .../org/apache/lucene/analysis/StopFilter.java  |  111 +
 .../lucene/analysis/StopwordAnalyzerBase.java   |  138 +
 .../apache/lucene/analysis/WordlistLoader.java  |  244 +
 .../apache/lucene/analysis/package-info.java    |    2 +-
 .../analysis/standard/StandardAnalyzer.java     |  122 +
 .../analysis/standard/StandardFilter.java       |   39 +
 .../analysis/standard/StandardTokenizer.java    |  214 +
 .../standard/StandardTokenizerImpl.java         |  823 +++
 .../standard/StandardTokenizerImpl.jflex        |  207 +
 .../lucene/analysis/standard/package-info.java  |   33 +
 .../apache/lucene/codecs/BlockTermState.java    |   14 +-
 .../codecs/blocktree/AutoPrefixTermsWriter.java |  438 --
 .../codecs/blocktree/BlockTreeTermsReader.java  |   11 +-
 .../codecs/blocktree/BlockTreeTermsWriter.java  |  222 +-
 .../codecs/blocktree/IntersectTermsEnum.java    |    2 -
 .../codecs/lucene50/Lucene50CompoundReader.java |    7 +-
 .../org/apache/lucene/document/BinaryPoint.java |    2 +-
 .../lucene/document/CompressionTools.java       |  150 -
 .../lucene/document/SortedDocValuesField.java   |    4 +-
 .../document/SortedSetDocValuesField.java       |    2 +
 .../java/org/apache/lucene/geo/GeoUtils.java    |   43 +-
 .../src/java/org/apache/lucene/geo/Polygon.java |   13 +-
 .../java/org/apache/lucene/geo/Rectangle.java   |    4 +-
 .../lucene/geo/SimpleGeoJSONPolygonParser.java  |  440 ++
 .../apache/lucene/index/DocumentsWriter.java    |    4 +
 .../org/apache/lucene/index/IndexCommit.java    |    2 +-
 .../org/apache/lucene/index/IndexUpgrader.java  |    2 +-
 .../org/apache/lucene/index/IndexWriter.java    |   74 +-
 .../apache/lucene/index/IndexWriterConfig.java  |   17 +-
 .../apache/lucene/index/MergeReaderWrapper.java |    4 +-
 .../org/apache/lucene/index/SegmentInfos.java   |   35 +-
 .../apache/lucene/index/SortingLeafReader.java  |    4 +-
 .../org/apache/lucene/index/TermContext.java    |   12 -
 .../java/org/apache/lucene/index/TermState.java |    6 -
 .../apache/lucene/search/BlendedTermQuery.java  |    6 +-
 .../org/apache/lucene/search/BooleanQuery.java  |   44 +-
 .../org/apache/lucene/search/BooleanScorer.java |   36 +-
 .../lucene/search/BooleanTopLevelScorers.java   |  182 -
 .../org/apache/lucene/search/BooleanWeight.java |  180 +-
 .../org/apache/lucene/search/BoostQuery.java    |   44 +-
 .../apache/lucene/search/ConjunctionDISI.java   |  124 +-
 .../apache/lucene/search/ConjunctionScorer.java |   10 +-
 .../lucene/search/ConstantScoreQuery.java       |    6 +-
 .../lucene/search/ConstantScoreWeight.java      |   35 +-
 .../lucene/search/DisjunctionMaxQuery.java      |   29 +-
 .../lucene/search/DisjunctionSumScorer.java     |   11 +-
 .../lucene/search/DocValuesRewriteMethod.java   |    4 +-
 .../org/apache/lucene/search/Explanation.java   |   33 +-
 .../apache/lucene/search/FieldValueQuery.java   |    4 +-
 .../org/apache/lucene/search/FilterWeight.java  |   73 +
 .../org/apache/lucene/search/IndexSearcher.java |   27 +-
 .../org/apache/lucene/search/LRUQueryCache.java |   36 +-
 .../lucene/search/LeafFieldComparator.java      |    7 +-
 .../apache/lucene/search/MatchAllDocsQuery.java |    4 +-
 .../apache/lucene/search/MatchNoDocsQuery.java  |   42 +-
 .../lucene/search/MinShouldMatchSumScorer.java  |    6 +-
 .../apache/lucene/search/MultiPhraseQuery.java  |   20 +-
 .../apache/lucene/search/MultiTermQuery.java    |    8 +-
 .../MultiTermQueryConstantScoreWrapper.java     |   11 +-
 .../org/apache/lucene/search/PhraseQuery.java   |   20 +-
 .../apache/lucene/search/PointInSetQuery.java   |    4 +-
 .../apache/lucene/search/PointRangeQuery.java   |    4 +-
 .../java/org/apache/lucene/search/Query.java    |    3 +-
 .../lucene/search/RandomAccessWeight.java       |    4 +-
 .../apache/lucene/search/ScoringRewrite.java    |    6 +-
 .../org/apache/lucene/search/SynonymQuery.java  |   20 +-
 .../org/apache/lucene/search/TermQuery.java     |   86 +-
 .../java/org/apache/lucene/search/Weight.java   |   15 +-
 .../org/apache/lucene/search/package-info.java  |   29 +-
 .../search/similarities/BM25Similarity.java     |   25 +-
 .../lucene/search/similarities/BasicStats.java  |   35 +-
 .../search/similarities/ClassicSimilarity.java  |   12 -
 .../search/similarities/LMSimilarity.java       |    8 +-
 .../search/similarities/MultiSimilarity.java    |   20 +-
 .../similarities/PerFieldSimilarityWrapper.java |   14 +-
 .../lucene/search/similarities/Similarity.java  |   64 +-
 .../search/similarities/SimilarityBase.java     |    8 +-
 .../search/similarities/TFIDFSimilarity.java    |  168 +-
 .../search/spans/FieldMaskingSpanQuery.java     |    4 +-
 .../search/spans/ScoringWrapperSpans.java       |   95 -
 .../lucene/search/spans/SpanBoostQuery.java     |   54 +-
 .../lucene/search/spans/SpanContainQuery.java   |    4 +-
 .../search/spans/SpanContainingQuery.java       |   12 +-
 .../search/spans/SpanMultiTermQueryWrapper.java |    2 +-
 .../lucene/search/spans/SpanNearQuery.java      |   18 +-
 .../lucene/search/spans/SpanNotQuery.java       |   14 +-
 .../apache/lucene/search/spans/SpanOrQuery.java |   12 +-
 .../search/spans/SpanPositionCheckQuery.java    |   10 +-
 .../apache/lucene/search/spans/SpanQuery.java   |    2 +-
 .../apache/lucene/search/spans/SpanScorer.java  |   10 +-
 .../lucene/search/spans/SpanTermQuery.java      |    8 +-
 .../apache/lucene/search/spans/SpanWeight.java  |   20 +-
 .../lucene/search/spans/SpanWithinQuery.java    |   12 +-
 .../lucene/store/ByteArrayIndexInput.java       |    4 +-
 .../java/org/apache/lucene/store/Directory.java |    8 +-
 .../org/apache/lucene/store/FSDirectory.java    |   15 +-
 .../lucene/store/FileSwitchDirectory.java       |   10 +-
 .../apache/lucene/store/FilterDirectory.java    |    9 +-
 .../store/LockValidatingDirectoryWrapper.java   |   10 +-
 .../lucene/store/NRTCachingDirectory.java       |    4 +-
 .../org/apache/lucene/store/RAMDirectory.java   |   22 +-
 .../lucene/store/TrackingDirectoryWrapper.java  |    4 +-
 .../org/apache/lucene/util/BitSetIterator.java  |   10 +
 .../org/apache/lucene/util/QueryBuilder.java    |    1 -
 .../java/org/apache/lucene/util/SloppyMath.java |   20 +-
 .../org/apache/lucene/util/bkd/BKDReader.java   |   83 +-
 .../org/apache/lucene/util/bkd/BKDWriter.java   |  187 +-
 .../apache/lucene/util/bkd/DocIdsWriter.java    |  170 +
 .../org/apache/lucene/util/packed/Direct16.java |    2 +-
 .../org/apache/lucene/util/packed/Direct32.java |    2 +-
 .../org/apache/lucene/util/packed/Direct64.java |    2 +-
 .../org/apache/lucene/util/packed/Direct8.java  |    2 +-
 .../lucene/util/packed/Packed16ThreeBlocks.java |    2 +-
 .../lucene/util/packed/Packed64SingleBlock.java |    2 +-
 .../lucene/util/packed/Packed8ThreeBlocks.java  |    2 +-
 lucene/core/src/java/overview.html              |    2 +-
 .../lucene/analysis/TestCharArrayMap.java       |  244 +
 .../lucene/analysis/TestCharArraySet.java       |  430 ++
 .../lucene/analysis/TestCharacterUtils.java     |  107 +
 .../apache/lucene/analysis/TestStopFilter.java  |  139 +
 .../lucene/analysis/TestWordlistLoader.java     |   79 +
 .../analysis/standard/TestStandardAnalyzer.java |  396 ++
 .../lucene62/TestLucene62SegmentInfoFormat.java |    4 +-
 .../lucene/document/TestBinaryDocument.java     |   29 -
 .../test/org/apache/lucene/geo/TestPolygon.java |  241 +
 .../index/TestAllFilesCheckIndexHeader.java     |    6 -
 .../index/TestConcurrentMergeScheduler.java     |    3 -
 .../test/org/apache/lucene/index/TestCrash.java |    1 -
 .../apache/lucene/index/TestCustomNorms.java    |   12 +-
 .../apache/lucene/index/TestDeletionPolicy.java |    4 +-
 .../lucene/index/TestDirectoryReaderReopen.java |    4 +-
 .../lucene/index/TestIndexFileDeleter.java      |    3 -
 .../apache/lucene/index/TestIndexSorting.java   |    8 +-
 .../apache/lucene/index/TestIndexWriter.java    |   43 +-
 .../lucene/index/TestIndexWriterCommit.java     |   57 +-
 .../lucene/index/TestIndexWriterDelete.java     |    1 -
 .../index/TestIndexWriterExceptions2.java       |    1 -
 .../lucene/index/TestIndexWriterFromReader.java |    9 -
 .../lucene/index/TestIndexWriterMerging.java    |    5 -
 .../lucene/index/TestIndexWriterOnDiskFull.java |    1 -
 .../TestIndexWriterOutOfFileDescriptors.java    |    1 -
 .../index/TestIndexWriterWithThreads.java       |    3 -
 .../lucene/index/TestMaxTermFrequency.java      |    2 -
 .../lucene/index/TestNRTReaderCleanup.java      |    3 -
 .../test/org/apache/lucene/index/TestNorms.java |   15 +-
 .../org/apache/lucene/index/TestOmitTf.java     |    4 -
 .../lucene/index/TestTransactionRollback.java   |    4 +-
 .../apache/lucene/index/TestTransactions.java   |    2 -
 .../lucene/index/TestUniqueTermCount.java       |    2 +-
 .../apache/lucene/search/JustCompileSearch.java |   12 +-
 .../org/apache/lucene/search/TestBoolean2.java  |   65 +-
 .../apache/lucene/search/TestBooleanCoord.java  |  860 ---
 .../search/TestBooleanMinShouldMatch.java       |   58 +-
 .../org/apache/lucene/search/TestBooleanOr.java |    2 +-
 .../apache/lucene/search/TestBooleanQuery.java  |   37 +-
 .../search/TestBooleanQueryVisitSubscorers.java |    5 +-
 .../lucene/search/TestBooleanRewrites.java      |   14 -
 .../apache/lucene/search/TestBooleanScorer.java |   26 +-
 .../apache/lucene/search/TestBoostQuery.java    |    4 +-
 .../lucene/search/TestComplexExplanations.java  |   17 +-
 .../lucene/search/TestConjunctionDISI.java      |   98 +-
 .../apache/lucene/search/TestConjunctions.java  |   13 +-
 .../lucene/search/TestConstantScoreQuery.java   |   13 +-
 .../TestControlledRealTimeReopenThread.java     |   15 +
 .../lucene/search/TestDocValuesScoring.java     |   14 +-
 .../apache/lucene/search/TestFilterWeight.java  |   67 +
 .../apache/lucene/search/TestLRUQueryCache.java |   35 +-
 .../lucene/search/TestMatchNoDocsQuery.java     |   50 +-
 .../lucene/search/TestMinShouldMatch2.java      |   13 +-
 .../lucene/search/TestMultiPhraseQuery.java     |   27 -
 .../search/TestMultiTermConstantScore.java      |   74 -
 .../search/TestMultiTermQueryRewrites.java      |   12 +-
 .../apache/lucene/search/TestNeedsScores.java   |   14 +-
 .../lucene/search/TestPositionIncrement.java    |    4 +-
 .../search/TestPositiveScoresOnlyCollector.java |    2 +-
 .../apache/lucene/search/TestQueryRescorer.java |   11 +-
 .../search/TestScoreCachingWrappingScorer.java  |    2 +-
 .../apache/lucene/search/TestScorerPerf.java    |    5 +-
 .../apache/lucene/search/TestSimilarity.java    |    4 -
 .../lucene/search/TestSimilarityProvider.java   |   20 -
 .../lucene/search/TestSimpleExplanations.java   |   43 +-
 .../apache/lucene/search/TestSortRandom.java    |    4 +-
 .../org/apache/lucene/search/TestTermQuery.java |  154 +
 .../org/apache/lucene/search/TestWildcard.java  |    3 +-
 .../search/similarities/TestSimilarity2.java    |    5 -
 .../search/similarities/TestSimilarityBase.java |    8 +-
 .../search/spans/JustCompileSearchSpans.java    |    2 +-
 .../search/spans/TestFieldMaskingSpanQuery.java |   14 +-
 .../search/spans/TestNearSpansOrdered.java      |   26 +-
 .../lucene/search/spans/TestSpanCollection.java |    6 +-
 .../search/spans/TestSpanContainQuery.java      |    2 +-
 .../apache/lucene/search/spans/TestSpans.java   |   10 +-
 .../store/TestTrackingDirectoryWrapper.java     |    2 +-
 .../apache/lucene/util/TestOfflineSorter.java   |   13 -
 .../apache/lucene/util/TestQueryBuilder.java    |   43 +-
 .../org/apache/lucene/util/bkd/TestBKD.java     |   35 +-
 .../lucene/util/bkd/TestDocIdsWriter.java       |  101 +
 .../org/apache/lucene/util/fst/TestFSTs.java    |    1 -
 .../demo/facet/DistanceFacetsExample.java       |   27 +-
 .../org/apache/lucene/facet/DrillDownQuery.java |    1 -
 .../apache/lucene/facet/DrillSidewaysQuery.java |   14 +-
 .../apache/lucene/facet/range/DoubleRange.java  |    6 +-
 .../apache/lucene/facet/range/LongRange.java    |    6 +-
 .../lucene/facet/taxonomy/TaxonomyReader.java   |    2 +-
 .../lucene/facet/taxonomy/TaxonomyWriter.java   |   16 +-
 .../directory/DirectoryTaxonomyWriter.java      |   40 +-
 .../apache/lucene/facet/TestDrillSideways.java  |    5 +-
 .../facet/range/TestRangeFacetCounts.java       |   14 +-
 .../taxonomy/directory/TestAddTaxonomy.java     |    4 +-
 .../directory/TestDirectoryTaxonomyWriter.java  |   20 +-
 .../lucene/search/grouping/SearchGroup.java     |    8 +-
 .../highlight/WeightedSpanTermExtractor.java    |    4 +-
 .../search/vectorhighlight/FieldQuery.java      |    6 +
 .../search/vectorhighlight/FieldQueryTest.java  |   14 +
 lucene/ivy-versions.properties                  |   37 +-
 .../lucene/search/join/GlobalOrdinalsQuery.java |    8 +-
 .../join/GlobalOrdinalsWithScoreQuery.java      |   25 +-
 .../org/apache/lucene/search/join/JoinUtil.java |    4 +-
 .../join/PointInSetIncludingScoreQuery.java     |   47 +-
 .../search/join/TermsIncludingScoreQuery.java   |   13 +-
 .../search/join/ToChildBlockJoinQuery.java      |   33 +-
 .../search/join/ToParentBlockJoinQuery.java     |   33 +-
 .../lucene/search/join/TestBlockJoin.java       |    4 +-
 .../apache/lucene/search/join/TestJoinUtil.java |   13 +-
 lucene/licenses/commons-compress-1.11.jar.sha1  |    1 +
 lucene/licenses/commons-compress-1.8.1.jar.sha1 |    1 -
 .../apache/lucene/index/memory/MemoryIndex.java |   27 +-
 .../lucene/index/memory/TestMemoryIndex.java    |   22 +
 .../store/HardlinkCopyDirectoryWrapper.java     |    4 +-
 .../search/TestDiversifiedTopDocsCollector.java |    4 +-
 .../store/TestHardLinkCopyDirectoryWrapper.java |    4 +-
 .../apache/lucene/util/fst/TestFSTsMisc.java    |    1 -
 .../apache/lucene/queries/BoostingQuery.java    |   18 +-
 .../apache/lucene/queries/CommonTermsQuery.java |   49 +-
 .../apache/lucene/queries/CustomScoreQuery.java |   82 +-
 .../org/apache/lucene/queries/TermsQuery.java   |    7 +-
 .../lucene/queries/function/BoostedQuery.java   |   18 +-
 .../lucene/queries/function/FunctionQuery.java  |   37 +-
 .../queries/function/FunctionRangeQuery.java    |   13 +-
 .../lucene/queries/mlt/MoreLikeThisQuery.java   |    1 -
 .../queries/payloads/PayloadScoreQuery.java     |   20 +-
 .../queries/payloads/SpanPayloadCheckQuery.java |   10 +-
 .../lucene/queries/BoostingQueryTest.java       |    4 +-
 .../lucene/queries/CommonTermsQueryTest.java    |   10 +-
 .../queries/TestCustomScoreExplanations.java    |   12 +-
 .../lucene/queries/TestCustomScoreQuery.java    |    4 -
 .../function/TestLongNormValueSource.java       |   12 -
 .../queries/payloads/TestPayloadScoreQuery.java |   10 -
 .../queries/payloads/TestPayloadSpans.java      |   30 +-
 .../queries/payloads/TestPayloadTermQuery.java  |   14 +-
 .../analyzing/AnalyzingQueryParser.java         |  202 -
 .../queryparser/analyzing/package-info.java     |   22 -
 .../lucene/queryparser/classic/CharStream.java  |    2 +-
 .../classic/MultiFieldQueryParser.java          |   52 +-
 .../queryparser/classic/ParseException.java     |    2 +-
 .../lucene/queryparser/classic/QueryParser.java |  399 +-
 .../lucene/queryparser/classic/QueryParser.jj   |  265 +-
 .../queryparser/classic/QueryParserBase.java    |  179 +-
 .../classic/QueryParserTokenManager.java        |   27 +-
 .../lucene/queryparser/classic/Token.java       |    2 +-
 .../queryparser/classic/TokenMgrError.java      |    2 +-
 .../complexPhrase/ComplexPhraseQueryParser.java |   24 +-
 .../CommonQueryParserConfiguration.java         |   12 -
 .../flexible/standard/StandardQueryParser.java  |   30 -
 .../StandardBooleanQueryNodeBuilder.java        |  109 -
 .../builders/StandardQueryTreeBuilder.java      |    6 +-
 .../builders/SynonymQueryNodeBuilder.java       |   48 +
 .../config/StandardQueryConfigHandler.java      |    9 -
 .../nodes/StandardBooleanQueryNode.java         |   49 -
 .../standard/nodes/SynonymQueryNode.java        |   30 +
 .../processors/AnalyzerQueryNodeProcessor.java  |    8 +-
 .../processors/FuzzyQueryNodeProcessor.java     |   11 +-
 ...owercaseExpandedTermsQueryNodeProcessor.java |  100 -
 .../processors/RegexpQueryNodeProcessor.java    |   56 +
 .../StandardQueryNodeProcessorPipeline.java     |    4 +-
 .../processors/TermRangeQueryNodeProcessor.java |   11 +-
 .../processors/WildcardQueryNodeProcessor.java  |   58 +-
 .../queryparser/simple/SimpleQueryParser.java   |   16 +-
 .../lucene/queryparser/xml/CoreParser.java      |    2 +
 .../xml/builders/BooleanQueryBuilder.java       |    1 -
 .../xml/builders/TermsQueryBuilder.java         |    1 -
 .../analyzing/TestAnalyzingQueryParser.java     |  268 -
 .../classic/TestMultiFieldQueryParser.java      |    1 -
 .../queryparser/classic/TestQueryParser.java    |  485 +-
 .../ext/TestExtendableQueryParser.java          |    1 +
 .../precedence/TestPrecedenceQueryParser.java   |   61 +-
 .../standard/TestMultiFieldQPHelper.java        |   24 +-
 .../flexible/standard/TestQPHelper.java         |   93 +-
 .../flexible/standard/TestStandardQP.java       |   27 +-
 .../simple/TestSimpleQueryParser.java           |    3 -
 .../queryparser/util/QueryParserTestBase.java   |  149 +-
 .../IndexAndTaxonomyReplicationHandler.java     |    6 +-
 .../replicator/IndexReplicationHandler.java     |    3 +-
 .../apache/lucene/replicator/nrt/CopyJob.java   |    4 +-
 .../lucene/replicator/nrt/CopyOneFile.java      |    4 +-
 .../apache/lucene/replicator/nrt/CopyState.java |    4 +-
 .../lucene/replicator/nrt/FileMetaData.java     |    4 +-
 .../org/apache/lucene/replicator/nrt/Node.java  |    4 +-
 .../nrt/NodeCommunicationException.java         |    4 +-
 .../nrt/PreCopyMergedSegmentWarmer.java         |    4 +-
 .../lucene/replicator/nrt/PrimaryNode.java      |   30 +-
 .../replicator/nrt/ReplicaFileDeleter.java      |    4 +-
 .../lucene/replicator/nrt/ReplicaNode.java      |    8 +-
 .../nrt/SegmentInfosSearcherManager.java        |    4 +-
 .../IndexAndTaxonomyReplicationClientTest.java  |   16 +-
 .../replicator/IndexReplicationClientTest.java  |   15 +-
 .../lucene/replicator/LocalReplicatorTest.java  |    4 +-
 .../replicator/http/HttpReplicatorTest.java     |    2 +-
 .../lucene/replicator/nrt/Connection.java       |    4 +-
 .../org/apache/lucene/replicator/nrt/Jobs.java  |    4 +-
 .../lucene/replicator/nrt/NodeProcess.java      |    4 +-
 .../lucene/replicator/nrt/SimpleCopyJob.java    |    6 +-
 .../replicator/nrt/SimplePrimaryNode.java       |    4 +-
 .../replicator/nrt/SimpleReplicaNode.java       |    4 +-
 .../lucene/replicator/nrt/SimpleServer.java     |    4 +-
 .../lucene/replicator/nrt/SimpleTransLog.java   |    4 +-
 .../replicator/nrt/TestNRTReplication.java      |   15 +-
 .../nrt/TestStressNRTReplication.java           |    4 +-
 .../lucene/replicator/nrt/ThreadPumper.java     |    4 +-
 .../org/apache/lucene/document/LatLonPoint.java |    5 +-
 .../document/LatLonPointDistanceQuery.java      |   39 +-
 .../document/LatLonPointInPolygonQuery.java     |    4 +-
 .../sandbox/queries/FuzzyLikeThisQuery.java     |    1 -
 .../lucene/search/DocValuesNumbersQuery.java    |    4 +-
 .../lucene/search/DocValuesRangeQuery.java      |    4 +-
 .../lucene/search/DocValuesTermsQuery.java      |    4 +-
 .../lucene/search/TermAutomatonQuery.java       |   18 +-
 .../sandbox/queries/FuzzyLikeThisQueryTest.java |    8 +-
 .../sandbox/queries/TestSlowFuzzyQuery2.java    |  184 -
 .../lucene/sandbox/queries/fuzzyTestData.txt    | 3721 ------------
 .../lucene/search/TestTermAutomatonQuery.java   |    4 +-
 .../spatial/composite/CompositeVerifyQuery.java |    6 +-
 .../composite/IntersectsRPTVerifyQuery.java     |    4 +-
 .../spatial/prefix/AbstractPrefixTreeQuery.java |    4 +-
 .../serialized/SerializedDVStrategy.java        |    4 +-
 .../geopoint/document/GeoPointTokenStream.java  |   14 +-
 .../geopoint/search/GeoPointDistanceQuery.java  |   15 +-
 .../search/GeoPointDistanceQueryImpl.java       |   63 +-
 .../search/GeoPointInBBoxQueryImpl.java         |   32 +-
 .../search/GeoPointInPolygonQueryImpl.java      |   14 +-
 .../geopoint/search/GeoPointMultiTermQuery.java |   49 +-
 .../GeoPointTermQueryConstantScoreWrapper.java  |    4 +-
 .../geopoint/search/GeoPointTermsEnum.java      |  212 +-
 .../lucene/spatial/util/TestGeoPointField.java  |    4 +-
 .../spatial3d/PointInGeo3DShapeQuery.java       |    4 +-
 .../lucene/spatial3d/geom/GeoStandardPath.java  |   35 +-
 .../apache/lucene/spatial3d/geom/XYZBounds.java |    2 +-
 .../lucene/spatial3d/geom/GeoPathTest.java      |   37 +-
 .../suggest/analyzing/SuggestStopFilter.java    |    4 +-
 .../analyzing/SuggestStopFilterFactory.java     |   10 +-
 .../suggest/document/CompletionWeight.java      |    8 -
 .../search/suggest/document/ContextQuery.java   |    4 +-
 .../suggest/document/FuzzyCompletionQuery.java  |    2 +-
 .../suggest/document/PrefixCompletionQuery.java |    2 +-
 .../suggest/document/RegexCompletionQuery.java  |    2 +-
 .../suggest/document/SuggestIndexSearcher.java  |    2 +-
 .../analyzing/AnalyzingInfixSuggesterTest.java  |    4 +-
 .../analyzing/BlendedInfixSuggesterTest.java    |    2 +-
 .../analyzing/TestFreeTextSuggester.java        |    6 +-
 .../analyzing/TestSuggestStopFilter.java        |    4 +-
 .../analyzing/TestSuggestStopFilterFactory.java |    2 +-
 .../analysis/BaseTokenStreamTestCase.java       |    5 +-
 .../apache/lucene/analysis/MockAnalyzer.java    |   11 +-
 .../lucene/analysis/MockBytesAnalyzer.java      |    7 +
 .../lucene/analysis/MockLowerCaseFilter.java    |   40 +
 .../lucene/analysis/MockSynonymAnalyzer.java    |   28 +
 .../lucene/analysis/MockSynonymFilter.java      |   97 +
 .../standard/WordBreakTestUnicode_6_3_0.java    | 5537 ++++++++++++++++++
 .../generateJavaUnicodeWordBreakTest.pl         |  232 +
 .../lucene/analysis/standard/package.html       |   26 +
 .../org/apache/lucene/geo/EarthDebugger.java    |    8 +-
 .../java/org/apache/lucene/geo/GeoTestUtil.java |    8 +-
 .../index/BaseCompoundFormatTestCase.java       |    2 +-
 .../index/BaseIndexFileFormatTestCase.java      |    1 -
 .../lucene/index/BaseNormsFormatTestCase.java   |    2 +-
 .../lucene/index/BasePointsFormatTestCase.java  |   29 +
 .../apache/lucene/mockfile/VirusCheckingFS.java |    4 +-
 .../lucene/search/AssertingIndexSearcher.java   |   23 +-
 .../apache/lucene/search/AssertingQuery.java    |    4 +-
 .../apache/lucene/search/AssertingWeight.java   |   29 +-
 .../lucene/search/BaseExplanationTestCase.java  |    6 +-
 .../org/apache/lucene/search/CheckHits.java     |    6 +-
 .../lucene/search/RandomApproximationQuery.java |   35 +-
 .../search/similarities/RandomSimilarity.java   |   32 +-
 .../lucene/search/spans/AssertingSpanQuery.java |    4 +-
 .../search/spans/AssertingSpanWeight.java       |   12 +-
 .../lucene/store/BaseDirectoryTestCase.java     |   56 +-
 .../lucene/store/MockDirectoryWrapper.java      |  136 +-
 .../lucene/analysis/TestMockSynonymFilter.java  |  151 +
 .../lucene/mockfile/TestVirusCheckingFS.java    |    4 +-
 .../search/TestBaseExplanationTestCase.java     |   24 +-
 .../lucene/store/TestMockDirectoryWrapper.java  |   82 +-
 lucene/tools/forbiddenApis/lucene.txt           |    3 +
 .../dependencies/GetMavenDependenciesTask.java  |    2 +-
 solr/CHANGES.txt                                |  347 +-
 solr/NOTICE.txt                                 |    3 +
 solr/bin/solr                                   |  254 +-
 solr/bin/solr.cmd                               |  280 +-
 solr/build.xml                                  |    8 +-
 solr/common-build.xml                           |    9 +-
 solr/contrib/analysis-extras/build.xml          |   10 +
 .../apache/solr/schema/ICUCollationField.java   |    2 +-
 .../solr/schema/TestICUCollationField.java      |   15 +-
 .../SolrStopwordsCarrot2LexicalDataFactory.java |    4 +-
 .../dataimport/TestTikaEntityProcessor.java     |    2 -
 .../solr/handler/dataimport/DataImporter.java   |    7 +-
 solr/contrib/extraction/ivy.xml                 |    3 +
 .../extraction/ExtractingDocumentLoader.java    |    4 +-
 .../ExtractingRequestHandlerTest.java           |    2 -
 solr/contrib/map-reduce/ivy.xml                 |    4 +-
 .../apache/solr/hadoop/SolrRecordWriter.java    |    7 +-
 .../solr/hadoop/TreeMergeOutputFormat.java      |    5 +-
 .../apache/solr/hadoop/MorphlineMapperTest.java |    1 +
 .../morphlines/cell/SolrCellMorphlineTest.java  |    3 +-
 solr/contrib/morphlines-core/ivy.xml            |    5 +-
 .../solr/collection1/conf/elevate.xml           |   24 +-
 .../src/test-files/solr/minimr/conf/elevate.xml |   24 +-
 .../src/test-files/solr/mrunit/conf/elevate.xml |   24 +-
 .../solrcelltest/collection1/conf/elevate.xml   |   24 +-
 solr/core/ivy.xml                               |    8 +-
 .../apache/solr/analysis/TokenizerChain.java    |   28 +-
 .../org/apache/solr/cloud/ElectionContext.java  |   51 +-
 .../java/org/apache/solr/cloud/LockTree.java    |   30 +-
 .../java/org/apache/solr/cloud/Overseer.java    |    9 +-
 .../OverseerAutoReplicaFailoverThread.java      |    2 +-
 .../cloud/OverseerCollectionMessageHandler.java |  168 +-
 .../org/apache/solr/cloud/ZkController.java     |  170 +-
 .../apache/solr/cloud/ZkSolrResourceLoader.java |   16 +-
 .../solr/cloud/overseer/CollectionMutator.java  |   11 +-
 .../apache/solr/cloud/rule/ImplicitSnitch.java  |   25 +-
 .../apache/solr/cloud/rule/ReplicaAssigner.java |   10 +-
 .../java/org/apache/solr/cloud/rule/Rule.java   |   10 +-
 .../apache/solr/cloud/rule/SnitchContext.java   |   33 +-
 .../org/apache/solr/core/CoreContainer.java     |   97 +-
 .../java/org/apache/solr/core/CoreSorter.java   |  185 +
 .../apache/solr/core/HdfsDirectoryFactory.java  |   13 +-
 .../java/org/apache/solr/core/NodeConfig.java   |   38 +-
 .../apache/solr/core/QuerySenderListener.java   |   10 +-
 .../java/org/apache/solr/core/SolrConfig.java   |   10 +-
 .../src/java/org/apache/solr/core/SolrCore.java |   75 +-
 .../apache/solr/core/SolrResourceLoader.java    |   10 +-
 .../org/apache/solr/core/SolrXmlConfig.java     |   12 +
 .../java/org/apache/solr/core/ZkContainer.java  |   44 +-
 .../apache/solr/core/backup/BackupManager.java  |  249 +
 .../apache/solr/core/backup/package-info.java   |   22 +
 .../backup/repository/BackupRepository.java     |  174 +
 .../repository/BackupRepositoryFactory.java     |   89 +
 .../backup/repository/HdfsBackupRepository.java |  159 +
 .../repository/LocalFileSystemRepository.java   |  136 +
 .../core/backup/repository/package-info.java    |   23 +
 .../org/apache/solr/handler/GraphHandler.java   |    8 +-
 .../org/apache/solr/handler/IndexFetcher.java   |   19 +-
 .../solr/handler/MoreLikeThisHandler.java       |    1 -
 .../apache/solr/handler/OldBackupDirectory.java |   55 +-
 .../apache/solr/handler/ReplicationHandler.java |   66 +-
 .../org/apache/solr/handler/RestoreCore.java    |   22 +-
 .../org/apache/solr/handler/SnapShooter.java    |  184 +-
 .../apache/solr/handler/SolrConfigHandler.java  |   29 +-
 .../org/apache/solr/handler/StreamHandler.java  |    1 +
 .../solr/handler/admin/CollectionsHandler.java  |  849 ++-
 .../solr/handler/admin/CoreAdminOperation.java  |   54 +-
 .../component/QueryElevationComponent.java      |    1 -
 .../handler/component/RealTimeGetComponent.java |  286 +-
 .../solr/handler/component/SearchComponent.java |    2 +
 .../solr/handler/component/SearchHandler.java   |    2 +
 .../solr/handler/component/TermsComponent.java  |  122 +-
 .../solr/highlight/DefaultSolrHighlighter.java  |   17 +-
 .../java/org/apache/solr/query/FilterQuery.java |    6 +-
 .../org/apache/solr/query/SolrRangeQuery.java   |   20 +-
 .../apache/solr/request/SolrRequestInfo.java    |    6 +-
 .../solr/response/GraphMLResponseWriter.java    |    6 +-
 .../org/apache/solr/response/ResultContext.java |    2 +-
 .../transform/DocIdAugmenterFactory.java        |   10 +-
 .../solr/response/transform/DocTransformer.java |   26 +-
 .../response/transform/DocTransformers.java     |   12 +
 .../transform/ExplainAugmenterFactory.java      |   22 +-
 .../transform/ValueSourceAugmenter.java         |   13 +-
 .../analysis/ManagedStopFilterFactory.java      |    4 +-
 .../solr/schema/AbstractSpatialFieldType.java   |   18 +-
 .../java/org/apache/solr/schema/LatLonType.java |   12 +-
 .../java/org/apache/solr/schema/PointType.java  |    2 -
 .../org/apache/solr/schema/SchemaManager.java   |   57 +-
 .../org/apache/solr/search/DisMaxQParser.java   |    1 -
 .../apache/solr/search/ExportQParserPlugin.java |    4 +-
 .../solr/search/ExtendedDismaxQParser.java      |   16 +-
 .../src/java/org/apache/solr/search/Filter.java |   10 +-
 .../solr/search/GraphTermsQParserPlugin.java    |    9 +-
 .../apache/solr/search/HashQParserPlugin.java   |    4 +-
 .../apache/solr/search/JoinQParserPlugin.java   |    8 +-
 .../org/apache/solr/search/QueryParsing.java    |    5 +-
 .../java/org/apache/solr/search/QueryUtils.java |    2 -
 .../apache/solr/search/ReRankQParserPlugin.java |   37 +-
 .../apache/solr/search/SimpleQParserPlugin.java |    2 -
 .../solr/search/SolrConstantScoreQuery.java     |    8 +-
 .../org/apache/solr/search/SolrCoreParser.java  |   40 +-
 .../apache/solr/search/SolrIndexSearcher.java   |   20 +-
 .../apache/solr/search/SolrQueryBuilder.java    |   34 +
 .../apache/solr/search/TermsQParserPlugin.java  |    1 -
 .../org/apache/solr/search/WrappedQuery.java    |    4 +-
 .../apache/solr/search/XmlQParserPlugin.java    |   11 +
 .../org/apache/solr/search/join/GraphQuery.java |   22 +-
 .../apache/solr/search/mlt/CloudMLTQParser.java |    2 -
 .../solr/search/mlt/SimpleMLTQParser.java       |    2 -
 .../similarities/SchemaSimilarityFactory.java   |    4 +-
 .../security/AutorizationEditOperation.java     |   29 +-
 .../org/apache/solr/security/Permission.java    |   35 +-
 .../solr/security/PermissionNameProvider.java   |    4 +-
 .../solr/store/blockcache/BlockDirectory.java   |    5 +-
 .../store/blockcache/BlockDirectoryCache.java   |    5 +-
 .../solr/store/blockcache/BufferStore.java      |    2 +-
 .../blockcache/CustomBufferedIndexInput.java    |    2 +-
 .../apache/solr/store/hdfs/HdfsDirectory.java   |   33 +-
 .../apache/solr/store/hdfs/HdfsFileReader.java  |  105 -
 .../solr/update/DeleteByQueryWrapper.java       |   10 +-
 .../solr/update/DirectUpdateHandler2.java       |    2 +-
 .../org/apache/solr/update/HdfsUpdateLog.java   |    6 -
 .../apache/solr/update/IndexFingerprint.java    |    4 +-
 .../java/org/apache/solr/update/PeerSync.java   |  124 +-
 .../apache/solr/update/UpdateShardHandler.java  |   13 +-
 .../ClassificationUpdateProcessor.java          |   38 +-
 .../ClassificationUpdateProcessorFactory.java   |   24 +-
 .../processor/UpdateRequestProcessorChain.java  |   19 +-
 .../apache/solr/util/ConcurrentLFUCache.java    |   15 +-
 .../apache/solr/util/ConcurrentLRUCache.java    |    7 +-
 .../apache/solr/util/RecordingJSONParser.java   |   80 +-
 .../src/java/org/apache/solr/util/SolrCLI.java  |  414 +-
 .../org/apache/solr/util/SolrPluginUtils.java   |    1 -
 .../resources/EditableSolrConfigAttributes.json |    5 +-
 solr/core/src/resources/ImplicitPlugins.json    |    6 +
 .../collection1/conf/schema-psuedo-fields.xml   |   71 +
 .../conf/solrconfig-testxmlparser.xml           |   33 +
 .../solr/collection1/conf/solrconfig-tlog.xml   |    8 +-
 .../solr/collection1/conf/solrconfig.xml        |    5 +-
 .../configsets/cloud-subdirs/conf/schema.xml    |   28 +
 .../cloud-subdirs/conf/solrconfig.xml           |   48 +
 .../conf/stopwords/stopwords-en.txt             |   62 +
 solr/core/src/test-files/solr/solr-50-all.xml   |    4 +
 solr/core/src/test-files/solr/solr-no-core.xml  |   45 -
 solr/core/src/test-files/solr/solr.xml          |    3 +
 .../apache/solr/DisMaxRequestHandlerTest.java   |    4 +-
 .../test/org/apache/solr/MinimalSchemaTest.java |    4 +-
 .../AbstractCloudBackupRestoreTestCase.java     |  272 +
 .../solr/cloud/BasicDistributedZk2Test.java     |   27 +-
 .../solr/cloud/CollectionStateFormat2Test.java  |    2 +-
 .../cloud/CollectionsAPIDistributedZkTest.java  |    2 +-
 .../solr/cloud/CreateCollectionCleanupTest.java |    4 +-
 .../apache/solr/cloud/CustomCollectionTest.java |    2 +-
 .../DeleteLastCustomShardedReplicaTest.java     |    2 +-
 .../apache/solr/cloud/DeleteReplicaTest.java    |    2 +-
 .../cloud/DistribJoinFromCollectionTest.java    |    2 +-
 .../cloud/OverseerModifyCollectionTest.java     |   96 +
 .../apache/solr/cloud/OverseerRolesTest.java    |    2 +-
 .../cloud/SharedFSAutoReplicaFailoverTest.java  |  150 +-
 .../apache/solr/cloud/SolrCLIZkUtilsTest.java   |  632 ++
 .../solr/cloud/TestAuthenticationFramework.java |  159 +-
 .../solr/cloud/TestCloudBackupRestore.java      |  219 -
 .../solr/cloud/TestCloudPseudoReturnFields.java |  839 +++
 .../apache/solr/cloud/TestConfigSetsAPI.java    |    4 +-
 .../solr/cloud/TestConfigSetsAPIZkFailure.java  |    2 +-
 .../solr/cloud/TestHdfsCloudBackupRestore.java  |  147 +
 .../cloud/TestLocalFSCloudBackupRestore.java    |   49 +
 .../org/apache/solr/cloud/TestLockTree.java     |   34 +-
 .../solr/cloud/TestMiniSolrCloudCluster.java    |    2 +-
 .../cloud/TestMiniSolrCloudClusterKerberos.java |    3 +-
 .../cloud/TestOnReconnectListenerSupport.java   |    4 +-
 .../apache/solr/cloud/TestRandomFlRTGCloud.java |  675 +++
 .../cloud/TestSizeLimitedDistributedMap.java    |    4 +-
 .../cloud/TestSolrCloudWithKerberosAlt.java     |    3 +-
 .../solr/cloud/UnloadDistributedZkTest.java     |    2 +-
 .../org/apache/solr/cloud/ZkControllerTest.java |    1 +
 .../solr/cloud/hdfs/HdfsNNFailoverTest.java     |    2 +-
 .../HdfsWriteToMultipleCollectionsTest.java     |    2 +-
 .../apache/solr/cloud/hdfs/StressHdfsTest.java  |    2 +-
 .../solr/cloud/overseer/ZkStateReaderTest.java  |   36 +-
 .../solr/cloud/overseer/ZkStateWriterTest.java  |   12 +-
 .../solr/cloud/rule/ImplicitSnitchTest.java     |   49 +-
 .../apache/solr/cloud/rule/RuleEngineTest.java  |   51 +-
 .../solr/core/BlobRepositoryCloudTest.java      |   35 +-
 .../solr/core/BlobRepositoryMockingTest.java    |   35 +-
 .../org/apache/solr/core/CoreSorterTest.java    |  240 +
 .../solr/core/OpenCloseCoreStressTest.java      |   16 +-
 .../test/org/apache/solr/core/SolrCoreTest.java |    1 +
 .../solr/core/TestBackupRepositoryFactory.java  |  149 +
 .../solr/core/TestConfigSetImmutable.java       |    3 +-
 .../org/apache/solr/core/TestLazyCores.java     |    4 +-
 .../solr/core/TestQuerySenderListener.java      |   18 +-
 .../apache/solr/core/TestSolrConfigHandler.java |    2 +-
 .../test/org/apache/solr/core/TestSolrXml.java  |   15 +-
 .../apache/solr/handler/BackupRestoreUtils.java |   67 +
 .../apache/solr/handler/CheckBackupStatus.java  |   10 +-
 .../DocumentAnalysisRequestHandlerTest.java     |    8 +-
 .../FieldAnalysisRequestHandlerTest.java        |   10 +-
 .../solr/handler/TestHdfsBackupRestoreCore.java |  251 +
 .../handler/TestReplicationHandlerBackup.java   |   39 +-
 .../apache/solr/handler/TestRestoreCore.java    |   52 +-
 .../handler/TestSolrConfigHandlerCloud.java     |    2 +-
 .../handler/admin/CoreAdminHandlerTest.java     |    2 +-
 .../solr/handler/admin/TestCollectionAPIs.java  |    2 +-
 .../DistributedTermsComponentTest.java          |   13 +-
 .../component/ResourceSharingTestComponent.java |   34 +-
 .../handler/component/TermsComponentTest.java   |   58 +-
 .../solr/index/hdfs/CheckHdfsIndexTest.java     |    3 +-
 .../apache/solr/request/TestFacetMethods.java   |    4 +-
 .../response/TestGraphMLResponseWriter.java     |    4 +-
 .../transform/TestSubQueryTransformer.java      |   17 +-
 .../apache/solr/schema/BooleanFieldTest.java    |    4 +-
 .../solr/schema/TestManagedSchemaAPI.java       |   34 +-
 .../apache/solr/search/GoodbyeQueryBuilder.java |   39 +
 .../apache/solr/search/HandyQueryBuilder.java   |   53 +
 .../apache/solr/search/HelloQueryBuilder.java   |   39 +
 .../search/TestGraphTermsQParserPlugin.java     |    4 +-
 .../solr/search/TestPseudoReturnFields.java     |  615 +-
 .../org/apache/solr/search/TestRangeQuery.java  |  146 +-
 .../apache/solr/search/TestRankQueryPlugin.java |    4 +-
 .../solr/search/TestXmlQParserPlugin.java       |   78 +
 .../solr/search/mlt/CloudMLTQParserTest.java    |   10 +-
 .../solr/security/BasicAuthIntegrationTest.java |  227 +-
 .../spelling/TestSuggestSpellingConverter.java  |    2 +-
 .../solr/store/hdfs/HdfsDirectoryTest.java      |    5 +-
 .../uninverting/TestFieldCacheSortRandom.java   |    4 +-
 ...lassificationUpdateProcessorFactoryTest.java |    4 +-
 .../example-DIH/solr/db/conf/elevate.xml        |   24 +-
 .../example-DIH/solr/mail/conf/elevate.xml      |   24 +-
 .../example-DIH/solr/rss/conf/elevate.xml       |   24 +-
 .../example-DIH/solr/solr/conf/elevate.xml      |   24 +-
 solr/example/files/conf/elevate.xml             |   24 +-
 solr/licenses/bcpkix-jdk15on-1.47.jar.sha1      |    1 +
 .../bcpkix-jdk15on-LICENSE-BSD_LIKE.txt         |   15 +
 solr/licenses/bcpkix-jdk15on-NOTICE.txt         |    2 +
 solr/licenses/commons-compress-1.11.jar.sha1    |    1 +
 solr/licenses/commons-compress-1.8.1.jar.sha1   |    1 -
 solr/licenses/fontbox-1.8.8.jar.sha1            |    1 -
 solr/licenses/fontbox-2.0.1.jar.sha1            |    1 +
 solr/licenses/hadoop-annotations-2.6.0.jar.sha1 |    1 -
 solr/licenses/hadoop-annotations-2.7.2.jar.sha1 |    1 +
 solr/licenses/hadoop-auth-2.6.0.jar.sha1        |    1 -
 solr/licenses/hadoop-auth-2.7.2.jar.sha1        |    1 +
 .../licenses/hadoop-common-2.6.0-tests.jar.sha1 |    1 -
 solr/licenses/hadoop-common-2.6.0.jar.sha1      |    1 -
 .../licenses/hadoop-common-2.7.2-tests.jar.sha1 |    1 +
 solr/licenses/hadoop-common-2.7.2.jar.sha1      |    1 +
 solr/licenses/hadoop-hdfs-2.6.0-tests.jar.sha1  |    1 -
 solr/licenses/hadoop-hdfs-2.6.0.jar.sha1        |    1 -
 solr/licenses/hadoop-hdfs-2.7.2-tests.jar.sha1  |    1 +
 solr/licenses/hadoop-hdfs-2.7.2.jar.sha1        |    1 +
 .../hadoop-mapreduce-client-app-2.6.0.jar.sha1  |    1 -
 .../hadoop-mapreduce-client-app-2.7.2.jar.sha1  |    1 +
 ...adoop-mapreduce-client-common-2.6.0.jar.sha1 |    1 -
 ...adoop-mapreduce-client-common-2.7.2.jar.sha1 |    1 +
 .../hadoop-mapreduce-client-core-2.6.0.jar.sha1 |    1 -
 .../hadoop-mapreduce-client-core-2.7.2.jar.sha1 |    1 +
 .../hadoop-mapreduce-client-hs-2.6.0.jar.sha1   |    1 -
 .../hadoop-mapreduce-client-hs-2.7.2.jar.sha1   |    1 +
 ...reduce-client-jobclient-2.6.0-tests.jar.sha1 |    1 -
 ...op-mapreduce-client-jobclient-2.6.0.jar.sha1 |    1 -
 ...reduce-client-jobclient-2.7.2-tests.jar.sha1 |    1 +
 ...op-mapreduce-client-jobclient-2.7.2.jar.sha1 |    1 +
 ...doop-mapreduce-client-shuffle-2.6.0.jar.sha1 |    1 -
 ...doop-mapreduce-client-shuffle-2.7.2.jar.sha1 |    1 +
 solr/licenses/hadoop-minikdc-2.6.0.jar.sha1     |    1 -
 solr/licenses/hadoop-minikdc-2.7.2.jar.sha1     |    1 +
 solr/licenses/hadoop-yarn-api-2.6.0.jar.sha1    |    1 -
 solr/licenses/hadoop-yarn-api-2.7.2.jar.sha1    |    1 +
 solr/licenses/hadoop-yarn-client-2.6.0.jar.sha1 |    1 -
 solr/licenses/hadoop-yarn-client-2.7.2.jar.sha1 |    1 +
 solr/licenses/hadoop-yarn-common-2.6.0.jar.sha1 |    1 -
 solr/licenses/hadoop-yarn-common-2.7.2.jar.sha1 |    1 +
 ...ver-applicationhistoryservice-2.6.0.jar.sha1 |    1 -
 ...ver-applicationhistoryservice-2.7.2.jar.sha1 |    1 +
 .../hadoop-yarn-server-common-2.6.0.jar.sha1    |    1 -
 .../hadoop-yarn-server-common-2.7.2.jar.sha1    |    1 +
 ...adoop-yarn-server-nodemanager-2.6.0.jar.sha1 |    1 -
 ...adoop-yarn-server-nodemanager-2.7.2.jar.sha1 |    1 +
 ...p-yarn-server-resourcemanager-2.6.0.jar.sha1 |    1 -
 ...p-yarn-server-resourcemanager-2.7.2.jar.sha1 |    1 +
 ...adoop-yarn-server-tests-2.6.0-tests.jar.sha1 |    1 -
 ...adoop-yarn-server-tests-2.7.2-tests.jar.sha1 |    1 +
 .../hadoop-yarn-server-web-proxy-2.6.0.jar.sha1 |    1 -
 .../hadoop-yarn-server-web-proxy-2.7.2.jar.sha1 |    1 +
 solr/licenses/htrace-core-3.0.4.jar.sha1        |    1 -
 .../htrace-core-3.2.0-incubating.jar.sha1       |    1 +
 solr/licenses/isoparser-1.0.2.jar.sha1          |    1 -
 solr/licenses/isoparser-1.1.18.jar.sha1         |    1 +
 solr/licenses/jackcess-2.1.3.jar.sha1           |    1 +
 solr/licenses/jackcess-LICENSE-ASL.txt          |  507 ++
 solr/licenses/jackcess-NOTICE.txt               |    2 +
 solr/licenses/jempbox-1.8.12.jar.sha1           |    1 +
 solr/licenses/jempbox-1.8.8.jar.sha1            |    1 -
 solr/licenses/metadata-extractor-2.6.2.jar.sha1 |    1 -
 solr/licenses/metadata-extractor-2.8.1.jar.sha1 |    1 +
 solr/licenses/netty-3.2.4.Final.jar.sha1        |    1 +
 solr/licenses/netty-3.7.0.Final.jar.sha1        |    1 -
 solr/licenses/netty-NOTICE.txt                  |   85 +-
 solr/licenses/netty-all-4.0.36.Final.jar.sha1   |    1 +
 solr/licenses/netty-all-LICENSE-ASL.txt         |  202 +
 solr/licenses/netty-all-NOTICE.txt              |  121 +
 solr/licenses/pdfbox-1.8.8.jar.sha1             |    1 -
 solr/licenses/pdfbox-2.0.1.jar.sha1             |    1 +
 solr/licenses/pdfbox-tools-2.0.1.jar.sha1       |    1 +
 solr/licenses/pdfbox-tools-LICENSE-ASL.txt      |  314 +
 solr/licenses/pdfbox-tools-NOTICE.txt           |   14 +
 solr/licenses/poi-3.11.jar.sha1                 |    1 -
 solr/licenses/poi-3.15-beta1.jar.sha1           |    1 +
 solr/licenses/poi-ooxml-3.11.jar.sha1           |    1 -
 solr/licenses/poi-ooxml-3.15-beta1.jar.sha1     |    1 +
 solr/licenses/poi-ooxml-schemas-3.11.jar.sha1   |    1 -
 .../poi-ooxml-schemas-3.15-beta1.jar.sha1       |    1 +
 solr/licenses/poi-scratchpad-3.11.jar.sha1      |    1 -
 .../licenses/poi-scratchpad-3.15-beta1.jar.sha1 |    1 +
 solr/licenses/rome-1.6.1.jar.sha1               |    1 +
 solr/licenses/tika-core-1.13.jar.sha1           |    1 +
 solr/licenses/tika-core-1.7.jar.sha1            |    1 -
 solr/licenses/tika-java7-1.13.jar.sha1          |    1 +
 solr/licenses/tika-java7-1.7.jar.sha1           |    1 -
 solr/licenses/tika-parsers-1.13.jar.sha1        |    1 +
 solr/licenses/tika-parsers-1.7.jar.sha1         |    1 -
 solr/licenses/tika-xmp-1.13.jar.sha1            |    1 +
 solr/licenses/tika-xmp-1.7.jar.sha1             |    1 -
 solr/licenses/vorbis-java-core-0.6.jar.sha1     |    1 -
 solr/licenses/vorbis-java-core-0.8.jar.sha1     |    1 +
 solr/licenses/vorbis-java-tika-0.6.jar.sha1     |    1 -
 solr/licenses/vorbis-java-tika-0.8.jar.sha1     |    1 +
 .../basic_configs/conf/_rest_managed.json       |    1 -
 .../configsets/basic_configs/conf/elevate.xml   |   42 +
 .../basic_configs/conf/lang/contractions_ca.txt |    8 +
 .../basic_configs/conf/lang/contractions_fr.txt |   15 +
 .../basic_configs/conf/lang/contractions_ga.txt |    5 +
 .../basic_configs/conf/lang/contractions_it.txt |   23 +
 .../basic_configs/conf/lang/hyphenations_ga.txt |    5 +
 .../basic_configs/conf/lang/stemdict_nl.txt     |    6 +
 .../basic_configs/conf/lang/stoptags_ja.txt     |  420 ++
 .../basic_configs/conf/lang/stopwords_ar.txt    |  125 +
 .../basic_configs/conf/lang/stopwords_bg.txt    |  193 +
 .../basic_configs/conf/lang/stopwords_ca.txt    |  220 +
 .../basic_configs/conf/lang/stopwords_cz.txt    |  172 +
 .../basic_configs/conf/lang/stopwords_da.txt    |  110 +
 .../basic_configs/conf/lang/stopwords_de.txt    |  294 +
 .../basic_configs/conf/lang/stopwords_el.txt    |   78 +
 .../basic_configs/conf/lang/stopwords_es.txt    |  356 ++
 .../basic_configs/conf/lang/stopwords_eu.txt    |   99 +
 .../basic_configs/conf/lang/stopwords_fa.txt    |  313 +
 .../basic_configs/conf/lang/stopwords_fi.txt    |   97 +
 .../basic_configs/conf/lang/stopwords_fr.txt    |  186 +
 .../basic_configs/conf/lang/stopwords_ga.txt    |  110 +
 .../basic_configs/conf/lang/stopwords_gl.txt    |  161 +
 .../basic_configs/conf/lang/stopwords_hi.txt    |  235 +
 .../basic_configs/conf/lang/stopwords_hu.txt    |  211 +
 .../basic_configs/conf/lang/stopwords_hy.txt    |   46 +
 .../basic_configs/conf/lang/stopwords_id.txt    |  359 ++
 .../basic_configs/conf/lang/stopwords_it.txt    |  303 +
 .../basic_configs/conf/lang/stopwords_ja.txt    |  127 +
 .../basic_configs/conf/lang/stopwords_lv.txt    |  172 +
 .../basic_configs/conf/lang/stopwords_nl.txt    |  119 +
 .../basic_configs/conf/lang/stopwords_no.txt    |  194 +
 .../basic_configs/conf/lang/stopwords_pt.txt    |  253 +
 .../basic_configs/conf/lang/stopwords_ro.txt    |  233 +
 .../basic_configs/conf/lang/stopwords_ru.txt    |  243 +
 .../basic_configs/conf/lang/stopwords_sv.txt    |  133 +
 .../basic_configs/conf/lang/stopwords_th.txt    |  119 +
 .../basic_configs/conf/lang/stopwords_tr.txt    |  212 +
 .../basic_configs/conf/lang/userdict_ja.txt     |   29 +
 .../basic_configs/conf/managed-schema           |  764 ++-
 .../configsets/basic_configs/conf/params.json   |   20 +
 .../basic_configs/conf/solrconfig.xml           | 1072 +++-
 .../data_driven_schema_configs/conf/elevate.xml |   24 +-
 .../conf/managed-schema                         |    4 +-
 .../conf/elevate.xml                            |   24 +-
 .../solr/client/solrj/impl/CloudSolrClient.java |  133 +-
 .../solr/client/solrj/impl/HttpClientUtil.java  |    8 +-
 .../solrj/io/graph/ShortestPathStream.java      |    6 +-
 .../solr/client/solrj/io/stream/JDBCStream.java |    4 +
 .../client/solrj/io/stream/ParallelStream.java  |    2 +-
 .../solrj/io/stream/ScoreNodesStream.java       |  256 +
 .../client/solrj/io/stream/TopicStream.java     |   45 +-
 .../solrj/request/CollectionAdminRequest.java   |   32 +-
 .../client/solrj/request/UpdateRequest.java     |    6 +
 .../solr/common/ToleratedUpdateError.java       |    2 +-
 .../solr/common/cloud/ClusterProperties.java    |    4 +-
 .../common/cloud/CollectionStatePredicate.java  |    4 +-
 .../common/cloud/CollectionStateWatcher.java    |    6 +-
 .../apache/solr/common/cloud/DocCollection.java |   26 +-
 .../apache/solr/common/cloud/SolrZkClient.java  |  119 +-
 .../solr/common/cloud/ZkConfigManager.java      |  117 +-
 .../solr/common/cloud/ZkMaintenanceUtils.java   |  368 ++
 .../apache/solr/common/cloud/ZkStateReader.java |  146 +-
 .../solr/common/params/CollectionParams.java    |   29 +-
 .../solr/common/params/CoreAdminParams.java     |   10 +
 .../apache/solr/common/params/SolrParams.java   |   24 +-
 .../apache/solr/common/params/TermsParams.java  |   12 +
 .../org/apache/solr/common/util/RetryUtil.java  |    8 +-
 .../solr/configsets/streaming/conf/schema.xml   |    2 +-
 .../solrj/impl/CloudSolrClientBuilderTest.java  |   28 +-
 .../client/solrj/impl/CloudSolrClientTest.java  |   24 +-
 .../ConcurrentUpdateSolrClientBuilderTest.java  |   12 +-
 .../solrj/impl/HttpSolrClientBuilderTest.java   |   22 +-
 .../solrj/impl/LBHttpSolrClientBuilderTest.java |   18 +-
 .../solrj/io/graph/GraphExpressionTest.java     |  137 +-
 .../solr/client/solrj/io/graph/GraphTest.java   |    4 +-
 .../solr/client/solrj/io/sql/JdbcTest.java      |    4 +-
 .../client/solrj/io/stream/JDBCStreamTest.java  |   16 +
 .../solrj/io/stream/StreamExpressionTest.java   |  244 +-
 .../client/solrj/io/stream/StreamingTest.java   |   19 +-
 .../cloud/TestCollectionStateWatchers.java      |    4 +-
 .../solr/common/util/TestJsonRecordReader.java  |   52 +
 .../apache/solr/common/util/TestRetryUtil.java  |   37 +-
 .../java/org/apache/solr/SolrTestCaseJ4.java    |   71 +-
 .../apache/solr/core/MockDirectoryFactory.java  |    5 -
 .../solr/core/MockFSDirectoryFactory.java       |    1 -
 solr/webapp/web/js/angular/controllers/cloud.js |   48 +-
 solr/webapp/web/js/lib/jquery.blockUI.js        |    2 +-
 1076 files changed, 39462 insertions(+), 26376 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/49a09217/solr/core/src/java/org/apache/solr/core/CoreContainer.java
----------------------------------------------------------------------
diff --cc solr/core/src/java/org/apache/solr/core/CoreContainer.java
index e2ad877,1bdf3e3..e72f381
--- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java
+++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
@@@ -16,21 -16,8 +16,10 @@@
   */
  package org.apache.solr.core;
  
- import static com.google.common.base.Preconditions.checkNotNull;
- import static java.util.Collections.EMPTY_MAP;
- import static org.apache.solr.common.params.CommonParams.AUTHC_PATH;
- import static org.apache.solr.common.params.CommonParams.AUTHZ_PATH;
- import static org.apache.solr.common.params.CommonParams.COLLECTIONS_HANDLER_PATH;
- import static org.apache.solr.common.params.CommonParams.CONFIGSETS_HANDLER_PATH;
- import static org.apache.solr.common.params.CommonParams.CORES_HANDLER_PATH;
- import static org.apache.solr.common.params.CommonParams.INFO_HANDLER_PATH;
- import static org.apache.solr.common.params.CommonParams.ZK_PATH;
- import static org.apache.solr.security.AuthenticationPlugin.AUTHENTICATION_PLUGIN_PROP;
- 
  import java.io.IOException;
 +import java.io.InputStream;
  import java.lang.invoke.MethodHandles;
 +import java.nio.ByteBuffer;
  import java.nio.file.Path;
  import java.nio.file.Paths;
  import java.util.ArrayList;
@@@ -329,9 -349,9 +351,9 @@@ public class CoreContainer 
        }
        if (builder.getAuthSchemeRegistryProvider() != null) {
          httpClientBuilder.setAuthSchemeRegistryProvider(new AuthSchemeRegistryProvider() {
-           
+ 
            @Override
 -          public Lookup<AuthSchemeProvider> getAuthSchemeRegistry() {
 +          public org.apache.http.config.Lookup<AuthSchemeProvider> getAuthSchemeRegistry() {
              return builder.getAuthSchemeRegistryProvider().getAuthSchemeRegistry();
            }
          });

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/49a09217/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/49a09217/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/49a09217/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
----------------------------------------------------------------------
diff --cc solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
index 769db01,cb72790..f96f6ed
--- a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
@@@ -203,32 -228,7 +214,32 @@@ public class CollectionsHandler extend
      rsp.setHttpCaching(false);
    }
  
 -
 +  void invokeAction(SolrQueryRequest req, SolrQueryResponse rsp, CoreContainer cores, CollectionAction action, CollectionOperation operation) throws Exception {
 +    if (!coreContainer.isZooKeeperAware()) {
 +      throw new SolrException(BAD_REQUEST,
 +          "Invalid request. collections can be accessed only in SolrCloud mode");
 +    }
 +    SolrResponse response = null;
-     Map<String, Object> props = operation.call(req, rsp, this);
++    Map<String, Object> props = operation.execute(req, rsp, this);
 +    String asyncId = req.getParams().get(ASYNC);
 +    if (props != null) {
 +      if (asyncId != null) {
 +        props.put(ASYNC, asyncId);
 +      }
 +      props.put(QUEUE_OPERATION, operation.action.toLower());
 +      ZkNodeProps zkProps = new ZkNodeProps(props);
 +      if (operation.sendToOCPQueue) {
 +        response = handleResponse(operation.action.toLower(), zkProps, rsp, operation.timeOut);
 +      }
 +      else Overseer.getStateUpdateQueue(coreContainer.getZkController().getZkClient()).offer(Utils.toJSON(props));
 +      final String collectionName = zkProps.getStr(NAME);
 +      if (action.equals(CollectionAction.CREATE) && asyncId == null) {
 +        if (rsp.getException() == null) {
 +          waitForActiveCollection(collectionName, zkProps, cores, response);
 +        }
 +      }
 +    }
 +  }
  
  
    static final Set<String> KNOWN_ROLES = ImmutableSet.of("overseer");
@@@ -864,12 -799,7 +810,6 @@@
  
      }
  
-     /**
-      * All actions must implement this method. If a non null map is returned , the action name is added to
-      * the map and sent to overseer for processing. If it returns a null, the call returns immediately
-      */
-     abstract Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h) throws Exception;
--
      public static CollectionOperation get(CollectionAction action) {
        for (CollectionOperation op : values()) {
          if (op.action == action) return op;
@@@ -1056,21 -992,16 +1002,26 @@@
      }
    }
  
-   public static final List<String> MODIFIABLE_COLL_PROPS = ImmutableList.of(
+   interface CollectionOp {
+     Map<String, Object> execute(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h) throws Exception;
 -    
++
+   }
+ 
+   public static final List<String> MODIFIABLE_COLL_PROPS = Arrays.asList(
        RULE,
        SNITCH,
        REPLICATION_FACTOR,
        MAX_SHARDS_PER_NODE,
-       AUTO_ADD_REPLICAS);
- 
+       AUTO_ADD_REPLICAS,
+       COLL_CONF);
 +
 +  @Override
 +  public Collection<Api> getApis() {
 +    return v2Handler.getApis();
 +  }
 +
 +  @Override
 +  public Boolean registerV2() {
 +    return Boolean.TRUE;
 +  }
  }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/49a09217/solr/core/src/java/org/apache/solr/schema/SchemaManager.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/49a09217/solr/core/src/resources/ImplicitPlugins.json
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/49a09217/solr/core/src/test/org/apache/solr/core/SolrCoreTest.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/49a09217/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/49a09217/solr/core/src/test/org/apache/solr/handler/admin/TestCollectionAPIs.java
----------------------------------------------------------------------
diff --cc solr/core/src/test/org/apache/solr/handler/admin/TestCollectionAPIs.java
index 6cfbf92,0000000..2ce467e
mode 100644,000000..100644
--- a/solr/core/src/test/org/apache/solr/handler/admin/TestCollectionAPIs.java
+++ b/solr/core/src/test/org/apache/solr/handler/admin/TestCollectionAPIs.java
@@@ -1,213 -1,0 +1,213 @@@
 +package org.apache.solr.handler.admin;
 +
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one or more
 + * contributor license agreements.  See the NOTICE file distributed with
 + * this work for additional information regarding copyright ownership.
 + * The ASF licenses this file to You under the Apache License, Version 2.0
 + * (the "License"); you may not use this file except in compliance with
 + * the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +
 +
 +import java.io.StringReader;
 +import java.lang.invoke.MethodHandles;
 +import java.util.Arrays;
 +import java.util.Collection;
 +import java.util.Collections;
 +import java.util.HashMap;
 +import java.util.List;
 +import java.util.Map;
 +
 +import org.apache.solr.SolrTestCaseJ4;
 +import org.apache.solr.client.solrj.SolrRequest;
 +import org.apache.solr.common.cloud.ZkNodeProps;
 +import org.apache.solr.common.params.CollectionParams;
 +import org.apache.solr.common.params.MapSolrParams;
 +import org.apache.solr.common.params.MultiMapSolrParams;
 +import org.apache.solr.common.params.SolrParams;
 +import org.apache.solr.common.util.Pair;
 +import org.apache.solr.common.util.Utils;
 +import org.apache.solr.core.CoreContainer;
 +import org.apache.solr.request.LocalSolrQueryRequest;
 +import org.apache.solr.request.SolrQueryRequest;
 +import org.apache.solr.response.SolrQueryResponse;
 +import org.apache.solr.servlet.SolrRequestParsers;
 +import org.apache.solr.util.CommandOperation;
 +import org.apache.solr.api.Api;
 +import org.apache.solr.api.ApiBag;
 +import org.slf4j.Logger;
 +import org.slf4j.LoggerFactory;
 +
 +import static org.apache.solr.client.solrj.SolrRequest.METHOD.DELETE;
 +import static org.apache.solr.client.solrj.SolrRequest.METHOD.POST;
 +import static org.apache.solr.cloud.Overseer.QUEUE_OPERATION;
 +import static org.apache.solr.common.util.Utils.fromJSONString;
 +
 +public class TestCollectionAPIs extends SolrTestCaseJ4 {
 +  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 +
 +
 +  public void testCommands() throws Exception {
 +    MockCollectionsHandler collectionsHandler = new MockCollectionsHandler();
 +    ApiBag apiBag = new ApiBag();
 +    Collection<Api> apis = collectionsHandler.getApis();
 +    for (Api api : apis) apiBag.register(api, Collections.EMPTY_MAP);
 +    //test a simple create collection call
 +    compareOutput(apiBag, "/collections", POST,
 +        "{create:{name:'newcoll', config:'schemaless', numShards:2, replicationFactor:2 }}", null,
 +        "{name:newcoll, fromApi:'true', replicationFactor:'2', collection.configName:schemaless, numShards:'2', stateFormat:'2', operation:create}");
 +
 +    //test a create collection with custom properties
 +    compareOutput(apiBag, "/collections", POST,
 +        "{create:{name:'newcoll', config:'schemaless', numShards:2, replicationFactor:2, properties:{prop1:'prop1val', prop2: prop2val} }}", null,
 +        "{name:newcoll, fromApi:'true', replicationFactor:'2', collection.configName:schemaless, numShards:'2', stateFormat:'2', operation:create, property.prop1:prop1val, property.prop2:prop2val}");
 +
 +
 +    compareOutput(apiBag, "/collections", POST,
 +        "{create-alias:{name: aliasName , collections:[c1,c2] }}", null, "{operation : createalias, name: aliasName, collections:[c1,c2] }");
 +
 +    compareOutput(apiBag, "/collections", POST,
 +        "{delete-alias:aliasName}", null, "{operation : deletealias, name: aliasName}");
 +
 +    compareOutput(apiBag, "/collections/collName", POST,
 +        "{reload:{}}", null,
 +        "{name:collName, operation :reload}");
 +
 +    compareOutput(apiBag, "/collections/collName", DELETE,
 +        null, null,
 +        "{name:collName, operation :delete}");
 +
 +    compareOutput(apiBag, "/collections/collName/shards/shard1", DELETE,
 +        null, null,
 +        "{collection:collName, shard: shard1 , operation :deleteshard }");
 +
 +    compareOutput(apiBag, "/collections/collName/shards/shard1/replica1?deleteDataDir=true&onlyIfDown=true", DELETE,
 +        null, null,
 +        "{collection:collName, shard: shard1, replica :replica1 , deleteDataDir:'true', onlyIfDown: 'true', operation :deletereplica }");
 +
 +    compareOutput(apiBag, "/collections/collName/shards", POST,
 +        "{split:{shard:shard1, ranges: '0-1f4,1f5-3e8,3e9-5dc', coreProperties : {prop1:prop1Val, prop2:prop2Val} }}", null,
 +        "{collection: collName , shard : shard1, ranges :'0-1f4,1f5-3e8,3e9-5dc', operation : splitshard, property.prop1:prop1Val, property.prop2: prop2Val}"
 +    );
 +
 +    compareOutput(apiBag, "/collections/collName/shards", POST,
 +        "{add-replica:{shard: shard1, node: 'localhost_8978' , coreProperties : {prop1:prop1Val, prop2:prop2Val} }}", null,
 +        "{collection: collName , shard : shard1, node :'localhost_8978', operation : addreplica, property.prop1:prop1Val, property.prop2: prop2Val}"
 +    );
 +
 +    compareOutput(apiBag, "/collections/collName/shards", POST,
 +        "{split:{ splitKey:id12345, coreProperties : {prop1:prop1Val, prop2:prop2Val} }}", null,
 +        "{collection: collName , split.key : id12345 , operation : splitshard, property.prop1:prop1Val, property.prop2: prop2Val}"
 +    );
 +
 +    compareOutput(apiBag, "/collections/collName/shards/shard1/replica1", POST,
 +        "{set-property : {name:propA , value: VALA}}", null,
 +        "{collection: collName, shard: shard1, replica : replica1 , property : propA , operation : addreplicaprop, property.value : 'VALA'}"
 +    );
 +
 +    compareOutput(apiBag, "/collections/collName/shards/shard1/replica1", POST,
 +        "{delete-property : propA }", null,
 +        "{collection: collName, shard: shard1, replica : replica1 , property : propA , operation : deletereplicaprop}"
 +    );
 +
 +    compareOutput(apiBag, "/collections/collName", POST,
 +        "{modify : {rule : 'replica:*,cores:<5', autoAddReplicas : false} }", null,
 +        "{collection: collName, operation : modifycollection , autoAddReplicas : 'false', rule : [{replica: '*', cores : '<5' }]}"
 +    );
 +
 +
 +    System.out.println();
 +
 +  }
 +
 +  ZkNodeProps compareOutput(final ApiBag apiBag, final String path, final SolrRequest.METHOD method,
 +                            final String payload, final CoreContainer cc, String expectedOutputMapJson) throws Exception {
 +    Pair<SolrQueryRequest, SolrQueryResponse> ctx = makeCall(apiBag, path, method, payload, cc);
 +    ZkNodeProps output = (ZkNodeProps) ctx.first().getContext().get(ZkNodeProps.class.getName());
 +    Map expected = (Map) fromJSONString(expectedOutputMapJson);
 +    assertMapEqual(expected, output);
 +    return output;
 +
 +  }
 +
 +  public static Pair<SolrQueryRequest, SolrQueryResponse> makeCall(final ApiBag apiBag, String path, final SolrRequest.METHOD method,
 +                                    final String payload, final CoreContainer cc) throws Exception {
 +    SolrParams queryParams = new MultiMapSolrParams(Collections.EMPTY_MAP);
 +    if (path.indexOf('?') > 0) {
 +      String queryStr = path.substring(path.indexOf('?')+1);
 +      path = path.substring(0, path.indexOf('?'));
 +      queryParams = SolrRequestParsers.parseQueryString(queryStr);
 +    }
 +    final HashMap<String, String> parts = new HashMap<>();
 +    Api api = apiBag.lookup(path, method.toString(), parts);
 +    if (api == null) throw new RuntimeException("No handler at path :" + path);
 +    SolrQueryResponse rsp = new SolrQueryResponse();
 +    LocalSolrQueryRequest req = new LocalSolrQueryRequest(null, queryParams){
 +      @Override
 +      public List<CommandOperation> getCommands(boolean validateInput) {
 +        if (payload == null) return Collections.emptyList();
 +        return ApiBag.getCommandOperations(new StringReader(payload), api.getCommandSchema(),true);
 +      }
 +
 +      @Override
 +      public Map<String, String> getPathValues() {
 +        return parts;
 +      }
 +
 +      @Override
 +      public String getHttpMethod() {
 +        return method.toString();
 +      }
 +    };
 +    try {
 +      api.call(req, rsp);
 +    } catch (ApiBag.ExceptionWithErrObject e) {
 +      throw new RuntimeException(e.getMessage() + Utils.toJSONString(e.getErrs()) , e);
 +
 +    }
 +    return new Pair<>(req,rsp);
 +  }
 +
 +  private void assertMapEqual(Map expected, ZkNodeProps actual) {
 +    assertEquals(errorMessage(expected, actual), expected.size(), actual.getProperties().size());
 +    for (Object o : expected.entrySet()) {
 +      Map.Entry e = (Map.Entry) o;
 +      Object actualVal = actual.get((String) e.getKey());
 +      if (actualVal instanceof String[]) {
 +        actualVal = Arrays.asList((String[]) actualVal);
 +      }
 +      assertEquals(errorMessage(expected, actual), e.getValue(), actualVal);
 +    }
 +  }
 +
 +  private String errorMessage(Map expected, ZkNodeProps actual) {
 +    return "expected: " + Utils.toJSONString(expected) + "\nactual: " + Utils.toJSONString(actual);
 +
 +  }
 +
 +  static class MockCollectionsHandler extends CollectionsHandler {
 +    LocalSolrQueryRequest req;
 +
 +    MockCollectionsHandler() { }
 +
 +    @Override
 +    void invokeAction(SolrQueryRequest req, SolrQueryResponse rsp, CoreContainer cores, CollectionParams.CollectionAction action,
 +                      CollectionOperation operation) throws Exception {
-       Map<String, Object> result = operation.call(req, rsp, this);
++      Map<String, Object> result = operation.execute(req, rsp, this);
 +      if (result != null) {
 +        result.put(QUEUE_OPERATION, operation.action.toLower());
 +        req.getContext().put(ZkNodeProps.class.getName(),new ZkNodeProps(result) );
 +      }
 +    }
 +  }
 +
 +}


[26/51] [abbrv] lucene-solr:apiv2: SOLR-7280: In cloud-mode sort the cores smartly before loading & limit threads to improve cluster stability

Posted by sa...@apache.org.
SOLR-7280: In cloud-mode sort the cores smartly before loading & limit threads to improve cluster stability


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/6c1b75b0
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/6c1b75b0
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/6c1b75b0

Branch: refs/heads/apiv2
Commit: 6c1b75b06bf2fe53be776923097e54b8c560826d
Parents: 4f45226
Author: Noble Paul <no...@apache.org>
Authored: Sat Jul 16 19:12:59 2016 +0530
Committer: Noble Paul <no...@apache.org>
Committed: Sat Jul 16 19:12:59 2016 +0530

----------------------------------------------------------------------
 solr/CHANGES.txt                                |   3 +
 .../org/apache/solr/cloud/ZkController.java     |   2 +-
 .../org/apache/solr/core/CoreContainer.java     |  23 +-
 .../java/org/apache/solr/core/CoreSorter.java   | 186 ++++++++++++++
 .../java/org/apache/solr/core/NodeConfig.java   |  16 +-
 .../org/apache/solr/core/CoreSorterTest.java    | 246 +++++++++++++++++++
 .../test/org/apache/solr/core/TestSolrXml.java  |   2 +-
 7 files changed, 463 insertions(+), 15 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6c1b75b0/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index af249be..d876a25 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -147,6 +147,9 @@ Bug Fixes
 * SOLR-9287: Including 'score' in the 'fl' param when doing an RTG no longer causes an NPE
   (hossman, Ishan Chattopadhyaya)
 
+* SOLR-7280: In cloud-mode sort the cores smartly before loading & limit threads to improve cluster stability
+  (noble, Erick Erickson, shalin)
+
 Optimizations
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6c1b75b0/solr/core/src/java/org/apache/solr/cloud/ZkController.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/ZkController.java b/solr/core/src/java/org/apache/solr/cloud/ZkController.java
index 3e4cbe5..f613141 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ZkController.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ZkController.java
@@ -115,7 +115,7 @@ import static org.apache.solr.common.cloud.ZkStateReader.SHARD_ID_PROP;
  * <p>
  * TODO: exceptions during close on attempts to update cloud state
  */
-public final class ZkController {
+public class ZkController {
 
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   static final int WAIT_DOWN_STATES_TIMEOUT_SECONDS = 60;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6c1b75b0/solr/core/src/java/org/apache/solr/core/CoreContainer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/CoreContainer.java b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
index cd05bbd..21f495c 100644
--- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java
+++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
@@ -25,6 +25,8 @@ import static org.apache.solr.common.params.CommonParams.CONFIGSETS_HANDLER_PATH
 import static org.apache.solr.common.params.CommonParams.CORES_HANDLER_PATH;
 import static org.apache.solr.common.params.CommonParams.INFO_HANDLER_PATH;
 import static org.apache.solr.common.params.CommonParams.ZK_PATH;
+import static org.apache.solr.core.NodeConfig.NodeConfigBuilder.DEFAULT_CORE_LOAD_THREADS;
+import static org.apache.solr.core.NodeConfig.NodeConfigBuilder.DEFAULT_CORE_LOAD_THREADS_IN_CLOUD;
 import static org.apache.solr.security.AuthenticationPlugin.AUTHENTICATION_PLUGIN_PROP;
 
 import java.io.IOException;
@@ -33,6 +35,8 @@ import java.nio.file.Path;
 import java.nio.file.Paths;
 import java.util.ArrayList;
 import java.util.Collection;
+import java.util.Collections;
+import java.util.Comparator;
 import java.util.List;
 import java.util.Locale;
 import java.util.Map;
@@ -349,7 +353,7 @@ public class CoreContainer {
       }
       if (builder.getAuthSchemeRegistryProvider() != null) {
         httpClientBuilder.setAuthSchemeRegistryProvider(new AuthSchemeRegistryProvider() {
-          
+
           @Override
           public Lookup<AuthSchemeProvider> getAuthSchemeRegistry() {
             return builder.getAuthSchemeRegistryProvider().getAuthSchemeRegistry();
@@ -485,17 +489,20 @@ public class CoreContainer {
     containerProperties.putAll(cfg.getSolrProperties());
 
     // setup executor to load cores in parallel
-    // do not limit the size of the executor in zk mode since cores may try and wait for each other.
     ExecutorService coreLoadExecutor = ExecutorUtil.newMDCAwareFixedThreadPool(
-        ( zkSys.getZkController() == null ? cfg.getCoreLoadThreadCount() : Integer.MAX_VALUE ),
+        cfg.getCoreLoadThreadCount(isZooKeeperAware() ? DEFAULT_CORE_LOAD_THREADS_IN_CLOUD : DEFAULT_CORE_LOAD_THREADS),
         new DefaultSolrThreadFactory("coreLoadExecutor") );
-    final List<Future<SolrCore>> futures = new ArrayList<Future<SolrCore>>();
+    final List<Future<SolrCore>> futures = new ArrayList<>();
     try {
-
       List<CoreDescriptor> cds = coresLocator.discover(this);
+      if (isZooKeeperAware()) {
+        //sort the cores if it is in SolrCloud. In standalone node the order does not matter
+        CoreSorter coreComparator = new CoreSorter().init(this);
+        cds = new ArrayList<>(cds);//make a copy
+        Collections.sort(cds, coreComparator::compare);
+      }
       checkForDuplicateCoreNames(cds);
 
-
       for (final CoreDescriptor cd : cds) {
         if (cd.isTransient() || !cd.isLoadOnStartup()) {
           solrCores.putDynamicDescriptor(cd.getName(), cd);
@@ -1258,6 +1265,10 @@ public class CoreContainer {
     return authenticationPlugin == null ? null : authenticationPlugin.plugin;
   }
 
+  public NodeConfig getNodeConfig() {
+    return cfg;
+  }
+
 }
 
 class CloserThread extends Thread {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6c1b75b0/solr/core/src/java/org/apache/solr/core/CoreSorter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/CoreSorter.java b/solr/core/src/java/org/apache/solr/core/CoreSorter.java
new file mode 100644
index 0000000..8074009
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/core/CoreSorter.java
@@ -0,0 +1,186 @@
+package org.apache.solr.core;
+
+import java.util.Collection;
+import java.util.Comparator;
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.solr.cloud.CloudDescriptor;
+import org.apache.solr.common.cloud.ClusterState;
+import org.apache.solr.common.cloud.DocCollection;
+import org.apache.solr.common.cloud.Replica;
+import org.apache.solr.common.cloud.Slice;
+
+import static java.util.Collections.emptyList;
+import static java.util.stream.Collectors.toList;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * This is a utility class that sorts cores in such a way as to minimize other cores
+ * waiting for replicas in the current node. This helps in avoiding leaderVote timeouts
+ * happening in other nodes of the cluster
+ *
+ */
+public class CoreSorter {
+  Map<String, CountsForEachShard> shardsVsReplicaCounts = new LinkedHashMap<>();
+  CoreContainer cc;
+  private static final CountsForEachShard zero = new CountsForEachShard(0, 0, 0);
+
+  public final static Comparator<CountsForEachShard> countsComparator = (c1, c2) -> {
+    if (c1 == null) c1 = zero;//just to avoid  NPE
+    if (c2 == null) c2 = zero;
+    if (c1.totalReplicasInDownNodes < c2.totalReplicasInDownNodes) {
+      //Prioritize replicas with least no:of down nodes waiting.
+      //It's better to bring up a node that is a member of a shard
+      //with 0 down nodes than 1 down node because it will make the shard
+      // complete earlier and avoid waiting by the other live nodes
+      if (c1.totalReplicasInLiveNodes > 0) {
+        //means nobody else is waiting for this , so no need to prioritize
+        return -1;
+      }
+    }
+    if (c2.totalReplicasInDownNodes < c1.totalReplicasInDownNodes) {
+      //same is the above, just to take care of the case where c2 has to be prioritized
+      if (c2.totalReplicasInLiveNodes > 0) {
+        //means nobody else is waiting for this , so no need to priotitize
+        return 1;
+      }
+    }
+
+    //Prioritize replicas where most no:of other nodes are waiting for
+    // For example if 1 other replicas are waiting for this replica, then
+    // prioritize that over the replica were zero other nodes are waiting
+    if (c1.totalReplicasInLiveNodes > c2.totalReplicasInLiveNodes) return -1;
+    if (c2.totalReplicasInLiveNodes > c1.totalReplicasInLiveNodes) return 1;
+
+    //If all else is same. prioritize fewer replicas I have because that will complete the
+    //quorum for shard faster. If I have only one replica for a shard I can finish it faster
+    // than a shard with 2 replicas in this node
+    if (c1.myReplicas < c2.myReplicas) return -1;
+    if (c2.myReplicas < c1.myReplicas) return 1;
+    //if everything is same return 0
+    return 0;
+  };
+
+
+  public CoreSorter init(CoreContainer cc) {
+    this.cc = cc;
+    if (cc == null || !cc.isZooKeeperAware()) {
+      return this;
+    }
+    String myNodeName = getNodeName();
+    ClusterState state = cc.getZkController().getClusterState();
+    for (CloudDescriptor cloudDescriptor : getCloudDescriptors()) {
+      String coll = cloudDescriptor.getCollectionName();
+      String sliceName = getShardName(cloudDescriptor);
+      if (shardsVsReplicaCounts.containsKey(sliceName)) continue;
+      CountsForEachShard c = new CountsForEachShard(0, 0, 0);
+      for (Replica replica : getReplicas(state, coll, cloudDescriptor.getShardId())) {
+        if (replica.getNodeName().equals(myNodeName)) {
+          c.myReplicas++;
+        } else {
+          Set<String> liveNodes = state.getLiveNodes();
+          if (liveNodes.contains(replica.getNodeName())) {
+            c.totalReplicasInLiveNodes++;
+          } else {
+            c.totalReplicasInDownNodes++;
+          }
+        }
+      }
+      shardsVsReplicaCounts.put(sliceName, c);
+    }
+
+    return this;
+
+  }
+
+
+  public int compare(CoreDescriptor cd1, CoreDescriptor cd2) {
+    String s1 = getShardName(cd1.getCloudDescriptor());
+    String s2 = getShardName(cd2.getCloudDescriptor());
+    if (s1 == null || s2 == null) return cd1.getName().compareTo(cd2.getName());
+    CountsForEachShard c1 = shardsVsReplicaCounts.get(s1);
+    CountsForEachShard c2 = shardsVsReplicaCounts.get(s2);
+    int result = countsComparator.compare(c1, c2);
+    return result == 0 ? s1.compareTo(s2) : result;
+  }
+
+
+  static class CountsForEachShard {
+    public int totalReplicasInDownNodes = 0, myReplicas = 0, totalReplicasInLiveNodes = 0;
+
+    public CountsForEachShard(int totalReplicasInDownNodes,  int totalReplicasInLiveNodes,int myReplicas) {
+      this.totalReplicasInDownNodes = totalReplicasInDownNodes;
+      this.myReplicas = myReplicas;
+      this.totalReplicasInLiveNodes = totalReplicasInLiveNodes;
+    }
+
+    public boolean equals(Object obj) {
+      if (obj instanceof CountsForEachShard) {
+        CountsForEachShard that = (CountsForEachShard) obj;
+        return that.totalReplicasInDownNodes == totalReplicasInDownNodes && that.myReplicas == myReplicas;
+
+      }
+      return false;
+    }
+
+    @Override
+    public String toString() {
+      return "down : " + totalReplicasInDownNodes + " , up :  " + totalReplicasInLiveNodes + " my : " + myReplicas;
+    }
+
+
+  }
+
+  static String getShardName(CloudDescriptor cd) {
+    return cd == null ?
+        null :
+        cd.getCollectionName()
+            + "_"
+            + cd.getShardId();
+  }
+
+
+  String getNodeName() {
+    return cc.getNodeConfig().getNodeName();
+  }
+
+  /**Return all replicas for a given collection+slice combo
+   */
+  Collection<Replica> getReplicas(ClusterState cs, String coll, String slice) {
+    DocCollection c = cs.getCollectionOrNull(coll);
+    if (c == null) return emptyList();
+    Slice s = c.getSlice(slice);
+    if (s == null) return emptyList();
+    return s.getReplicas();
+  }
+
+
+  /**return cloud descriptors for all cores in this node
+   */
+  Collection<CloudDescriptor> getCloudDescriptors() {
+    return cc.getCores()
+        .stream()
+        .map((core) -> core.getCoreDescriptor().getCloudDescriptor())
+        .collect(toList());
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6c1b75b0/solr/core/src/java/org/apache/solr/core/NodeConfig.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/NodeConfig.java b/solr/core/src/java/org/apache/solr/core/NodeConfig.java
index e72fbc9..3db453b 100644
--- a/solr/core/src/java/org/apache/solr/core/NodeConfig.java
+++ b/solr/core/src/java/org/apache/solr/core/NodeConfig.java
@@ -50,7 +50,7 @@ public class NodeConfig {
 
   private final CloudConfig cloudConfig;
 
-  private final int coreLoadThreads;
+  private final Integer coreLoadThreads;
 
   private final int transientCacheSize;
 
@@ -64,7 +64,7 @@ public class NodeConfig {
                      PluginInfo shardHandlerFactoryConfig, UpdateShardHandlerConfig updateShardHandlerConfig,
                      String coreAdminHandlerClass, String collectionsAdminHandlerClass,
                      String infoHandlerClass, String configSetsHandlerClass,
-                     LogWatcherConfig logWatcherConfig, CloudConfig cloudConfig, int coreLoadThreads,
+                     LogWatcherConfig logWatcherConfig, CloudConfig cloudConfig, Integer coreLoadThreads,
                      int transientCacheSize, boolean useSchemaCache, String managementPath, SolrResourceLoader loader,
                      Properties solrProperties, PluginInfo[] backupRepositoryPlugins) {
     this.nodeName = nodeName;
@@ -87,7 +87,7 @@ public class NodeConfig {
     this.solrProperties = solrProperties;
     this.backupRepositoryPlugins = backupRepositoryPlugins;
 
-    if (this.cloudConfig != null && this.coreLoadThreads < 2) {
+    if (this.cloudConfig != null && this.getCoreLoadThreadCount(NodeConfigBuilder.DEFAULT_CORE_LOAD_THREADS) < 2) {
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
           "SolrCloud requires a value of at least 2 for coreLoadThreads (configured value = " + this.coreLoadThreads + ")");
     }
@@ -109,8 +109,8 @@ public class NodeConfig {
     return updateShardHandlerConfig;
   }
 
-  public int getCoreLoadThreadCount() {
-    return coreLoadThreads;
+  public int getCoreLoadThreadCount(int def) {
+    return coreLoadThreads == null ? def : coreLoadThreads;
   }
 
   public String getSharedLibDirectory() {
@@ -185,7 +185,7 @@ public class NodeConfig {
     private String configSetsHandlerClass = DEFAULT_CONFIGSETSHANDLERCLASS;
     private LogWatcherConfig logWatcherConfig = new LogWatcherConfig(true, null, null, 50);
     private CloudConfig cloudConfig;
-    private int coreLoadThreads = DEFAULT_CORE_LOAD_THREADS;
+    private Integer coreLoadThreads;
     private int transientCacheSize = DEFAULT_TRANSIENT_CACHE_SIZE;
     private boolean useSchemaCache = false;
     private String managementPath;
@@ -195,7 +195,9 @@ public class NodeConfig {
     private final SolrResourceLoader loader;
     private final String nodeName;
 
-    private static final int DEFAULT_CORE_LOAD_THREADS = 3;
+    public static final int DEFAULT_CORE_LOAD_THREADS = 3;
+    //No:of core load threads in cloud mode is set to a default of 24
+    public static final int DEFAULT_CORE_LOAD_THREADS_IN_CLOUD = 24;
 
     private static final int DEFAULT_TRANSIENT_CACHE_SIZE = Integer.MAX_VALUE;
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6c1b75b0/solr/core/src/test/org/apache/solr/core/CoreSorterTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/CoreSorterTest.java b/solr/core/src/test/org/apache/solr/core/CoreSorterTest.java
new file mode 100644
index 0000000..5b550bf
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/core/CoreSorterTest.java
@@ -0,0 +1,246 @@
+package org.apache.solr.core;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+
+import org.apache.solr.SolrTestCaseJ4;
+import org.apache.solr.cloud.CloudDescriptor;
+import org.apache.solr.cloud.ZkController;
+import org.apache.solr.common.cloud.ClusterState;
+import org.apache.solr.common.cloud.Replica;
+import org.apache.solr.common.util.Utils;
+import org.apache.solr.core.CoreSorter.CountsForEachShard;
+import org.apache.solr.util.MockCoreContainer;
+
+import static java.util.stream.Collectors.toList;
+import static org.apache.solr.core.CoreSorter.getShardName;
+import static org.easymock.EasyMock.createMock;
+import static org.easymock.EasyMock.expectLastCall;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.reset;
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+public class CoreSorterTest extends SolrTestCaseJ4 {
+  Map<String, Boolean> nodes = new LinkedHashMap<>();
+  Set<String> liveNodes = new HashSet<>();
+
+  public void testComparator() {
+    List<CountsForEachShard> l = new ArrayList<>();
+    //                           DN LIV  MY
+    l.add(new CountsForEachShard(1, 3, 1));
+    l.add(new CountsForEachShard(0, 3, 2));
+    l.add(new CountsForEachShard(0, 3, 3));
+    l.add(new CountsForEachShard(0, 3, 4));
+    l.add(new CountsForEachShard(1, 0, 2));
+    l.add(new CountsForEachShard(1, 0, 1));
+    l.add(new CountsForEachShard(2, 5, 1));
+    l.add(new CountsForEachShard(2, 4, 2));
+    l.add(new CountsForEachShard(2, 3, 3));
+
+    List<CountsForEachShard> expected = Arrays.asList(
+        new CountsForEachShard(0, 3, 2),
+        new CountsForEachShard(0, 3, 3),
+        new CountsForEachShard(0, 3, 4),
+        new CountsForEachShard(1, 3, 1),
+        new CountsForEachShard(2, 5, 1),
+        new CountsForEachShard(2, 4, 2),
+        new CountsForEachShard(2, 3, 3),
+        new CountsForEachShard(1, 0, 1),
+        new CountsForEachShard(1, 0, 2)
+
+    );
+
+    for (int i = 0; i < 10; i++) {
+      List<CountsForEachShard> copy = new ArrayList<>(l);
+      Collections.shuffle(copy);
+      Collections.sort(copy, CoreSorter.countsComparator);
+      for (int j = 0; j < copy.size(); j++) {
+        assertEquals(expected.get(j), copy.get(j));
+      }
+    }
+  }
+
+  public void testSort() throws Exception {
+    CoreContainer mockCC = getMockContainer();
+    MockCoreSorter coreSorter = (MockCoreSorter) new MockCoreSorter().init(mockCC);
+    List<CoreDescriptor> copy = new ArrayList<>(coreSorter.getLocalCores());
+    Collections.sort(copy, coreSorter::compare);
+    List<CountsForEachShard> l = copy.stream()
+        .map(CoreDescriptor::getCloudDescriptor)
+        .map(it -> coreSorter.shardsVsReplicaCounts.get(getShardName(it)))
+        .collect(toList());
+    for (int i = 1; i < l.size(); i++) {
+      CountsForEachShard curr = l.get(i);
+      CountsForEachShard prev = l.get(i-1);
+      assertTrue(CoreSorter.countsComparator.compare(prev, curr) < 1);
+    }
+
+    for (CountsForEachShard c : l) {
+      System.out.println(c);
+    }
+  }
+
+  private CoreContainer getMockContainer() {
+    CoreContainer mockCC = createMock(CoreContainer.class);
+    ZkController mockZKC = createMock(ZkController.class);
+    ClusterState mockClusterState = createMock(ClusterState.class);
+    reset(mockCC, mockZKC, mockClusterState);
+    mockCC.isZooKeeperAware();
+    expectLastCall().andAnswer(() -> Boolean.TRUE).anyTimes();
+    mockCC.getZkController();
+    expectLastCall().andAnswer(() -> mockZKC).anyTimes();
+    mockClusterState.getLiveNodes();
+    expectLastCall().andAnswer(() -> liveNodes).anyTimes();
+    mockZKC.getClusterState();
+    expectLastCall().andAnswer(() -> mockClusterState).anyTimes();
+    replay(mockCC, mockZKC, mockClusterState);
+    return mockCC;
+  }
+
+  static class ReplicaInfo {
+    final int coll, slice, replica;
+    final String replicaName;
+    CloudDescriptor cd;
+
+    ReplicaInfo(int coll, int slice, int replica) {
+      this.coll = coll;
+      this.slice = slice;
+      this.replica = replica;
+      replicaName = "coll_" + coll + "_" + slice + "_" + replica;
+      Properties p = new Properties();
+      p.setProperty(CoreDescriptor.CORE_SHARD, "shard_" + slice);
+      p.setProperty(CoreDescriptor.CORE_COLLECTION, "coll_" + slice);
+      p.setProperty(CoreDescriptor.CORE_NODE_NAME, replicaName);
+      cd = new CloudDescriptor(replicaName, p, null);
+    }
+
+    @Override
+    public boolean equals(Object obj) {
+      if (obj instanceof ReplicaInfo) {
+        ReplicaInfo replicaInfo = (ReplicaInfo) obj;
+        return replicaInfo.replicaName.equals(replicaName);
+      }
+      return false;
+    }
+
+
+    @Override
+    public int hashCode() {
+      return replicaName.hashCode();
+    }
+
+    CloudDescriptor getCloudDescriptor() {
+      return cd;
+
+    }
+
+    public Replica getReplica(String node) {
+      return new Replica(replicaName, Utils.makeMap("core", replicaName, "node_name", node));
+    }
+
+    public boolean equals(String coll, String slice) {
+      return cd.getCollectionName().equals(coll) && slice.equals(cd.getShardId());
+    }
+  }
+
+
+  class MockCoreSorter extends CoreSorter {
+    int numColls = 1 + random().nextInt(3);
+    int numReplicas = 2 + random().nextInt(2);
+    int numShards = 50 + random().nextInt(10);
+    String myNodeName;
+    Collection<CloudDescriptor> myCores = new ArrayList<>();
+    List<CoreDescriptor> localCores = new ArrayList<>();
+
+    Map<ReplicaInfo, String> replicaPositions = new LinkedHashMap<>();//replicaname vs. nodename
+
+    public MockCoreSorter() {
+      int totalNodes = 50 + random().nextInt(10);
+      int myNode = random().nextInt(totalNodes);
+      List<String> nodeNames = new ArrayList<>();
+      for (int i = 0; i < totalNodes; i++) {
+        String s = "192.168.1." + i + ":8983_solr";
+        if (i == myNode) myNodeName = s;
+        boolean on = random().nextInt(100) < 70;
+        nodes.put(s,
+            on);//70% chance that the node is up;
+        nodeNames.add(s);
+        if(on) liveNodes.add(s);
+      }
+
+      for (int i = 0; i < numColls; i++) {
+        for (int j = 0; j < numShards; j++) {
+          for (int k = 0; k < numReplicas; k++) {
+            ReplicaInfo ri = new ReplicaInfo(i, j, k);
+            replicaPositions.put(ri, nodeNames.get(random().nextInt(totalNodes)));
+          }
+        }
+      }
+
+      for (Map.Entry<ReplicaInfo, String> e : replicaPositions.entrySet()) {
+        if (e.getValue().equals(myNodeName)) {
+          myCores.add(e.getKey().getCloudDescriptor());
+          localCores.add(new MockCoreContainer.MockCoreDescriptor() {
+            @Override
+            public CloudDescriptor getCloudDescriptor() {
+              return e.getKey().getCloudDescriptor();
+            }
+          });
+        }
+      }
+    }
+
+    @Override
+    String getNodeName() {
+      return myNodeName;
+    }
+
+    @Override
+    Collection<CloudDescriptor> getCloudDescriptors() {
+      return myCores;
+
+    }
+
+    public List<CoreDescriptor> getLocalCores() {
+      return localCores;
+    }
+
+    @Override
+    Collection<Replica> getReplicas(ClusterState cs, String coll, String slice) {
+      List<Replica> r = new ArrayList<>();
+      for (Map.Entry<ReplicaInfo, String> e : replicaPositions.entrySet()) {
+        if (e.getKey().equals(coll, slice)) {
+          r.add(e.getKey().getReplica(e.getValue()));
+        }
+      }
+      return r;
+    }
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6c1b75b0/solr/core/src/test/org/apache/solr/core/TestSolrXml.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/TestSolrXml.java b/solr/core/src/test/org/apache/solr/core/TestSolrXml.java
index 4343efe..e005c9f 100644
--- a/solr/core/src/test/org/apache/solr/core/TestSolrXml.java
+++ b/solr/core/src/test/org/apache/solr/core/TestSolrXml.java
@@ -73,7 +73,7 @@ public class TestSolrXml extends SolrTestCaseJ4 {
     assertEquals("collection handler class", "testCollectionsHandler", cfg.getCollectionsHandlerClass());
     assertEquals("info handler class", "testInfoHandler", cfg.getInfoHandlerClass());
     assertEquals("config set handler class", "testConfigSetsHandler", cfg.getConfigSetsHandlerClass());
-    assertEquals("core load threads", 11, cfg.getCoreLoadThreadCount());
+    assertEquals("core load threads", 11, cfg.getCoreLoadThreadCount(0));
     assertThat("core root dir", cfg.getCoreRootDirectory().toString(), containsString("testCoreRootDirectory"));
     assertEquals("distrib conn timeout", 22, cfg.getUpdateShardHandlerConfig().getDistributedConnectionTimeout());
     assertEquals("distrib socket timeout", 33, cfg.getUpdateShardHandlerConfig().getDistributedSocketTimeout());


[45/51] [abbrv] lucene-solr:apiv2: SOLR-9309: Fix SolrCloud RTG response structure when multi ids requested but only 1 found

Posted by sa...@apache.org.
SOLR-9309: Fix SolrCloud RTG response structure when multi ids requested but only 1 found


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/9aa639d4
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/9aa639d4
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/9aa639d4

Branch: refs/heads/apiv2
Commit: 9aa639d45e31059bb2910dade6d7728ea075cd57
Parents: 08019f4
Author: Chris Hostetter <ho...@apache.org>
Authored: Tue Jul 19 11:11:49 2016 -0700
Committer: Chris Hostetter <ho...@apache.org>
Committed: Tue Jul 19 11:11:49 2016 -0700

----------------------------------------------------------------------
 solr/CHANGES.txt                                |   2 +
 .../handler/component/RealTimeGetComponent.java | 144 ++++++++++++-------
 .../apache/solr/cloud/TestRandomFlRTGCloud.java |   7 +-
 3 files changed, 93 insertions(+), 60 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aa639d4/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 0ccccee..55fae47 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -159,6 +159,8 @@ Bug Fixes
 
 * SOLR-9288: Fix [docid] transformer to return -1 when used in RTG with uncommitted doc (hossman)
 
+* SOLR-9309: Fix SolrCloud RTG response structure when multi ids requested but only 1 found (hossman)
+
 Optimizations
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aa639d4/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java b/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java
index 9865a11..9018a86 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java
@@ -143,10 +143,9 @@ public class RealTimeGetComponent extends SearchComponent
       return;
     }
 
-    String id[] = params.getParams("id");
-    String ids[] = params.getParams("ids");
-
-    if (id == null && ids == null) {
+    final IdsRequsted reqIds = IdsRequsted.parseParams(req);
+    
+    if (reqIds.allIds.isEmpty()) {
       return;
     }
 
@@ -171,20 +170,6 @@ public class RealTimeGetComponent extends SearchComponent
       throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
     }
 
-
-    String[] allIds = id==null ? new String[0] : id;
-
-    if (ids != null) {
-      List<String> lst = new ArrayList<>();
-      for (String s : allIds) {
-        lst.add(s);
-      }
-      for (String idList : ids) {
-        lst.addAll( StrUtils.splitSmart(idList, ",", true) );
-      }
-      allIds = lst.toArray(new String[lst.size()]);
-    }
-
     SolrCore core = req.getCore();
     SchemaField idField = core.getLatestSchema().getUniqueKeyField();
     FieldType fieldType = idField.getType();
@@ -209,7 +194,7 @@ public class RealTimeGetComponent extends SearchComponent
      SolrIndexSearcher searcher = null;
 
      BytesRefBuilder idBytes = new BytesRefBuilder();
-     for (String idStr : allIds) {
+     for (String idStr : reqIds.allIds) {
        fieldType.readableToIndexed(idStr, idBytes);
        if (ulog != null) {
          Object o = ulog.lookup(idBytes.get());
@@ -297,18 +282,7 @@ public class RealTimeGetComponent extends SearchComponent
      }
    }
 
-
-   // if the client specified a single id=foo, then use "doc":{
-   // otherwise use a standard doclist
-
-   if (ids ==  null && allIds.length <= 1) {
-     // if the doc was not found, then use a value of null.
-     rsp.add("doc", docList.size() > 0 ? docList.get(0) : null);
-   } else {
-     docList.setNumFound(docList.size());
-     rsp.addResponse(docList);
-   }
-
+   addDocListToResponse(rb, docList);
   }
 
 
@@ -461,25 +435,13 @@ public class RealTimeGetComponent extends SearchComponent
   }
 
   public int createSubRequests(ResponseBuilder rb) throws IOException {
-    SolrParams params = rb.req.getParams();
-    String id1[] = params.getParams("id");
-    String ids[] = params.getParams("ids");
-
-    if (id1 == null && ids == null) {
+    
+    final IdsRequsted reqIds = IdsRequsted.parseParams(rb.req);
+    if (reqIds.allIds.isEmpty()) {
       return ResponseBuilder.STAGE_DONE;
     }
-
-    List<String> allIds = new ArrayList<>();
-    if (id1 != null) {
-      for (String s : id1) {
-        allIds.add(s);
-      }
-    }
-    if (ids != null) {
-      for (String s : ids) {
-        allIds.addAll( StrUtils.splitSmart(s, ",", true) );
-      }
-    }
+    
+    SolrParams params = rb.req.getParams();
 
     // TODO: handle collection=...?
 
@@ -495,7 +457,7 @@ public class RealTimeGetComponent extends SearchComponent
 
 
       Map<String, List<String>> sliceToId = new HashMap<>();
-      for (String id : allIds) {
+      for (String id : reqIds.allIds) {
         Slice slice = coll.getRouter().getTargetSlice(id, null, null, params, coll);
 
         List<String> idsForShard = sliceToId.get(slice.getName());
@@ -524,7 +486,7 @@ public class RealTimeGetComponent extends SearchComponent
         rb.addRequest(this, sreq);
       }      
     } else {
-      String shardIdList = StrUtils.join(allIds, ',');
+      String shardIdList = StrUtils.join(reqIds.allIds, ',');
       ShardRequest sreq = new ShardRequest();
 
       sreq.purpose = 1;
@@ -586,17 +548,31 @@ public class RealTimeGetComponent extends SearchComponent
         docList.addAll(subList);
       }
     }
+    
+    addDocListToResponse(rb, docList);
+  }
 
-    if (docList.size() <= 1 && rb.req.getParams().getParams("ids")==null) {
+  /**
+   * Encapsulates logic for how a {@link SolrDocumentList} should be added to the response
+   * based on the request params used
+   */
+  private void addDocListToResponse(final ResponseBuilder rb, final SolrDocumentList docList) {
+    assert null != docList;
+    
+    final SolrQueryResponse rsp = rb.rsp;
+    final IdsRequsted reqIds = IdsRequsted.parseParams(rb.req);
+    
+    if (reqIds.useSingleDocResponse) {
+      assert docList.size() <= 1;
       // if the doc was not found, then use a value of null.
-      rb.rsp.add("doc", docList.size() > 0 ? docList.get(0) : null);
+      rsp.add("doc", docList.size() > 0 ? docList.get(0) : null);
     } else {
       docList.setNumFound(docList.size());
-      rb.rsp.addResponse(docList);
+      rsp.addResponse(docList);
     }
   }
 
-
+                                                                                               
 
   ////////////////////////////////////////////
   ///  SolrInfoMBean
@@ -768,6 +744,66 @@ public class RealTimeGetComponent extends SearchComponent
     return new ArrayList<>(versionsToRet);
   }
 
+  /** 
+   * Simple struct for tracking what ids were requested and what response format is expected 
+   * acording to the request params
+   */
+  private final static class IdsRequsted {
+    /** An List (which may be empty but will never be null) of the uniqueKeys requested. */
+    public final List<String> allIds;
+    /** 
+     * true if the params provided by the user indicate that a single doc response structure 
+     * should be used.  
+     * Value is meaninless if <code>ids</code> is empty.
+     */
+    public final boolean useSingleDocResponse;
+    private IdsRequsted(List<String> allIds, boolean useSingleDocResponse) {
+      assert null != allIds;
+      this.allIds = allIds;
+      this.useSingleDocResponse = useSingleDocResponse;
+    }
+    
+    /**
+     * Parsers the <code>id</code> and <code>ids</code> params attached to the specified request object, 
+     * and returns an <code>IdsRequsted</code> struct to use for this request.
+     * The <code>IdsRequsted</code> is cached in the {@link SolrQueryRequest#getContext} so subsequent 
+     * method calls on the same request will not re-parse the params.
+     */
+    public static IdsRequsted parseParams(SolrQueryRequest req) {
+      final String contextKey = IdsRequsted.class.toString() + "_PARSED_ID_PARAMS";
+      if (req.getContext().containsKey(contextKey)) {
+        return (IdsRequsted)req.getContext().get(contextKey);
+      }
+      final SolrParams params = req.getParams();
+      final String id[] = params.getParams("id");
+      final String ids[] = params.getParams("ids");
+      
+      if (id == null && ids == null) {
+        IdsRequsted result = new IdsRequsted(Collections.<String>emptyList(), true);
+        req.getContext().put(contextKey, result);
+        return result;
+      }
+      final List<String> allIds = new ArrayList<>((null == id ? 0 : id.length)
+                                                  + (null == ids ? 0 : (2 * ids.length)));
+      if (null != id) {
+        for (String singleId : id) {
+          allIds.add(singleId);
+        }
+      }
+      if (null != ids) {
+        for (String idList : ids) {
+          allIds.addAll( StrUtils.splitSmart(idList, ",", true) );
+        }
+      }
+      // if the client specified a single id=foo, then use "doc":{
+      // otherwise use a standard doclist
+      IdsRequsted result = new IdsRequsted(allIds, (ids == null && allIds.size() <= 1));
+      req.getContext().put(contextKey, result);
+      return result;
+    }
+  }
+
+  
   /**
    * A lite weight ResultContext for use with RTG requests that can point at Realtime Searchers
    */

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9aa639d4/solr/core/src/test/org/apache/solr/cloud/TestRandomFlRTGCloud.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestRandomFlRTGCloud.java b/solr/core/src/test/org/apache/solr/cloud/TestRandomFlRTGCloud.java
index 682d6a0..8fc61c7 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestRandomFlRTGCloud.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestRandomFlRTGCloud.java
@@ -387,16 +387,11 @@ public class TestRandomFlRTGCloud extends SolrCloudTestCase {
    * trivial helper method to deal with diff response structure between using a single 'id' param vs
    * 2 or more 'id' params (or 1 or more 'ids' params).
    *
-   * NOTE: <code>expectList</code> is currently ignored due to SOLR-9309 -- instead best efforst are made to
-   * return a synthetic list based on whatever can be found in the response.
-   *
    * @return List from response, or a synthetic one created from single response doc if 
    * <code>expectList</code> was false; May be empty; May be null if response included null list.
    */
   private static SolrDocumentList getDocsFromRTGResponse(final boolean expectList, final QueryResponse rsp) {
-    // TODO: blocked by SOLR-9309 (once this can be fixed, update jdocs)
-    if (null != rsp.getResults()) { // TODO: replace this..
-    // if (expectList) {            // TODO: ...with this tighter check.
+    if (expectList) {
       return rsp.getResults();
     }
     


[32/51] [abbrv] lucene-solr:apiv2: SOLR-9240: Update CHANGES.txt

Posted by sa...@apache.org.
SOLR-9240: Update CHANGES.txt


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/10f3700e
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/10f3700e
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/10f3700e

Branch: refs/heads/apiv2
Commit: 10f3700e725edb9793c76ad367edfb70f97b34a8
Parents: 59218b9
Author: jbernste <jb...@apache.org>
Authored: Sun Jul 17 21:49:24 2016 -0400
Committer: jbernste <jb...@apache.org>
Committed: Sun Jul 17 21:49:24 2016 -0400

----------------------------------------------------------------------
 solr/CHANGES.txt | 2 ++
 1 file changed, 2 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/10f3700e/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index d876a25..54317ed 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -95,6 +95,8 @@ New Features
   Spatial4j package "com.spatial4j.core" it is rewritten to "org.locationtech.spatial4j" with a warning.
   (David Smiley)
 
+* SOLR-9240: Support parallel ETL with the topic expression (Joel Bernstein)
+
 Bug Fixes
 ----------------------
 


[04/51] [abbrv] lucene-solr:apiv2: LUCENE-7368: Remove queryNorm.

Posted by sa...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/spans/SpanContainingQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/spans/SpanContainingQuery.java b/lucene/core/src/java/org/apache/lucene/search/spans/SpanContainingQuery.java
index bfa3f85..0ce7093 100644
--- a/lucene/core/src/java/org/apache/lucene/search/spans/SpanContainingQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/spans/SpanContainingQuery.java
@@ -43,18 +43,18 @@ public final class SpanContainingQuery extends SpanContainQuery {
   }
 
   @Override
-  public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-    SpanWeight bigWeight = big.createWeight(searcher, false);
-    SpanWeight littleWeight = little.createWeight(searcher, false);
+  public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    SpanWeight bigWeight = big.createWeight(searcher, false, boost);
+    SpanWeight littleWeight = little.createWeight(searcher, false, boost);
     return new SpanContainingWeight(searcher, needsScores ? getTermContexts(bigWeight, littleWeight) : null,
-                                      bigWeight, littleWeight);
+                                      bigWeight, littleWeight, boost);
   }
 
   public class SpanContainingWeight extends SpanContainWeight {
 
     public SpanContainingWeight(IndexSearcher searcher, Map<Term, TermContext> terms,
-                                SpanWeight bigWeight, SpanWeight littleWeight) throws IOException {
-      super(searcher, terms, bigWeight, littleWeight);
+                                SpanWeight bigWeight, SpanWeight littleWeight, float boost) throws IOException {
+      super(searcher, terms, bigWeight, littleWeight, boost);
     }
 
     /**

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/spans/SpanMultiTermQueryWrapper.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/spans/SpanMultiTermQueryWrapper.java b/lucene/core/src/java/org/apache/lucene/search/spans/SpanMultiTermQueryWrapper.java
index f4c6f24..9c844d1 100644
--- a/lucene/core/src/java/org/apache/lucene/search/spans/SpanMultiTermQueryWrapper.java
+++ b/lucene/core/src/java/org/apache/lucene/search/spans/SpanMultiTermQueryWrapper.java
@@ -95,7 +95,7 @@ public class SpanMultiTermQueryWrapper<Q extends MultiTermQuery> extends SpanQue
   }
 
   @Override
-  public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
+  public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
     throw new IllegalArgumentException("Rewrite first!");
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/spans/SpanNearQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/spans/SpanNearQuery.java b/lucene/core/src/java/org/apache/lucene/search/spans/SpanNearQuery.java
index 217d75f..7958f47 100644
--- a/lucene/core/src/java/org/apache/lucene/search/spans/SpanNearQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/spans/SpanNearQuery.java
@@ -176,20 +176,20 @@ public class SpanNearQuery extends SpanQuery implements Cloneable {
   }
 
   @Override
-  public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
+  public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
     List<SpanWeight> subWeights = new ArrayList<>();
     for (SpanQuery q : clauses) {
-      subWeights.add(q.createWeight(searcher, false));
+      subWeights.add(q.createWeight(searcher, false, boost));
     }
-    return new SpanNearWeight(subWeights, searcher, needsScores ? getTermContexts(subWeights) : null);
+    return new SpanNearWeight(subWeights, searcher, needsScores ? getTermContexts(subWeights) : null, boost);
   }
 
   public class SpanNearWeight extends SpanWeight {
 
     final List<SpanWeight> subWeights;
 
-    public SpanNearWeight(List<SpanWeight> subWeights, IndexSearcher searcher, Map<Term, TermContext> terms) throws IOException {
-      super(SpanNearQuery.this, searcher, terms);
+    public SpanNearWeight(List<SpanWeight> subWeights, IndexSearcher searcher, Map<Term, TermContext> terms, float boost) throws IOException {
+      super(SpanNearQuery.this, searcher, terms, boost);
       this.subWeights = subWeights;
     }
 
@@ -295,14 +295,14 @@ public class SpanNearQuery extends SpanQuery implements Cloneable {
     }
 
     @Override
-    public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-      return new SpanGapWeight(searcher);
+    public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+      return new SpanGapWeight(searcher, boost);
     }
 
     private class SpanGapWeight extends SpanWeight {
 
-      SpanGapWeight(IndexSearcher searcher) throws IOException {
-        super(SpanGapQuery.this, searcher, null);
+      SpanGapWeight(IndexSearcher searcher, float boost) throws IOException {
+        super(SpanGapQuery.this, searcher, null, boost);
       }
 
       @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/spans/SpanNotQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/spans/SpanNotQuery.java b/lucene/core/src/java/org/apache/lucene/search/spans/SpanNotQuery.java
index 9b07abf..0984bd9 100644
--- a/lucene/core/src/java/org/apache/lucene/search/spans/SpanNotQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/spans/SpanNotQuery.java
@@ -93,11 +93,11 @@ public final class SpanNotQuery extends SpanQuery {
 
 
   @Override
-  public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-    SpanWeight includeWeight = include.createWeight(searcher, false);
-    SpanWeight excludeWeight = exclude.createWeight(searcher, false);
+  public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    SpanWeight includeWeight = include.createWeight(searcher, false, boost);
+    SpanWeight excludeWeight = exclude.createWeight(searcher, false, boost);
     return new SpanNotWeight(searcher, needsScores ? getTermContexts(includeWeight, excludeWeight) : null,
-                                  includeWeight, excludeWeight);
+                                  includeWeight, excludeWeight, boost);
   }
 
   public class SpanNotWeight extends SpanWeight {
@@ -106,8 +106,8 @@ public final class SpanNotQuery extends SpanQuery {
     final SpanWeight excludeWeight;
 
     public SpanNotWeight(IndexSearcher searcher, Map<Term, TermContext> terms,
-                         SpanWeight includeWeight, SpanWeight excludeWeight) throws IOException {
-      super(SpanNotQuery.this, searcher, terms);
+                         SpanWeight includeWeight, SpanWeight excludeWeight, float boost) throws IOException {
+      super(SpanNotQuery.this, searcher, terms, boost);
       this.includeWeight = includeWeight;
       this.excludeWeight = excludeWeight;
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/spans/SpanOrQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/spans/SpanOrQuery.java b/lucene/core/src/java/org/apache/lucene/search/spans/SpanOrQuery.java
index 37e5863..e273dd9 100644
--- a/lucene/core/src/java/org/apache/lucene/search/spans/SpanOrQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/spans/SpanOrQuery.java
@@ -114,20 +114,20 @@ public final class SpanOrQuery extends SpanQuery {
   }
 
   @Override
-  public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
+  public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
     List<SpanWeight> subWeights = new ArrayList<>(clauses.size());
     for (SpanQuery q : clauses) {
-      subWeights.add(q.createWeight(searcher, false));
+      subWeights.add(q.createWeight(searcher, false, boost));
     }
-    return new SpanOrWeight(searcher, needsScores ? getTermContexts(subWeights) : null, subWeights);
+    return new SpanOrWeight(searcher, needsScores ? getTermContexts(subWeights) : null, subWeights, boost);
   }
 
   public class SpanOrWeight extends SpanWeight {
 
     final List<SpanWeight> subWeights;
 
-    public SpanOrWeight(IndexSearcher searcher, Map<Term, TermContext> terms, List<SpanWeight> subWeights) throws IOException {
-      super(SpanOrQuery.this, searcher, terms);
+    public SpanOrWeight(IndexSearcher searcher, Map<Term, TermContext> terms, List<SpanWeight> subWeights, float boost) throws IOException {
+      super(SpanOrQuery.this, searcher, terms, boost);
       this.subWeights = subWeights;
     }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/spans/SpanPositionCheckQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/spans/SpanPositionCheckQuery.java b/lucene/core/src/java/org/apache/lucene/search/spans/SpanPositionCheckQuery.java
index 21c3a03..9613ceb 100644
--- a/lucene/core/src/java/org/apache/lucene/search/spans/SpanPositionCheckQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/spans/SpanPositionCheckQuery.java
@@ -67,17 +67,17 @@ public abstract class SpanPositionCheckQuery extends SpanQuery implements Clonea
   protected abstract AcceptStatus acceptPosition(Spans spans) throws IOException;
 
   @Override
-  public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-    SpanWeight matchWeight = match.createWeight(searcher, false);
-    return new SpanPositionCheckWeight(matchWeight, searcher, needsScores ? getTermContexts(matchWeight) : null);
+  public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    SpanWeight matchWeight = match.createWeight(searcher, false, boost);
+    return new SpanPositionCheckWeight(matchWeight, searcher, needsScores ? getTermContexts(matchWeight) : null, boost);
   }
 
   public class SpanPositionCheckWeight extends SpanWeight {
 
     final SpanWeight matchWeight;
 
-    public SpanPositionCheckWeight(SpanWeight matchWeight, IndexSearcher searcher, Map<Term, TermContext> terms) throws IOException {
-      super(SpanPositionCheckQuery.this, searcher, terms);
+    public SpanPositionCheckWeight(SpanWeight matchWeight, IndexSearcher searcher, Map<Term, TermContext> terms, float boost) throws IOException {
+      super(SpanPositionCheckQuery.this, searcher, terms, boost);
       this.matchWeight = matchWeight;
     }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/spans/SpanQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/spans/SpanQuery.java b/lucene/core/src/java/org/apache/lucene/search/spans/SpanQuery.java
index 12a1129..965f80e 100644
--- a/lucene/core/src/java/org/apache/lucene/search/spans/SpanQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/spans/SpanQuery.java
@@ -36,7 +36,7 @@ public abstract class SpanQuery extends Query {
   public abstract String getField();
 
   @Override
-  public abstract SpanWeight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException;
+  public abstract SpanWeight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException;
 
   /**
    * Build a map of terms to termcontexts, for use in constructing SpanWeights

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/spans/SpanTermQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/spans/SpanTermQuery.java b/lucene/core/src/java/org/apache/lucene/search/spans/SpanTermQuery.java
index 43e0dcc..2746a0c 100644
--- a/lucene/core/src/java/org/apache/lucene/search/spans/SpanTermQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/spans/SpanTermQuery.java
@@ -64,7 +64,7 @@ public class SpanTermQuery extends SpanQuery {
   public String getField() { return term.field(); }
 
   @Override
-  public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
+  public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
     final TermContext context;
     final IndexReaderContext topContext = searcher.getTopReaderContext();
     if (termContext == null || termContext.topReaderContext != topContext) {
@@ -73,15 +73,15 @@ public class SpanTermQuery extends SpanQuery {
     else {
       context = termContext;
     }
-    return new SpanTermWeight(context, searcher, needsScores ? Collections.singletonMap(term, context) : null);
+    return new SpanTermWeight(context, searcher, needsScores ? Collections.singletonMap(term, context) : null, boost);
   }
 
   public class SpanTermWeight extends SpanWeight {
 
     final TermContext termContext;
 
-    public SpanTermWeight(TermContext termContext, IndexSearcher searcher, Map<Term, TermContext> terms) throws IOException {
-      super(SpanTermQuery.this, searcher, terms);
+    public SpanTermWeight(TermContext termContext, IndexSearcher searcher, Map<Term, TermContext> terms, float boost) throws IOException {
+      super(SpanTermQuery.this, searcher, terms, boost);
       this.termContext = termContext;
       assert termContext != null : "TermContext must not be null";
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/spans/SpanWeight.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/spans/SpanWeight.java b/lucene/core/src/java/org/apache/lucene/search/spans/SpanWeight.java
index 8209bd0..c0b231e 100644
--- a/lucene/core/src/java/org/apache/lucene/search/spans/SpanWeight.java
+++ b/lucene/core/src/java/org/apache/lucene/search/spans/SpanWeight.java
@@ -82,14 +82,14 @@ public abstract class SpanWeight extends Weight {
    *                     be null if scores are not required
    * @throws IOException on error
    */
-  public SpanWeight(SpanQuery query, IndexSearcher searcher, Map<Term, TermContext> termContexts) throws IOException {
+  public SpanWeight(SpanQuery query, IndexSearcher searcher, Map<Term, TermContext> termContexts, float boost) throws IOException {
     super(query);
     this.field = query.getField();
     this.similarity = searcher.getSimilarity(termContexts != null);
-    this.simWeight = buildSimWeight(query, searcher, termContexts);
+    this.simWeight = buildSimWeight(query, searcher, termContexts, boost);
   }
 
-  private Similarity.SimWeight buildSimWeight(SpanQuery query, IndexSearcher searcher, Map<Term, TermContext> termContexts) throws IOException {
+  private Similarity.SimWeight buildSimWeight(SpanQuery query, IndexSearcher searcher, Map<Term, TermContext> termContexts, float boost) throws IOException {
     if (termContexts == null || termContexts.size() == 0 || query.getField() == null)
       return null;
     TermStatistics[] termStats = new TermStatistics[termContexts.size()];
@@ -99,7 +99,7 @@ public abstract class SpanWeight extends Weight {
       i++;
     }
     CollectionStatistics collectionStats = searcher.collectionStatistics(query.getField());
-    return searcher.getSimilarity(true).computeWeight(collectionStats, termStats);
+    return searcher.getSimilarity(true).computeWeight(boost, collectionStats, termStats);
   }
 
   /**
@@ -117,18 +117,6 @@ public abstract class SpanWeight extends Weight {
   public abstract Spans getSpans(LeafReaderContext ctx, Postings requiredPostings) throws IOException;
 
   @Override
-  public float getValueForNormalization() throws IOException {
-    return simWeight == null ? 1.0f : simWeight.getValueForNormalization();
-  }
-
-  @Override
-  public void normalize(float queryNorm, float boost) {
-    if (simWeight != null) {
-      simWeight.normalize(queryNorm, boost);
-    }
-  }
-
-  @Override
   public SpanScorer scorer(LeafReaderContext context) throws IOException {
     final Spans spans = getSpans(context, Postings.POSITIONS);
     if (spans == null) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/spans/SpanWithinQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/spans/SpanWithinQuery.java b/lucene/core/src/java/org/apache/lucene/search/spans/SpanWithinQuery.java
index 6fbfb67..1bb0f50 100644
--- a/lucene/core/src/java/org/apache/lucene/search/spans/SpanWithinQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/spans/SpanWithinQuery.java
@@ -44,18 +44,18 @@ public final class SpanWithinQuery extends SpanContainQuery {
   }
 
   @Override
-  public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-    SpanWeight bigWeight = big.createWeight(searcher, false);
-    SpanWeight littleWeight = little.createWeight(searcher, false);
+  public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    SpanWeight bigWeight = big.createWeight(searcher, false, boost);
+    SpanWeight littleWeight = little.createWeight(searcher, false, boost);
     return new SpanWithinWeight(searcher, needsScores ? getTermContexts(bigWeight, littleWeight) : null,
-                                      bigWeight, littleWeight);
+                                      bigWeight, littleWeight, boost);
   }
 
   public class SpanWithinWeight extends SpanContainWeight {
 
     public SpanWithinWeight(IndexSearcher searcher, Map<Term, TermContext> terms,
-                            SpanWeight bigWeight, SpanWeight littleWeight) throws IOException {
-      super(searcher, terms, bigWeight, littleWeight);
+                            SpanWeight bigWeight, SpanWeight littleWeight, float boost) throws IOException {
+      super(searcher, terms, bigWeight, littleWeight, boost);
     }
 
     /**

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/test/org/apache/lucene/index/TestCustomNorms.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestCustomNorms.java b/lucene/core/src/test/org/apache/lucene/index/TestCustomNorms.java
index a8d457e..ba3414b 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestCustomNorms.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestCustomNorms.java
@@ -83,11 +83,6 @@ public class TestCustomNorms extends LuceneTestCase {
     Similarity delegate = new ClassicSimilarity();
 
     @Override
-    public float queryNorm(float sumOfSquaredWeights) {
-      return delegate.queryNorm(sumOfSquaredWeights);
-    }
-
-    @Override
     public Similarity get(String field) {
       if (floatTestField.equals(field)) {
         return new FloatEncodingBoostSimilarity();
@@ -105,7 +100,7 @@ public class TestCustomNorms extends LuceneTestCase {
     }
     
     @Override
-    public SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats) {
+    public SimWeight computeWeight(float boost, CollectionStatistics collectionStats, TermStatistics... termStats) {
       throw new UnsupportedOperationException();
     }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java
index b91bb9b..6528765 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java
@@ -992,8 +992,8 @@ public class TestIndexSorting extends LuceneTestCase {
     }
     
     @Override
-    public SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats) {
-      return in.computeWeight(collectionStats, termStats);
+    public SimWeight computeWeight(float boost, CollectionStatistics collectionStats, TermStatistics... termStats) {
+      return in.computeWeight(boost, collectionStats, termStats);
     }
     
     @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/test/org/apache/lucene/index/TestMaxTermFrequency.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestMaxTermFrequency.java b/lucene/core/src/test/org/apache/lucene/index/TestMaxTermFrequency.java
index 871ae30..29a7cd1 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestMaxTermFrequency.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestMaxTermFrequency.java
@@ -115,7 +115,6 @@ public class TestMaxTermFrequency extends LuceneTestCase {
       return norm;
     }
 
-    @Override public float queryNorm(float sumOfSquaredWeights) { return 0; }
     @Override public float tf(float freq) { return 0; }
     @Override public float idf(long docFreq, long docCount) { return 0; }
     @Override public float sloppyFreq(int distance) { return 0; }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/test/org/apache/lucene/index/TestNorms.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestNorms.java b/lucene/core/src/test/org/apache/lucene/index/TestNorms.java
index 642540e..ae35963 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestNorms.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestNorms.java
@@ -64,7 +64,6 @@ public class TestNorms extends LuceneTestCase {
       return state.getLength();
     }
 
-    @Override public float queryNorm(float sumOfSquaredWeights) { return 0; }
     @Override public float tf(float freq) { return 0; }
     @Override public float idf(long docFreq, long docCount) { return 0; }
     @Override public float sloppyFreq(int distance) { return 0; }
@@ -156,12 +155,6 @@ public class TestNorms extends LuceneTestCase {
     Similarity delegate = new ClassicSimilarity();
 
     @Override
-    public float queryNorm(float sumOfSquaredWeights) {
-
-      return delegate.queryNorm(sumOfSquaredWeights);
-    }
-
-    @Override
     public Similarity get(String field) {
       if (byteTestField.equals(field)) {
         return new ByteEncodingBoostSimilarity();
@@ -181,7 +174,7 @@ public class TestNorms extends LuceneTestCase {
     }
 
     @Override
-    public SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats) {
+    public SimWeight computeWeight(float boost, CollectionStatistics collectionStats, TermStatistics... termStats) {
       throw new UnsupportedOperationException();
     }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/test/org/apache/lucene/index/TestOmitTf.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestOmitTf.java b/lucene/core/src/test/org/apache/lucene/index/TestOmitTf.java
index 3d3384e..3a13d9b 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestOmitTf.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestOmitTf.java
@@ -46,8 +46,6 @@ public class TestOmitTf extends LuceneTestCase {
   public static class SimpleSimilarity extends TFIDFSimilarity {
     @Override public float decodeNormValue(long norm) { return norm; }
     @Override public long encodeNormValue(float f) { return (long) f; }
-    @Override
-    public float queryNorm(float sumOfSquaredWeights) { return 1.0f; }
     @Override public float lengthNorm(FieldInvertState state) { return state.getBoost(); }
     @Override public float tf(float freq) { return freq; }
     @Override public float sloppyFreq(int distance) { return 2.0f; }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/test/org/apache/lucene/index/TestUniqueTermCount.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestUniqueTermCount.java b/lucene/core/src/test/org/apache/lucene/index/TestUniqueTermCount.java
index 575a9bb..b1661ca 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestUniqueTermCount.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestUniqueTermCount.java
@@ -105,7 +105,7 @@ public class TestUniqueTermCount extends LuceneTestCase {
     }
 
     @Override
-    public SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats) {
+    public SimWeight computeWeight(float boost, CollectionStatistics collectionStats, TermStatistics... termStats) {
       throw new UnsupportedOperationException();
     }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/test/org/apache/lucene/search/JustCompileSearch.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/JustCompileSearch.java b/lucene/core/src/test/org/apache/lucene/search/JustCompileSearch.java
index b46a46e..151d475 100644
--- a/lucene/core/src/test/org/apache/lucene/search/JustCompileSearch.java
+++ b/lucene/core/src/test/org/apache/lucene/search/JustCompileSearch.java
@@ -194,7 +194,7 @@ final class JustCompileSearch {
   static final class JustCompileSimilarity extends Similarity {
 
     @Override
-    public SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats) {
+    public SimWeight computeWeight(float boost, CollectionStatistics collectionStats, TermStatistics... termStats) {
       throw new UnsupportedOperationException(UNSUPPORTED_MSG);
     }
 
@@ -258,16 +258,6 @@ final class JustCompileSearch {
     }
 
     @Override
-    public void normalize(float norm, float topLevelBoost) {
-      throw new UnsupportedOperationException(UNSUPPORTED_MSG);
-    }
-
-    @Override
-    public float getValueForNormalization() {
-      throw new UnsupportedOperationException(UNSUPPORTED_MSG);
-    }
-
-    @Override
     public Scorer scorer(LeafReaderContext context) {
       throw new UnsupportedOperationException(UNSUPPORTED_MSG);
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/test/org/apache/lucene/search/TestBooleanScorer.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestBooleanScorer.java b/lucene/core/src/test/org/apache/lucene/search/TestBooleanScorer.java
index 42865b7..065def1 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestBooleanScorer.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestBooleanScorer.java
@@ -77,7 +77,7 @@ public class TestBooleanScorer extends LuceneTestCase {
     }
 
     @Override
-    public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
+    public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
       return new Weight(CrazyMustUseBulkScorerQuery.this) {
         @Override
         public void extractTerms(Set<Term> terms) {
@@ -90,15 +90,6 @@ public class TestBooleanScorer extends LuceneTestCase {
         }
 
         @Override
-        public float getValueForNormalization() {
-          return 1.0f;
-        }
-
-        @Override
-        public void normalize(float norm, float topLevelBoost) {
-        }
-
-        @Override
         public Scorer scorer(LeafReaderContext context) {
           throw new UnsupportedOperationException();
         }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/test/org/apache/lucene/search/TestComplexExplanations.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestComplexExplanations.java b/lucene/core/src/test/org/apache/lucene/search/TestComplexExplanations.java
index 1a4591d..fce2cd4 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestComplexExplanations.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestComplexExplanations.java
@@ -33,14 +33,11 @@ import org.apache.lucene.search.spans.*;
  */
 public class TestComplexExplanations extends BaseExplanationTestCase {
 
-  /**
-   * Override the Similarity used in our searcher with one that plays
-   * nice with boosts of 0.0
-   */
   @Override
   public void setUp() throws Exception {
     super.setUp();
-    searcher.setSimilarity(createQnorm1Similarity());
+    // TODO: switch to BM25?
+    searcher.setSimilarity(new ClassicSimilarity());
   }
   
   @Override
@@ -49,16 +46,6 @@ public class TestComplexExplanations extends BaseExplanationTestCase {
     super.tearDown();
   }
 
-  // must be static for weight serialization tests 
-  private static ClassicSimilarity createQnorm1Similarity() {
-    return new ClassicSimilarity() {
-        @Override
-        public float queryNorm(float sumOfSquaredWeights) {
-          return 1.0f; // / (float) Math.sqrt(1.0f + sumOfSquaredWeights);
-        }
-      };
-  }
-
   
   public void test1() throws Exception {
     

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/test/org/apache/lucene/search/TestConjunctions.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestConjunctions.java b/lucene/core/src/test/org/apache/lucene/search/TestConjunctions.java
index b6e0356..8b26121 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestConjunctions.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestConjunctions.java
@@ -94,18 +94,9 @@ public class TestConjunctions extends LuceneTestCase {
     }
 
     @Override
-    public SimWeight computeWeight(
+    public SimWeight computeWeight(float boost,
         CollectionStatistics collectionStats, TermStatistics... termStats) {
-      return new SimWeight() {
-        @Override
-        public float getValueForNormalization() {
-          return 1; // we don't care
-        }
-        @Override
-        public void normalize(float queryNorm, float topLevelBoost) {
-          // we don't care
-        }
-      };
+      return new SimWeight() {};
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/test/org/apache/lucene/search/TestConstantScoreQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestConstantScoreQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestConstantScoreQuery.java
index 0a49259..7671cc0 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestConstantScoreQuery.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestConstantScoreQuery.java
@@ -30,7 +30,6 @@ import org.apache.lucene.index.MultiReader;
 import org.apache.lucene.index.RandomIndexWriter;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.search.BooleanClause.Occur;
-import org.apache.lucene.search.similarities.ClassicSimilarity;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.LuceneTestCase;
@@ -100,14 +99,6 @@ public class TestConstantScoreQuery extends LuceneTestCase {
       searcher = newSearcher(reader, true, false);
       searcher.setQueryCache(null); // to assert on scorer impl
       
-      // set a similarity that does not normalize our boost away
-      searcher.setSimilarity(new ClassicSimilarity() {
-        @Override
-        public float queryNorm(float sumOfSquaredWeights) {
-          return 1.0f;
-        }
-      });
-      
       final BoostQuery csq1 = new BoostQuery(new ConstantScoreQuery(new TermQuery(new Term ("field", "term"))), 2f);
       final BoostQuery csq2 = new BoostQuery(new ConstantScoreQuery(csq1), 5f);
       
@@ -143,8 +134,8 @@ public class TestConstantScoreQuery extends LuceneTestCase {
     }
     
     @Override
-    public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-      return in.createWeight(searcher, needsScores);
+    public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+      return in.createWeight(searcher, needsScores, boost);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/test/org/apache/lucene/search/TestDocValuesScoring.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestDocValuesScoring.java b/lucene/core/src/test/org/apache/lucene/search/TestDocValuesScoring.java
index 77125b0..de40c0d 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestDocValuesScoring.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestDocValuesScoring.java
@@ -78,11 +78,6 @@ public class TestDocValuesScoring extends LuceneTestCase {
       public Similarity get(String field) {
         return "foo".equals(field) ? fooSim : base;
       }
-
-      @Override
-      public float queryNorm(float sumOfSquaredWeights) {
-        return base.queryNorm(sumOfSquaredWeights);
-      }
     });
     
     // in this case, we searched on field "foo". first document should have 2x the score.
@@ -148,8 +143,8 @@ public class TestDocValuesScoring extends LuceneTestCase {
     }
 
     @Override
-    public SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats) {
-      return sim.computeWeight(collectionStats, termStats);
+    public SimWeight computeWeight(float boost, CollectionStatistics collectionStats, TermStatistics... termStats) {
+      return sim.computeWeight(boost, collectionStats, termStats);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java b/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java
index 48dcdf0..a2e4e1d 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java
@@ -347,8 +347,8 @@ public class TestLRUQueryCache extends LuceneTestCase {
     }
 
     @Override
-    public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-      return new ConstantScoreWeight(this) {
+    public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+      return new ConstantScoreWeight(this, boost) {
         @Override
         public Scorer scorer(LeafReaderContext context) throws IOException {
           return null;
@@ -932,8 +932,8 @@ public class TestLRUQueryCache extends LuceneTestCase {
     int[] i = new int[] {42}; // an array so that clone keeps the reference
 
     @Override
-    public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-      return new ConstantScoreWeight(this) {
+    public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+      return new ConstantScoreWeight(this, boost) {
         @Override
         public Scorer scorer(LeafReaderContext context) throws IOException {
           return null;
@@ -1112,16 +1112,6 @@ public class TestLRUQueryCache extends LuceneTestCase {
     }
 
     @Override
-    public float getValueForNormalization() throws IOException {
-      return in.getValueForNormalization();
-    }
-
-    @Override
-    public void normalize(float norm, float boost) {
-      in.normalize(norm, boost);
-    }
-
-    @Override
     public Scorer scorer(LeafReaderContext context) throws IOException {
       scorerCalled.set(true);
       return in.scorer(context);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/test/org/apache/lucene/search/TestMinShouldMatch2.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestMinShouldMatch2.java b/lucene/core/src/test/org/apache/lucene/search/TestMinShouldMatch2.java
index c9408f0..19f0be5 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestMinShouldMatch2.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestMinShouldMatch2.java
@@ -89,12 +89,7 @@ public class TestMinShouldMatch2 extends LuceneTestCase {
     r = DirectoryReader.open(dir);
     reader = getOnlyLeafReader(r);
     searcher = new IndexSearcher(reader);
-    searcher.setSimilarity(new ClassicSimilarity() {
-      @Override
-      public float queryNorm(float sumOfSquaredWeights) {
-        return 1; // we disable queryNorm, both for debugging and ease of impl
-      }
-    });
+    searcher.setSimilarity(new ClassicSimilarity());
   }
   
   @AfterClass
@@ -338,11 +333,9 @@ public class TestMinShouldMatch2 extends LuceneTestCase {
           boolean success = ords.add(ord);
           assert success; // no dups
           TermContext context = TermContext.build(reader.getContext(), term);
-          SimWeight w = weight.similarity.computeWeight(
+          SimWeight w = weight.similarity.computeWeight(1f,
                         searcher.collectionStatistics("field"),
                         searcher.termStatistics(term, context));
-          w.getValueForNormalization(); // ignored
-          w.normalize(1F, 1F);
           sims[(int)ord] = weight.similarity.simScorer(w, reader.getContext());
         }
       }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/test/org/apache/lucene/search/TestMultiPhraseQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestMultiPhraseQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestMultiPhraseQuery.java
index 942ac13..262cbf3 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestMultiPhraseQuery.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestMultiPhraseQuery.java
@@ -33,7 +33,6 @@ import org.apache.lucene.index.MultiFields;
 import org.apache.lucene.index.RandomIndexWriter;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.index.TermsEnum;
-import org.apache.lucene.search.similarities.ClassicSimilarity;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.RAMDirectory;
 import org.apache.lucene.util.BytesRef;
@@ -336,32 +335,6 @@ public class TestMultiPhraseQuery extends LuceneTestCase {
   public void testEmptyToString() {
     new MultiPhraseQuery.Builder().build().toString();
   }
-  
-  public void testCustomIDF() throws Exception {
-    Directory indexStore = newDirectory();
-    RandomIndexWriter writer = new RandomIndexWriter(random(), indexStore);
-    add("This is a test", "object", writer);
-    add("a note", "note", writer);
-    
-    IndexReader reader = writer.getReader();
-    IndexSearcher searcher = newSearcher(reader);
-    searcher.setSimilarity(new ClassicSimilarity() { 
-      @Override
-      public Explanation idfExplain(CollectionStatistics collectionStats, TermStatistics termStats[]) {
-        return Explanation.match(10f, "just a test");
-      } 
-    });
-    
-    MultiPhraseQuery.Builder queryBuilder = new MultiPhraseQuery.Builder();
-    queryBuilder.add(new Term[] { new Term("body", "this"), new Term("body", "that") });
-    queryBuilder.add(new Term("body", "is"));
-    Weight weight = queryBuilder.build().createWeight(searcher, true);
-    assertEquals(10f * 10f, weight.getValueForNormalization(), 0.001f);
-
-    writer.close();
-    reader.close();
-    indexStore.close();
-  }
 
   public void testZeroPosIncr() throws IOException {
     Directory dir = new RAMDirectory();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/test/org/apache/lucene/search/TestMultiTermConstantScore.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestMultiTermConstantScore.java b/lucene/core/src/test/org/apache/lucene/search/TestMultiTermConstantScore.java
index da1c369..69e1e10 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestMultiTermConstantScore.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestMultiTermConstantScore.java
@@ -23,11 +23,9 @@ import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
 import org.apache.lucene.document.FieldType;
 import org.apache.lucene.document.TextField;
-import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.RandomIndexWriter;
 import org.apache.lucene.index.Term;
-import org.apache.lucene.search.similarities.ClassicSimilarity;
 import org.apache.lucene.store.Directory;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
@@ -214,76 +212,6 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter {
     }
   }
 
-  @Test
-  public void testBoost() throws IOException {
-    // NOTE: uses index build in *this* setUp
-
-    IndexSearcher search = newSearcher(reader);
-
-    // test for correct application of query normalization
-    // must use a non score normalizing method for this.
-    
-    search.setSimilarity(new ClassicSimilarity());
-    Query q = csrq("data", "1", "6", T, T);
-    search.search(new BoostQuery(q, 100), new SimpleCollector() {
-      private int base = 0;
-      private Scorer scorer;
-      @Override
-      public void setScorer(Scorer scorer) {
-        this.scorer = scorer;
-      }
-      @Override
-      public void collect(int doc) throws IOException {
-        assertEquals("score for doc " + (doc + base) + " was not correct", 1.0f, scorer.score(), SCORE_COMP_THRESH);
-      }
-      @Override
-      protected void doSetNextReader(LeafReaderContext context) throws IOException {
-        base = context.docBase;
-      }
-      
-      @Override
-      public boolean needsScores() {
-        return true;
-      }
-    });
-
-    //
-    // Ensure that boosting works to score one clause of a query higher
-    // than another.
-    //
-    Query q1 = new BoostQuery(csrq("data", "A", "A", T, T), .1f); // matches document #0
-    Query q2 = csrq("data", "Z", "Z", T, T); // matches document #1
-    BooleanQuery.Builder bq = new BooleanQuery.Builder();
-    bq.add(q1, BooleanClause.Occur.SHOULD);
-    bq.add(q2, BooleanClause.Occur.SHOULD);
-
-    ScoreDoc[] hits = search.search(bq.build(), 1000).scoreDocs;
-    Assert.assertEquals(1, hits[0].doc);
-    Assert.assertEquals(0, hits[1].doc);
-    assertTrue(hits[0].score > hits[1].score);
-
-    q1 = new BoostQuery(csrq("data", "A", "A", T, T, MultiTermQuery.CONSTANT_SCORE_BOOLEAN_REWRITE), .1f); // matches document #0
-    q2 = csrq("data", "Z", "Z", T, T, MultiTermQuery.CONSTANT_SCORE_BOOLEAN_REWRITE); // matches document #1
-    bq = new BooleanQuery.Builder();
-    bq.add(q1, BooleanClause.Occur.SHOULD);
-    bq.add(q2, BooleanClause.Occur.SHOULD);
-
-    hits = search.search(bq.build(), 1000).scoreDocs;
-    Assert.assertEquals(1, hits[0].doc);
-    Assert.assertEquals(0, hits[1].doc);
-    assertTrue(hits[0].score > hits[1].score);
-
-    q1 = new BoostQuery(csrq("data", "A", "A", T, T), 10f); // matches document #0
-    q2 = csrq("data", "Z", "Z", T, T); // matches document #1
-    bq = new BooleanQuery.Builder();
-    bq.add(q1, BooleanClause.Occur.SHOULD);
-    bq.add(q2, BooleanClause.Occur.SHOULD);
-
-    hits = search.search(bq.build(), 1000).scoreDocs;
-    Assert.assertEquals(0, hits[0].doc);
-    Assert.assertEquals(1, hits[1].doc);
-    assertTrue(hits[0].score > hits[1].score);
-  }
 
   @Test
   public void testBooleanOrderUnAffected() throws IOException {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/test/org/apache/lucene/search/TestNeedsScores.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestNeedsScores.java b/lucene/core/src/test/org/apache/lucene/search/TestNeedsScores.java
index 2723ce8..f8fe82c 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestNeedsScores.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestNeedsScores.java
@@ -101,8 +101,8 @@ public class TestNeedsScores extends LuceneTestCase {
     }
 
     @Override
-    public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-      final Weight w = in.createWeight(searcher, needsScores);
+    public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+      final Weight w = in.createWeight(searcher, needsScores, boost);
       return new Weight(AssertNeedsScores.this) {
         @Override
         public void extractTerms(Set<Term> terms) {
@@ -115,16 +115,6 @@ public class TestNeedsScores extends LuceneTestCase {
         }
 
         @Override
-        public float getValueForNormalization() throws IOException {
-          return w.getValueForNormalization();
-        }
-
-        @Override
-        public void normalize(float norm, float topLevelBoost) {
-          w.normalize(norm, topLevelBoost);
-        }
-
-        @Override
         public Scorer scorer(LeafReaderContext context) throws IOException {
           assertEquals("query=" + in, value, needsScores);
           return w.scorer(context);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/test/org/apache/lucene/search/TestPositionIncrement.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestPositionIncrement.java b/lucene/core/src/test/org/apache/lucene/search/TestPositionIncrement.java
index 227b15d..3dd9b18 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestPositionIncrement.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestPositionIncrement.java
@@ -252,7 +252,7 @@ public class TestPositionIncrement extends LuceneTestCase {
       System.out.println("\ngetPayloadSpans test");
     }
     PayloadSpanCollector collector = new PayloadSpanCollector();
-    Spans pspans = snq.createWeight(is, false).getSpans(is.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
+    Spans pspans = snq.createWeight(is, false, 1f).getSpans(is.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
     while (pspans.nextDoc() != Spans.NO_MORE_DOCS) {
       while (pspans.nextStartPosition() != Spans.NO_MORE_POSITIONS) {
         if (VERBOSE) {
@@ -274,7 +274,7 @@ public class TestPositionIncrement extends LuceneTestCase {
     assertEquals(8, count);
 
     // System.out.println("\ngetSpans test");
-    Spans spans = snq.createWeight(is, false).getSpans(is.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
+    Spans spans = snq.createWeight(is, false, 1f).getSpans(is.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     count = 0;
     sawZero = false;
     while (spans.nextDoc() != Spans.NO_MORE_DOCS) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/test/org/apache/lucene/search/TestPositiveScoresOnlyCollector.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestPositiveScoresOnlyCollector.java b/lucene/core/src/test/org/apache/lucene/search/TestPositiveScoresOnlyCollector.java
index 0612c00..0043b3d 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestPositiveScoresOnlyCollector.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestPositiveScoresOnlyCollector.java
@@ -94,7 +94,7 @@ public class TestPositiveScoresOnlyCollector extends LuceneTestCase {
     IndexReader ir = writer.getReader();
     writer.close();
     IndexSearcher searcher = newSearcher(ir);
-    Weight fake = new TermQuery(new Term("fake", "weight")).createWeight(searcher, true);
+    Weight fake = new TermQuery(new Term("fake", "weight")).createWeight(searcher, true, 1f);
     Scorer s = new SimpleScorer(fake);
     TopDocsCollector<ScoreDoc> tdc = TopScoreDocCollector.create(scores.length);
     Collector c = new PositiveScoresOnlyCollector(tdc);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/test/org/apache/lucene/search/TestQueryRescorer.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestQueryRescorer.java b/lucene/core/src/test/org/apache/lucene/search/TestQueryRescorer.java
index c0428f4..8b616b5 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestQueryRescorer.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestQueryRescorer.java
@@ -412,7 +412,7 @@ public class TestQueryRescorer extends LuceneTestCase {
     }
 
     @Override
-    public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
+    public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
 
       return new Weight(FixedScoreQuery.this) {
 
@@ -421,15 +421,6 @@ public class TestQueryRescorer extends LuceneTestCase {
         }
 
         @Override
-        public float getValueForNormalization() {
-          return 1.0f;
-        }
-
-        @Override
-        public void normalize(float queryNorm, float topLevelBoost) {
-        }
-
-        @Override
         public Scorer scorer(final LeafReaderContext context) throws IOException {
 
           return new Scorer(null) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/test/org/apache/lucene/search/TestScoreCachingWrappingScorer.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestScoreCachingWrappingScorer.java b/lucene/core/src/test/org/apache/lucene/search/TestScoreCachingWrappingScorer.java
index 7951291..2725603 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestScoreCachingWrappingScorer.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestScoreCachingWrappingScorer.java
@@ -116,7 +116,7 @@ public class TestScoreCachingWrappingScorer extends LuceneTestCase {
     IndexReader ir = writer.getReader();
     writer.close();
     IndexSearcher searcher = newSearcher(ir);
-    Weight fake = new TermQuery(new Term("fake", "weight")).createWeight(searcher, true);
+    Weight fake = new TermQuery(new Term("fake", "weight")).createWeight(searcher, true, 1f);
     Scorer s = new SimpleScorer(fake);
     ScoreCachingCollector scc = new ScoreCachingCollector(scores.length);
     scc.setScorer(s);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/test/org/apache/lucene/search/TestScorerPerf.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestScorerPerf.java b/lucene/core/src/test/org/apache/lucene/search/TestScorerPerf.java
index a7c1ba8..562f2da 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestScorerPerf.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestScorerPerf.java
@@ -149,8 +149,8 @@ public class TestScorerPerf extends LuceneTestCase {
     }
 
     @Override
-    public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-      return new ConstantScoreWeight(this) {
+    public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+      return new ConstantScoreWeight(this, boost) {
         @Override
         public Scorer scorer(LeafReaderContext context) throws IOException {
           return new ConstantScoreScorer(this, score(), new BitSetIterator(docs, docs.approximateCardinality()));

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/test/org/apache/lucene/search/TestSimilarity.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestSimilarity.java b/lucene/core/src/test/org/apache/lucene/search/TestSimilarity.java
index 578126d..966e5da 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestSimilarity.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestSimilarity.java
@@ -39,8 +39,6 @@ import org.apache.lucene.document.Document;
 public class TestSimilarity extends LuceneTestCase {
   
   public static class SimpleSimilarity extends ClassicSimilarity {
-    @Override
-    public float queryNorm(float sumOfSquaredWeights) { return 1.0f; }
     @Override public float lengthNorm(FieldInvertState state) { return state.getBoost(); }
     @Override public float tf(float freq) { return freq; }
     @Override public float sloppyFreq(int distance) { return 2.0f; }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/test/org/apache/lucene/search/TestSimilarityProvider.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestSimilarityProvider.java b/lucene/core/src/test/org/apache/lucene/search/TestSimilarityProvider.java
index 4f1ec8c..83144b1 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestSimilarityProvider.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestSimilarityProvider.java
@@ -115,11 +115,6 @@ public class TestSimilarityProvider extends LuceneTestCase {
     }
 
     @Override
-    public float queryNorm(float sumOfSquaredWeights) {
-      return 1f;
-    }
-
-    @Override
     public float lengthNorm(FieldInvertState state) {
       return 1f;
     }
@@ -156,11 +151,6 @@ public class TestSimilarityProvider extends LuceneTestCase {
     public float decodeNormValue(long norm) {
       return norm;
     }
-
-    @Override
-    public float queryNorm(float sumOfSquaredWeights) {
-      return 1f;
-    }
     
     @Override
     public float lengthNorm(FieldInvertState state) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/test/org/apache/lucene/search/TestSortRandom.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestSortRandom.java b/lucene/core/src/test/org/apache/lucene/search/TestSortRandom.java
index 32bce9e..2bccaff 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestSortRandom.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestSortRandom.java
@@ -229,8 +229,8 @@ public class TestSortRandom extends LuceneTestCase {
     }
 
     @Override
-    public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-      return new ConstantScoreWeight(this) {
+    public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+      return new ConstantScoreWeight(this, boost) {
         @Override
         public Scorer scorer(LeafReaderContext context) throws IOException {
           Random random = new Random(context.docBase ^ seed);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/test/org/apache/lucene/search/similarities/TestSimilarityBase.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/similarities/TestSimilarityBase.java b/lucene/core/src/test/org/apache/lucene/search/similarities/TestSimilarityBase.java
index 829d30c..aec55de 100644
--- a/lucene/core/src/test/org/apache/lucene/search/similarities/TestSimilarityBase.java
+++ b/lucene/core/src/test/org/apache/lucene/search/similarities/TestSimilarityBase.java
@@ -172,7 +172,7 @@ public class TestSimilarityBase extends LuceneTestCase {
   
   /** Creates the default statistics object that the specific tests modify. */
   private BasicStats createStats() {
-    BasicStats stats = new BasicStats("spoof");
+    BasicStats stats = new BasicStats("spoof", 1f);
     stats.setNumberOfDocuments(NUMBER_OF_DOCUMENTS);
     stats.setNumberOfFieldTokens(NUMBER_OF_FIELD_TOKENS);
     stats.setAvgFieldLength(AVG_FIELD_LENGTH);
@@ -197,9 +197,9 @@ public class TestSimilarityBase extends LuceneTestCase {
   private void unitTestCore(BasicStats stats, float freq, int docLen) {
     for (SimilarityBase sim : sims) {
       BasicStats realStats = (BasicStats) sim.computeWeight(
+          stats.getBoost(),
           toCollectionStats(stats), 
           toTermStats(stats));
-      realStats.normalize(1f, stats.getBoost());
       float score = sim.score(realStats, freq, docLen);
       float explScore = sim.explain(
           realStats, 1, Explanation.match(freq, "freq"), docLen).getValue();
@@ -530,9 +530,9 @@ public class TestSimilarityBase extends LuceneTestCase {
   private void correctnessTestCore(SimilarityBase sim, float gold) {
     BasicStats stats = createStats();
     BasicStats realStats = (BasicStats) sim.computeWeight(
+        stats.getBoost(),
         toCollectionStats(stats), 
         toTermStats(stats));
-    realStats.normalize(1f, stats.getBoost());
     float score = sim.score(realStats, FREQ, DOC_LEN);
     assertEquals(
         sim.toString() + " score not correct.", gold, score, FLOAT_EPSILON);
@@ -648,7 +648,7 @@ public class TestSimilarityBase extends LuceneTestCase {
           continue;
         }
       }
-      BasicStats stats = (BasicStats) sim.computeWeight(collectionStats, termStats);
+      BasicStats stats = (BasicStats) sim.computeWeight(1f, collectionStats, termStats);
       for (float tf = 1.0f; tf <= 10.0f; tf += 1.0f) {
         for (int i = 0; i < 256; i++) {
           float len = sim.decodeNormValue((byte) i);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/test/org/apache/lucene/search/spans/JustCompileSearchSpans.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/spans/JustCompileSearchSpans.java b/lucene/core/src/test/org/apache/lucene/search/spans/JustCompileSearchSpans.java
index 6e6102f..b1f87dd 100644
--- a/lucene/core/src/test/org/apache/lucene/search/spans/JustCompileSearchSpans.java
+++ b/lucene/core/src/test/org/apache/lucene/search/spans/JustCompileSearchSpans.java
@@ -93,7 +93,7 @@ final class JustCompileSearchSpans {
     }
 
     @Override
-    public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
+    public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
       throw new UnsupportedOperationException(UNSUPPORTED_MSG);
     }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/test/org/apache/lucene/search/spans/TestFieldMaskingSpanQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/spans/TestFieldMaskingSpanQuery.java b/lucene/core/src/test/org/apache/lucene/search/spans/TestFieldMaskingSpanQuery.java
index c2e2c7d..8dccfcb 100644
--- a/lucene/core/src/test/org/apache/lucene/search/spans/TestFieldMaskingSpanQuery.java
+++ b/lucene/core/src/test/org/apache/lucene/search/spans/TestFieldMaskingSpanQuery.java
@@ -142,7 +142,7 @@ public class TestFieldMaskingSpanQuery extends LuceneTestCase {
     QueryUtils.checkEqual(q, qr);
 
     Set<Term> terms = new HashSet<>();
-    qr.createWeight(searcher, false).extractTerms(terms);
+    qr.createWeight(searcher, false, 1f).extractTerms(terms);
     assertEquals(1, terms.size());
   }
   
@@ -162,7 +162,7 @@ public class TestFieldMaskingSpanQuery extends LuceneTestCase {
     QueryUtils.checkUnequal(q, qr);
 
     Set<Term> terms = new HashSet<>();
-    qr.createWeight(searcher, false).extractTerms(terms);
+    qr.createWeight(searcher, false, 1f).extractTerms(terms);
     assertEquals(2, terms.size());
   }
   
@@ -176,7 +176,7 @@ public class TestFieldMaskingSpanQuery extends LuceneTestCase {
     QueryUtils.checkEqual(q, qr);
 
     HashSet<Term> set = new HashSet<>();
-    qr.createWeight(searcher, true).extractTerms(set);
+    qr.createWeight(searcher, true, 1f).extractTerms(set);
     assertEquals(2, set.size());
   }
   
@@ -252,7 +252,7 @@ public class TestFieldMaskingSpanQuery extends LuceneTestCase {
     SpanQuery q  = new SpanOrQuery(q1, new FieldMaskingSpanQuery(q2, "gender"));
     check(q, new int[] { 0, 1, 2, 3, 4 });
 
-    Spans span = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
+    Spans span = q.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertNext(span, 0,0,1);
     assertNext(span, 1,0,1);
     assertNext(span, 1,1,2);
@@ -274,8 +274,8 @@ public class TestFieldMaskingSpanQuery extends LuceneTestCase {
     check(qA, new int[] { 0, 1, 2, 4 });
     check(qB, new int[] { 0, 1, 2, 4 });
   
-    Spans spanA = qA.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
-    Spans spanB = qB.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
+    Spans spanA = qA.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
+    Spans spanB = qB.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     
     while (spanA.nextDoc() != Spans.NO_MORE_DOCS) {
       assertNotSame("spanB not still going", Spans.NO_MORE_DOCS, spanB.nextDoc());
@@ -300,7 +300,7 @@ public class TestFieldMaskingSpanQuery extends LuceneTestCase {
         new FieldMaskingSpanQuery(qB, "id") }, -1, false );
     check(q, new int[] { 0, 1, 2, 3 });
 
-    Spans span = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
+    Spans span = q.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertNext(span, 0,0,1);
     assertNext(span, 1,1,2);
     assertNext(span, 2,0,1);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/test/org/apache/lucene/search/spans/TestNearSpansOrdered.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/spans/TestNearSpansOrdered.java b/lucene/core/src/test/org/apache/lucene/search/spans/TestNearSpansOrdered.java
index d9c003a..6b491fe 100644
--- a/lucene/core/src/test/org/apache/lucene/search/spans/TestNearSpansOrdered.java
+++ b/lucene/core/src/test/org/apache/lucene/search/spans/TestNearSpansOrdered.java
@@ -121,7 +121,7 @@ public class TestNearSpansOrdered extends LuceneTestCase {
   
   public void testNearSpansNext() throws Exception {
     SpanNearQuery q = makeQuery();
-    Spans span = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
+    Spans span = q.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertNext(span,0,0,3);
     assertNext(span,1,0,4);
     assertFinished(span);
@@ -134,7 +134,7 @@ public class TestNearSpansOrdered extends LuceneTestCase {
    */
   public void testNearSpansAdvanceLikeNext() throws Exception {
     SpanNearQuery q = makeQuery();
-    Spans span = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
+    Spans span = q.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertEquals(0, span.advance(0));
     assertEquals(0, span.nextStartPosition());
     assertEquals(s(0,0,3), s(span));
@@ -146,7 +146,7 @@ public class TestNearSpansOrdered extends LuceneTestCase {
   
   public void testNearSpansNextThenAdvance() throws Exception {
     SpanNearQuery q = makeQuery();
-    Spans span = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
+    Spans span = q.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertNotSame(Spans.NO_MORE_DOCS, span.nextDoc());
     assertEquals(0, span.nextStartPosition());
     assertEquals(s(0,0,3), s(span));
@@ -158,7 +158,7 @@ public class TestNearSpansOrdered extends LuceneTestCase {
   
   public void testNearSpansNextThenAdvancePast() throws Exception {
     SpanNearQuery q = makeQuery();
-    Spans span = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
+    Spans span = q.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertNotSame(Spans.NO_MORE_DOCS, span.nextDoc());
     assertEquals(0, span.nextStartPosition());
     assertEquals(s(0,0,3), s(span));
@@ -167,13 +167,13 @@ public class TestNearSpansOrdered extends LuceneTestCase {
   
   public void testNearSpansAdvancePast() throws Exception {
     SpanNearQuery q = makeQuery();
-    Spans span = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
+    Spans span = q.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertEquals(Spans.NO_MORE_DOCS, span.advance(2));
   }
   
   public void testNearSpansAdvanceTo0() throws Exception {
     SpanNearQuery q = makeQuery();
-    Spans span = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
+    Spans span = q.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertEquals(0, span.advance(0));
     assertEquals(0, span.nextStartPosition());
     assertEquals(s(0,0,3), s(span));
@@ -181,7 +181,7 @@ public class TestNearSpansOrdered extends LuceneTestCase {
 
   public void testNearSpansAdvanceTo1() throws Exception {
     SpanNearQuery q = makeQuery();
-    Spans span = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
+    Spans span = q.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertEquals(1, span.advance(1));
     assertEquals(0, span.nextStartPosition());
     assertEquals(s(1,0,4), s(span));
@@ -220,7 +220,7 @@ public class TestNearSpansOrdered extends LuceneTestCase {
         new SpanOrQuery(new SpanTermQuery(new Term(FIELD, "w1")), new SpanTermQuery(new Term(FIELD, "w2"))),
         new SpanTermQuery(new Term(FIELD, "w4"))
     }, 10, true);
-    Spans spans = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
+    Spans spans = q.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertNext(spans,0,0,4);
     assertNext(spans,0,1,4);
     assertFinished(spans);
@@ -230,7 +230,7 @@ public class TestNearSpansOrdered extends LuceneTestCase {
     SpanNearQuery q = new SpanNearQuery(new SpanQuery[]{
         new SpanTermQuery(new Term(FIELD, "t1")), new SpanTermQuery(new Term(FIELD, "t2"))
     }, 1, true);
-    Spans spans = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
+    Spans spans = q.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertNext(spans,4,0,2);
     assertFinished(spans);
   }
@@ -239,7 +239,7 @@ public class TestNearSpansOrdered extends LuceneTestCase {
     SpanNearQuery q = new SpanNearQuery(new SpanQuery[]{
         new SpanTermQuery(new Term(FIELD, "t2")), new SpanTermQuery(new Term(FIELD, "t1"))
     }, 1, true);
-    Spans spans = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
+    Spans spans = q.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertNext(spans,4,1,4);
     assertNext(spans,4,2,4);
     assertFinished(spans);
@@ -263,7 +263,7 @@ public class TestNearSpansOrdered extends LuceneTestCase {
         .addGap(1)
         .addClause(new SpanTermQuery(new Term(FIELD, "w2")))
         .build();
-    Spans spans = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
+    Spans spans = q.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertNext(spans, 1, 0, 3);
     assertNext(spans, 2, 0, 3);
     assertFinished(spans);
@@ -276,7 +276,7 @@ public class TestNearSpansOrdered extends LuceneTestCase {
         .addClause(new SpanTermQuery(new Term(FIELD, "w3")))
         .setSlop(1)
         .build();
-    spans = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
+    spans = q.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertNext(spans, 2, 0, 5);
     assertNext(spans, 3, 0, 6);
     assertFinished(spans);
@@ -288,7 +288,7 @@ public class TestNearSpansOrdered extends LuceneTestCase {
         .addGap(2)
         .addClause(new SpanTermQuery(new Term(FIELD, "g")))
         .build();
-    Spans spans = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
+    Spans spans = q.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertNext(spans, 5, 0, 4);
     assertNext(spans, 5, 9, 13);
     assertFinished(spans);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/test/org/apache/lucene/search/spans/TestSpanCollection.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/spans/TestSpanCollection.java b/lucene/core/src/test/org/apache/lucene/search/spans/TestSpanCollection.java
index 8e45482..2b42a76 100644
--- a/lucene/core/src/test/org/apache/lucene/search/spans/TestSpanCollection.java
+++ b/lucene/core/src/test/org/apache/lucene/search/spans/TestSpanCollection.java
@@ -119,7 +119,7 @@ public class TestSpanCollection extends LuceneTestCase {
     SpanNearQuery q7 = new SpanNearQuery(new SpanQuery[]{q1, q6}, 1, true);
 
     TermCollector collector = new TermCollector();
-    Spans spans = q7.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
+    Spans spans = q7.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertEquals(0, spans.advance(0));
     spans.nextStartPosition();
     checkCollectedTerms(spans, collector, new Term(FIELD, "w1"), new Term(FIELD, "w2"), new Term(FIELD, "w3"));
@@ -139,7 +139,7 @@ public class TestSpanCollection extends LuceneTestCase {
     SpanOrQuery orQuery = new SpanOrQuery(q2, q3);
 
     TermCollector collector = new TermCollector();
-    Spans spans = orQuery.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
+    Spans spans = orQuery.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
 
     assertEquals(1, spans.advance(1));
     spans.nextStartPosition();
@@ -169,7 +169,7 @@ public class TestSpanCollection extends LuceneTestCase {
     SpanNotQuery notq = new SpanNotQuery(nq, q3);
 
     TermCollector collector = new TermCollector();
-    Spans spans = notq.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
+    Spans spans = notq.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
 
     assertEquals(2, spans.advance(2));
     spans.nextStartPosition();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/test/org/apache/lucene/search/spans/TestSpanContainQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/spans/TestSpanContainQuery.java b/lucene/core/src/test/org/apache/lucene/search/spans/TestSpanContainQuery.java
index 3e50183..c2c2338 100644
--- a/lucene/core/src/test/org/apache/lucene/search/spans/TestSpanContainQuery.java
+++ b/lucene/core/src/test/org/apache/lucene/search/spans/TestSpanContainQuery.java
@@ -72,7 +72,7 @@ public class TestSpanContainQuery extends LuceneTestCase {
   }
 
   Spans makeSpans(SpanQuery sq) throws Exception {
-    return sq.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
+    return sq.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
   }
 
   void tstEqualSpans(String mes, SpanQuery expectedQ, SpanQuery actualQ) throws Exception {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/test/org/apache/lucene/search/spans/TestSpans.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/spans/TestSpans.java b/lucene/core/src/test/org/apache/lucene/search/spans/TestSpans.java
index 2983301..2d5e05c 100644
--- a/lucene/core/src/test/org/apache/lucene/search/spans/TestSpans.java
+++ b/lucene/core/src/test/org/apache/lucene/search/spans/TestSpans.java
@@ -202,7 +202,7 @@ public class TestSpans extends LuceneTestCase {
   public void testSpanNearOrderedOverlap() throws Exception {
     final SpanQuery query = spanNearOrderedQuery(field, 1, "t1", "t2", "t3");
     
-    Spans spans = query.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
+    Spans spans = query.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
 
     assertEquals("first doc", 11, spans.nextDoc());
     assertEquals("first start", 0, spans.nextStartPosition());
@@ -217,7 +217,7 @@ public class TestSpans extends LuceneTestCase {
   public void testSpanNearUnOrdered() throws Exception {
     //See http://www.gossamer-threads.com/lists/lucene/java-dev/52270 for discussion about this test
     SpanQuery senq = spanNearUnorderedQuery(field, 0, "u1", "u2");
-    Spans spans = senq.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
+    Spans spans = senq.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertNext(spans, 4, 1, 3);
     assertNext(spans, 5, 2, 4);
     assertNext(spans, 8, 2, 4);
@@ -226,7 +226,7 @@ public class TestSpans extends LuceneTestCase {
     assertFinished(spans);
 
     senq = spanNearUnorderedQuery(1, senq, spanTermQuery(field, "u2")); 
-    spans = senq.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
+    spans = senq.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
     assertNext(spans, 4, 0, 3);
     assertNext(spans, 4, 1, 3); // unordered spans can be subsets
     assertNext(spans, 5, 0, 4);
@@ -240,7 +240,7 @@ public class TestSpans extends LuceneTestCase {
   }
 
   private Spans orSpans(String[] terms) throws Exception {
-    return spanOrQuery(field, terms).createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
+    return spanOrQuery(field, terms).createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
   }
 
   public void testSpanOrEmpty() throws Exception {
@@ -444,7 +444,7 @@ public class TestSpans extends LuceneTestCase {
      SpanQuery iq = spanTermQuery(field, include);
      SpanQuery eq = spanTermQuery(field, exclude);
      SpanQuery snq = spanNotQuery(iq, eq, pre, post);
-     Spans spans = snq.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
+     Spans spans = snq.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
 
      int i = 0;
      if (spans != null) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/facet/src/java/org/apache/lucene/facet/DrillSidewaysQuery.java
----------------------------------------------------------------------
diff --git a/lucene/facet/src/java/org/apache/lucene/facet/DrillSidewaysQuery.java b/lucene/facet/src/java/org/apache/lucene/facet/DrillSidewaysQuery.java
index 48883ea..bb9db7a 100644
--- a/lucene/facet/src/java/org/apache/lucene/facet/DrillSidewaysQuery.java
+++ b/lucene/facet/src/java/org/apache/lucene/facet/DrillSidewaysQuery.java
@@ -79,8 +79,8 @@ class DrillSidewaysQuery extends Query {
   }
   
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-    final Weight baseWeight = baseQuery.createWeight(searcher, needsScores);
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    final Weight baseWeight = baseQuery.createWeight(searcher, needsScores, boost);
     final Weight[] drillDowns = new Weight[drillDownQueries.length];
     for(int dim=0;dim<drillDownQueries.length;dim++) {
       drillDowns[dim] = searcher.createNormalizedWeight(drillDownQueries[dim], false);
@@ -96,16 +96,6 @@ class DrillSidewaysQuery extends Query {
       }
 
       @Override
-      public float getValueForNormalization() throws IOException {
-        return baseWeight.getValueForNormalization();
-      }
-
-      @Override
-      public void normalize(float norm, float boost) {
-        baseWeight.normalize(norm, boost);
-      }
-
-      @Override
       public Scorer scorer(LeafReaderContext context) throws IOException {
         // We can only run as a top scorer:
         throw new UnsupportedOperationException();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/facet/src/java/org/apache/lucene/facet/range/DoubleRange.java
----------------------------------------------------------------------
diff --git a/lucene/facet/src/java/org/apache/lucene/facet/range/DoubleRange.java b/lucene/facet/src/java/org/apache/lucene/facet/range/DoubleRange.java
index 8893c65..2203be3 100644
--- a/lucene/facet/src/java/org/apache/lucene/facet/range/DoubleRange.java
+++ b/lucene/facet/src/java/org/apache/lucene/facet/range/DoubleRange.java
@@ -137,12 +137,12 @@ public final class DoubleRange extends Range {
     }
 
     @Override
-    public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
+    public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
       final Weight fastMatchWeight = fastMatchQuery == null
           ? null
-          : searcher.createWeight(fastMatchQuery, false);
+          : searcher.createWeight(fastMatchQuery, false, 1f);
 
-      return new ConstantScoreWeight(this) {
+      return new ConstantScoreWeight(this, boost) {
         @Override
         public Scorer scorer(LeafReaderContext context) throws IOException {
           final int maxDoc = context.reader().maxDoc();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/facet/src/java/org/apache/lucene/facet/range/LongRange.java
----------------------------------------------------------------------
diff --git a/lucene/facet/src/java/org/apache/lucene/facet/range/LongRange.java b/lucene/facet/src/java/org/apache/lucene/facet/range/LongRange.java
index 46ee00b..9c12ecd 100644
--- a/lucene/facet/src/java/org/apache/lucene/facet/range/LongRange.java
+++ b/lucene/facet/src/java/org/apache/lucene/facet/range/LongRange.java
@@ -129,12 +129,12 @@ public final class LongRange extends Range {
     }
 
     @Override
-    public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
+    public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
       final Weight fastMatchWeight = fastMatchQuery == null
           ? null
-          : searcher.createWeight(fastMatchQuery, false);
+          : searcher.createWeight(fastMatchQuery, false, 1f);
 
-      return new ConstantScoreWeight(this) {
+      return new ConstantScoreWeight(this, boost) {
         @Override
         public Scorer scorer(LeafReaderContext context) throws IOException {
           final int maxDoc = context.reader().maxDoc();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/facet/src/test/org/apache/lucene/facet/TestDrillSideways.java
----------------------------------------------------------------------
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/TestDrillSideways.java b/lucene/facet/src/test/org/apache/lucene/facet/TestDrillSideways.java
index ebb8fb6..7531ec7 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/TestDrillSideways.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/TestDrillSideways.java
@@ -650,8 +650,8 @@ public class TestDrillSideways extends FacetTestCase {
         filter = new Query() {
 
           @Override
-          public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-            return new RandomAccessWeight(this) {
+          public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+            return new RandomAccessWeight(this, boost) {
               @Override
               protected Bits getMatchingDocs(final LeafReaderContext context) throws IOException {
                 return new Bits() {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/facet/src/test/org/apache/lucene/facet/range/TestRangeFacetCounts.java
----------------------------------------------------------------------
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/range/TestRangeFacetCounts.java b/lucene/facet/src/test/org/apache/lucene/facet/range/TestRangeFacetCounts.java
index b9ff1ce..7250ef4 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/range/TestRangeFacetCounts.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/range/TestRangeFacetCounts.java
@@ -678,8 +678,8 @@ public class TestRangeFacetCounts extends FacetTestCase {
     }
 
     @Override
-    public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-      final Weight in = this.in.createWeight(searcher, needsScores);
+    public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+      final Weight in = this.in.createWeight(searcher, needsScores, boost);
       return new Weight(in.getQuery()) {
 
         @Override
@@ -693,16 +693,6 @@ public class TestRangeFacetCounts extends FacetTestCase {
         }
 
         @Override
-        public float getValueForNormalization() throws IOException {
-          return in.getValueForNormalization();
-        }
-
-        @Override
-        public void normalize(float norm, float topLevelBoost) {
-          in.normalize(norm, topLevelBoost);
-        }
-
-        @Override
         public Scorer scorer(LeafReaderContext context) throws IOException {
           used.set(true);
           return in.scorer(context);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/highlighter/src/java/org/apache/lucene/search/highlight/WeightedSpanTermExtractor.java
----------------------------------------------------------------------
diff --git a/lucene/highlighter/src/java/org/apache/lucene/search/highlight/WeightedSpanTermExtractor.java b/lucene/highlighter/src/java/org/apache/lucene/search/highlight/WeightedSpanTermExtractor.java
index 7507bdd..837201e 100644
--- a/lucene/highlighter/src/java/org/apache/lucene/search/highlight/WeightedSpanTermExtractor.java
+++ b/lucene/highlighter/src/java/org/apache/lucene/search/highlight/WeightedSpanTermExtractor.java
@@ -288,10 +288,10 @@ public class WeightedSpanTermExtractor {
       for (final String field : fieldNames) {
         final SpanQuery rewrittenQuery = (SpanQuery) spanQuery.rewrite(getLeafContext().reader());
         queries.put(field, rewrittenQuery);
-        rewrittenQuery.createWeight(searcher, false).extractTerms(nonWeightedTerms);
+        rewrittenQuery.createWeight(searcher, false, boost).extractTerms(nonWeightedTerms);
       }
     } else {
-      spanQuery.createWeight(searcher, false).extractTerms(nonWeightedTerms);
+      spanQuery.createWeight(searcher, false, boost).extractTerms(nonWeightedTerms);
     }
 
     List<PositionSpan> spanPositions = new ArrayList<>();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/join/src/java/org/apache/lucene/search/join/GlobalOrdinalsQuery.java
----------------------------------------------------------------------
diff --git a/lucene/join/src/java/org/apache/lucene/search/join/GlobalOrdinalsQuery.java b/lucene/join/src/java/org/apache/lucene/search/join/GlobalOrdinalsQuery.java
index 6c380b4..83e01e4 100644
--- a/lucene/join/src/java/org/apache/lucene/search/join/GlobalOrdinalsQuery.java
+++ b/lucene/join/src/java/org/apache/lucene/search/join/GlobalOrdinalsQuery.java
@@ -60,8 +60,8 @@ final class GlobalOrdinalsQuery extends Query {
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-    return new W(this, toQuery.createWeight(searcher, false));
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    return new W(this, toQuery.createWeight(searcher, false, 1f), boost);
   }
 
   @Override
@@ -98,8 +98,8 @@ final class GlobalOrdinalsQuery extends Query {
 
     private final Weight approximationWeight;
 
-    W(Query query, Weight approximationWeight) {
-      super(query);
+    W(Query query, Weight approximationWeight, float boost) {
+      super(query, boost);
       this.approximationWeight = approximationWeight;
     }
 


[27/51] [abbrv] lucene-solr:apiv2: SOLR-7280: precommit errors

Posted by sa...@apache.org.
SOLR-7280: precommit errors


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/74633594
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/74633594
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/74633594

Branch: refs/heads/apiv2
Commit: 74633594d891d3f6eff61ad39310f6410dbfc313
Parents: 6c1b75b
Author: Noble Paul <no...@apache.org>
Authored: Sat Jul 16 19:54:43 2016 +0530
Committer: Noble Paul <no...@apache.org>
Committed: Sat Jul 16 19:54:43 2016 +0530

----------------------------------------------------------------------
 .../org/apache/solr/core/CoreContainer.java     |  1 -
 .../java/org/apache/solr/core/CoreSorter.java   | 33 +++++++++---------
 .../org/apache/solr/core/CoreSorterTest.java    | 36 +++++++++-----------
 3 files changed, 33 insertions(+), 37 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/74633594/solr/core/src/java/org/apache/solr/core/CoreContainer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/CoreContainer.java b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
index 21f495c..aa65f54 100644
--- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java
+++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
@@ -36,7 +36,6 @@ import java.nio.file.Paths;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
-import java.util.Comparator;
 import java.util.List;
 import java.util.Locale;
 import java.util.Map;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/74633594/solr/core/src/java/org/apache/solr/core/CoreSorter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/CoreSorter.java b/solr/core/src/java/org/apache/solr/core/CoreSorter.java
index 8074009..4c37b8f 100644
--- a/solr/core/src/java/org/apache/solr/core/CoreSorter.java
+++ b/solr/core/src/java/org/apache/solr/core/CoreSorter.java
@@ -1,20 +1,3 @@
-package org.apache.solr.core;
-
-import java.util.Collection;
-import java.util.Comparator;
-import java.util.LinkedHashMap;
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.solr.cloud.CloudDescriptor;
-import org.apache.solr.common.cloud.ClusterState;
-import org.apache.solr.common.cloud.DocCollection;
-import org.apache.solr.common.cloud.Replica;
-import org.apache.solr.common.cloud.Slice;
-
-import static java.util.Collections.emptyList;
-import static java.util.stream.Collectors.toList;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -38,6 +21,22 @@ import static java.util.stream.Collectors.toList;
  * happening in other nodes of the cluster
  *
  */
+package org.apache.solr.core;
+
+import java.util.Collection;
+import java.util.Comparator;
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.solr.cloud.CloudDescriptor;
+import org.apache.solr.common.cloud.ClusterState;
+import org.apache.solr.common.cloud.DocCollection;
+import org.apache.solr.common.cloud.Replica;
+import org.apache.solr.common.cloud.Slice;
+
+import static java.util.Collections.emptyList;
+import static java.util.stream.Collectors.toList;
 public class CoreSorter {
   Map<String, CountsForEachShard> shardsVsReplicaCounts = new LinkedHashMap<>();
   CoreContainer cc;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/74633594/solr/core/src/test/org/apache/solr/core/CoreSorterTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/CoreSorterTest.java b/solr/core/src/test/org/apache/solr/core/CoreSorterTest.java
index 5b550bf..3e5af20 100644
--- a/solr/core/src/test/org/apache/solr/core/CoreSorterTest.java
+++ b/solr/core/src/test/org/apache/solr/core/CoreSorterTest.java
@@ -1,3 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package org.apache.solr.core;
 
 import java.util.ArrayList;
@@ -27,24 +43,6 @@ import static org.easymock.EasyMock.expectLastCall;
 import static org.easymock.EasyMock.replay;
 import static org.easymock.EasyMock.reset;
 
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
 public class CoreSorterTest extends SolrTestCaseJ4 {
   Map<String, Boolean> nodes = new LinkedHashMap<>();
   Set<String> liveNodes = new HashSet<>();
@@ -77,7 +75,7 @@ public class CoreSorterTest extends SolrTestCaseJ4 {
 
     for (int i = 0; i < 10; i++) {
       List<CountsForEachShard> copy = new ArrayList<>(l);
-      Collections.shuffle(copy);
+      Collections.shuffle(copy, random());
       Collections.sort(copy, CoreSorter.countsComparator);
       for (int j = 0; j < copy.size(); j++) {
         assertEquals(expected.get(j), copy.get(j));


[46/51] [abbrv] lucene-solr:apiv2: Tweak LeafFieldComparator javadocs (duplicate and copy/paste).

Posted by sa...@apache.org.
Tweak LeafFieldComparator javadocs (duplicate and copy/paste).


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/ee44da66
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/ee44da66
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/ee44da66

Branch: refs/heads/apiv2
Commit: ee44da6662fd4e7c3bc3156e5df3a29a7acaef4b
Parents: 9aa639d
Author: Christine Poerschke <cp...@apache.org>
Authored: Wed Jul 20 11:42:27 2016 +0100
Committer: Christine Poerschke <cp...@apache.org>
Committed: Wed Jul 20 11:48:21 2016 +0100

----------------------------------------------------------------------
 .../java/org/apache/lucene/search/LeafFieldComparator.java    | 7 ++-----
 1 file changed, 2 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ee44da66/lucene/core/src/java/org/apache/lucene/search/LeafFieldComparator.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/LeafFieldComparator.java b/lucene/core/src/java/org/apache/lucene/search/LeafFieldComparator.java
index bbf1de8..7f84953 100644
--- a/lucene/core/src/java/org/apache/lucene/search/LeafFieldComparator.java
+++ b/lucene/core/src/java/org/apache/lucene/search/LeafFieldComparator.java
@@ -38,9 +38,6 @@ import java.io.IOException;
  *  <li> {@link #compareBottom} Compare a new hit (docID)
  *       against the "weakest" (bottom) entry in the queue.
  *
- *  <li> {@link #compareBottom} Compare a new hit (docID)
- *       against the "weakest" (bottom) entry in the queue.
- *
  *  <li> {@link #compareTop} Compare a new hit (docID)
  *       against the top value previously set by a call to
  *       {@link FieldComparator#setTopValue}.
@@ -95,8 +92,8 @@ public interface LeafFieldComparator {
    *    
    * @param doc that was hit
    * @return any {@code N < 0} if the doc's value is sorted after
-   * the bottom entry (not competitive), any {@code N > 0} if the
-   * doc's value is sorted before the bottom entry and {@code 0} if
+   * the top entry (not competitive), any {@code N > 0} if the
+   * doc's value is sorted before the top entry and {@code 0} if
    * they are equal.
    */
   int compareTop(int doc) throws IOException;


[25/51] [abbrv] lucene-solr:apiv2: SOLR-9237: Make FvhContainer public

Posted by sa...@apache.org.
SOLR-9237: Make FvhContainer public


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/4f452261
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/4f452261
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/4f452261

Branch: refs/heads/apiv2
Commit: 4f45226174c4f1cdd5364b044b5d7ee6c2001522
Parents: 2585c9f
Author: Jan H�ydahl <ja...@apache.org>
Authored: Sat Jul 16 11:51:06 2016 +0200
Committer: Jan H�ydahl <ja...@apache.org>
Committed: Sat Jul 16 11:51:06 2016 +0200

----------------------------------------------------------------------
 .../java/org/apache/solr/highlight/DefaultSolrHighlighter.java | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4f452261/solr/core/src/java/org/apache/solr/highlight/DefaultSolrHighlighter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/highlight/DefaultSolrHighlighter.java b/solr/core/src/java/org/apache/solr/highlight/DefaultSolrHighlighter.java
index ca481cd..43decc3 100644
--- a/solr/core/src/java/org/apache/solr/highlight/DefaultSolrHighlighter.java
+++ b/solr/core/src/java/org/apache/solr/highlight/DefaultSolrHighlighter.java
@@ -739,12 +739,12 @@ public class DefaultSolrHighlighter extends SolrHighlighter implements PluginInf
     return new TokenOrderingFilter(tStream, 10);
   }
 
-  // Wraps FVH to allow pass-by-reference
-  protected class FvhContainer {
+  // Wraps FVH to allow pass-by-reference. Public access to allow use in 3rd party subclasses
+  public class FvhContainer {
     FastVectorHighlighter fvh;
     FieldQuery fieldQuery;
 
-    FvhContainer(FastVectorHighlighter fvh, FieldQuery fieldQuery) {
+    public FvhContainer(FastVectorHighlighter fvh, FieldQuery fieldQuery) {
       this.fvh = fvh;
       this.fieldQuery = fieldQuery;
     }


[05/51] [abbrv] lucene-solr:apiv2: LUCENE-7368: Remove queryNorm.

Posted by sa...@apache.org.
LUCENE-7368: Remove queryNorm.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/5def78ba
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/5def78ba
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/5def78ba

Branch: refs/heads/apiv2
Commit: 5def78ba101dd87261c787dc865979769c4b58e4
Parents: 7b4af27
Author: Adrien Grand <jp...@gmail.com>
Authored: Thu Jul 7 18:17:20 2016 +0200
Committer: Adrien Grand <jp...@gmail.com>
Committed: Tue Jul 12 14:38:36 2016 +0200

----------------------------------------------------------------------
 lucene/CHANGES.txt                              |    2 +
 lucene/MIGRATE.txt                              |    7 +
 .../org/apache/lucene/search/BooleanQuery.java  |    4 +-
 .../org/apache/lucene/search/BooleanWeight.java |   29 +-
 .../org/apache/lucene/search/BoostQuery.java    |   44 +-
 .../lucene/search/ConstantScoreQuery.java       |    6 +-
 .../lucene/search/ConstantScoreWeight.java      |   35 +-
 .../lucene/search/DisjunctionMaxQuery.java      |   29 +-
 .../lucene/search/DocValuesRewriteMethod.java   |    4 +-
 .../apache/lucene/search/FieldValueQuery.java   |    4 +-
 .../org/apache/lucene/search/IndexSearcher.java |   27 +-
 .../org/apache/lucene/search/LRUQueryCache.java |    2 +-
 .../apache/lucene/search/MatchAllDocsQuery.java |    4 +-
 .../apache/lucene/search/MatchNoDocsQuery.java  |   25 +-
 .../apache/lucene/search/MultiPhraseQuery.java  |   17 +-
 .../MultiTermQueryConstantScoreWrapper.java     |    7 +-
 .../org/apache/lucene/search/PhraseQuery.java   |   18 +-
 .../apache/lucene/search/PointInSetQuery.java   |    4 +-
 .../apache/lucene/search/PointRangeQuery.java   |    4 +-
 .../java/org/apache/lucene/search/Query.java    |    3 +-
 .../lucene/search/RandomAccessWeight.java       |    4 +-
 .../org/apache/lucene/search/SynonymQuery.java  |   20 +-
 .../org/apache/lucene/search/TermQuery.java     |   20 +-
 .../java/org/apache/lucene/search/Weight.java   |   15 +-
 .../org/apache/lucene/search/package-info.java  |   26 +-
 .../search/similarities/BM25Similarity.java     |   25 +-
 .../lucene/search/similarities/BasicStats.java  |   35 +-
 .../search/similarities/ClassicSimilarity.java  |    6 -
 .../search/similarities/LMSimilarity.java       |    8 +-
 .../search/similarities/MultiSimilarity.java    |   20 +-
 .../similarities/PerFieldSimilarityWrapper.java |   14 +-
 .../lucene/search/similarities/Similarity.java  |   50 +-
 .../search/similarities/SimilarityBase.java     |    8 +-
 .../search/similarities/TFIDFSimilarity.java    |  134 +-
 .../search/spans/FieldMaskingSpanQuery.java     |    4 +-
 .../lucene/search/spans/SpanBoostQuery.java     |   54 +-
 .../lucene/search/spans/SpanContainQuery.java   |    4 +-
 .../search/spans/SpanContainingQuery.java       |   12 +-
 .../search/spans/SpanMultiTermQueryWrapper.java |    2 +-
 .../lucene/search/spans/SpanNearQuery.java      |   18 +-
 .../lucene/search/spans/SpanNotQuery.java       |   12 +-
 .../apache/lucene/search/spans/SpanOrQuery.java |   10 +-
 .../search/spans/SpanPositionCheckQuery.java    |   10 +-
 .../apache/lucene/search/spans/SpanQuery.java   |    2 +-
 .../lucene/search/spans/SpanTermQuery.java      |    8 +-
 .../apache/lucene/search/spans/SpanWeight.java  |   20 +-
 .../lucene/search/spans/SpanWithinQuery.java    |   12 +-
 .../apache/lucene/index/TestCustomNorms.java    |    7 +-
 .../apache/lucene/index/TestIndexSorting.java   |    4 +-
 .../lucene/index/TestMaxTermFrequency.java      |    1 -
 .../test/org/apache/lucene/index/TestNorms.java |    9 +-
 .../org/apache/lucene/index/TestOmitTf.java     |    2 -
 .../lucene/index/TestUniqueTermCount.java       |    2 +-
 .../apache/lucene/search/JustCompileSearch.java |   12 +-
 .../apache/lucene/search/TestBooleanScorer.java |   11 +-
 .../lucene/search/TestComplexExplanations.java  |   17 +-
 .../apache/lucene/search/TestConjunctions.java  |   13 +-
 .../lucene/search/TestConstantScoreQuery.java   |   13 +-
 .../lucene/search/TestDocValuesScoring.java     |    9 +-
 .../apache/lucene/search/TestLRUQueryCache.java |   18 +-
 .../lucene/search/TestMinShouldMatch2.java      |   11 +-
 .../lucene/search/TestMultiPhraseQuery.java     |   27 -
 .../search/TestMultiTermConstantScore.java      |   72 -
 .../apache/lucene/search/TestNeedsScores.java   |   14 +-
 .../lucene/search/TestPositionIncrement.java    |    4 +-
 .../search/TestPositiveScoresOnlyCollector.java |    2 +-
 .../apache/lucene/search/TestQueryRescorer.java |   11 +-
 .../search/TestScoreCachingWrappingScorer.java  |    2 +-
 .../apache/lucene/search/TestScorerPerf.java    |    4 +-
 .../apache/lucene/search/TestSimilarity.java    |    2 -
 .../lucene/search/TestSimilarityProvider.java   |   10 -
 .../apache/lucene/search/TestSortRandom.java    |    4 +-
 .../search/similarities/TestSimilarityBase.java |    8 +-
 .../search/spans/JustCompileSearchSpans.java    |    2 +-
 .../search/spans/TestFieldMaskingSpanQuery.java |   14 +-
 .../search/spans/TestNearSpansOrdered.java      |   26 +-
 .../lucene/search/spans/TestSpanCollection.java |    6 +-
 .../search/spans/TestSpanContainQuery.java      |    2 +-
 .../apache/lucene/search/spans/TestSpans.java   |   10 +-
 .../apache/lucene/facet/DrillSidewaysQuery.java |   14 +-
 .../apache/lucene/facet/range/DoubleRange.java  |    6 +-
 .../apache/lucene/facet/range/LongRange.java    |    6 +-
 .../apache/lucene/facet/TestDrillSideways.java  |    4 +-
 .../facet/range/TestRangeFacetCounts.java       |   14 +-
 .../highlight/WeightedSpanTermExtractor.java    |    4 +-
 .../lucene/search/join/GlobalOrdinalsQuery.java |    8 +-
 .../join/GlobalOrdinalsWithScoreQuery.java      |   15 +-
 .../join/PointInSetIncludingScoreQuery.java     |   13 +-
 .../search/join/TermsIncludingScoreQuery.java   |   13 +-
 .../search/join/ToChildBlockJoinQuery.java      |   14 +-
 .../search/join/ToParentBlockJoinQuery.java     |   14 +-
 .../lucene/search/join/TestBlockJoin.java       |    4 +-
 .../apache/lucene/search/join/TestJoinUtil.java |   13 +-
 .../search/TestDiversifiedTopDocsCollector.java |    4 +-
 .../apache/lucene/queries/BoostingQuery.java    |   18 +-
 .../apache/lucene/queries/CustomScoreQuery.java |   82 +-
 .../org/apache/lucene/queries/TermsQuery.java   |    7 +-
 .../lucene/queries/function/BoostedQuery.java   |   18 +-
 .../lucene/queries/function/FunctionQuery.java  |   37 +-
 .../queries/function/FunctionRangeQuery.java    |   13 +-
 .../queries/payloads/PayloadScoreQuery.java     |   20 +-
 .../queries/payloads/SpanPayloadCheckQuery.java |   10 +-
 .../queries/TestCustomScoreExplanations.java    |   12 +-
 .../lucene/queries/TestCustomScoreQuery.java    |    3 -
 .../function/TestLongNormValueSource.java       |    6 -
 .../queries/payloads/TestPayloadScoreQuery.java |    5 -
 .../queries/payloads/TestPayloadSpans.java      |   30 +-
 .../queries/payloads/TestPayloadTermQuery.java  |    9 +-
 .../document/LatLonPointDistanceQuery.java      |    4 +-
 .../document/LatLonPointInPolygonQuery.java     |    4 +-
 .../lucene/search/DocValuesNumbersQuery.java    |    4 +-
 .../lucene/search/DocValuesRangeQuery.java      |    4 +-
 .../lucene/search/DocValuesTermsQuery.java      |    4 +-
 .../lucene/search/TermAutomatonQuery.java       |   18 +-
 .../sandbox/queries/FuzzyLikeThisQueryTest.java |    8 +-
 .../sandbox/queries/TestSlowFuzzyQuery2.java    |  184 -
 .../lucene/sandbox/queries/fuzzyTestData.txt    | 3721 ------------------
 .../lucene/search/TestTermAutomatonQuery.java   |    4 +-
 .../spatial/composite/CompositeVerifyQuery.java |    6 +-
 .../composite/IntersectsRPTVerifyQuery.java     |    4 +-
 .../spatial/prefix/AbstractPrefixTreeQuery.java |    4 +-
 .../serialized/SerializedDVStrategy.java        |    4 +-
 .../GeoPointTermQueryConstantScoreWrapper.java  |    4 +-
 .../spatial3d/PointInGeo3DShapeQuery.java       |    4 +-
 .../suggest/document/CompletionWeight.java      |    8 -
 .../search/suggest/document/ContextQuery.java   |    4 +-
 .../suggest/document/FuzzyCompletionQuery.java  |    2 +-
 .../suggest/document/PrefixCompletionQuery.java |    2 +-
 .../suggest/document/RegexCompletionQuery.java  |    2 +-
 .../suggest/document/SuggestIndexSearcher.java  |    2 +-
 .../lucene/index/BaseNormsFormatTestCase.java   |    2 +-
 .../lucene/search/AssertingIndexSearcher.java   |   23 +-
 .../apache/lucene/search/AssertingQuery.java    |    4 +-
 .../apache/lucene/search/AssertingWeight.java   |   10 -
 .../lucene/search/RandomApproximationQuery.java |   14 +-
 .../search/similarities/RandomSimilarity.java   |    9 -
 .../lucene/search/spans/AssertingSpanQuery.java |    4 +-
 .../search/spans/AssertingSpanWeight.java       |   12 +-
 .../search/TestBaseExplanationTestCase.java     |   10 +-
 .../handler/component/RealTimeGetComponent.java |    2 +-
 .../java/org/apache/solr/query/FilterQuery.java |    6 +-
 .../org/apache/solr/query/SolrRangeQuery.java   |   11 +-
 .../java/org/apache/solr/schema/LatLonType.java |   10 +-
 .../apache/solr/search/ExportQParserPlugin.java |    4 +-
 .../src/java/org/apache/solr/search/Filter.java |   10 +-
 .../solr/search/GraphTermsQParserPlugin.java    |    4 +-
 .../apache/solr/search/HashQParserPlugin.java   |    4 +-
 .../apache/solr/search/JoinQParserPlugin.java   |    8 +-
 .../apache/solr/search/ReRankQParserPlugin.java |   17 +-
 .../solr/search/SolrConstantScoreQuery.java     |    8 +-
 .../org/apache/solr/search/WrappedQuery.java    |    4 +-
 .../org/apache/solr/search/join/GraphQuery.java |   22 +-
 .../similarities/SchemaSimilarityFactory.java   |    4 +-
 .../solr/update/DeleteByQueryWrapper.java       |   10 +-
 .../apache/solr/DisMaxRequestHandlerTest.java   |    4 +-
 .../cloud/DistribJoinFromCollectionTest.java    |    2 +-
 .../apache/solr/search/TestRankQueryPlugin.java |    4 +-
 .../solr/search/mlt/CloudMLTQParserTest.java    |    6 +-
 .../uninverting/TestFieldCacheSortRandom.java   |    4 +-
 159 files changed, 436 insertions(+), 5390 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index dfc9ebf..eba11c9 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -13,6 +13,8 @@ API Changes
 * LUCENE-7369: Similarity.coord and BooleanQuery.disableCoord are removed.
   (Adrien Grand)
 
+* LUCENE-7368: Removed query normalization. (Adrien Grand)
+
 Bug Fixes
 
 Improvements

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/MIGRATE.txt
----------------------------------------------------------------------
diff --git a/lucene/MIGRATE.txt b/lucene/MIGRATE.txt
index 9b24ecb..f914529 100644
--- a/lucene/MIGRATE.txt
+++ b/lucene/MIGRATE.txt
@@ -29,3 +29,10 @@ undesirable. The new BM25Similarity does not suffer from this problem since it
 has better saturation for the contribution of the term frequency so the coord
 factors have been removed from scores. Things now work as if coords were always
 disabled when constructing boolean queries.
+
+## Weight.getValueForNormalization() and Weight.normalize() removed (LUCENE-7368)
+
+Query normalization's goal was to make scores comparable across queries, which
+was only implemented by the ClassicSimilarity. Since ClassicSimilarity is not
+the default similarity anymore, this functionality has been removed. Boosts are
+now propagated through Query#createWeight.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java b/lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java
index c22b1bb..3742bfc 100644
--- a/lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java
@@ -196,12 +196,12 @@ public class BooleanQuery extends Query implements Iterable<BooleanClause> {
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
     BooleanQuery query = this;
     if (needsScores == false) {
       query = rewriteNoScoring();
     }
-    return new BooleanWeight(query, searcher, needsScores);
+    return new BooleanWeight(query, searcher, needsScores, boost);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/BooleanWeight.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/BooleanWeight.java b/lucene/core/src/java/org/apache/lucene/search/BooleanWeight.java
index f55a6b7..ce4419f 100644
--- a/lucene/core/src/java/org/apache/lucene/search/BooleanWeight.java
+++ b/lucene/core/src/java/org/apache/lucene/search/BooleanWeight.java
@@ -42,14 +42,14 @@ final class BooleanWeight extends Weight {
   final ArrayList<Weight> weights;
   final boolean needsScores;
 
-  BooleanWeight(BooleanQuery query, IndexSearcher searcher, boolean needsScores) throws IOException {
+  BooleanWeight(BooleanQuery query, IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
     super(query);
     this.query = query;
     this.needsScores = needsScores;
     this.similarity = searcher.getSimilarity(needsScores);
     weights = new ArrayList<>();
     for (BooleanClause c : query) {
-      Weight w = searcher.createWeight(c.getQuery(), needsScores && c.isScoring());
+      Weight w = searcher.createWeight(c.getQuery(), needsScores && c.isScoring(), boost);
       weights.add(w);
     }
   }
@@ -66,31 +66,6 @@ final class BooleanWeight extends Weight {
   }
 
   @Override
-  public float getValueForNormalization() throws IOException {
-    float sum = 0.0f;
-    int i = 0;
-    for (BooleanClause clause : query) {
-      // call sumOfSquaredWeights for all clauses in case of side effects
-      float s = weights.get(i).getValueForNormalization();         // sum sub weights
-      if (clause.isScoring()) {
-        // only add to sum for scoring clauses
-        sum += s;
-      }
-      i += 1;
-    }
-
-    return sum ;
-  }
-
-  @Override
-  public void normalize(float norm, float boost) {
-    for (Weight w : weights) {
-      // normalize all clauses, (even if non-scoring in case of side affects)
-      w.normalize(norm, boost);
-    }
-  }
-
-  @Override
   public Explanation explain(LeafReaderContext context, int doc) throws IOException {
     final int minShouldMatch = query.getMinimumNumberShouldMatch();
     List<Explanation> subs = new ArrayList<>();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/BoostQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/BoostQuery.java b/lucene/core/src/java/org/apache/lucene/search/BoostQuery.java
index eb7f4b9..5b05966 100644
--- a/lucene/core/src/java/org/apache/lucene/search/BoostQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/BoostQuery.java
@@ -19,11 +19,8 @@ package org.apache.lucene.search;
 
 import java.io.IOException;
 import java.util.Objects;
-import java.util.Set;
 
 import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.index.Term;
 
 /**
  * A {@link Query} wrapper that allows to give a boost to the wrapped query.
@@ -113,45 +110,8 @@ public final class BoostQuery extends Query {
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-    final Weight weight = query.createWeight(searcher, needsScores);
-    if (needsScores == false) {
-      return weight;
-    }
-    // Apply the query boost, this may impact the return value of getValueForNormalization()
-    weight.normalize(1f, boost);
-    return new Weight(this) {
-
-      @Override
-      public void extractTerms(Set<Term> terms) {
-        weight.extractTerms(terms);
-      }
-
-      @Override
-      public Explanation explain(LeafReaderContext context, int doc) throws IOException {
-        return weight.explain(context, doc);
-      }
-
-      @Override
-      public float getValueForNormalization() throws IOException {
-        return weight.getValueForNormalization();
-      }
-
-      @Override
-      public void normalize(float norm, float boost) {
-        weight.normalize(norm, BoostQuery.this.boost * boost);
-      }
-
-      @Override
-      public Scorer scorer(LeafReaderContext context) throws IOException {
-        return weight.scorer(context);
-      }
-      
-      @Override
-      public BulkScorer bulkScorer(LeafReaderContext context) throws IOException {
-        return weight.bulkScorer(context);
-      }
-    };
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    return query.createWeight(searcher, needsScores, BoostQuery.this.boost * boost);
   }
 
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/ConstantScoreQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/ConstantScoreQuery.java b/lucene/core/src/java/org/apache/lucene/search/ConstantScoreQuery.java
index eb5e2d3..c5a7d08 100644
--- a/lucene/core/src/java/org/apache/lucene/search/ConstantScoreQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/ConstantScoreQuery.java
@@ -110,10 +110,10 @@ public final class ConstantScoreQuery extends Query {
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-    final Weight innerWeight = searcher.createWeight(query, false);
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    final Weight innerWeight = searcher.createWeight(query, false, 1f);
     if (needsScores) {
-      return new ConstantScoreWeight(this) {
+      return new ConstantScoreWeight(this, boost) {
 
         @Override
         public BulkScorer bulkScorer(LeafReaderContext context) throws IOException {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/ConstantScoreWeight.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/ConstantScoreWeight.java b/lucene/core/src/java/org/apache/lucene/search/ConstantScoreWeight.java
index 034a3a7..671ec71 100644
--- a/lucene/core/src/java/org/apache/lucene/search/ConstantScoreWeight.java
+++ b/lucene/core/src/java/org/apache/lucene/search/ConstantScoreWeight.java
@@ -32,13 +32,11 @@ import org.apache.lucene.index.Term;
  */
 public abstract class ConstantScoreWeight extends Weight {
 
-  private float boost;
-  private float queryNorm;
-  private float queryWeight;
+  private final float score;
 
-  protected ConstantScoreWeight(Query query) {
+  protected ConstantScoreWeight(Query query, float score) {
     super(query);
-    normalize(1f, 1f);
+    this.score = score;
   }
 
   @Override
@@ -48,31 +46,9 @@ public abstract class ConstantScoreWeight extends Weight {
     // override if your constant-score query does wrap terms
   }
 
-  @Override
-  public final float getValueForNormalization() throws IOException {
-    return queryWeight * queryWeight;
-  }
-
-  @Override
-  public void normalize(float norm, float boost) {
-    this.boost = boost;
-    queryNorm = norm;
-    queryWeight = queryNorm * boost;
-  }
-
-  /** Return the normalization factor for this weight. */
-  protected final float queryNorm() {
-    return queryNorm;
-  }
-
-  /** Return the boost for this weight. */
-  protected final float boost() {
-    return boost;
-  }
-
   /** Return the score produced by this {@link Weight}. */
   protected final float score() {
-    return queryWeight;
+    return score;
   }
 
   @Override
@@ -92,8 +68,7 @@ public abstract class ConstantScoreWeight extends Weight {
 
     if (exists) {
       return Explanation.match(
-          queryWeight, getQuery().toString() + ", product of:",
-          Explanation.match(boost, "boost"), Explanation.match(queryNorm, "queryNorm"));
+          score, getQuery().toString() + (score == 1f ? "" : "^" + score));
     } else {
       return Explanation.noMatch(getQuery().toString() + " doesn't match id " + doc);
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/DisjunctionMaxQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/DisjunctionMaxQuery.java b/lucene/core/src/java/org/apache/lucene/search/DisjunctionMaxQuery.java
index 8b1c45d..b29126b 100644
--- a/lucene/core/src/java/org/apache/lucene/search/DisjunctionMaxQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/DisjunctionMaxQuery.java
@@ -100,10 +100,10 @@ public final class DisjunctionMaxQuery extends Query implements Iterable<Query>
     private final boolean needsScores;
 
     /** Construct the Weight for this Query searched by searcher.  Recursively construct subquery weights. */
-    public DisjunctionMaxWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
+    public DisjunctionMaxWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
       super(DisjunctionMaxQuery.this);
       for (Query disjunctQuery : disjuncts) {
-        weights.add(searcher.createWeight(disjunctQuery, needsScores));
+        weights.add(searcher.createWeight(disjunctQuery, needsScores, boost));
       }
       this.needsScores = needsScores;
     }
@@ -115,27 +115,6 @@ public final class DisjunctionMaxQuery extends Query implements Iterable<Query>
       }
     }
 
-    /** Compute the sub of squared weights of us applied to our subqueries.  Used for normalization. */
-    @Override
-    public float getValueForNormalization() throws IOException {
-      float max = 0.0f, sum = 0.0f;
-      for (Weight currentWeight : weights) {
-        float sub = currentWeight.getValueForNormalization();
-        sum += sub;
-        max = Math.max(max, sub);
-        
-      }
-      return (((sum - max) * tieBreakerMultiplier * tieBreakerMultiplier) + max);
-    }
-
-    /** Apply the computed normalization factor to our subqueries */
-    @Override
-    public void normalize(float norm, float boost) {
-      for (Weight wt : weights) {
-        wt.normalize(norm, boost);
-      }
-    }
-
     /** Create the scorer used to score our associated DisjunctionMaxQuery */
     @Override
     public Scorer scorer(LeafReaderContext context) throws IOException {
@@ -186,8 +165,8 @@ public final class DisjunctionMaxQuery extends Query implements Iterable<Query>
 
   /** Create the Weight used to score us */
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-    return new DisjunctionMaxWeight(searcher, needsScores);
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    return new DisjunctionMaxWeight(searcher, needsScores, boost);
   }
 
   /** Optimize our representation and our subqueries representations

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/DocValuesRewriteMethod.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/DocValuesRewriteMethod.java b/lucene/core/src/java/org/apache/lucene/search/DocValuesRewriteMethod.java
index 261e8e5..43c3c05 100644
--- a/lucene/core/src/java/org/apache/lucene/search/DocValuesRewriteMethod.java
+++ b/lucene/core/src/java/org/apache/lucene/search/DocValuesRewriteMethod.java
@@ -73,8 +73,8 @@ public final class DocValuesRewriteMethod extends MultiTermQuery.RewriteMethod {
     public final String getField() { return query.getField(); }
     
     @Override
-    public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-      return new RandomAccessWeight(this) {
+    public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+      return new RandomAccessWeight(this, boost) {
         @Override
         protected Bits getMatchingDocs(LeafReaderContext context) throws IOException {
           final SortedSetDocValues fcsi = DocValues.getSortedSet(context.reader(), query.field);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/FieldValueQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/FieldValueQuery.java b/lucene/core/src/java/org/apache/lucene/search/FieldValueQuery.java
index ed3c5e7..42fc256 100644
--- a/lucene/core/src/java/org/apache/lucene/search/FieldValueQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/FieldValueQuery.java
@@ -59,8 +59,8 @@ public final class FieldValueQuery extends Query {
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-    return new RandomAccessWeight(this) {
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    return new RandomAccessWeight(this, boost) {
 
       @Override
       protected Bits getMatchingDocs(LeafReaderContext context) throws IOException {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/IndexSearcher.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/IndexSearcher.java b/lucene/core/src/java/org/apache/lucene/search/IndexSearcher.java
index b81b807..5cae122 100644
--- a/lucene/core/src/java/org/apache/lucene/search/IndexSearcher.java
+++ b/lucene/core/src/java/org/apache/lucene/search/IndexSearcher.java
@@ -87,18 +87,8 @@ public class IndexSearcher {
     }
 
     @Override
-    public SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats) {
-      return new SimWeight() {
-
-        @Override
-        public float getValueForNormalization() {
-          return 1f;
-        }
-
-        @Override
-        public void normalize(float queryNorm, float boost) {}
-
-      };
+    public SimWeight computeWeight(float boost, CollectionStatistics collectionStats, TermStatistics... termStats) {
+      return new SimWeight() {};
     }
 
     @Override
@@ -732,14 +722,7 @@ public class IndexSearcher {
    */
   public Weight createNormalizedWeight(Query query, boolean needsScores) throws IOException {
     query = rewrite(query);
-    Weight weight = createWeight(query, needsScores);
-    float v = weight.getValueForNormalization();
-    float norm = getSimilarity(needsScores).queryNorm(v);
-    if (Float.isInfinite(norm) || Float.isNaN(norm)) {
-      norm = 1.0f;
-    }
-    weight.normalize(norm, 1.0f);
-    return weight;
+    return createWeight(query, needsScores, 1f);
   }
 
   /**
@@ -747,9 +730,9 @@ public class IndexSearcher {
    * if possible and configured.
    * @lucene.experimental
    */
-  public Weight createWeight(Query query, boolean needsScores) throws IOException {
+  public Weight createWeight(Query query, boolean needsScores, float boost) throws IOException {
     final QueryCache queryCache = this.queryCache;
-    Weight weight = query.createWeight(this, needsScores);
+    Weight weight = query.createWeight(this, needsScores, boost);
     if (needsScores == false && queryCache != null) {
       weight = queryCache.doCache(weight, queryCachingPolicy);
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/LRUQueryCache.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/LRUQueryCache.java b/lucene/core/src/java/org/apache/lucene/search/LRUQueryCache.java
index f555365..7ad208f 100644
--- a/lucene/core/src/java/org/apache/lucene/search/LRUQueryCache.java
+++ b/lucene/core/src/java/org/apache/lucene/search/LRUQueryCache.java
@@ -688,7 +688,7 @@ public class LRUQueryCache implements QueryCache, Accountable {
     private final AtomicBoolean used;
 
     CachingWrapperWeight(Weight in, QueryCachingPolicy policy) {
-      super(in.getQuery());
+      super(in.getQuery(), 1f);
       this.in = in;
       this.policy = policy;
       used = new AtomicBoolean(false);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/MatchAllDocsQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/MatchAllDocsQuery.java b/lucene/core/src/java/org/apache/lucene/search/MatchAllDocsQuery.java
index 2566cf0..296f502 100644
--- a/lucene/core/src/java/org/apache/lucene/search/MatchAllDocsQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/MatchAllDocsQuery.java
@@ -29,8 +29,8 @@ import org.apache.lucene.util.Bits;
 public final class MatchAllDocsQuery extends Query {
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) {
-    return new ConstantScoreWeight(this) {
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) {
+    return new ConstantScoreWeight(this, boost) {
       @Override
       public String toString() {
         return "weight(" + MatchAllDocsQuery.this + ")";

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/MatchNoDocsQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/MatchNoDocsQuery.java b/lucene/core/src/java/org/apache/lucene/search/MatchNoDocsQuery.java
index 825e082..e2d4f98 100644
--- a/lucene/core/src/java/org/apache/lucene/search/MatchNoDocsQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/MatchNoDocsQuery.java
@@ -42,7 +42,7 @@ public class MatchNoDocsQuery extends Query {
   }
   
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
     return new Weight(this) {
       @Override
       public void extractTerms(Set<Term> terms) {
@@ -58,29 +58,6 @@ public class MatchNoDocsQuery extends Query {
         return null;
       }
 
-      @Override
-      public final float getValueForNormalization() throws IOException {
-        return 0;
-      }
-
-      @Override
-      public void normalize(float norm, float boost) {
-      }
-
-      /** Return the normalization factor for this weight. */
-      protected final float queryNorm() {
-        return 0;
-      }
-
-      /** Return the boost for this weight. */
-      protected final float boost() {
-        return 0;
-      }
-
-      /** Return the score produced by this {@link Weight}. */
-      protected final float score() {
-        return 0;
-      }
     };
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/MultiPhraseQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/MultiPhraseQuery.java b/lucene/core/src/java/org/apache/lucene/search/MultiPhraseQuery.java
index 00af26c..afb6fc7 100644
--- a/lucene/core/src/java/org/apache/lucene/search/MultiPhraseQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/MultiPhraseQuery.java
@@ -187,7 +187,7 @@ public class MultiPhraseQuery extends Query {
     private final Map<Term,TermContext> termContexts = new HashMap<>();
     private final boolean needsScores;
 
-    public MultiPhraseWeight(IndexSearcher searcher, boolean needsScores)
+    public MultiPhraseWeight(IndexSearcher searcher, boolean needsScores, float boost)
       throws IOException {
       super(MultiPhraseQuery.this);
       this.needsScores = needsScores;
@@ -207,6 +207,7 @@ public class MultiPhraseQuery extends Query {
         }
       }
       stats = similarity.computeWeight(
+          boost,
           searcher.collectionStatistics(field),
           allTermStats.toArray(new TermStatistics[allTermStats.size()]));
     }
@@ -219,16 +220,6 @@ public class MultiPhraseQuery extends Query {
     }
 
     @Override
-    public float getValueForNormalization() {
-      return stats.getValueForNormalization();
-    }
-
-    @Override
-    public void normalize(float queryNorm, float boost) {
-      stats.normalize(queryNorm, boost);
-    }
-
-    @Override
     public Scorer scorer(LeafReaderContext context) throws IOException {
       assert termArrays.length != 0;
       final LeafReader reader = context.reader();
@@ -331,8 +322,8 @@ public class MultiPhraseQuery extends Query {
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-    return new MultiPhraseWeight(searcher, needsScores);
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    return new MultiPhraseWeight(searcher, needsScores, boost);
   }
 
   /** Prints a user-readable version of this query. */

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/MultiTermQueryConstantScoreWrapper.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/MultiTermQueryConstantScoreWrapper.java b/lucene/core/src/java/org/apache/lucene/search/MultiTermQueryConstantScoreWrapper.java
index ed4c430..54c6d48 100644
--- a/lucene/core/src/java/org/apache/lucene/search/MultiTermQueryConstantScoreWrapper.java
+++ b/lucene/core/src/java/org/apache/lucene/search/MultiTermQueryConstantScoreWrapper.java
@@ -108,8 +108,8 @@ final class MultiTermQueryConstantScoreWrapper<Q extends MultiTermQuery> extends
   public final String getField() { return query.getField(); }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-    return new ConstantScoreWeight(this) {
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    return new ConstantScoreWeight(this, boost) {
 
       /** Try to collect terms from the given terms enum and return true iff all
        *  terms could be collected. If {@code false} is returned, the enum is
@@ -153,8 +153,7 @@ final class MultiTermQueryConstantScoreWrapper<Q extends MultiTermQuery> extends
             bq.add(new TermQuery(new Term(query.field, t.term), termContext), Occur.SHOULD);
           }
           Query q = new ConstantScoreQuery(bq.build());
-          final Weight weight = searcher.rewrite(q).createWeight(searcher, needsScores);
-          weight.normalize(1f, score());
+          final Weight weight = searcher.rewrite(q).createWeight(searcher, needsScores, score());
           return new WeightOrDocIdSet(weight);
         }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/PhraseQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/PhraseQuery.java b/lucene/core/src/java/org/apache/lucene/search/PhraseQuery.java
index 64c0946..70dbed8 100644
--- a/lucene/core/src/java/org/apache/lucene/search/PhraseQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/PhraseQuery.java
@@ -356,7 +356,7 @@ public class PhraseQuery extends Query {
     private final boolean needsScores;
     private transient TermContext states[];
 
-    public PhraseWeight(IndexSearcher searcher, boolean needsScores)
+    public PhraseWeight(IndexSearcher searcher, boolean needsScores, float boost)
       throws IOException {
       super(PhraseQuery.this);
       final int[] positions = PhraseQuery.this.getPositions();
@@ -375,7 +375,7 @@ public class PhraseQuery extends Query {
         states[i] = TermContext.build(context, term);
         termStats[i] = searcher.termStatistics(term, states[i]);
       }
-      stats = similarity.computeWeight(searcher.collectionStatistics(field), termStats);
+      stats = similarity.computeWeight(boost, searcher.collectionStatistics(field), termStats);
     }
 
     @Override
@@ -387,16 +387,6 @@ public class PhraseQuery extends Query {
     public String toString() { return "weight(" + PhraseQuery.this + ")"; }
 
     @Override
-    public float getValueForNormalization() {
-      return stats.getValueForNormalization();
-    }
-
-    @Override
-    public void normalize(float queryNorm, float boost) {
-      stats.normalize(queryNorm, boost);
-    }
-
-    @Override
     public Scorer scorer(LeafReaderContext context) throws IOException {
       assert terms.length > 0;
       final LeafReader reader = context.reader();
@@ -507,8 +497,8 @@ public class PhraseQuery extends Query {
 
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-    return new PhraseWeight(searcher, needsScores);
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    return new PhraseWeight(searcher, needsScores, boost);
   }
 
   /** Prints a user-readable version of this query. */

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/PointInSetQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/PointInSetQuery.java b/lucene/core/src/java/org/apache/lucene/search/PointInSetQuery.java
index 8569114..34b6ed8 100644
--- a/lucene/core/src/java/org/apache/lucene/search/PointInSetQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/PointInSetQuery.java
@@ -106,12 +106,12 @@ public abstract class PointInSetQuery extends Query {
   }
 
   @Override
-  public final Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
+  public final Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
 
     // We don't use RandomAccessWeight here: it's no good to approximate with "match all docs".
     // This is an inverted structure and should be used in the first pass:
 
-    return new ConstantScoreWeight(this) {
+    return new ConstantScoreWeight(this, boost) {
 
       @Override
       public Scorer scorer(LeafReaderContext context) throws IOException {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java b/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java
index 63de04c..1cd2d35 100644
--- a/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java
@@ -98,12 +98,12 @@ public abstract class PointRangeQuery extends Query {
   }
 
   @Override
-  public final Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
+  public final Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
 
     // We don't use RandomAccessWeight here: it's no good to approximate with "match all docs".
     // This is an inverted structure and should be used in the first pass:
 
-    return new ConstantScoreWeight(this) {
+    return new ConstantScoreWeight(this, boost) {
 
       private DocIdSet buildMatchingDocIdSet(LeafReader reader, PointValues values) throws IOException {
         DocIdSetBuilder result = new DocIdSetBuilder(reader.maxDoc(), values, field);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/Query.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/Query.java b/lucene/core/src/java/org/apache/lucene/search/Query.java
index 8cab2d1..05aa94f 100644
--- a/lucene/core/src/java/org/apache/lucene/search/Query.java
+++ b/lucene/core/src/java/org/apache/lucene/search/Query.java
@@ -61,8 +61,9 @@ public abstract class Query {
    *
    * @param needsScores   True if document scores ({@link Scorer#score}) or match
    *                      frequencies ({@link Scorer#freq}) are needed.
+   * @param boost         The boost that is propagated by the parent queries.
    */
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
     throw new UnsupportedOperationException("Query " + this + " does not implement createWeight");
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/RandomAccessWeight.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/RandomAccessWeight.java b/lucene/core/src/java/org/apache/lucene/search/RandomAccessWeight.java
index fcc1d1a..950ce04 100644
--- a/lucene/core/src/java/org/apache/lucene/search/RandomAccessWeight.java
+++ b/lucene/core/src/java/org/apache/lucene/search/RandomAccessWeight.java
@@ -34,8 +34,8 @@ import org.apache.lucene.util.Bits.MatchNoBits;
 public abstract class RandomAccessWeight extends ConstantScoreWeight {
 
   /** Sole constructor. */
-  protected RandomAccessWeight(Query query) {
-    super(query);
+  protected RandomAccessWeight(Query query, float boost) {
+    super(query, boost);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/SynonymQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/SynonymQuery.java b/lucene/core/src/java/org/apache/lucene/search/SynonymQuery.java
index 4d49cd9..c718dc9 100644
--- a/lucene/core/src/java/org/apache/lucene/search/SynonymQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/SynonymQuery.java
@@ -110,16 +110,16 @@ public final class SynonymQuery extends Query {
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
     if (needsScores) {
-      return new SynonymWeight(this, searcher);
+      return new SynonymWeight(this, searcher, boost);
     } else {
       // if scores are not needed, let BooleanWeight deal with optimizing that case.
       BooleanQuery.Builder bq = new BooleanQuery.Builder();
       for (Term term : terms) {
         bq.add(new TermQuery(term), BooleanClause.Occur.SHOULD);
       }
-      return searcher.rewrite(bq.build()).createWeight(searcher, needsScores);
+      return searcher.rewrite(bq.build()).createWeight(searcher, needsScores, boost);
     }
   }
   
@@ -128,7 +128,7 @@ public final class SynonymQuery extends Query {
     private final Similarity similarity;
     private final Similarity.SimWeight simWeight;
 
-    SynonymWeight(Query query, IndexSearcher searcher) throws IOException {
+    SynonymWeight(Query query, IndexSearcher searcher, float boost) throws IOException {
       super(query);
       CollectionStatistics collectionStats = searcher.collectionStatistics(terms[0].field());
       long docFreq = 0;
@@ -146,7 +146,7 @@ public final class SynonymQuery extends Query {
       }
       TermStatistics pseudoStats = new TermStatistics(null, docFreq, totalTermFreq);
       this.similarity = searcher.getSimilarity(true);
-      this.simWeight = similarity.computeWeight(collectionStats, pseudoStats);
+      this.simWeight = similarity.computeWeight(boost, collectionStats, pseudoStats);
     }
 
     @Override
@@ -184,16 +184,6 @@ public final class SynonymQuery extends Query {
     }
 
     @Override
-    public float getValueForNormalization() throws IOException {
-      return simWeight.getValueForNormalization();
-    }
-
-    @Override
-    public void normalize(float norm, float boost) {
-      simWeight.normalize(norm, boost);
-    }
-
-    @Override
     public Scorer scorer(LeafReaderContext context) throws IOException {
       Similarity.SimScorer simScorer = similarity.simScorer(simWeight, context);
       // we use termscorers + disjunction as an impl detail

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/TermQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/TermQuery.java b/lucene/core/src/java/org/apache/lucene/search/TermQuery.java
index 6547b10..590c3b3 100644
--- a/lucene/core/src/java/org/apache/lucene/search/TermQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/TermQuery.java
@@ -48,8 +48,8 @@ public class TermQuery extends Query {
     private final TermContext termStates;
     private final boolean needsScores;
 
-    public TermWeight(IndexSearcher searcher, boolean needsScores, TermContext termStates)
-        throws IOException {
+    public TermWeight(IndexSearcher searcher, boolean needsScores,
+        float boost, TermContext termStates) throws IOException {
       super(TermQuery.this);
       this.needsScores = needsScores;
       assert termStates != null : "TermContext must not be null";
@@ -70,7 +70,7 @@ public class TermQuery extends Query {
         termStats = new TermStatistics(term.bytes(), docFreq, totalTermFreq);
       }
      
-      this.stats = similarity.computeWeight(collectionStats, termStats);
+      this.stats = similarity.computeWeight(boost, collectionStats, termStats);
     }
 
     @Override
@@ -84,16 +84,6 @@ public class TermQuery extends Query {
     }
 
     @Override
-    public float getValueForNormalization() {
-      return stats.getValueForNormalization();
-    }
-
-    @Override
-    public void normalize(float queryNorm, float boost) {
-      stats.normalize(queryNorm, boost);
-    }
-
-    @Override
     public Scorer scorer(LeafReaderContext context) throws IOException {
       assert termStates.topReaderContext == ReaderUtil.getTopLevelContext(context) : "The top-reader used to create Weight (" + termStates.topReaderContext + ") is not the same as the current reader's top-reader (" + ReaderUtil.getTopLevelContext(context);
       final TermsEnum termsEnum = getTermsEnum(context);
@@ -173,7 +163,7 @@ public class TermQuery extends Query {
   }
 
   @Override
-  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
+  public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
     final IndexReaderContext context = searcher.getTopReaderContext();
     final TermContext termState;
     if (perReaderTermState == null
@@ -186,7 +176,7 @@ public class TermQuery extends Query {
       termState = this.perReaderTermState;
     }
 
-    return new TermWeight(searcher, needsScores, termState);
+    return new TermWeight(searcher, needsScores, boost, termState);
   }
 
   /** Prints a user-readable version of this query. */

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/Weight.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/Weight.java b/lucene/core/src/java/org/apache/lucene/search/Weight.java
index 40d2aaf..47f553e 100644
--- a/lucene/core/src/java/org/apache/lucene/search/Weight.java
+++ b/lucene/core/src/java/org/apache/lucene/search/Weight.java
@@ -24,7 +24,6 @@ import org.apache.lucene.index.IndexReaderContext;
 import org.apache.lucene.index.LeafReader;
 import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.index.Term;
-import org.apache.lucene.search.similarities.Similarity;
 import org.apache.lucene.util.Bits;
 
 /**
@@ -44,13 +43,7 @@ import org.apache.lucene.util.Bits;
  * A <code>Weight</code> is used in the following way:
  * <ol>
  * <li>A <code>Weight</code> is constructed by a top-level query, given a
- * <code>IndexSearcher</code> ({@link Query#createWeight(IndexSearcher, boolean)}).
- * <li>The {@link #getValueForNormalization()} method is called on the
- * <code>Weight</code> to compute the query normalization factor
- * {@link Similarity#queryNorm(float)} of the query clauses contained in the
- * query.
- * <li>The query normalization factor is passed to {@link #normalize(float, float)}. At
- * this point the weighting is complete.
+ * <code>IndexSearcher</code> ({@link Query#createWeight(IndexSearcher, boolean, float)}).
  * <li>A <code>Scorer</code> is constructed by
  * {@link #scorer(org.apache.lucene.index.LeafReaderContext)}.
  * </ol>
@@ -90,12 +83,6 @@ public abstract class Weight {
   public final Query getQuery() {
     return parentQuery;
   }
-  
-  /** The value for normalization of contained query clauses (e.g. sum of squared weights). */
-  public abstract float getValueForNormalization() throws IOException;
-
-  /** Assigns the query normalization factor and boost to this. */
-  public abstract void normalize(float norm, float boost);
 
   /**
    * Returns a {@link Scorer} which can iterate in order over all matching

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/package-info.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/package-info.java b/lucene/core/src/java/org/apache/lucene/search/package-info.java
index 5f9b213..b4d4780 100644
--- a/lucene/core/src/java/org/apache/lucene/search/package-info.java
+++ b/lucene/core/src/java/org/apache/lucene/search/package-info.java
@@ -357,7 +357,7 @@
  *         {@link org.apache.lucene.search.Query Query} class has several methods that are important for
  *         derived classes:
  *         <ol>
- *             <li>{@link org.apache.lucene.search.Query#createWeight(IndexSearcher,boolean) createWeight(IndexSearcher searcher,boolean)} &mdash; A
+ *             <li>{@link org.apache.lucene.search.Query#createWeight(IndexSearcher,boolean,float) createWeight(IndexSearcher searcher, boolean needsScores, float boost)} &mdash; A
  *                 {@link org.apache.lucene.search.Weight Weight} is the internal representation of the
  *                 Query, so each Query implementation must
  *                 provide an implementation of Weight. See the subsection on <a
@@ -366,7 +366,7 @@
  *             <li>{@link org.apache.lucene.search.Query#rewrite(org.apache.lucene.index.IndexReader) rewrite(IndexReader reader)} &mdash; Rewrites queries into primitive queries. Primitive queries are:
  *                 {@link org.apache.lucene.search.TermQuery TermQuery},
  *                 {@link org.apache.lucene.search.BooleanQuery BooleanQuery}, <span
- *                     >and other queries that implement {@link org.apache.lucene.search.Query#createWeight(IndexSearcher,boolean) createWeight(IndexSearcher searcher,boolean,float)}</span></li>
+ *                     >and other queries that implement {@link org.apache.lucene.search.Query#createWeight(IndexSearcher,boolean,float) createWeight(IndexSearcher searcher,boolean needsScores, float boost)}</span></li>
  *         </ol>
  * <a name="weightClass"></a>
  * <h3>The Weight Interface</h3>
@@ -381,28 +381,6 @@
  *                 {@link org.apache.lucene.search.Weight#getQuery getQuery()} &mdash; Pointer to the
  *                 Query that this Weight represents.</li>
  *             <li>
- *                 {@link org.apache.lucene.search.Weight#getValueForNormalization() getValueForNormalization()} &mdash; 
- *                 A weight can return a floating point value to indicate its magnitude for query normalization. Typically
- *                 a weight such as TermWeight that scores via a {@link org.apache.lucene.search.similarities.Similarity Similarity} 
- *                 will just defer to the Similarity's implementation: 
- *                 {@link org.apache.lucene.search.similarities.Similarity.SimWeight#getValueForNormalization SimWeight#getValueForNormalization()}.
- *                 For example, with {@link org.apache.lucene.search.similarities.TFIDFSimilarity Lucene's classic vector-space formula}, this
- *                 is implemented as the sum of squared weights: <code>(idf * boost)<sup>2</sup></code></li>
- *             <li>
- *                 {@link org.apache.lucene.search.Weight#normalize(float,float) normalize(float norm, float boost)} &mdash; 
- *                 Performs query normalization: 
- *                 <ul>
- *                 <li><code>boost</code>: A query-boost factor from any wrapping queries that should be multiplied into every
- *                 document's score. For example, a TermQuery that is wrapped within a BooleanQuery with a boost of <code>5</code> would
- *                 receive this value at this time. This allows the TermQuery (the leaf node in this case) to compute this up-front
- *                 a single time (e.g. by multiplying into the IDF), rather than for every document.</li> 
- *                 <li><code>norm</code>: Passes in a a normalization factor which may
- *                 allow for comparing scores between queries.</li>
- *                 </ul>
- *                 Typically a weight such as TermWeight
- *                 that scores via a {@link org.apache.lucene.search.similarities.Similarity Similarity} will just defer to the Similarity's implementation:
- *                 {@link org.apache.lucene.search.similarities.Similarity.SimWeight#normalize SimWeight#normalize(float,float)}.</li>
- *             <li>
  *                 {@link org.apache.lucene.search.Weight#scorer scorer()} &mdash;
  *                 Construct a new {@link org.apache.lucene.search.Scorer Scorer} for this Weight. See <a href="#scorerClass">The Scorer Class</a>
  *                 below for help defining a Scorer. As the name implies, the Scorer is responsible for doing the actual scoring of documents 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/similarities/BM25Similarity.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/similarities/BM25Similarity.java b/lucene/core/src/java/org/apache/lucene/search/similarities/BM25Similarity.java
index 99f76ef..cbce18e 100644
--- a/lucene/core/src/java/org/apache/lucene/search/similarities/BM25Similarity.java
+++ b/lucene/core/src/java/org/apache/lucene/search/similarities/BM25Similarity.java
@@ -205,7 +205,7 @@ public class BM25Similarity extends Similarity {
   }
 
   @Override
-  public final SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats) {
+  public final SimWeight computeWeight(float boost, CollectionStatistics collectionStats, TermStatistics... termStats) {
     Explanation idf = termStats.length == 1 ? idfExplain(collectionStats, termStats[0]) : idfExplain(collectionStats, termStats);
 
     float avgdl = avgFieldLength(collectionStats);
@@ -215,7 +215,7 @@ public class BM25Similarity extends Similarity {
     for (int i = 0; i < cache.length; i++) {
       cache[i] = k1 * ((1 - b) + b * decodeNormValue((byte)i) / avgdl);
     }
-    return new BM25Stats(collectionStats.field(), idf, avgdl, cache);
+    return new BM25Stats(collectionStats.field(), boost, idf, avgdl, cache);
   }
 
   @Override
@@ -267,34 +267,23 @@ public class BM25Similarity extends Similarity {
     /** The average document length. */
     private final float avgdl;
     /** query boost */
-    private float boost;
+    private final float boost;
     /** weight (idf * boost) */
-    private float weight;
+    private final float weight;
     /** field name, for pulling norms */
     private final String field;
     /** precomputed norm[256] with k1 * ((1 - b) + b * dl / avgdl) */
     private final float cache[];
 
-    BM25Stats(String field, Explanation idf, float avgdl, float cache[]) {
+    BM25Stats(String field, float boost, Explanation idf, float avgdl, float cache[]) {
       this.field = field;
+      this.boost = boost;
       this.idf = idf;
       this.avgdl = avgdl;
       this.cache = cache;
-      normalize(1f, 1f);
-    }
-
-    @Override
-    public float getValueForNormalization() {
-      // we return a TF-IDF like normalization to be nice, but we don't actually normalize ourselves.
-      return weight * weight;
+      this.weight = idf.getValue() * boost;
     }
 
-    @Override
-    public void normalize(float queryNorm, float boost) {
-      // we don't normalize with queryNorm at all, we just capture the top-level boost
-      this.boost = boost;
-      this.weight = idf.getValue() * boost;
-    } 
   }
 
   private Explanation explainTFNorm(int doc, Explanation freq, BM25Stats stats, NumericDocValues norms) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/similarities/BasicStats.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/similarities/BasicStats.java b/lucene/core/src/java/org/apache/lucene/search/similarities/BasicStats.java
index 5304d1f..a08fe2f 100644
--- a/lucene/core/src/java/org/apache/lucene/search/similarities/BasicStats.java
+++ b/lucene/core/src/java/org/apache/lucene/search/similarities/BasicStats.java
@@ -38,15 +38,13 @@ public class BasicStats extends Similarity.SimWeight {
   
   // -------------------------- Boost-related stuff --------------------------
 
-  /** For most Similarities, the immediate and the top level query boosts are
-   * not handled differently. Hence, this field is just the product of the
-   * other two. */
-  protected float boost;
+  /** A query boost. Should be applied as a multiplicative factor to the score. */
+  protected final float boost;
   
   /** Constructor. */
-  public BasicStats(String field) {
+  public BasicStats(String field, float boost) {
     this.field = field;
-    normalize(1f, 1f);
+    this.boost = boost;
   }
   
   // ------------------------- Getter/setter methods -------------------------
@@ -107,31 +105,6 @@ public class BasicStats extends Similarity.SimWeight {
     this.totalTermFreq = totalTermFreq;
   }
   
-  // -------------------------- Boost-related stuff --------------------------
-  
-  /** The square of the raw normalization value.
-   * @see #rawNormalizationValue() */
-  @Override
-  public float getValueForNormalization() {
-    float rawValue = rawNormalizationValue();
-    return rawValue * rawValue;
-  }
-  
-  /** Computes the raw normalization value. This basic implementation returns
-   * the query boost. Subclasses may override this method to include other
-   * factors (such as idf), or to save the value for inclusion in
-   * {@link #normalize(float, float)}, etc.
-   */
-  protected float rawNormalizationValue() {
-    return boost;
-  }
-  
-  /** No normalization is done. {@code boost} is saved in the object, however. */
-  @Override
-  public void normalize(float queryNorm, float boost) {
-    this.boost = boost;
-  }
-  
   /** Returns the total boost. */
   public float getBoost() {
     return boost;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/similarities/ClassicSimilarity.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/similarities/ClassicSimilarity.java b/lucene/core/src/java/org/apache/lucene/search/similarities/ClassicSimilarity.java
index 93b8261..fae85a0 100644
--- a/lucene/core/src/java/org/apache/lucene/search/similarities/ClassicSimilarity.java
+++ b/lucene/core/src/java/org/apache/lucene/search/similarities/ClassicSimilarity.java
@@ -55,12 +55,6 @@ public class ClassicSimilarity extends TFIDFSimilarity {
 
   /** Sole constructor: parameter-free */
   public ClassicSimilarity() {}
-
-  /** Implemented as <code>1/sqrt(sumOfSquaredWeights)</code>. */
-  @Override
-  public float queryNorm(float sumOfSquaredWeights) {
-    return (float)(1.0 / Math.sqrt(sumOfSquaredWeights));
-  }
   
   /**
    * Encodes a normalization factor for storage in an index.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/similarities/LMSimilarity.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/similarities/LMSimilarity.java b/lucene/core/src/java/org/apache/lucene/search/similarities/LMSimilarity.java
index fb8fbf4..2e484eb 100644
--- a/lucene/core/src/java/org/apache/lucene/search/similarities/LMSimilarity.java
+++ b/lucene/core/src/java/org/apache/lucene/search/similarities/LMSimilarity.java
@@ -54,8 +54,8 @@ public abstract class LMSimilarity extends SimilarityBase {
   }
   
   @Override
-  protected BasicStats newStats(String field) {
-    return new LMStats(field);
+  protected BasicStats newStats(String field, float boost) {
+    return new LMStats(field, boost);
   }
 
   /**
@@ -108,8 +108,8 @@ public abstract class LMSimilarity extends SimilarityBase {
     /**
      * Creates LMStats for the provided field and query-time boost
      */
-    public LMStats(String field) {
-      super(field);
+    public LMStats(String field, float boost) {
+      super(field, boost);
     }
     
     /**

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/similarities/MultiSimilarity.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/similarities/MultiSimilarity.java b/lucene/core/src/java/org/apache/lucene/search/similarities/MultiSimilarity.java
index c4b7dd6..68828fc 100644
--- a/lucene/core/src/java/org/apache/lucene/search/similarities/MultiSimilarity.java
+++ b/lucene/core/src/java/org/apache/lucene/search/similarities/MultiSimilarity.java
@@ -50,10 +50,10 @@ public class MultiSimilarity extends Similarity {
   }
 
   @Override
-  public SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats) {
+  public SimWeight computeWeight(float boost, CollectionStatistics collectionStats, TermStatistics... termStats) {
     SimWeight subStats[] = new SimWeight[sims.length];
     for (int i = 0; i < subStats.length; i++) {
-      subStats[i] = sims[i].computeWeight(collectionStats, termStats);
+      subStats[i] = sims[i].computeWeight(boost, collectionStats, termStats);
     }
     return new MultiStats(subStats);
   }
@@ -109,21 +109,5 @@ public class MultiSimilarity extends Similarity {
     MultiStats(SimWeight subStats[]) {
       this.subStats = subStats;
     }
-    
-    @Override
-    public float getValueForNormalization() {
-      float sum = 0.0f;
-      for (SimWeight stat : subStats) {
-        sum += stat.getValueForNormalization();
-      }
-      return sum / subStats.length;
-    }
-
-    @Override
-    public void normalize(float queryNorm, float boost) {
-      for (SimWeight stat : subStats) {
-        stat.normalize(queryNorm, boost);
-      }
-    }
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/similarities/PerFieldSimilarityWrapper.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/similarities/PerFieldSimilarityWrapper.java b/lucene/core/src/java/org/apache/lucene/search/similarities/PerFieldSimilarityWrapper.java
index cfbb9a5..6c05616 100644
--- a/lucene/core/src/java/org/apache/lucene/search/similarities/PerFieldSimilarityWrapper.java
+++ b/lucene/core/src/java/org/apache/lucene/search/similarities/PerFieldSimilarityWrapper.java
@@ -46,10 +46,10 @@ public abstract class PerFieldSimilarityWrapper extends Similarity {
   }
 
   @Override
-  public final SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats) {
+  public final SimWeight computeWeight(float boost, CollectionStatistics collectionStats, TermStatistics... termStats) {
     PerFieldSimWeight weight = new PerFieldSimWeight();
     weight.delegate = get(collectionStats.field());
-    weight.delegateWeight = weight.delegate.computeWeight(collectionStats, termStats);
+    weight.delegateWeight = weight.delegate.computeWeight(boost, collectionStats, termStats);
     return weight;
   }
 
@@ -67,15 +67,5 @@ public abstract class PerFieldSimilarityWrapper extends Similarity {
   static class PerFieldSimWeight extends SimWeight {
     Similarity delegate;
     SimWeight delegateWeight;
-    
-    @Override
-    public float getValueForNormalization() {
-      return delegateWeight.getValueForNormalization();
-    }
-    
-    @Override
-    public void normalize(float queryNorm, float boost) {
-      delegateWeight.normalize(queryNorm, boost);
-    }
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/similarities/Similarity.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/similarities/Similarity.java b/lucene/core/src/java/org/apache/lucene/search/similarities/Similarity.java
index e66fa5f..6a6e93b 100644
--- a/lucene/core/src/java/org/apache/lucene/search/similarities/Similarity.java
+++ b/lucene/core/src/java/org/apache/lucene/search/similarities/Similarity.java
@@ -19,7 +19,6 @@ package org.apache.lucene.search.similarities;
 
 import org.apache.lucene.index.FieldInvertState;
 import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.search.BooleanQuery;
 import org.apache.lucene.search.CollectionStatistics;
 import org.apache.lucene.search.Explanation;
 import org.apache.lucene.search.IndexSearcher;
@@ -78,16 +77,12 @@ import java.util.Collections;
  * <a name="querytime">Query time</a>
  * At query-time, Queries interact with the Similarity via these steps:
  * <ol>
- *   <li>The {@link #computeWeight(CollectionStatistics, TermStatistics...)} method is called a single time,
+ *   <li>The {@link #computeWeight(float, CollectionStatistics, TermStatistics...)} method is called a single time,
  *       allowing the implementation to compute any statistics (such as IDF, average document length, etc)
  *       across <i>the entire collection</i>. The {@link TermStatistics} and {@link CollectionStatistics} passed in 
  *       already contain all of the raw statistics involved, so a Similarity can freely use any combination
  *       of statistics without causing any additional I/O. Lucene makes no assumption about what is 
  *       stored in the returned {@link Similarity.SimWeight} object.
- *   <li>The query normalization process occurs a single time: {@link Similarity.SimWeight#getValueForNormalization()}
- *       is called for each query leaf node, {@link Similarity#queryNorm(float)} is called for the top-level
- *       query, and finally {@link Similarity.SimWeight#normalize(float, float)} passes down the normalization value
- *       and any top-level boosts (e.g. from enclosing {@link BooleanQuery}s).
  *   <li>For each segment in the index, the Query creates a {@link #simScorer(SimWeight, org.apache.lucene.index.LeafReaderContext)}
  *       The score() method is called for each matching document.
  * </ol>
@@ -109,23 +104,6 @@ public abstract class Similarity {
    */
   public Similarity() {}
   
-  /** Computes the normalization value for a query given the sum of the
-   * normalized weights {@link SimWeight#getValueForNormalization()} of 
-   * each of the query terms.  This value is passed back to the 
-   * weight ({@link SimWeight#normalize(float, float)} of each query 
-   * term, to provide a hook to attempt to make scores from different
-   * queries comparable.
-   * <p>
-   * By default this is disabled (returns <code>1</code>), but some
-   * implementations such as {@link TFIDFSimilarity} override this.
-   * 
-   * @param valueForNormalization the sum of the term normalization values
-   * @return a normalization factor for query weights
-   */
-  public float queryNorm(float valueForNormalization) {
-    return 1f;
-  }
-  
   /**
    * Computes the normalization value for a field, given the accumulated
    * state of term processing for this field (see {@link FieldInvertState}).
@@ -144,15 +122,17 @@ public abstract class Similarity {
   /**
    * Compute any collection-level weight (e.g. IDF, average document length, etc) needed for scoring a query.
    *
+   * @param boost a multiplicative factor to apply to the produces scores
    * @param collectionStats collection-level statistics, such as the number of tokens in the collection.
    * @param termStats term-level statistics, such as the document frequency of a term across the collection.
    * @return SimWeight object with the information this Similarity needs to score a query.
    */
-  public abstract SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats);
+  public abstract SimWeight computeWeight(float boost,
+      CollectionStatistics collectionStats, TermStatistics... termStats);
 
   /**
    * Creates a new {@link Similarity.SimScorer} to score matching documents from a segment of the inverted index.
-   * @param weight collection information from {@link #computeWeight(CollectionStatistics, TermStatistics...)}
+   * @param weight collection information from {@link #computeWeight(float, CollectionStatistics, TermStatistics...)}
    * @param context segment of the inverted index to be scored.
    * @return SloppySimScorer for scoring documents across <code>context</code>
    * @throws IOException if there is a low-level I/O error
@@ -215,24 +195,6 @@ public abstract class Similarity {
      * constructors, typically implicit.)
      */
     public SimWeight() {}
-    
-    /** The value for normalization of contained query clauses (e.g. sum of squared weights).
-     * <p>
-     * NOTE: a Similarity implementation might not use any query normalization at all,
-     * it's not required. However, if it wants to participate in query normalization,
-     * it can return a value here.
-     */
-    public abstract float getValueForNormalization();
-    
-    /** Assigns the query normalization factor and boost from parent queries to this.
-     * <p>
-     * NOTE: a Similarity implementation might not use this normalized value at all,
-     * it's not required. However, it's usually a good idea to at least incorporate 
-     * the boost into its score.
-     * <p>
-     * NOTE: If this method is called several times, it behaves as if only the
-     * last call was performed.
-     */
-    public abstract void normalize(float queryNorm, float boost);
+
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/similarities/SimilarityBase.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/similarities/SimilarityBase.java b/lucene/core/src/java/org/apache/lucene/search/similarities/SimilarityBase.java
index 0b7c342..c2c117d 100644
--- a/lucene/core/src/java/org/apache/lucene/search/similarities/SimilarityBase.java
+++ b/lucene/core/src/java/org/apache/lucene/search/similarities/SimilarityBase.java
@@ -83,18 +83,18 @@ public abstract class SimilarityBase extends Similarity {
   }
   
   @Override
-  public final SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats) {
+  public final SimWeight computeWeight(float boost, CollectionStatistics collectionStats, TermStatistics... termStats) {
     BasicStats stats[] = new BasicStats[termStats.length];
     for (int i = 0; i < termStats.length; i++) {
-      stats[i] = newStats(collectionStats.field());
+      stats[i] = newStats(collectionStats.field(), boost);
       fillBasicStats(stats[i], collectionStats, termStats[i]);
     }
     return stats.length == 1 ? stats[0] : new MultiSimilarity.MultiStats(stats);
   }
   
   /** Factory method to return a custom stats object */
-  protected BasicStats newStats(String field) {
-    return new BasicStats(field);
+  protected BasicStats newStats(String field, float boost) {
+    return new BasicStats(field, boost);
   }
   
   /** Fills all member fields defined in {@code BasicStats} in {@code stats}. 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/similarities/TFIDFSimilarity.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/similarities/TFIDFSimilarity.java b/lucene/core/src/java/org/apache/lucene/search/similarities/TFIDFSimilarity.java
index 3e92375..ff708b4 100644
--- a/lucene/core/src/java/org/apache/lucene/search/similarities/TFIDFSimilarity.java
+++ b/lucene/core/src/java/org/apache/lucene/search/similarities/TFIDFSimilarity.java
@@ -262,9 +262,6 @@ import org.apache.lucene.util.BytesRef;
  *   <tr>
  *     <td valign="middle" align="right" rowspan="1">
  *       score(q,d) &nbsp; = &nbsp;
- *       <A HREF="#formula_queryNorm"><span style="color: #FF33CC">queryNorm(q)</span></A> &nbsp;&middot;&nbsp;
- *     </td>
- *     <td valign="bottom" align="center" rowspan="1" style="text-align: center">
  *       <big><big><big>&sum;</big></big></big>
  *     </td>
  *     <td valign="middle" align="right" rowspan="1">
@@ -354,72 +351,6 @@ import org.apache.lucene.util.BytesRef;
  *      <br>&nbsp;<br>
  *    </li>
  *
- *    <li><b>
- *      <A NAME="formula_queryNorm"></A>
- *      <i>queryNorm(q)</i>
- *      </b>
- *      is a normalizing factor used to make scores between queries comparable.
- *      This factor does not affect document ranking (since all ranked documents are multiplied by the same factor),
- *      but rather just attempts to make scores from different queries (or even different indexes) comparable.
- *      This is a search time factor computed by the Similarity in effect at search time.
- *
- *      The default computation in
- *      {@link org.apache.lucene.search.similarities.ClassicSimilarity#queryNorm(float) ClassicSimilarity}
- *      produces a <a href="http://en.wikipedia.org/wiki/Euclidean_norm#Euclidean_norm">Euclidean norm</a>:
- *      <br>&nbsp;<br>
- *      <table cellpadding="1" cellspacing="0" border="0" style="width:auto; margin-left:auto; margin-right:auto" summary="query normalization computation">
- *        <tr>
- *          <td valign="middle" align="right" rowspan="1">
- *            queryNorm(q)  &nbsp; = &nbsp;
- *            {@link org.apache.lucene.search.similarities.ClassicSimilarity#queryNorm(float) queryNorm(sumOfSquaredWeights)}
- *            &nbsp; = &nbsp;
- *          </td>
- *          <td valign="middle" align="center" rowspan="1">
- *            <table summary="query normalization computation">
- *               <tr><td align="center" style="text-align: center"><big>1</big></td></tr>
- *               <tr><td align="center" style="text-align: center"><big>
- *                  &ndash;&ndash;&ndash;&ndash;&ndash;&ndash;&ndash;&ndash;&ndash;&ndash;&ndash;&ndash;&ndash;&ndash;
- *               </big></td></tr>
- *               <tr><td align="center" style="text-align: center">sumOfSquaredWeights<sup><big>&frac12;</big></sup></td></tr>
- *            </table>
- *          </td>
- *        </tr>
- *      </table>
- *      <br>&nbsp;<br>
- *
- *      The sum of squared weights (of the query terms) is
- *      computed by the query {@link org.apache.lucene.search.Weight} object.
- *      For example, a {@link org.apache.lucene.search.BooleanQuery}
- *      computes this value as:
- *
- *      <br>&nbsp;<br>
- *      <table cellpadding="1" cellspacing="0" border="0" style="width:auto; margin-left:auto; margin-right:auto" summary="sum of squared weights computation">
- *        <tr>
- *          <td valign="middle" align="right" rowspan="1">
- *            {@link org.apache.lucene.search.Weight#getValueForNormalization() sumOfSquaredWeights} &nbsp; = &nbsp;
- *            {@link org.apache.lucene.search.BoostQuery#getBoost() q.getBoost()} <sup><big>2</big></sup>
- *            &nbsp;&middot;&nbsp;
- *          </td>
- *          <td valign="bottom" align="center" rowspan="1" style="text-align: center">
- *            <big><big><big>&sum;</big></big></big>
- *          </td>
- *          <td valign="middle" align="right" rowspan="1">
- *            <big><big>(</big></big>
- *            <A HREF="#formula_idf">idf(t)</A> &nbsp;&middot;&nbsp;
- *            <A HREF="#formula_termBoost">t.getBoost()</A>
- *            <big><big>) <sup>2</sup> </big></big>
- *          </td>
- *        </tr>
- *        <tr valign="top">
- *          <td></td>
- *          <td align="center" style="text-align: center"><small>t in q</small></td>
- *          <td></td>
- *        </tr>
- *      </table>
- *      <br>&nbsp;<br>
- *
- *    </li>
- *
  *    <li>
  *      <A NAME="formula_termBoost"></A>
  *      <b><i>t.getBoost()</i></b>
@@ -495,22 +426,6 @@ public abstract class TFIDFSimilarity extends Similarity {
    */
   public TFIDFSimilarity() {}
   
-  /** Computes the normalization value for a query given the sum of the squared
-   * weights of each of the query terms.  This value is multiplied into the
-   * weight of each query term. While the classic query normalization factor is
-   * computed as 1/sqrt(sumOfSquaredWeights), other implementations might
-   * completely ignore sumOfSquaredWeights (ie return 1).
-   *
-   * <p>This does not affect ranking, but the default implementation does make scores
-   * from different queries more comparable than they would be by eliminating the
-   * magnitude of the Query vector as a factor in the score.
-   *
-   * @param sumOfSquaredWeights the sum of the squares of query term weights
-   * @return a normalization factor for query weights
-   */
-  @Override
-  public abstract float queryNorm(float sumOfSquaredWeights);
-  
   /** Computes a score factor based on a term or phrase's frequency in a
    * document.  This value is multiplied by the {@link #idf(long, long)}
    * factor for each term in the query and these products are then summed to
@@ -652,11 +567,11 @@ public abstract class TFIDFSimilarity extends Similarity {
   public abstract float scorePayload(int doc, int start, int end, BytesRef payload);
 
   @Override
-  public final SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats) {
+  public final SimWeight computeWeight(float boost, CollectionStatistics collectionStats, TermStatistics... termStats) {
     final Explanation idf = termStats.length == 1
     ? idfExplain(collectionStats, termStats[0])
     : idfExplain(collectionStats, termStats);
-    return new IDFStats(collectionStats.field(), idf);
+    return new IDFStats(collectionStats.field(), boost, idf);
   }
 
   @Override
@@ -672,7 +587,7 @@ public abstract class TFIDFSimilarity extends Similarity {
     
     TFIDFSimScorer(IDFStats stats, NumericDocValues norms) throws IOException {
       this.stats = stats;
-      this.weightValue = stats.value;
+      this.weightValue = stats.queryWeight;
       this.norms = norms;
     }
     
@@ -705,49 +620,18 @@ public abstract class TFIDFSimilarity extends Similarity {
     private final String field;
     /** The idf and its explanation */
     private final Explanation idf;
-    private float queryNorm;
-    private float boost;
-    private float queryWeight;
-    private float value;
+    private final float boost;
+    private final float queryWeight;
     
-    public IDFStats(String field, Explanation idf) {
+    public IDFStats(String field, float boost, Explanation idf) {
       // TODO: Validate?
       this.field = field;
       this.idf = idf;
-      normalize(1f, 1f);
-    }
-
-    @Override
-    public float getValueForNormalization() {
-      // TODO: (sorta LUCENE-1907) make non-static class and expose this squaring via a nice method to subclasses?
-      return queryWeight * queryWeight;  // sum of squared weights
-    }
-
-    @Override
-    public void normalize(float queryNorm, float boost) {
       this.boost = boost;
-      this.queryNorm = queryNorm;
-      queryWeight = queryNorm * boost * idf.getValue();
-      value = queryWeight * idf.getValue();         // idf for document
+      this.queryWeight = boost * idf.getValue();
     }
   }  
 
-  private Explanation explainQuery(IDFStats stats) {
-    List<Explanation> subs = new ArrayList<>();
-
-    Explanation boostExpl = Explanation.match(stats.boost, "boost");
-    if (stats.boost != 1.0f)
-      subs.add(boostExpl);
-    subs.add(stats.idf);
-
-    Explanation queryNormExpl = Explanation.match(stats.queryNorm,"queryNorm");
-    subs.add(queryNormExpl);
-
-    return Explanation.match(
-        boostExpl.getValue() * stats.idf.getValue() * queryNormExpl.getValue(),
-        "queryWeight, product of:", subs);
-  }
-
   private Explanation explainField(int doc, Explanation freq, IDFStats stats, NumericDocValues norms) {
     Explanation tfExplanation = Explanation.match(tf(freq.getValue()), "tf(freq="+freq.getValue()+"), with freq of:", freq);
     Explanation fieldNormExpl = Explanation.match(
@@ -761,9 +645,9 @@ public abstract class TFIDFSimilarity extends Similarity {
   }
 
   private Explanation explainScore(int doc, Explanation freq, IDFStats stats, NumericDocValues norms) {
-    Explanation queryExpl = explainQuery(stats);
+    Explanation queryExpl = Explanation.match(stats.boost, "boost");
     Explanation fieldExpl = explainField(doc, freq, stats, norms);
-    if (queryExpl.getValue() == 1f) {
+    if (stats.boost == 1f) {
       return fieldExpl;
     }
     return Explanation.match(

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/spans/FieldMaskingSpanQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/spans/FieldMaskingSpanQuery.java b/lucene/core/src/java/org/apache/lucene/search/spans/FieldMaskingSpanQuery.java
index 5c5e4dc..7759683 100644
--- a/lucene/core/src/java/org/apache/lucene/search/spans/FieldMaskingSpanQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/spans/FieldMaskingSpanQuery.java
@@ -89,8 +89,8 @@ public final class FieldMaskingSpanQuery extends SpanQuery {
   // ...this is done to be more consistent with things like SpanFirstQuery
 
   @Override
-  public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-    return maskedQuery.createWeight(searcher, needsScores);
+  public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    return maskedQuery.createWeight(searcher, needsScores, boost);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/spans/SpanBoostQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/spans/SpanBoostQuery.java b/lucene/core/src/java/org/apache/lucene/search/spans/SpanBoostQuery.java
index 9ecd743..0ce3b0a 100644
--- a/lucene/core/src/java/org/apache/lucene/search/spans/SpanBoostQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/spans/SpanBoostQuery.java
@@ -18,17 +18,10 @@ package org.apache.lucene.search.spans;
 
 
 import java.io.IOException;
-import java.util.Map;
 import java.util.Objects;
-import java.util.Set;
-import java.util.TreeMap;
 
 import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.index.Term;
-import org.apache.lucene.index.TermContext;
 import org.apache.lucene.search.BoostQuery;
-import org.apache.lucene.search.Explanation;
 import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.search.Query;
 
@@ -115,51 +108,8 @@ public final class SpanBoostQuery extends SpanQuery {
   }
 
   @Override
-  public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
-    final SpanWeight weight = query.createWeight(searcher, needsScores);
-    if (needsScores == false) {
-      return weight;
-    }
-    Map<Term, TermContext> terms = new TreeMap<>();
-    weight.extractTermContexts(terms);
-    weight.normalize(1f, boost);
-    return new SpanWeight(this, searcher, terms) {
-      
-      @Override
-      public void extractTerms(Set<Term> terms) {
-        weight.extractTerms(terms);
-      }
-
-      @Override
-      public Explanation explain(LeafReaderContext context, int doc) throws IOException {
-        return weight.explain(context, doc);
-      }
-
-      @Override
-      public float getValueForNormalization() throws IOException {
-        return weight.getValueForNormalization();
-      }
-
-      @Override
-      public void normalize(float norm, float boost) {
-        weight.normalize(norm, SpanBoostQuery.this.boost * boost);
-      }
-      
-      @Override
-      public Spans getSpans(LeafReaderContext ctx, Postings requiredPostings) throws IOException {
-        return weight.getSpans(ctx, requiredPostings);
-      }
-
-      @Override
-      public SpanScorer scorer(LeafReaderContext context) throws IOException {
-        return weight.scorer(context);
-      }
-
-      @Override
-      public void extractTermContexts(Map<Term,TermContext> contexts) {
-        weight.extractTermContexts(contexts);
-      }
-    };
+  public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+    return query.createWeight(searcher, needsScores, SpanBoostQuery.this.boost * boost);
   }
 
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5def78ba/lucene/core/src/java/org/apache/lucene/search/spans/SpanContainQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/spans/SpanContainQuery.java b/lucene/core/src/java/org/apache/lucene/search/spans/SpanContainQuery.java
index b122a09..8bb2633 100644
--- a/lucene/core/src/java/org/apache/lucene/search/spans/SpanContainQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/spans/SpanContainQuery.java
@@ -62,8 +62,8 @@ abstract class SpanContainQuery extends SpanQuery implements Cloneable {
     final SpanWeight littleWeight;
 
     public SpanContainWeight(IndexSearcher searcher, Map<Term, TermContext> terms,
-                             SpanWeight bigWeight, SpanWeight littleWeight) throws IOException {
-      super(SpanContainQuery.this, searcher, terms);
+                             SpanWeight bigWeight, SpanWeight littleWeight, float boost) throws IOException {
+      super(SpanContainQuery.this, searcher, terms, boost);
       this.bigWeight = bigWeight;
       this.littleWeight = littleWeight;
     }


[33/51] [abbrv] lucene-solr:apiv2: SOLR-7362: Log the actual payload being sent to Solr

Posted by sa...@apache.org.
SOLR-7362: Log the actual payload being sent to Solr


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/05b38f5f
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/05b38f5f
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/05b38f5f

Branch: refs/heads/apiv2
Commit: 05b38f5f21f18c0f38fbd0907ffd8b2bd8cf0132
Parents: 10f3700
Author: Shalin Shekhar Mangar <sh...@apache.org>
Authored: Mon Jul 18 10:59:23 2016 +0530
Committer: Shalin Shekhar Mangar <sh...@apache.org>
Committed: Mon Jul 18 10:59:23 2016 +0530

----------------------------------------------------------------------
 solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/05b38f5f/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java b/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java
index 250ccf0..41e32dd 100644
--- a/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java
+++ b/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java
@@ -168,7 +168,7 @@ public class TestSolrConfigHandler extends RestTestBase {
 
   public static void runConfigCommand(RestTestHarness harness, String uri, String payload) throws IOException {
     String json = SolrTestCaseJ4.json(payload);
-    log.info("going to send config command. path {} , payload: ", uri, payload);
+    log.info("going to send config command. path {} , payload: {}", uri, payload);
     String response = harness.post(uri, json);
     Map map = (Map) ObjectBuilder.getVal(new JSONParser(new StringReader(response)));
     assertNull(response, map.get("errors"));


[49/51] [abbrv] lucene-solr:apiv2: Merge remote-tracking branch 'origin/master'

Posted by sa...@apache.org.
Merge remote-tracking branch 'origin/master'


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/d4cb52fb
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/d4cb52fb
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/d4cb52fb

Branch: refs/heads/apiv2
Commit: d4cb52fb206ca4e00f9983152294d09205c023b0
Parents: fb4de6a 71541bc
Author: Noble Paul <no...@apache.org>
Authored: Wed Jul 20 22:01:10 2016 +0530
Committer: Noble Paul <no...@apache.org>
Committed: Wed Jul 20 22:01:10 2016 +0530

----------------------------------------------------------------------
 lucene/CHANGES.txt                              |   3 +
 .../lucene/search/LeafFieldComparator.java      |   7 +-
 .../org/apache/lucene/search/TermQuery.java     |  58 ++++---
 .../org/apache/lucene/search/TestTermQuery.java | 154 +++++++++++++++++++
 4 files changed, 198 insertions(+), 24 deletions(-)
----------------------------------------------------------------------



[41/51] [abbrv] lucene-solr:apiv2: Merge remote-tracking branch 'origin/master'

Posted by sa...@apache.org.
Merge remote-tracking branch 'origin/master'


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/b4c8f567
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/b4c8f567
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/b4c8f567

Branch: refs/heads/apiv2
Commit: b4c8f5678d853c91cf0e33a749bd0bd5c3d800e4
Parents: 2d1496c 832dacf
Author: Noble Paul <no...@apache.org>
Authored: Tue Jul 19 18:21:38 2016 +0530
Committer: Noble Paul <no...@apache.org>
Committed: Tue Jul 19 18:21:38 2016 +0530

----------------------------------------------------------------------
 solr/core/src/java/org/apache/solr/search/SolrCoreParser.java | 3 +++
 1 file changed, 3 insertions(+)
----------------------------------------------------------------------



[35/51] [abbrv] lucene-solr:apiv2: SOLR-9275: make XML QueryParser support (defType=xmlparser) extensible via configuration

Posted by sa...@apache.org.
SOLR-9275: make XML QueryParser support (defType=xmlparser) extensible via configuration


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/6f76ac13
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/6f76ac13
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/6f76ac13

Branch: refs/heads/apiv2
Commit: 6f76ac1314510fe09860a92e246c88bfae168fbb
Parents: 9f13bf6
Author: Christine Poerschke <cp...@apache.org>
Authored: Mon Jul 18 14:26:38 2016 +0100
Committer: Christine Poerschke <cp...@apache.org>
Committed: Mon Jul 18 15:45:33 2016 +0100

----------------------------------------------------------------------
 solr/CHANGES.txt                                |  3 +
 .../org/apache/solr/search/SolrCoreParser.java  | 37 +++++++++-
 .../apache/solr/search/SolrQueryBuilder.java    | 34 +++++++++
 .../apache/solr/search/XmlQParserPlugin.java    | 11 +++
 .../conf/solrconfig-testxmlparser.xml           | 33 +++++++++
 .../apache/solr/search/GoodbyeQueryBuilder.java | 39 ++++++++++
 .../apache/solr/search/HandyQueryBuilder.java   | 53 +++++++++++++
 .../apache/solr/search/HelloQueryBuilder.java   | 39 ++++++++++
 .../solr/search/TestXmlQParserPlugin.java       | 78 ++++++++++++++++++++
 9 files changed, 324 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6f76ac13/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 54317ed..4864925 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -97,6 +97,9 @@ New Features
 
 * SOLR-9240: Support parallel ETL with the topic expression (Joel Bernstein)
 
+* SOLR-9275: XML QueryParser support (defType=xmlparser) now extensible via configuration.
+  (Christine Poerschke)
+
 Bug Fixes
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6f76ac13/solr/core/src/java/org/apache/solr/search/SolrCoreParser.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/SolrCoreParser.java b/solr/core/src/java/org/apache/solr/search/SolrCoreParser.java
index cf3fb42..1e0e5bd 100755
--- a/solr/core/src/java/org/apache/solr/search/SolrCoreParser.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrCoreParser.java
@@ -16,23 +16,54 @@
  */
 package org.apache.solr.search;
 
+import java.util.Map;
+
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.queryparser.xml.CoreParser;
+import org.apache.lucene.queryparser.xml.QueryBuilder;
 
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.core.SolrResourceLoader;
 import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.util.plugin.NamedListInitializedPlugin;
 
 /**
  * Assembles a QueryBuilder which uses Query objects from Solr's <code>search</code> module
  * in addition to Query objects supported by the Lucene <code>CoreParser</code>.
  */
-public class SolrCoreParser extends CoreParser {
+public class SolrCoreParser extends CoreParser implements NamedListInitializedPlugin {
+
+  protected final SolrQueryRequest req;
 
   public SolrCoreParser(String defaultField, Analyzer analyzer,
       SolrQueryRequest req) {
     super(defaultField, analyzer);
+    this.req = req;
+  }
+
+  @Override
+  public void init(NamedList initArgs) {
+    final SolrResourceLoader loader;
+    if (req == null) {
+      loader = new SolrResourceLoader();
+    } else {
+      loader = req.getCore().getResourceLoader();
+    }
+
+    final Iterable<Map.Entry<String,Object>> args = initArgs;
+    for (final Map.Entry<String,Object> entry : args) {
+      final String queryName = entry.getKey();
+      final String queryBuilderClassName = (String)entry.getValue();
+
+      final SolrQueryBuilder queryBuilder = loader.newInstance(
+          queryBuilderClassName,
+          SolrQueryBuilder.class,
+          null,
+          new Class[] {String.class, Analyzer.class, SolrQueryRequest.class, QueryBuilder.class},
+          new Object[] {defaultField, analyzer, req, this});
 
-    // final IndexSchema schema = req.getSchema();
-    // lucene_parser.addQueryBuilder("SomeOtherQuery", new SomeOtherQueryBuilder(schema));
+      this.queryFactory.addBuilder(queryName, queryBuilder);
+    }
   }
 
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6f76ac13/solr/core/src/java/org/apache/solr/search/SolrQueryBuilder.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/SolrQueryBuilder.java b/solr/core/src/java/org/apache/solr/search/SolrQueryBuilder.java
new file mode 100644
index 0000000..e813512
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/search/SolrQueryBuilder.java
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.search;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.queryparser.xml.QueryBuilder;
+import org.apache.solr.request.SolrQueryRequest;
+
+public abstract class SolrQueryBuilder implements QueryBuilder {
+
+  protected final SolrQueryRequest req;
+  protected final QueryBuilder queryFactory;
+
+  public SolrQueryBuilder(String defaultField, Analyzer analyzer,
+      SolrQueryRequest req, QueryBuilder queryFactory) {
+    this.req = req;
+    this.queryFactory = queryFactory;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6f76ac13/solr/core/src/java/org/apache/solr/search/XmlQParserPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/XmlQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/XmlQParserPlugin.java
index cb6b45e..ee8e062 100755
--- a/solr/core/src/java/org/apache/solr/search/XmlQParserPlugin.java
+++ b/solr/core/src/java/org/apache/solr/search/XmlQParserPlugin.java
@@ -25,12 +25,21 @@ import org.apache.lucene.search.Query;
 
 import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.SolrParams;
+import org.apache.solr.common.util.NamedList;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.schema.IndexSchema;
 
 public class XmlQParserPlugin extends QParserPlugin {
   public static final String NAME = "xmlparser";
 
+  private NamedList args;
+
+  @Override
+  public void init( NamedList args ) {
+    super.init(args);
+    this.args = args;
+  }
+
   private class XmlQParser extends QParser {
 
     public XmlQParser(String qstr, SolrParams localParams,
@@ -46,7 +55,9 @@ public class XmlQParserPlugin extends QParserPlugin {
       final IndexSchema schema = req.getSchema();
       final String defaultField = QueryParsing.getDefaultField(schema, getParam(CommonParams.DF));
       final Analyzer analyzer = schema.getQueryAnalyzer();
+
       final SolrCoreParser solrParser = new SolrCoreParser(defaultField, analyzer, req);
+      solrParser.init(args);
       try {
         return solrParser.parse(new ByteArrayInputStream(qstr.getBytes(StandardCharsets.UTF_8)));
       } catch (ParserException e) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6f76ac13/solr/core/src/test-files/solr/collection1/conf/solrconfig-testxmlparser.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/collection1/conf/solrconfig-testxmlparser.xml b/solr/core/src/test-files/solr/collection1/conf/solrconfig-testxmlparser.xml
new file mode 100644
index 0000000..40c39a1
--- /dev/null
+++ b/solr/core/src/test-files/solr/collection1/conf/solrconfig-testxmlparser.xml
@@ -0,0 +1,33 @@
+<?xml version="1.0" ?>
+
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<!-- solrconfig-basic.xml plus a queryParser element -->
+<config>
+  <luceneMatchVersion>${tests.luceneMatchVersion:LATEST}</luceneMatchVersion>
+  <dataDir>${solr.data.dir:}</dataDir>
+  <xi:include href="solrconfig.snippet.randomindexconfig.xml" xmlns:xi="http://www.w3.org/2001/XInclude"/>
+  <directoryFactory name="DirectoryFactory" class="${solr.directoryFactory:solr.RAMDirectoryFactory}"/>
+  <schemaFactory class="ClassicIndexSchemaFactory"/>
+  <requestHandler name="standard" class="solr.StandardRequestHandler" />
+  <queryParser name="testxmlparser" class="XmlQParserPlugin">
+    <str name="HandyQuery">org.apache.solr.search.HandyQueryBuilder</str>
+    <str name="HelloQuery">org.apache.solr.search.HelloQueryBuilder</str>
+    <str name="GoodbyeQuery">org.apache.solr.search.GoodbyeQueryBuilder</str>
+  </queryParser>
+</config>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6f76ac13/solr/core/src/test/org/apache/solr/search/GoodbyeQueryBuilder.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/GoodbyeQueryBuilder.java b/solr/core/src/test/org/apache/solr/search/GoodbyeQueryBuilder.java
new file mode 100644
index 0000000..af258d4
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/search/GoodbyeQueryBuilder.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.search;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.queryparser.xml.ParserException;
+import org.apache.lucene.queryparser.xml.QueryBuilder;
+import org.apache.lucene.search.MatchNoDocsQuery;
+import org.apache.lucene.search.Query;
+import org.apache.solr.request.SolrQueryRequest;
+import org.w3c.dom.Element;
+
+public class GoodbyeQueryBuilder extends SolrQueryBuilder {
+
+  public GoodbyeQueryBuilder(String defaultField, Analyzer analyzer,
+      SolrQueryRequest req, QueryBuilder queryFactory) {
+    super(defaultField, analyzer, req, queryFactory);
+  }
+
+  @Override
+  public Query getQuery(Element e) throws ParserException {
+    return new MatchNoDocsQuery();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6f76ac13/solr/core/src/test/org/apache/solr/search/HandyQueryBuilder.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/HandyQueryBuilder.java b/solr/core/src/test/org/apache/solr/search/HandyQueryBuilder.java
new file mode 100644
index 0000000..14a8aac
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/search/HandyQueryBuilder.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.search;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.queryparser.xml.DOMUtils;
+import org.apache.lucene.queryparser.xml.ParserException;
+import org.apache.lucene.queryparser.xml.QueryBuilder;
+import org.apache.lucene.search.BooleanClause;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.Query;
+import org.apache.solr.request.SolrQueryRequest;
+import org.w3c.dom.Element;
+
+// A simple test query builder to demonstrate use of
+// SolrQueryBuilder's queryFactory constructor argument.
+public class HandyQueryBuilder extends SolrQueryBuilder {
+
+  public HandyQueryBuilder(String defaultField, Analyzer analyzer,
+      SolrQueryRequest req, QueryBuilder queryFactory) {
+    super(defaultField, analyzer, req, queryFactory);
+  }
+
+  @Override
+  public Query getQuery(Element e) throws ParserException {
+    final BooleanQuery.Builder bq = new BooleanQuery.Builder();
+    final Query lhsQ = getSubQuery(e, "Left");
+    final Query rhsQ = getSubQuery(e, "Right");
+    bq.add(new BooleanClause(lhsQ, BooleanClause.Occur.SHOULD));
+    bq.add(new BooleanClause(rhsQ, BooleanClause.Occur.SHOULD));
+    return bq.build();
+  }
+
+  private Query getSubQuery(Element e, String name) throws ParserException {
+    Element subE = DOMUtils.getChildByTagOrFail(e, name);
+    subE = DOMUtils.getFirstChildOrFail(subE);
+    return queryFactory.getQuery(subE);
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6f76ac13/solr/core/src/test/org/apache/solr/search/HelloQueryBuilder.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/HelloQueryBuilder.java b/solr/core/src/test/org/apache/solr/search/HelloQueryBuilder.java
new file mode 100644
index 0000000..642047f
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/search/HelloQueryBuilder.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.search;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.queryparser.xml.ParserException;
+import org.apache.lucene.queryparser.xml.QueryBuilder;
+import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.search.Query;
+import org.apache.solr.request.SolrQueryRequest;
+import org.w3c.dom.Element;
+
+public class HelloQueryBuilder extends SolrQueryBuilder {
+
+  public HelloQueryBuilder(String defaultField, Analyzer analyzer,
+      SolrQueryRequest req, QueryBuilder queryFactory) {
+    super(defaultField, analyzer, req, queryFactory);
+  }
+
+  @Override
+  public Query getQuery(Element e) throws ParserException {
+    return new MatchAllDocsQuery();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6f76ac13/solr/core/src/test/org/apache/solr/search/TestXmlQParserPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/TestXmlQParserPlugin.java b/solr/core/src/test/org/apache/solr/search/TestXmlQParserPlugin.java
new file mode 100644
index 0000000..3c4edae
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/search/TestXmlQParserPlugin.java
@@ -0,0 +1,78 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.search;
+
+import org.apache.solr.SolrTestCaseJ4;
+import org.apache.solr.common.params.ModifiableSolrParams;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestXmlQParserPlugin extends SolrTestCaseJ4 {
+
+  @BeforeClass
+  public static void beforeClass() throws Exception {
+    initCore("solrconfig-testxmlparser.xml", "schema-minimal.xml");
+  }
+
+  @Override
+  @Before
+  public void setUp() throws Exception {
+    // if you override setUp or tearDown, you better call
+    // the super classes version
+    super.setUp();
+    clearIndex();
+    assertU(commit());
+  }
+
+  @Test
+  public void testHelloQuery() throws Exception {
+    final int numDocs = random().nextInt(10);
+    implTestQuery(numDocs, "<HelloQuery/>", numDocs);
+  }
+
+  @Test
+  public void testGoodbyeQuery() throws Exception {
+    final int numDocs = random().nextInt(10);
+    implTestQuery(numDocs, "<GoodbyeQuery/>", 0);
+  }
+
+  @Test
+  public void testHandyQuery() throws Exception {
+    final int numDocs = random().nextInt(10);
+    final String q = "<HandyQuery><Left><HelloQuery/></Left><Right><GoodbyeQuery/></Right></HandyQuery>";
+    implTestQuery(numDocs, q, numDocs);
+  }
+
+  public void implTestQuery(int numDocs, String q, int expectedCount) throws Exception {
+    // add some documents
+    for (int ii=1; ii<=numDocs; ++ii) {
+      String[] doc = {"id",ii+"0"};
+      assertU(adoc(doc));
+      if (random().nextBoolean()) {
+        assertU(commit());
+      }
+    }
+    assertU(commit());
+    // and then run the query
+    ModifiableSolrParams params = new ModifiableSolrParams();
+    params.add("defType", "testxmlparser");
+    params.add("q", q);
+    assertQ(req(params), "*[count(//doc)="+expectedCount+"]");
+  }
+
+}


[22/51] [abbrv] lucene-solr:apiv2: LUCENE-7380: add Polygon.fromGeoJSON

Posted by sa...@apache.org.
LUCENE-7380: add Polygon.fromGeoJSON


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/343f374b
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/343f374b
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/343f374b

Branch: refs/heads/apiv2
Commit: 343f374b530fa71dc6102d74725b536f5f1367f3
Parents: 2e0b2f5
Author: Mike McCandless <mi...@apache.org>
Authored: Fri Jul 15 10:51:08 2016 -0400
Committer: Mike McCandless <mi...@apache.org>
Committed: Fri Jul 15 10:51:08 2016 -0400

----------------------------------------------------------------------
 lucene/CHANGES.txt                              |   4 +
 .../src/java/org/apache/lucene/geo/Polygon.java |  13 +-
 .../lucene/geo/SimpleGeoJSONPolygonParser.java  | 440 +++++++++++++++++++
 .../test/org/apache/lucene/geo/TestPolygon.java | 241 ++++++++++
 4 files changed, 697 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/343f374b/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 9e19d35..92ee7b9 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -54,6 +54,10 @@ New Features
 * LUCENE-7355: Added Analyzer#normalize(), which only applies normalization to
   an input string. (Adrien Grand)
 
+* LUCENE-7380: Add Polygon.fromGeoJSON for more easily creating
+  Polygon instances from a standard GeoJSON string (Robert Muir, Mike
+  McCandless)
+
 Bug Fixes
 
 * LUCENE-6662: Fixed potential resource leaks. (Rishabh Patel via Adrien Grand)

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/343f374b/lucene/core/src/java/org/apache/lucene/geo/Polygon.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/geo/Polygon.java b/lucene/core/src/java/org/apache/lucene/geo/Polygon.java
index 3b5dec9..99453b9 100644
--- a/lucene/core/src/java/org/apache/lucene/geo/Polygon.java
+++ b/lucene/core/src/java/org/apache/lucene/geo/Polygon.java
@@ -16,10 +16,13 @@
  */
 package org.apache.lucene.geo;
 
+import java.text.ParseException;
 import java.util.Arrays;
 
 /**
- * Represents a closed polygon on the earth's surface.
+ * Represents a closed polygon on the earth's surface.  You can either construct the Polygon directly yourself with {@code double[]}
+ * coordinates, or use {@link Polygon#fromGeoJSON} if you have a polygon already encoded as a
+ * <a href="http://geojson.org/geojson-spec.html">GeoJSON</a> string.
  * <p>
  * NOTES:
  * <ol>
@@ -159,4 +162,12 @@ public final class Polygon {
     }
     return sb.toString();
   }
+
+  /** Parses a standard GeoJSON polygon string.  The type of the incoming GeoJSON object must be a Polygon or MultiPolygon, optionally
+   *  embedded under a "type: Feature".  A Polygon will return as a length 1 array, while a MultiPolygon will be 1 or more in length.
+   *
+   *  <p>See <a href="http://geojson.org/geojson-spec.html">the GeoJSON specification</a>. */
+  public static Polygon[] fromGeoJSON(String geojson) throws ParseException {
+    return new SimpleGeoJSONPolygonParser(geojson).parse();
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/343f374b/lucene/core/src/java/org/apache/lucene/geo/SimpleGeoJSONPolygonParser.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/geo/SimpleGeoJSONPolygonParser.java b/lucene/core/src/java/org/apache/lucene/geo/SimpleGeoJSONPolygonParser.java
new file mode 100644
index 0000000..278307f
--- /dev/null
+++ b/lucene/core/src/java/org/apache/lucene/geo/SimpleGeoJSONPolygonParser.java
@@ -0,0 +1,440 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.geo;
+
+import java.text.ParseException;
+import java.util.ArrayList;
+import java.util.List;
+
+/*
+  We accept either a whole type: Feature, like this:
+
+    { "type": "Feature",
+      "geometry": {
+         "type": "Polygon",
+         "coordinates": [
+           [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0],
+             [100.0, 1.0], [100.0, 0.0] ]
+           ]
+       },
+       "properties": {
+         "prop0": "value0",
+         "prop1": {"this": "that"}
+         }
+       }
+
+   Or the inner object with type: Multi/Polygon.
+
+   Or a type: FeatureCollection, if it has only one Feature which is a Polygon or MultiPolyon.
+
+   type: MultiPolygon (union of polygons) is also accepted.
+*/
+
+/** Does minimal parsing of a GeoJSON object, to extract either Polygon or MultiPolygon, either directly as a the top-level type, or if
+ *  the top-level type is Feature, as the geometry of that feature. */
+
+@SuppressWarnings("unchecked")
+class SimpleGeoJSONPolygonParser {
+  final String input;
+  private int upto;
+  private String polyType;
+  private List<Object> coordinates;
+    
+  public SimpleGeoJSONPolygonParser(String input) {
+    this.input = input;
+  }
+
+  public Polygon[] parse() throws ParseException {
+    // parse entire object
+    parseObject("");
+
+    // make sure there's nothing left:
+    readEnd();
+
+    // The order of JSON object keys (type, geometry, coordinates in our case) can be arbitrary, so we wait until we are done parsing to
+    // put the pieces together here:
+
+    if (coordinates == null) {
+      throw newParseException("did not see any polygon coordinates");
+    }
+
+    if (polyType == null) {
+      throw newParseException("did not see type: Polygon or MultiPolygon");
+    }
+
+    if (polyType.equals("Polygon")) {
+      return new Polygon[] {parsePolygon(coordinates)};
+    } else {
+      List<Polygon> polygons = new ArrayList<>();
+      for(int i=0;i<coordinates.size();i++) {
+        Object o = coordinates.get(i);
+        if (o instanceof List == false) {
+          throw newParseException("elements of coordinates array should be an array, but got: " + o.getClass());
+        }
+        polygons.add(parsePolygon((List<Object>) o));
+      }
+
+      return polygons.toArray(new Polygon[polygons.size()]);
+    }
+  }
+
+  /** path is the "address" by keys of where we are, e.g. geometry.coordinates */
+  private void parseObject(String path) throws ParseException {
+    scan('{');
+    boolean first = true;
+    while (true) {
+      char ch = peek();
+      if (ch == '}') {
+        break;
+      } else if (first == false) {
+        if (ch == ',') {
+          // ok
+          upto++;
+          ch = peek();
+          if (ch == '}') {
+            break;
+          }
+        } else {
+          throw newParseException("expected , but got " + ch);
+        }
+      }
+
+      first = false;
+
+      int uptoStart = upto;
+      String key = parseString();
+
+      if (path.equals("crs.properties") && key.equals("href")) {
+        upto = uptoStart;
+        throw newParseException("cannot handle linked crs");
+      }
+
+      scan(':');
+
+      Object o;
+
+      ch = peek();
+
+      uptoStart = upto;
+
+      if (ch == '[') {
+        String newPath;
+        if (path.length() == 0) {
+          newPath = key;
+        } else {
+          newPath = path + "." + key;
+        }
+        o = parseArray(newPath);
+      } else if (ch == '{') {
+        String newPath;
+        if (path.length() == 0) {
+          newPath = key;
+        } else {
+          newPath = path + "." + key;
+        }
+        parseObject(newPath);
+        o = null;
+      } else if (ch == '"') {
+        o = parseString();
+      } else if (ch == 't') {
+        scan("true");
+        o = Boolean.TRUE;
+      } else if (ch == 'f') {
+        scan("false");
+        o = Boolean.FALSE;
+      } else if (ch == 'n') {
+        scan("null");
+        o = null;
+      } else if (ch == '-' || ch == '.' || (ch >= '0' && ch <= '9')) {
+        o = parseNumber();
+      } else if (ch == '}') {
+        break;
+      } else {
+        throw newParseException("expected array, object, string or literal value, but got: " + ch);
+      }
+
+      if (path.equals("crs.properties") && key.equals("name")) {
+        if (o instanceof String == false) {
+          upto = uptoStart;
+          throw newParseException("crs.properties.name should be a string, but saw: " + o);
+        }
+        String crs = (String) o;
+        if (crs.startsWith("urn:ogc:def:crs:OGC") == false || crs.endsWith(":CRS84") == false) {
+          upto = uptoStart;
+          throw newParseException("crs must be CRS84 from OGC, but saw: " + o);
+        }
+      }
+
+      if (key.equals("type") && path.startsWith("crs") == false) {
+        if (o instanceof String == false) {
+          upto = uptoStart;
+          throw newParseException("type should be a string, but got: " + o);
+        }
+        String type = (String) o;
+        if (type.equals("Polygon") && isValidGeometryPath(path)) {
+          polyType = "Polygon";
+        } else if (type.equals("MultiPolygon") && isValidGeometryPath(path)) {
+          polyType = "MultiPolygon";
+        } else if ((type.equals("FeatureCollection") || type.equals("Feature")) && (path.equals("features.[]") || path.equals(""))) {
+          // OK, we recurse
+        } else {
+          upto = uptoStart;
+          throw newParseException("can only handle type FeatureCollection (if it has a single polygon geometry), Feature, Polygon or MutiPolygon, but got " + type);
+        }
+      } else if (key.equals("coordinates") && isValidGeometryPath(path)) {
+        if (o instanceof List == false) {
+          upto = uptoStart;
+          throw newParseException("coordinates should be an array, but got: " + o.getClass());
+        }
+        if (coordinates != null) {
+          upto = uptoStart;
+          throw newParseException("only one Polygon or MultiPolygon is supported");
+        }
+        coordinates = (List<Object>) o;
+      }
+    }
+
+    scan('}');
+  }
+
+  /** Returns true if the object path is a valid location to see a Multi/Polygon geometry */
+  private boolean isValidGeometryPath(String path) {
+    return path.equals("") || path.equals("geometry") || path.equals("features.[].geometry");
+  }
+
+  private Polygon parsePolygon(List<Object> coordinates) throws ParseException {
+    List<Polygon> holes = new ArrayList<>();
+    Object o = coordinates.get(0);
+    if (o instanceof List == false) {
+      throw newParseException("first element of polygon array must be an array [[lat, lon], [lat, lon] ...] but got: " + o);
+    }
+    double[][] polyPoints = parsePoints((List<Object>) o);
+    for(int i=1;i<coordinates.size();i++) {
+      o = coordinates.get(i);
+      if (o instanceof List == false) {
+        throw newParseException("elements of coordinates array must be an array [[lat, lon], [lat, lon] ...] but got: " + o);
+      }
+      double[][] holePoints = parsePoints((List<Object>) o);
+      holes.add(new Polygon(holePoints[0], holePoints[1]));
+    }
+    return new Polygon(polyPoints[0], polyPoints[1], holes.toArray(new Polygon[holes.size()]));
+  }
+
+  /** Parses [[lat, lon], [lat, lon] ...] into 2d double array */
+  private double[][] parsePoints(List<Object> o) throws ParseException {
+    double[] lats = new double[o.size()];
+    double[] lons = new double[o.size()];
+    for(int i=0;i<o.size();i++) {
+      Object point = o.get(i);
+      if (point instanceof List == false) {
+        throw newParseException("elements of coordinates array must [lat, lon] array, but got: " + point);
+      }
+      List<Object> pointList = (List<Object>) point;
+      if (pointList.size() != 2) {
+        throw newParseException("elements of coordinates array must [lat, lon] array, but got wrong element count: " + pointList);
+      }
+      if (pointList.get(0) instanceof Double == false) {
+        throw newParseException("elements of coordinates array must [lat, lon] array, but first element is not a Double: " + pointList.get(0));
+      }
+      if (pointList.get(1) instanceof Double == false) {
+        throw newParseException("elements of coordinates array must [lat, lon] array, but second element is not a Double: " + pointList.get(1));
+      }
+
+      // lon, lat ordering in GeoJSON!
+      lons[i] = ((Double) pointList.get(0)).doubleValue();
+      lats[i] = ((Double) pointList.get(1)).doubleValue();
+    }
+
+    return new double[][] {lats, lons};
+  }
+
+  private List<Object> parseArray(String path) throws ParseException {
+    List<Object> result = new ArrayList<>();
+    scan('[');
+    while (upto < input.length()) {
+      char ch = peek();
+      if (ch == ']') {
+        scan(']');
+        return result;
+      }
+
+      if (result.size() > 0) {
+        if (ch != ',') {
+          throw newParseException("expected ',' separating list items, but got '" + ch + "'");
+        }
+
+        // skip the ,
+        upto++;
+        
+        if (upto == input.length()) {
+          throw newParseException("hit EOF while parsing array");
+        }
+        ch = peek();
+      }
+
+      Object o;
+      if (ch == '[') {
+        o = parseArray(path + ".[]");
+      } else if (ch == '{') {
+        // This is only used when parsing the "features" in type: FeatureCollection
+        parseObject(path + ".[]");
+        o = null;
+      } else if (ch == '-' || ch == '.' || (ch >= '0' && ch <= '9')) {
+        o = parseNumber();
+      } else {
+        throw newParseException("expected another array or number while parsing array, not '" + ch + "'");
+      }
+      
+      result.add(o);
+    }
+
+    throw newParseException("hit EOF while reading array");
+  }
+
+  private Number parseNumber() throws ParseException {
+    StringBuilder b = new StringBuilder();
+    int uptoStart = upto;
+    while (upto < input.length()) {
+      char ch = input.charAt(upto);
+      if (ch == '-' || ch == '.' || (ch >= '0' && ch <= '9') || ch == 'e' || ch == 'E') {
+        upto++;
+        b.append(ch);
+      } else {
+        break;
+      }
+    }
+
+    // we only handle doubles
+    try {
+      return Double.parseDouble(b.toString());
+    } catch (NumberFormatException nfe) {
+      upto = uptoStart;
+      throw newParseException("could not parse number as double");
+    }
+  }
+
+  private String parseString() throws ParseException {
+    scan('"');
+    StringBuilder b = new StringBuilder();
+    while (upto < input.length()) {
+      char ch = input.charAt(upto);
+      if (ch == '"') {
+        upto++;
+        return b.toString();
+      }
+      if (ch == '\\') {
+        // an escaped character
+        upto++;
+        if (upto == input.length()) {
+          throw newParseException("hit EOF inside string literal");
+        }
+        ch = input.charAt(upto);
+        if (ch == 'u') {
+          // 4 hex digit unicode BMP escape
+          upto++;
+          if (upto + 4 > input.length()) {
+            throw newParseException("hit EOF inside string literal");
+          }
+          b.append(Integer.parseInt(input.substring(upto, upto+4), 16));
+        } else if (ch == '\\') {
+          b.append('\\');
+          upto++;
+        } else {
+          // TODO: allow \n, \t, etc.???
+          throw newParseException("unsupported string escape character \\" + ch);
+        }
+      } else {
+        b.append(ch);
+        upto++;
+      }
+    }
+
+    throw newParseException("hit EOF inside string literal");
+  }
+
+  private char peek() throws ParseException {
+    while (upto < input.length()) {
+      char ch = input.charAt(upto);
+      if (isJSONWhitespace(ch)) {
+        upto++;
+        continue;
+      }
+      return ch;
+    }
+
+    throw newParseException("unexpected EOF");
+  }
+
+  /** Scans across whitespace and consumes the expected character, or throws {@code ParseException} if the character is wrong */
+  private void scan(char expected) throws ParseException {
+    while (upto < input.length()) {
+      char ch = input.charAt(upto);
+      if (isJSONWhitespace(ch)) {
+        upto++;
+        continue;
+      }
+      if (ch != expected) {
+        throw newParseException("expected '" + expected + "' but got '" + ch + "'");
+      }
+      upto++;
+      return;
+    }
+    throw newParseException("expected '" + expected + "' but got EOF");
+  }
+
+  private void readEnd() throws ParseException {
+    while (upto < input.length()) {
+      char ch = input.charAt(upto);
+      if (isJSONWhitespace(ch) == false) {
+        throw newParseException("unexpected character '" + ch + "' after end of GeoJSON object");
+      }
+      upto++;
+    }
+  }
+
+  /** Scans the expected string, or throws {@code ParseException} */
+  private void scan(String expected) throws ParseException {
+    if (upto + expected.length() > input.length()) {
+      throw newParseException("expected \"" + expected + "\" but hit EOF");
+    }
+    String subString = input.substring(upto, upto+expected.length());
+    if (subString.equals(expected) == false) {
+      throw newParseException("expected \"" + expected + "\" but got \"" + subString + "\"");
+    }
+    upto += expected.length();
+  }
+
+  private static boolean isJSONWhitespace(char ch) {
+    // JSON doesn't accept allow unicode whitespace?
+    return ch == 0x20 || // space
+      ch == 0x09 || // tab
+      ch == 0x0a || // line feed
+      ch == 0x0d;  // newline
+  }
+
+  /** When calling this, upto should be at the position of the incorrect character! */
+  private ParseException newParseException(String details) throws ParseException {
+    String fragment;
+    int end = Math.min(input.length(), upto+1);
+    if (upto < 50) {
+      fragment = input.substring(0, end);
+    } else {
+      fragment = "..." + input.substring(upto-50, end);
+    }
+    return new ParseException(details + " at character offset " + upto + "; fragment leading to this:\n" + fragment, upto);
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/343f374b/lucene/core/src/test/org/apache/lucene/geo/TestPolygon.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/geo/TestPolygon.java b/lucene/core/src/test/org/apache/lucene/geo/TestPolygon.java
index 401092f..8ee6271 100644
--- a/lucene/core/src/test/org/apache/lucene/geo/TestPolygon.java
+++ b/lucene/core/src/test/org/apache/lucene/geo/TestPolygon.java
@@ -16,6 +16,8 @@
  */
 package org.apache.lucene.geo;
 
+import java.text.ParseException;
+
 import org.apache.lucene.util.LuceneTestCase;
 
 public class TestPolygon extends LuceneTestCase {
@@ -59,4 +61,243 @@ public class TestPolygon extends LuceneTestCase {
     });
     assertTrue(expected.getMessage(), expected.getMessage().contains("it must close itself"));
   }
+
+  public void testGeoJSONPolygon() throws Exception {
+    StringBuilder b = new StringBuilder();
+    b.append("{\n");
+    b.append("  \"type\": \"Polygon\",\n");
+    b.append("  \"coordinates\": [\n");
+    b.append("    [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0],\n");
+    b.append("      [100.0, 1.0], [100.0, 0.0] ]\n");
+    b.append("  ]\n");
+    b.append("}\n");
+     
+    Polygon[] polygons = Polygon.fromGeoJSON(b.toString());
+    assertEquals(1, polygons.length);
+    assertEquals(new Polygon(new double[] {0.0, 0.0, 1.0, 1.0, 0.0},
+                             new double[] {100.0, 101.0, 101.0, 100.0, 100.0}), polygons[0]);
+  }
+
+  public void testGeoJSONPolygonWithHole() throws Exception {
+    StringBuilder b = new StringBuilder();
+    b.append("{\n");
+    b.append("  \"type\": \"Polygon\",\n");
+    b.append("  \"coordinates\": [\n");
+    b.append("    [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0],\n");
+    b.append("      [100.0, 1.0], [100.0, 0.0] ],\n");
+    b.append("    [ [100.5, 0.5], [100.5, 0.75], [100.75, 0.75], [100.75, 0.5], [100.5, 0.5]]\n");
+    b.append("  ]\n");
+    b.append("}\n");
+     
+    Polygon hole = new Polygon(new double[] {0.5, 0.75, 0.75, 0.5, 0.5},
+                               new double[] {100.5, 100.5, 100.75, 100.75, 100.5});
+    Polygon expected = new Polygon(new double[] {0.0, 0.0, 1.0, 1.0, 0.0},    
+                                   new double[] {100.0, 101.0, 101.0, 100.0, 100.0}, hole);
+    Polygon[] polygons = Polygon.fromGeoJSON(b.toString());
+
+    assertEquals(1, polygons.length);
+    assertEquals(expected, polygons[0]);
+  }
+
+  // a MultiPolygon returns multiple Polygons
+  public void testGeoJSONMultiPolygon() throws Exception {
+    StringBuilder b = new StringBuilder();
+    b.append("{\n");
+    b.append("  \"type\": \"MultiPolygon\",\n");
+    b.append("  \"coordinates\": [\n");
+    b.append("    [\n");
+    b.append("      [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0],\n");
+    b.append("        [100.0, 1.0], [100.0, 0.0] ]\n");
+    b.append("    ],\n");
+    b.append("    [\n");
+    b.append("      [ [10.0, 2.0], [11.0, 2.0], [11.0, 3.0],\n");
+    b.append("        [10.0, 3.0], [10.0, 2.0] ]\n");
+    b.append("    ]\n");
+    b.append("  ],\n");
+    b.append("}\n");
+     
+    Polygon[] polygons = Polygon.fromGeoJSON(b.toString());
+    assertEquals(2, polygons.length);
+    assertEquals(new Polygon(new double[] {0.0, 0.0, 1.0, 1.0, 0.0},
+                             new double[] {100.0, 101.0, 101.0, 100.0, 100.0}), polygons[0]);
+    assertEquals(new Polygon(new double[] {2.0, 2.0, 3.0, 3.0, 2.0},
+                             new double[] {10.0, 11.0, 11.0, 10.0, 10.0}), polygons[1]);
+  }
+
+  // make sure type can appear last (JSON allows arbitrary key/value order for objects)
+  public void testGeoJSONTypeComesLast() throws Exception {
+    StringBuilder b = new StringBuilder();
+    b.append("{\n");
+    b.append("  \"coordinates\": [\n");
+    b.append("    [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0],\n");
+    b.append("      [100.0, 1.0], [100.0, 0.0] ]\n");
+    b.append("  ],\n");
+    b.append("  \"type\": \"Polygon\",\n");
+    b.append("}\n");
+     
+    Polygon[] polygons = Polygon.fromGeoJSON(b.toString());
+    assertEquals(1, polygons.length);
+    assertEquals(new Polygon(new double[] {0.0, 0.0, 1.0, 1.0, 0.0},
+                             new double[] {100.0, 101.0, 101.0, 100.0, 100.0}), polygons[0]);
+  }
+
+  // make sure Polygon inside a type: Feature also works
+  public void testGeoJSONPolygonFeature() throws Exception {
+    StringBuilder b = new StringBuilder();
+    b.append("{ \"type\": \"Feature\",\n");
+    b.append("  \"geometry\": {\n");
+    b.append("    \"type\": \"Polygon\",\n");
+    b.append("    \"coordinates\": [\n");
+    b.append("      [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0],\n");
+    b.append("        [100.0, 1.0], [100.0, 0.0] ]\n");
+    b.append("      ]\n");
+    b.append("  },\n");
+    b.append("  \"properties\": {\n");
+    b.append("    \"prop0\": \"value0\",\n");
+    b.append("    \"prop1\": {\"this\": \"that\"}\n");
+    b.append("  }\n");
+    b.append("}\n");
+     
+    Polygon[] polygons = Polygon.fromGeoJSON(b.toString());
+    assertEquals(1, polygons.length);
+    assertEquals(new Polygon(new double[] {0.0, 0.0, 1.0, 1.0, 0.0},
+                             new double[] {100.0, 101.0, 101.0, 100.0, 100.0}), polygons[0]);
+  }
+
+  // make sure MultiPolygon inside a type: Feature also works
+  public void testGeoJSONMultiPolygonFeature() throws Exception {
+    StringBuilder b = new StringBuilder();
+    b.append("{ \"type\": \"Feature\",\n");
+    b.append("  \"geometry\": {\n");
+    b.append("      \"type\": \"MultiPolygon\",\n");
+    b.append("      \"coordinates\": [\n");
+    b.append("        [\n");
+    b.append("          [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0],\n");
+    b.append("            [100.0, 1.0], [100.0, 0.0] ]\n");
+    b.append("        ],\n");
+    b.append("        [\n");
+    b.append("          [ [10.0, 2.0], [11.0, 2.0], [11.0, 3.0],\n");
+    b.append("            [10.0, 3.0], [10.0, 2.0] ]\n");
+    b.append("        ]\n");
+    b.append("      ]\n");
+    b.append("  },\n");
+    b.append("  \"properties\": {\n");
+    b.append("    \"prop0\": \"value0\",\n");
+    b.append("    \"prop1\": {\"this\": \"that\"}\n");
+    b.append("  }\n");
+    b.append("}\n");
+     
+    Polygon[] polygons = Polygon.fromGeoJSON(b.toString());
+    assertEquals(2, polygons.length);
+    assertEquals(new Polygon(new double[] {0.0, 0.0, 1.0, 1.0, 0.0},
+                             new double[] {100.0, 101.0, 101.0, 100.0, 100.0}), polygons[0]);
+    assertEquals(new Polygon(new double[] {2.0, 2.0, 3.0, 3.0, 2.0},
+                             new double[] {10.0, 11.0, 11.0, 10.0, 10.0}), polygons[1]);
+  }
+
+  // FeatureCollection with one geometry is allowed:
+  public void testGeoJSONFeatureCollectionWithSinglePolygon() throws Exception {
+    StringBuilder b = new StringBuilder();
+    b.append("{ \"type\": \"FeatureCollection\",\n");
+    b.append("  \"features\": [\n");
+    b.append("    { \"type\": \"Feature\",\n");
+    b.append("      \"geometry\": {\n");
+    b.append("        \"type\": \"Polygon\",\n");
+    b.append("        \"coordinates\": [\n");
+    b.append("          [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0],\n");
+    b.append("            [100.0, 1.0], [100.0, 0.0] ]\n");
+    b.append("          ]\n");
+    b.append("      },\n");
+    b.append("      \"properties\": {\n");
+    b.append("        \"prop0\": \"value0\",\n");
+    b.append("        \"prop1\": {\"this\": \"that\"}\n");
+    b.append("      }\n");
+    b.append("    }\n");
+    b.append("  ]\n");
+    b.append("}    \n");
+
+    Polygon expected = new Polygon(new double[] {0.0, 0.0, 1.0, 1.0, 0.0},    
+                                   new double[] {100.0, 101.0, 101.0, 100.0, 100.0});
+    Polygon[] actual = Polygon.fromGeoJSON(b.toString());
+    assertEquals(1, actual.length);
+    assertEquals(expected, actual[0]);
+  }
+
+  // stuff after the object is not allowed
+  public void testIllegalGeoJSONExtraCrapAtEnd() throws Exception {
+    StringBuilder b = new StringBuilder();
+    b.append("{\n");
+    b.append("  \"type\": \"Polygon\",\n");
+    b.append("  \"coordinates\": [\n");
+    b.append("    [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0],\n");
+    b.append("      [100.0, 1.0], [100.0, 0.0] ]\n");
+    b.append("  ]\n");
+    b.append("}\n");
+    b.append("foo\n");
+     
+    Exception e = expectThrows(ParseException.class, () -> Polygon.fromGeoJSON(b.toString()));
+    assertTrue(e.getMessage().contains("unexpected character 'f' after end of GeoJSON object"));
+  }
+
+  public void testIllegalGeoJSONLinkedCRS() throws Exception {
+
+    StringBuilder b = new StringBuilder();
+    b.append("{\n");
+    b.append("  \"type\": \"Polygon\",\n");
+    b.append("  \"coordinates\": [\n");
+    b.append("    [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0],\n");
+    b.append("      [100.0, 1.0], [100.0, 0.0] ]\n");
+    b.append("  ],\n");
+    b.append("  \"crs\": {\n");
+    b.append("    \"type\": \"link\",\n");
+    b.append("    \"properties\": {\n");
+    b.append("      \"href\": \"http://example.com/crs/42\",\n");
+    b.append("      \"type\": \"proj4\"\n");
+    b.append("    }\n");
+    b.append("  }    \n");
+    b.append("}\n");
+    Exception e = expectThrows(ParseException.class, () -> Polygon.fromGeoJSON(b.toString()));
+    assertTrue(e.getMessage().contains("cannot handle linked crs"));
+  }
+
+  // FeatureCollection with more than one geometry is not supported:
+  public void testIllegalGeoJSONMultipleFeatures() throws Exception {
+    StringBuilder b = new StringBuilder();
+    b.append("{ \"type\": \"FeatureCollection\",\n");
+    b.append("  \"features\": [\n");
+    b.append("    { \"type\": \"Feature\",\n");
+    b.append("      \"geometry\": {\"type\": \"Point\", \"coordinates\": [102.0, 0.5]},\n");
+    b.append("      \"properties\": {\"prop0\": \"value0\"}\n");
+    b.append("    },\n");
+    b.append("    { \"type\": \"Feature\",\n");
+    b.append("      \"geometry\": {\n");
+    b.append("      \"type\": \"LineString\",\n");
+    b.append("      \"coordinates\": [\n");
+    b.append("        [102.0, 0.0], [103.0, 1.0], [104.0, 0.0], [105.0, 1.0]\n");
+    b.append("        ]\n");
+    b.append("      },\n");
+    b.append("      \"properties\": {\n");
+    b.append("        \"prop0\": \"value0\",\n");
+    b.append("        \"prop1\": 0.0\n");
+    b.append("      }\n");
+    b.append("    },\n");
+    b.append("    { \"type\": \"Feature\",\n");
+    b.append("      \"geometry\": {\n");
+    b.append("        \"type\": \"Polygon\",\n");
+    b.append("        \"coordinates\": [\n");
+    b.append("          [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0],\n");
+    b.append("            [100.0, 1.0], [100.0, 0.0] ]\n");
+    b.append("          ]\n");
+    b.append("      },\n");
+    b.append("      \"properties\": {\n");
+    b.append("        \"prop0\": \"value0\",\n");
+    b.append("        \"prop1\": {\"this\": \"that\"}\n");
+    b.append("      }\n");
+    b.append("    }\n");
+    b.append("  ]\n");
+    b.append("}    \n");
+
+    Exception e = expectThrows(ParseException.class, () -> Polygon.fromGeoJSON(b.toString()));
+    assertTrue(e.getMessage().contains("can only handle type FeatureCollection (if it has a single polygon geometry), Feature, Polygon or MutiPolygon, but got Point"));
+  }
 }


[20/51] [abbrv] lucene-solr:apiv2: SOLR-9306: give solr/contrib/analysis-extras's test classes access to lucene/analysis's test classes

Posted by sa...@apache.org.
SOLR-9306: give solr/contrib/analysis-extras's test classes access to lucene/analysis's test classes


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/f9c94706
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/f9c94706
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/f9c94706

Branch: refs/heads/apiv2
Commit: f9c94706416c80dcdc4514256c2e4cbf975c386b
Parents: 3a71c7d
Author: Christine Poerschke <cp...@apache.org>
Authored: Thu Jul 14 10:20:35 2016 +0100
Committer: Christine Poerschke <cp...@apache.org>
Committed: Thu Jul 14 10:20:35 2016 +0100

----------------------------------------------------------------------
 .../maven/lucene/analysis/common/pom.xml.template    | 13 +++++++++++++
 .../solr/contrib/analysis-extras/pom.xml.template    |  7 +++++++
 solr/common-build.xml                                |  6 ++++++
 solr/contrib/analysis-extras/build.xml               | 10 ++++++++++
 .../org/apache/solr/schema/ICUCollationField.java    |  2 +-
 .../apache/solr/schema/TestICUCollationField.java    | 15 ++++++++++++++-
 6 files changed, 51 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f9c94706/dev-tools/maven/lucene/analysis/common/pom.xml.template
----------------------------------------------------------------------
diff --git a/dev-tools/maven/lucene/analysis/common/pom.xml.template b/dev-tools/maven/lucene/analysis/common/pom.xml.template
index 95a0487..cded628 100644
--- a/dev-tools/maven/lucene/analysis/common/pom.xml.template
+++ b/dev-tools/maven/lucene/analysis/common/pom.xml.template
@@ -69,5 +69,18 @@
         </excludes>
       </testResource>
     </testResources>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-jar-plugin</artifactId>
+        <executions>
+          <execution>
+            <goals>
+              <goal>test-jar</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
   </build>
 </project>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f9c94706/dev-tools/maven/solr/contrib/analysis-extras/pom.xml.template
----------------------------------------------------------------------
diff --git a/dev-tools/maven/solr/contrib/analysis-extras/pom.xml.template b/dev-tools/maven/solr/contrib/analysis-extras/pom.xml.template
index 4d0f322..b7a11a8 100644
--- a/dev-tools/maven/solr/contrib/analysis-extras/pom.xml.template
+++ b/dev-tools/maven/solr/contrib/analysis-extras/pom.xml.template
@@ -43,6 +43,13 @@
   </scm>
   <dependencies>
     <dependency>
+      <groupId>org.apache.lucene</groupId>
+      <artifactId>lucene-analyzers-common</artifactId>
+      <version>${project.version}</version>
+      <type>test-jar</type>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
       <!-- lucene-test-framework dependency must be declared before lucene-core -->
       <!-- This dependency cannot be put into solr-parent, because local        -->
       <!-- dependencies are always ordered before inherited dependencies.       -->

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f9c94706/solr/common-build.xml
----------------------------------------------------------------------
diff --git a/solr/common-build.xml b/solr/common-build.xml
index 235070a..8bf9db7 100644
--- a/solr/common-build.xml
+++ b/solr/common-build.xml
@@ -467,6 +467,12 @@
     </sequential>
   </macrodef>
 
+  <target name="-compile-test-lucene-analysis">
+    <ant dir="${common.dir}/analysis" target="compile-test" inheritAll="false">
+      <propertyset refid="uptodate.and.compiled.properties"/>
+    </ant>
+  </target>
+
   <target name="-compile-test-lucene-queryparser">
     <ant dir="${common.dir}/queryparser" target="compile-test" inheritAll="false">
       <propertyset refid="uptodate.and.compiled.properties"/>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f9c94706/solr/contrib/analysis-extras/build.xml
----------------------------------------------------------------------
diff --git a/solr/contrib/analysis-extras/build.xml b/solr/contrib/analysis-extras/build.xml
index 6697fc9..38d67dd 100644
--- a/solr/contrib/analysis-extras/build.xml
+++ b/solr/contrib/analysis-extras/build.xml
@@ -25,6 +25,8 @@
 
   <import file="../contrib-build.xml"/>
   
+  <target name="compile-test" depends="-compile-test-lucene-analysis,common-solr.compile-test"/>
+
   <path id="analysis.extras.lucene.libs">
     <pathelement location="${analyzers-icu.jar}"/>
     <!-- 
@@ -43,6 +45,14 @@
     <path refid="solr.base.classpath"/>
   </path>
 
+  <path id="test.classpath">
+    <path refid="solr.test.base.classpath"/>
+    <dirset dir="${common.dir}/build/analysis/">
+      <include name="**/classes/java"/>
+      <include name="**/classes/test"/>
+    </dirset>
+  </path>
+
   <!-- 
     Although the smartcn, stempel, and morfologik jars are not dependencies of
     code in the analysis-extras contrib, they must remain here in order to

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f9c94706/solr/contrib/analysis-extras/src/java/org/apache/solr/schema/ICUCollationField.java
----------------------------------------------------------------------
diff --git a/solr/contrib/analysis-extras/src/java/org/apache/solr/schema/ICUCollationField.java b/solr/contrib/analysis-extras/src/java/org/apache/solr/schema/ICUCollationField.java
index b186754..2071163 100644
--- a/solr/contrib/analysis-extras/src/java/org/apache/solr/schema/ICUCollationField.java
+++ b/solr/contrib/analysis-extras/src/java/org/apache/solr/schema/ICUCollationField.java
@@ -198,7 +198,7 @@ public class ICUCollationField extends FieldType {
    * Read custom rules from a file, and create a RuleBasedCollator
    * The file cannot support comments, as # might be in the rules!
    */
-  private Collator createFromRules(String fileName, ResourceLoader loader) {
+  static Collator createFromRules(String fileName, ResourceLoader loader) {
     InputStream input = null;
     try {
      input = loader.openResource(fileName);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f9c94706/solr/contrib/analysis-extras/src/test/org/apache/solr/schema/TestICUCollationField.java
----------------------------------------------------------------------
diff --git a/solr/contrib/analysis-extras/src/test/org/apache/solr/schema/TestICUCollationField.java b/solr/contrib/analysis-extras/src/test/org/apache/solr/schema/TestICUCollationField.java
index e21b4dd..4cff7fa 100644
--- a/solr/contrib/analysis-extras/src/test/org/apache/solr/schema/TestICUCollationField.java
+++ b/solr/contrib/analysis-extras/src/test/org/apache/solr/schema/TestICUCollationField.java
@@ -21,6 +21,9 @@ import java.io.FileOutputStream;
 
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.io.IOUtils;
+import org.apache.lucene.analysis.util.FilesystemResourceLoader;
+import org.apache.lucene.analysis.util.ResourceLoader;
+import org.apache.lucene.analysis.util.StringMockResourceLoader;
 import org.apache.solr.SolrTestCaseJ4;
 import org.junit.BeforeClass;
 
@@ -80,10 +83,20 @@ public class TestICUCollationField extends SolrTestCaseJ4 {
 
     RuleBasedCollator tailoredCollator = new RuleBasedCollator(baseCollator.getRules() + DIN5007_2_tailorings);
     String tailoredRules = tailoredCollator.getRules();
-    FileOutputStream os = new FileOutputStream(new File(confDir, "customrules.dat"));
+    final String osFileName = "customrules.dat";
+    final FileOutputStream os = new FileOutputStream(new File(confDir, osFileName));
     IOUtils.write(tailoredRules, os, "UTF-8");
     os.close();
 
+    final ResourceLoader loader;
+    if (random().nextBoolean()) {
+      loader = new StringMockResourceLoader(tailoredRules);
+    } else {
+      loader = new FilesystemResourceLoader(confDir.toPath());
+    }
+    final Collator readCollator = ICUCollationField.createFromRules(osFileName, loader);
+    assertEquals(tailoredCollator, readCollator);
+
     return tmpFile;
   }
 


[34/51] [abbrv] lucene-solr:apiv2: SOLR-9275: add defaultField protected field to queryparser/xml's CoreParser

Posted by sa...@apache.org.
SOLR-9275: add defaultField protected field to queryparser/xml's CoreParser


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/9f13bf67
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/9f13bf67
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/9f13bf67

Branch: refs/heads/apiv2
Commit: 9f13bf671872d973684fe77e36373e9136645c48
Parents: 05b38f5
Author: Christine Poerschke <cp...@apache.org>
Authored: Mon Jul 18 14:25:22 2016 +0100
Committer: Christine Poerschke <cp...@apache.org>
Committed: Mon Jul 18 15:45:01 2016 +0100

----------------------------------------------------------------------
 .../src/java/org/apache/lucene/queryparser/xml/CoreParser.java     | 2 ++
 1 file changed, 2 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9f13bf67/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/CoreParser.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/CoreParser.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/CoreParser.java
index 2dd0097..98a878f 100644
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/CoreParser.java
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/CoreParser.java
@@ -33,6 +33,7 @@ import java.io.InputStream;
  */
 public class CoreParser implements QueryBuilder {
 
+  protected String defaultField;
   protected Analyzer analyzer;
   protected QueryParser parser;
   protected QueryBuilderFactory queryFactory;
@@ -59,6 +60,7 @@ public class CoreParser implements QueryBuilder {
   }
 
   protected CoreParser(String defaultField, Analyzer analyzer, QueryParser parser) {
+    this.defaultField = defaultField;
     this.analyzer = analyzer;
     this.parser = parser;
 


[11/51] [abbrv] lucene-solr:apiv2: LUCENE-7371: Better compression of values in Lucene60PointsFormat.

Posted by sa...@apache.org.
LUCENE-7371: Better compression of values in Lucene60PointsFormat.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/866398be
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/866398be
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/866398be

Branch: refs/heads/apiv2
Commit: 866398bea67607bcd54331a48736e6bdb94a703d
Parents: e92a38a
Author: Adrien Grand <jp...@gmail.com>
Authored: Tue Jul 5 16:54:19 2016 +0200
Committer: Adrien Grand <jp...@gmail.com>
Committed: Tue Jul 12 17:57:56 2016 +0200

----------------------------------------------------------------------
 lucene/CHANGES.txt                              |   3 +
 .../simpletext/SimpleTextPointsWriter.java      |  16 +-
 .../org/apache/lucene/util/bkd/BKDReader.java   |  65 ++++++-
 .../org/apache/lucene/util/bkd/BKDWriter.java   | 185 +++++++++++++++----
 .../org/apache/lucene/util/bkd/TestBKD.java     |  29 +++
 .../lucene/index/BasePointsFormatTestCase.java  |  29 +++
 6 files changed, 281 insertions(+), 46 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/866398be/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index c520e1b..c68d4df 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -117,6 +117,9 @@ Optimizations
 
 * LUCENE-7351: Doc id compression for points. (Adrien Grand)
 
+* LUCENE-7351: Point values are now better compressed using run-length
+  encoding. (Adrien Grand)
+
 Other
 
 * LUCENE-4787: Fixed some highlighting javadocs. (Michael Dodsworth via Adrien

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/866398be/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextPointsWriter.java
----------------------------------------------------------------------
diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextPointsWriter.java b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextPointsWriter.java
index e54e20a..8d5c034 100644
--- a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextPointsWriter.java
+++ b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextPointsWriter.java
@@ -20,6 +20,7 @@ package org.apache.lucene.codecs.simpletext;
 import java.io.IOException;
 import java.util.HashMap;
 import java.util.Map;
+import java.util.function.IntFunction;
 
 import org.apache.lucene.codecs.PointsReader;
 import org.apache.lucene.codecs.PointsWriter;
@@ -161,12 +162,15 @@ class SimpleTextPointsWriter extends PointsWriter {
         }
 
         @Override
-        protected void writeLeafBlockPackedValue(IndexOutput out, int[] commonPrefixLengths, byte[] bytes, int bytesOffset) throws IOException {
-          // NOTE: we don't do prefix coding, so we ignore commonPrefixLengths
-          write(out, BLOCK_VALUE);
-          write(out, new BytesRef(bytes, bytesOffset, packedBytesLength).toString());
-          newline(out);
-        }          
+        protected void writeLeafBlockPackedValues(IndexOutput out, int[] commonPrefixLengths, int count, int sortedDim, IntFunction<BytesRef> packedValues) throws IOException {
+          for (int i = 0; i < count; ++i) {
+            BytesRef packedValue = packedValues.apply(i);
+            // NOTE: we don't do prefix coding, so we ignore commonPrefixLengths
+            write(out, BLOCK_VALUE);
+            write(out, packedValue.toString());
+            newline(out);
+          }
+        }
       }) {
 
       values.intersect(fieldInfo.name, new IntersectVisitor() {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/866398be/lucene/core/src/java/org/apache/lucene/util/bkd/BKDReader.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/util/bkd/BKDReader.java b/lucene/core/src/java/org/apache/lucene/util/bkd/BKDReader.java
index 3566bc1..9ca0bb4 100644
--- a/lucene/core/src/java/org/apache/lucene/util/bkd/BKDReader.java
+++ b/lucene/core/src/java/org/apache/lucene/util/bkd/BKDReader.java
@@ -20,6 +20,7 @@ import java.io.IOException;
 import java.util.Arrays;
 
 import org.apache.lucene.codecs.CodecUtil;
+import org.apache.lucene.index.CorruptIndexException;
 import org.apache.lucene.index.PointValues.IntersectVisitor;
 import org.apache.lucene.index.PointValues.Relation;
 import org.apache.lucene.store.IndexInput;
@@ -345,6 +346,63 @@ public class BKDReader implements Accountable {
 
   protected void visitDocValues(int[] commonPrefixLengths, byte[] scratchPackedValue, IndexInput in, int[] docIDs, int count, IntersectVisitor visitor) throws IOException {
     visitor.grow(count);
+
+    readCommonPrefixes(commonPrefixLengths, scratchPackedValue, in);
+
+    int compressedDim = version < BKDWriter.VERSION_COMPRESSED_VALUES
+        ? -1
+        : readCompressedDim(in);
+
+    if (compressedDim == -1) {
+      visitRawDocValues(commonPrefixLengths, scratchPackedValue, in, docIDs, count, visitor);
+    } else {
+      visitCompressedDocValues(commonPrefixLengths, scratchPackedValue, in, docIDs, count, visitor, compressedDim);
+    }
+  }
+
+  // Just read suffixes for every dimension
+  private void visitRawDocValues(int[] commonPrefixLengths, byte[] scratchPackedValue, IndexInput in, int[] docIDs, int count, IntersectVisitor visitor) throws IOException {
+    for (int i = 0; i < count; ++i) {
+      for(int dim=0;dim<numDims;dim++) {
+        int prefix = commonPrefixLengths[dim];
+        in.readBytes(scratchPackedValue, dim*bytesPerDim + prefix, bytesPerDim - prefix);
+      }
+      visitor.visit(docIDs[i], scratchPackedValue);
+    }
+  }
+
+  private void visitCompressedDocValues(int[] commonPrefixLengths, byte[] scratchPackedValue, IndexInput in, int[] docIDs, int count, IntersectVisitor visitor, int compressedDim) throws IOException {
+    // the byte at `compressedByteOffset` is compressed using run-length compression,
+    // other suffix bytes are stored verbatim
+    final int compressedByteOffset = compressedDim * bytesPerDim + commonPrefixLengths[compressedDim];
+    commonPrefixLengths[compressedDim]++;
+    int i;
+    for (i = 0; i < count; ) {
+      scratchPackedValue[compressedByteOffset] = in.readByte();
+      final int runLen = Byte.toUnsignedInt(in.readByte());
+      for (int j = 0; j < runLen; ++j) {
+        for(int dim=0;dim<numDims;dim++) {
+          int prefix = commonPrefixLengths[dim];
+          in.readBytes(scratchPackedValue, dim*bytesPerDim + prefix, bytesPerDim - prefix);
+        }
+        visitor.visit(docIDs[i+j], scratchPackedValue);
+      }
+      i += runLen;
+    }
+    if (i != count) {
+      throw new CorruptIndexException("Sub blocks do not add up to the expected count: " + count + " != " + i, in);
+    }
+  }
+
+  private int readCompressedDim(IndexInput in) throws IOException {
+    int compressedDim = in.readByte();
+    if (compressedDim < -1 || compressedDim >= numDims) {
+      throw new CorruptIndexException("Got compressedDim="+compressedDim, in);
+    }
+    return compressedDim;
+  }
+
+  private void readCommonPrefixes(int[] commonPrefixLengths, byte[] scratchPackedValue, IndexInput in) throws IOException {
     for(int dim=0;dim<numDims;dim++) {
       int prefix = in.readVInt();
       commonPrefixLengths[dim] = prefix;
@@ -353,13 +411,6 @@ public class BKDReader implements Accountable {
       }
       //System.out.println("R: " + dim + " of " + numDims + " prefix=" + prefix);
     }
-    for(int i=0;i<count;i++) {
-      for(int dim=0;dim<numDims;dim++) {
-        int prefix = commonPrefixLengths[dim];
-        in.readBytes(scratchPackedValue, dim*bytesPerDim + prefix, bytesPerDim - prefix);
-      }
-      visitor.visit(docIDs[i], scratchPackedValue);
-    }
   }
 
   private void intersect(IntersectState state,

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/866398be/lucene/core/src/java/org/apache/lucene/util/bkd/BKDWriter.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/util/bkd/BKDWriter.java b/lucene/core/src/java/org/apache/lucene/util/bkd/BKDWriter.java
index 6dfdac2..09e6412 100644
--- a/lucene/core/src/java/org/apache/lucene/util/bkd/BKDWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/util/bkd/BKDWriter.java
@@ -22,9 +22,12 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Comparator;
 import java.util.List;
+import java.util.function.IntFunction;
 
 import org.apache.lucene.codecs.CodecUtil;
 import org.apache.lucene.index.MergeState;
+import org.apache.lucene.index.PointValues.IntersectVisitor;
+import org.apache.lucene.index.PointValues.Relation;
 import org.apache.lucene.store.ChecksumIndexInput;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IOContext;
@@ -43,7 +46,6 @@ import org.apache.lucene.util.PriorityQueue;
 import org.apache.lucene.util.StringHelper;
 
 // TODO
-//   - the compression is somewhat stupid now (delta vInt for 1024 docIDs, no compression for the byte[] values even though they have high locality)
 //   - allow variable length byte[] (across docs and dims), but this is quite a bit more hairy
 //   - we could also index "auto-prefix terms" here, and use better compression, and maybe only use for the "fully contained" case so we'd
 //     only index docIDs
@@ -60,7 +62,7 @@ import org.apache.lucene.util.StringHelper;
  *  the requested <code>maxPointsInLeafNode</code>.  Values that fall exactly
  *  on a cell boundary may be in either cell.
  *
- *  <p>The number of dimensions can be 1 to 255, but every byte[] value is fixed length.
+ *  <p>The number of dimensions can be 1 to 8, but every byte[] value is fixed length.
  *
  *  <p>
  *  See <a href="https://www.cs.duke.edu/~pankaj/publications/papers/bkd-sstd.pdf">this paper</a> for details.
@@ -69,7 +71,7 @@ import org.apache.lucene.util.StringHelper;
  *  and then uses up to the specified {@code maxMBSortInHeap} heap space for writing.
  *
  *  <p>
- *  <b>NOTE</b>: This can write at most Integer.MAX_VALUE * <code>maxPointsInLeafNode</code> total points, and
+ *  <b>NOTE</b>: This can write at most Integer.MAX_VALUE * <code>maxPointsInLeafNode</code> total points.
  *
  * @lucene.experimental */
 
@@ -78,7 +80,8 @@ public class BKDWriter implements Closeable {
   public static final String CODEC_NAME = "BKD";
   public static final int VERSION_START = 0;
   public static final int VERSION_COMPRESSED_DOC_IDS = 1;
-  public static final int VERSION_CURRENT = VERSION_COMPRESSED_DOC_IDS;
+  public static final int VERSION_COMPRESSED_VALUES = 2;
+  public static final int VERSION_CURRENT = VERSION_COMPRESSED_VALUES;
 
   /** How many bytes each docs takes in the fixed-width offline format */
   private final int bytesPerDoc;
@@ -312,6 +315,8 @@ public class BKDWriter implements Closeable {
     /** Which leaf block we are up to */
     private int blockID;
 
+    private final byte[] packedValues;
+
     public MergeReader(BKDReader bkd, MergeState.DocMap docMap) throws IOException {
       this.bkd = bkd;
       state = new BKDReader.IntersectState(bkd.in.clone(),
@@ -327,6 +332,7 @@ public class BKDWriter implements Closeable {
         //System.out.println("  leaf fp=" + fp);
       }
       state.in.seek(minFP);
+      this.packedValues = new byte[bkd.maxPointsInLeafNode * bkd.packedBytesLength];
     }
 
     public boolean next() throws IOException {
@@ -341,18 +347,33 @@ public class BKDWriter implements Closeable {
           docsInBlock = bkd.readDocIDs(state.in, state.in.getFilePointer(), state.scratchDocIDs);
           assert docsInBlock > 0;
           docBlockUpto = 0;
-          for(int dim=0;dim<bkd.numDims;dim++) {
-            int prefix = state.in.readVInt();
-            state.commonPrefixLengths[dim] = prefix;
-            if (prefix > 0) {
-              state.in.readBytes(state.scratchPackedValue, dim*bkd.bytesPerDim, prefix);
+          bkd.visitDocValues(state.commonPrefixLengths, state.scratchPackedValue, state.in, state.scratchDocIDs, docsInBlock, new IntersectVisitor() {
+            int i = 0;
+
+            @Override
+            public void visit(int docID) throws IOException {
+              throw new UnsupportedOperationException();
             }
-          }
+
+            @Override
+            public void visit(int docID, byte[] packedValue) throws IOException {
+              assert docID == state.scratchDocIDs[i];
+              System.arraycopy(packedValue, 0, packedValues, i * bkd.packedBytesLength, bkd.packedBytesLength);
+              i++;
+            }
+
+            @Override
+            public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
+              throw new UnsupportedOperationException();
+            }
+
+          });
 
           blockID++;
         }
 
-        int oldDocID = state.scratchDocIDs[docBlockUpto++];
+        final int index = docBlockUpto++;
+        int oldDocID = state.scratchDocIDs[index];
 
         int mappedDocID;
         if (docMap == null) {
@@ -360,13 +381,11 @@ public class BKDWriter implements Closeable {
         } else {
           mappedDocID = docMap.get(oldDocID);
         }
-        for(int dim=0;dim<bkd.numDims;dim++) {
-          int prefix = state.commonPrefixLengths[dim];
-          state.in.readBytes(state.scratchPackedValue, dim*bkd.bytesPerDim + prefix, bkd.bytesPerDim - prefix);
-        }
+        
         if (mappedDocID != -1) {
           // Not deleted!
           docID = mappedDocID;
+          System.arraycopy(packedValues, index * bkd.packedBytesLength, state.scratchPackedValue, 0, bkd.packedBytesLength);
           return true;
         }
       }
@@ -518,10 +537,21 @@ public class BKDWriter implements Closeable {
         writeLeafBlockDocs(out, leafBlockDocIDs, 0, leafCount);
         writeCommonPrefixes(out, commonPrefixLengths, firstPackedValue);
 
-        // Write the full values:
-        for (int i=0;i<leafCount;i++) {
-          writeLeafBlockPackedValue(out, commonPrefixLengths, leafBlockPackedValues[i], 0);
-        }
+        final IntFunction<BytesRef> packedValues = new IntFunction<BytesRef>() {
+          final BytesRef scratch = new BytesRef();
+
+          {
+            scratch.length = packedBytesLength;
+            scratch.offset = 0;
+          }
+
+          @Override
+          public BytesRef apply(int i) {
+            scratch.bytes = leafBlockPackedValues[i];
+            return scratch;
+          }
+        };
+        writeLeafBlockPackedValues(out, commonPrefixLengths, leafCount, 0, packedValues);
 
         leafCount = 0;
       }
@@ -896,13 +926,57 @@ public class BKDWriter implements Closeable {
     DocIdsWriter.writeDocIds(docIDs, start, count, out);
   }
 
-  protected void writeLeafBlockPackedValue(IndexOutput out, int[] commonPrefixLengths, byte[] bytes, int offset) throws IOException {
-    for(int dim=0;dim<numDims;dim++) {
-      int prefix = commonPrefixLengths[dim];
-      out.writeBytes(bytes, offset+dim*bytesPerDim+prefix, bytesPerDim-prefix);
+  protected void writeLeafBlockPackedValues(IndexOutput out, int[] commonPrefixLengths, int count, int sortedDim, IntFunction<BytesRef> packedValues) throws IOException {
+    int prefixLenSum = Arrays.stream(commonPrefixLengths).sum();
+    if (prefixLenSum == packedBytesLength) {
+      // all values in this block are equal
+      out.writeByte((byte) -1);
+    } else {
+      assert commonPrefixLengths[sortedDim] < bytesPerDim;
+      out.writeByte((byte) sortedDim);
+      int compressedByteOffset = sortedDim * bytesPerDim + commonPrefixLengths[sortedDim];
+      commonPrefixLengths[sortedDim]++;
+      for (int i = 0; i < count; ) {
+        // do run-length compression on the byte at compressedByteOffset 
+        int runLen = runLen(packedValues, i, Math.min(i + 0xff, count), compressedByteOffset);
+        assert runLen <= 0xff;
+        BytesRef first = packedValues.apply(i);
+        byte prefixByte = first.bytes[first.offset + compressedByteOffset];
+        out.writeByte(prefixByte);
+        out.writeByte((byte) runLen);
+        writeLeafBlockPackedValuesRange(out, commonPrefixLengths, i, i + runLen, packedValues);
+        i += runLen;
+        assert i <= count;
+      }
     }
   }
 
+  private void writeLeafBlockPackedValuesRange(IndexOutput out, int[] commonPrefixLengths, int start, int end, IntFunction<BytesRef> packedValues) throws IOException {
+    for (int i = start; i < end; ++i) {
+      BytesRef ref = packedValues.apply(i);
+      assert ref.length == packedBytesLength;
+
+      for(int dim=0;dim<numDims;dim++) {
+        int prefix = commonPrefixLengths[dim];
+        out.writeBytes(ref.bytes, ref.offset + dim*bytesPerDim + prefix, bytesPerDim-prefix);
+      }
+    }
+  }
+
+  private static int runLen(IntFunction<BytesRef> packedValues, int start, int end, int byteOffset) {
+    BytesRef first = packedValues.apply(start);
+    byte b = first.bytes[first.offset + byteOffset];
+    for (int i = start + 1; i < end; ++i) {
+      BytesRef ref = packedValues.apply(i);
+      byte b2 = ref.bytes[ref.offset + byteOffset];
+      assert Byte.toUnsignedInt(b2) >= Byte.toUnsignedInt(b);
+      if (b != b2) {
+        return i - start;
+      }
+    }
+    return end - start;
+  }
+
   protected void writeCommonPrefixes(IndexOutput out, int[] commonPrefixes, byte[] packedValue) throws IOException {
     for(int dim=0;dim<numDims;dim++) {
       out.writeVInt(commonPrefixes[dim]);
@@ -1058,6 +1132,11 @@ public class BKDWriter implements Closeable {
     if (nodeID >= leafNodeOffset) {
 
       // Leaf node: write block
+      // We can write the block in any order so by default we write it sorted by the dimension that has the
+      // least number of unique bytes at commonPrefixLengths[dim], which makes compression more efficient
+      int sortedDim = 0;
+      int sortedDimCardinality = Integer.MAX_VALUE;
+
       for (int dim=0;dim<numDims;dim++) {
         if (slices[dim].writer instanceof HeapPointWriter == false) {
           // Adversarial cases can cause this, e.g. very lopsided data, all equal points, such that we started
@@ -1081,9 +1160,29 @@ public class BKDWriter implements Closeable {
             break;
           }
         }
+
+        int prefix = commonPrefixLengths[dim];
+        if (prefix < bytesPerDim) {
+          int cardinality = 1;
+          byte previous = scratch1[offset + prefix];
+          for (long i = 1; i < source.count; ++i) {
+            heapSource.readPackedValue(Math.toIntExact(source.start + i), scratch2);
+            byte b = scratch2[offset + prefix];
+            assert Byte.toUnsignedInt(previous) <= Byte.toUnsignedInt(b);
+            if (b != previous) {
+              cardinality++;
+              previous = b;
+            }
+          }
+          assert cardinality <= 256;
+          if (cardinality < sortedDimCardinality) {
+            sortedDim = dim;
+            sortedDimCardinality = cardinality;
+          }
+        }
       }
 
-      PathSlice source = slices[0];
+      PathSlice source = slices[sortedDim];
 
       // We ensured that maxPointsSortInHeap was >= maxPointsInLeafNode, so we better be in heap at this point:
       HeapPointWriter heapSource = (HeapPointWriter) source.writer;
@@ -1105,15 +1204,21 @@ public class BKDWriter implements Closeable {
       writeCommonPrefixes(out, commonPrefixLengths, scratch1);
 
       // Write the full values:
-      byte[] lastPackedValue = new byte[bytesPerDim];
-      for (int i=0;i<count;i++) {
-        heapSource.getPackedValueSlice(Math.toIntExact(source.start + i), scratchBytesRef);
-        assert numDims != 1 || valueInOrder(i, lastPackedValue, scratchBytesRef.bytes, scratchBytesRef.offset);
-
-        // Make sure this value does in fact fall within this leaf cell:
-        assert valueInBounds(scratchBytesRef, minPackedValue, maxPackedValue);
-        writeLeafBlockPackedValue(out, commonPrefixLengths, scratchBytesRef.bytes, scratchBytesRef.offset);
-      }
+      IntFunction<BytesRef> packedValues = new IntFunction<BytesRef>() {
+        final BytesRef scratch = new BytesRef();
+
+        {
+          scratch.length = packedBytesLength;
+        }
+
+        @Override
+        public BytesRef apply(int i) {
+          heapSource.getPackedValueSlice(Math.toIntExact(source.start + i), scratch);
+          return scratch;
+        }
+      };
+      assert valuesInOrderAndBounds(count, minPackedValue, maxPackedValue, packedValues);
+      writeLeafBlockPackedValues(out, commonPrefixLengths, count, sortedDim, packedValues);
 
     } else {
       // Inner node: partition/recurse
@@ -1216,6 +1321,20 @@ public class BKDWriter implements Closeable {
   }
 
   // only called from assert
+  private boolean valuesInOrderAndBounds(int count, byte[] minPackedValue, byte[] maxPackedValue, IntFunction<BytesRef> values) throws IOException {
+    byte[] lastPackedValue = new byte[bytesPerDim];
+    for (int i=0;i<count;i++) {
+      BytesRef packedValue = values.apply(i);
+      assert packedValue.length == packedBytesLength;
+      assert numDims != 1 || valueInOrder(i, lastPackedValue, packedValue.bytes, packedValue.offset);
+
+      // Make sure this value does in fact fall within this leaf cell:
+      assert valueInBounds(packedValue, minPackedValue, maxPackedValue);
+    }
+    return true;
+  }
+
+  // only called from assert
   private boolean valueInOrder(long ord, byte[] lastPackedValue, byte[] packedValue, int packedValueOffset) {
     if (ord > 0 && StringHelper.compare(bytesPerDim, lastPackedValue, 0, packedValue, packedValueOffset) > 0) {
       throw new AssertionError("values out of order: last value=" + new BytesRef(lastPackedValue) + " current value=" + new BytesRef(packedValue, packedValueOffset, packedBytesLength) + " ord=" + ord);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/866398be/lucene/core/src/test/org/apache/lucene/util/bkd/TestBKD.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/util/bkd/TestBKD.java b/lucene/core/src/test/org/apache/lucene/util/bkd/TestBKD.java
index e8b88fc..9eb1fd3 100644
--- a/lucene/core/src/test/org/apache/lucene/util/bkd/TestBKD.java
+++ b/lucene/core/src/test/org/apache/lucene/util/bkd/TestBKD.java
@@ -507,6 +507,35 @@ public class TestBKD extends LuceneTestCase {
     verify(docValues, null, numDims, numBytesPerDim);
   }
 
+  // this should trigger run-length compression with lengths that are greater than 255
+  public void testOneDimTwoValues() throws Exception {
+    int numBytesPerDim = TestUtil.nextInt(random(), 2, 30);
+    int numDims = TestUtil.nextInt(random(), 1, 5);
+
+    int numDocs = atLeast(1000);
+    int theDim = random().nextInt(numDims);
+    byte[] value1 = new byte[numBytesPerDim];
+    random().nextBytes(value1);
+    byte[] value2 = new byte[numBytesPerDim];
+    random().nextBytes(value2);
+    byte[][][] docValues = new byte[numDocs][][];
+
+    for(int docID=0;docID<numDocs;docID++) {
+      byte[][] values = new byte[numDims][];
+      for(int dim=0;dim<numDims;dim++) {
+        if (dim == theDim) {
+          values[dim] = random().nextBoolean() ? value1 : value2;
+        } else {
+          values[dim] = new byte[numBytesPerDim];
+          random().nextBytes(values[dim]);
+        }
+      }
+      docValues[docID] = values;
+    }
+
+    verify(docValues, null, numDims, numBytesPerDim);
+  }
+
   public void testMultiValued() throws Exception {
     int numBytesPerDim = TestUtil.nextInt(random(), 2, 30);
     int numDims = TestUtil.nextInt(random(), 1, 5);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/866398be/lucene/test-framework/src/java/org/apache/lucene/index/BasePointsFormatTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BasePointsFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BasePointsFormatTestCase.java
index 7c42d1c..5891df5 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/BasePointsFormatTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/BasePointsFormatTestCase.java
@@ -327,6 +327,35 @@ public abstract class BasePointsFormatTestCase extends BaseIndexFileFormatTestCa
     verify(docValues, null, numDims, numBytesPerDim);
   }
 
+  // this should trigger run-length compression with lengths that are greater than 255
+  public void testOneDimTwoValues() throws Exception {
+    int numBytesPerDim = TestUtil.nextInt(random(), 2, PointValues.MAX_NUM_BYTES);
+    int numDims = TestUtil.nextInt(random(), 1, PointValues.MAX_DIMENSIONS);
+
+    int numDocs = atLeast(1000);
+    int theDim = random().nextInt(numDims);
+    byte[] value1 = new byte[numBytesPerDim];
+    random().nextBytes(value1);
+    byte[] value2 = new byte[numBytesPerDim];
+    random().nextBytes(value2);
+    byte[][][] docValues = new byte[numDocs][][];
+
+    for(int docID=0;docID<numDocs;docID++) {
+      byte[][] values = new byte[numDims][];
+      for(int dim=0;dim<numDims;dim++) {
+        if (dim == theDim) {
+          values[dim] = random().nextBoolean() ? value1 : value2;
+        } else {
+          values[dim] = new byte[numBytesPerDim];
+          random().nextBytes(values[dim]);
+        }
+      }
+      docValues[docID] = values;
+    }
+
+    verify(docValues, null, numDims, numBytesPerDim);
+  }
+
   // Tests on N-dimensional points where each dimension is a BigInteger
   public void testBigIntNDims() throws Exception {
 


[24/51] [abbrv] lucene-solr:apiv2: LUCENE-7382: Fix bug introduced by LUCENE-7355 that used the wrong default AttributeFactory for new Tokenizers

Posted by sa...@apache.org.
LUCENE-7382: Fix bug introduced by LUCENE-7355 that used the wrong default AttributeFactory for new Tokenizers


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/2585c9f3
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/2585c9f3
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/2585c9f3

Branch: refs/heads/apiv2
Commit: 2585c9f3ff750b8e551f261412625aef0e7d4a4b
Parents: 833c8ee
Author: Uwe Schindler <us...@apache.org>
Authored: Sat Jul 16 10:09:40 2016 +0200
Committer: Uwe Schindler <us...@apache.org>
Committed: Sat Jul 16 10:09:40 2016 +0200

----------------------------------------------------------------------
 lucene/CHANGES.txt                                            | 4 ++++
 lucene/core/src/java/org/apache/lucene/analysis/Analyzer.java | 4 ++--
 2 files changed, 6 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2585c9f3/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 92ee7b9..6c62aab 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -65,6 +65,10 @@ Bug Fixes
 * LUCENE-7340: MemoryIndex.toString() could throw NPE; fixed. Renamed to toStringDebug().
   (Daniel Collins, David Smiley)
 
+* LUCENE-7382: Fix bug introduced by LUCENE-7355 that used the
+  wrong default AttributeFactory for new Tokenizers.
+  (Terry Smith, Uwe Schindler)
+
 Improvements
 
 * LUCENE-7323: Compound file writing now verifies the incoming

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2585c9f3/lucene/core/src/java/org/apache/lucene/analysis/Analyzer.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/analysis/Analyzer.java b/lucene/core/src/java/org/apache/lucene/analysis/Analyzer.java
index 0d60d24..aa4b42d 100644
--- a/lucene/core/src/java/org/apache/lucene/analysis/Analyzer.java
+++ b/lucene/core/src/java/org/apache/lucene/analysis/Analyzer.java
@@ -287,9 +287,9 @@ public abstract class Analyzer implements Closeable {
   /** Return the {@link AttributeFactory} to be used for
    *  {@link #tokenStream analysis} and
    *  {@link #normalize(String, String) normalization}. The default
-   *  implementation returns {@link AttributeFactory#DEFAULT_ATTRIBUTE_FACTORY}. */
+   *  implementation returns {@link TokenStream#DEFAULT_TOKEN_ATTRIBUTE_FACTORY}. */
   protected AttributeFactory attributeFactory() {
-    return AttributeFactory.DEFAULT_ATTRIBUTE_FACTORY;
+    return TokenStream.DEFAULT_TOKEN_ATTRIBUTE_FACTORY;
   }
 
   /**


[38/51] [abbrv] lucene-solr:apiv2: LUCENE-7385: fix location of CHANGES.txt entry in master (branch_6x is good)

Posted by sa...@apache.org.
LUCENE-7385: fix location of CHANGES.txt entry in master (branch_6x is good)


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/5c4b7173
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/5c4b7173
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/5c4b7173

Branch: refs/heads/apiv2
Commit: 5c4b7173a8535b76a96a32bdba79d8b89be14dc7
Parents: efef37b
Author: David Smiley <ds...@apache.org>
Authored: Mon Jul 18 22:11:26 2016 -0400
Committer: David Smiley <ds...@apache.org>
Committed: Mon Jul 18 22:11:26 2016 -0400

----------------------------------------------------------------------
 lucene/CHANGES.txt | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5c4b7173/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 4ac3169..34e91b6 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -120,6 +120,8 @@ Improvements
 * LUCENE-7376: Add support for ToParentBlockJoinQuery to fast vector highlighter's
   FieldQuery. (Martijn van Groningen)
 
+* LUCENE-7385: Improve/fix assert messages in SpanScorer. (David Smiley)
+
 Optimizations
 
 * LUCENE-7330, LUCENE-7339: Speed up conjunction queries. (Adrien Grand)
@@ -131,8 +133,6 @@ Optimizations
 * LUCENE-7371: Point values are now better compressed using run-length
   encoding. (Adrien Grand)
 
-* LUCENE-7385: Improve/fix assert messages in SpanScorer. (David Smiley)
-
 Other
 
 * LUCENE-4787: Fixed some highlighting javadocs. (Michael Dodsworth via Adrien


[06/51] [abbrv] lucene-solr:apiv2: add points to back compat indices

Posted by sa...@apache.org.
add points to back compat indices


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/1e794e0e
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/1e794e0e
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/1e794e0e

Branch: refs/heads/apiv2
Commit: 1e794e0ee26d17bdd8669a77592bbafaf758af18
Parents: 5def78b
Author: Mike McCandless <mi...@apache.org>
Authored: Tue Jul 12 10:07:49 2016 -0400
Committer: Mike McCandless <mi...@apache.org>
Committed: Tue Jul 12 10:14:24 2016 -0400

----------------------------------------------------------------------
 .../index/TestBackwardsCompatibility.java       |  36 +++++++++++++++++++
 .../org/apache/lucene/index/index.6.0.0-cfs.zip | Bin 13744 -> 15807 bytes
 .../apache/lucene/index/index.6.0.0-nocfs.zip   | Bin 13749 -> 15806 bytes
 .../org/apache/lucene/index/index.6.0.1-cfs.zip | Bin 13734 -> 15820 bytes
 .../apache/lucene/index/index.6.0.1-nocfs.zip   | Bin 13735 -> 15823 bytes
 5 files changed, 36 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/1e794e0e/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java b/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
index 11096e4..8226022 100644
--- a/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
+++ b/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
@@ -38,13 +38,18 @@ import java.util.regex.Pattern;
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.codecs.Codec;
 import org.apache.lucene.document.BinaryDocValuesField;
+import org.apache.lucene.document.BinaryPoint;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.DoubleDocValuesField;
+import org.apache.lucene.document.DoublePoint;
 import org.apache.lucene.document.Field;
 import org.apache.lucene.document.FieldType;
 import org.apache.lucene.document.FloatDocValuesField;
+import org.apache.lucene.document.FloatPoint;
+import org.apache.lucene.document.IntPoint;
 import org.apache.lucene.document.LegacyIntField;
 import org.apache.lucene.document.LegacyLongField;
+import org.apache.lucene.document.LongPoint;
 import org.apache.lucene.document.NumericDocValuesField;
 import org.apache.lucene.document.SortedDocValuesField;
 import org.apache.lucene.document.SortedNumericDocValuesField;
@@ -700,6 +705,8 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
     final boolean is42Index = MultiFields.getMergedFieldInfos(reader).fieldInfo("dvSortedSet") != null;
     // true if this is a 4.9+ index
     final boolean is49Index = MultiFields.getMergedFieldInfos(reader).fieldInfo("dvSortedNumeric") != null;
+    // true if this index has points (>= 6.0)
+    final boolean hasPoints = MultiFields.getMergedFieldInfos(reader).fieldInfo("intPoint1d") != null;
 
     assert is40Index;
 
@@ -831,6 +838,23 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
     hits = searcher.search(new TermQuery(new Term("utf8", "ab\ud917\udc17cd")), 1000).scoreDocs;
     assertEquals(34, hits.length);
 
+    if (hasPoints) {
+      doTestHits(searcher.search(IntPoint.newRangeQuery("intPoint1d", 0, 34), 1000).scoreDocs, 34, searcher.getIndexReader());
+      doTestHits(searcher.search(IntPoint.newRangeQuery("intPoint2d", new int[] {0, 0}, new int[] {34, 68}), 1000).scoreDocs, 34, searcher.getIndexReader());
+      doTestHits(searcher.search(FloatPoint.newRangeQuery("floatPoint1d", 0f, 34f), 1000).scoreDocs, 34, searcher.getIndexReader());
+      doTestHits(searcher.search(FloatPoint.newRangeQuery("floatPoint2d", new float[] {0f, 0f}, new float[] {34f, 68f}), 1000).scoreDocs, 34, searcher.getIndexReader());
+      doTestHits(searcher.search(LongPoint.newRangeQuery("longPoint1d", 0, 34), 1000).scoreDocs, 34, searcher.getIndexReader());
+      doTestHits(searcher.search(LongPoint.newRangeQuery("longPoint2d", new long[] {0, 0}, new long[] {34, 68}), 1000).scoreDocs, 34, searcher.getIndexReader());
+      doTestHits(searcher.search(DoublePoint.newRangeQuery("doublePoint1d", 0.0, 34.0), 1000).scoreDocs, 34, searcher.getIndexReader());
+      doTestHits(searcher.search(DoublePoint.newRangeQuery("doublePoint2d", new double[] {0.0, 0.0}, new double[] {34.0, 68.0}), 1000).scoreDocs, 34, searcher.getIndexReader());
+      
+      byte[] bytes1 = new byte[4];
+      byte[] bytes2 = new byte[] {0, 0, 0, (byte) 34};
+      doTestHits(searcher.search(BinaryPoint.newRangeQuery("binaryPoint1d", bytes1, bytes2), 1000).scoreDocs, 34, searcher.getIndexReader());
+      byte[] bytes3 = new byte[] {0, 0, 0, (byte) 68};
+      doTestHits(searcher.search(BinaryPoint.newRangeQuery("binaryPoint2d", new byte[][] {bytes1, bytes1}, new byte[][] {bytes2, bytes3}), 1000).scoreDocs, 34, searcher.getIndexReader());
+    }
+
     reader.close();
   }
 
@@ -983,6 +1007,18 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
     doc.add(new NumericDocValuesField("dvShort", (short)id));
     doc.add(new SortedSetDocValuesField("dvSortedSet", ref));
     doc.add(new SortedNumericDocValuesField("dvSortedNumeric", id));
+
+    doc.add(new IntPoint("intPoint1d", id));
+    doc.add(new IntPoint("intPoint2d", id, 2*id));
+    doc.add(new FloatPoint("floatPoint1d", (float) id));
+    doc.add(new FloatPoint("floatPoint2d", (float) id, (float) 2*id));
+    doc.add(new LongPoint("longPoint1d", id));
+    doc.add(new LongPoint("longPoint2d", id, 2*id));
+    doc.add(new DoublePoint("doublePoint1d", (double) id));
+    doc.add(new DoublePoint("doublePoint2d", (double) id, (double) 2*id));
+    doc.add(new BinaryPoint("binaryPoint1d", bytes));
+    doc.add(new BinaryPoint("binaryPoint2d", bytes, bytes));
+    
     // a field with both offsets and term vectors for a cross-check
     FieldType customType3 = new FieldType(TextField.TYPE_STORED);
     customType3.setStoreTermVectors(true);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/1e794e0e/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.0.0-cfs.zip
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.0.0-cfs.zip b/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.0.0-cfs.zip
index 2993970..c8622df 100644
Binary files a/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.0.0-cfs.zip and b/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.0.0-cfs.zip differ

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/1e794e0e/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.0.0-nocfs.zip
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.0.0-nocfs.zip b/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.0.0-nocfs.zip
index 55b5cc1..3c245d1 100644
Binary files a/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.0.0-nocfs.zip and b/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.0.0-nocfs.zip differ

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/1e794e0e/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.0.1-cfs.zip
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.0.1-cfs.zip b/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.0.1-cfs.zip
index 4b8161f..f10f1a8 100644
Binary files a/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.0.1-cfs.zip and b/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.0.1-cfs.zip differ

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/1e794e0e/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.0.1-nocfs.zip
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.0.1-nocfs.zip b/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.0.1-nocfs.zip
index 051b0ad..d45b7fd 100644
Binary files a/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.0.1-nocfs.zip and b/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.0.1-nocfs.zip differ


[15/51] [abbrv] lucene-solr:apiv2: LUCENE-7372: Factor out an org.apache.lucene.search.FilterWeight class.

Posted by sa...@apache.org.
LUCENE-7372: Factor out an org.apache.lucene.search.FilterWeight class.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/15e8719b
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/15e8719b
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/15e8719b

Branch: refs/heads/apiv2
Commit: 15e8719b8aa80b1e7e8deeba6bf8bec99f663ac8
Parents: 415d321
Author: Christine Poerschke <cp...@apache.org>
Authored: Wed Jul 13 16:04:24 2016 +0100
Committer: Christine Poerschke <cp...@apache.org>
Committed: Wed Jul 13 16:04:24 2016 +0100

----------------------------------------------------------------------
 lucene/CHANGES.txt                              |  3 +
 .../org/apache/lucene/search/FilterWeight.java  | 73 ++++++++++++++++++++
 .../apache/lucene/search/TestFilterWeight.java  | 67 ++++++++++++++++++
 .../apache/lucene/search/TestLRUQueryCache.java | 17 +----
 .../join/GlobalOrdinalsWithScoreQuery.java      | 10 ++-
 .../search/join/ToChildBlockJoinQuery.java      | 19 ++---
 .../search/join/ToParentBlockJoinQuery.java     | 19 ++---
 .../apache/lucene/search/AssertingWeight.java   | 19 +----
 .../lucene/search/RandomApproximationQuery.java | 21 +-----
 .../search/TestBaseExplanationTestCase.java     | 14 +---
 .../apache/solr/search/ReRankQParserPlugin.java | 24 ++-----
 11 files changed, 170 insertions(+), 116 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/15e8719b/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 16d9d41..ec8ecf0 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -132,6 +132,9 @@ Other
 
 * LUCENE-7360: Explanation.toHtml() is deprecated. (Alan Woodward)
   
+* LUCENE-7372: Factor out an org.apache.lucene.search.FilterWeight class.
+  (Christine Poerschke, Adrien Grand, David Smiley)
+
 ======================= Lucene 6.1.0 =======================
 
 New Features

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/15e8719b/lucene/core/src/java/org/apache/lucene/search/FilterWeight.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/FilterWeight.java b/lucene/core/src/java/org/apache/lucene/search/FilterWeight.java
new file mode 100644
index 0000000..2053067
--- /dev/null
+++ b/lucene/core/src/java/org/apache/lucene/search/FilterWeight.java
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.search;
+
+import java.io.IOException;
+import java.util.Set;
+
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.Term;
+
+/**
+ * A {@code FilterWeight} contains another {@code Weight} and implements
+ * all abstract methods by calling the contained weight's method.
+ *
+ * Note that {@code FilterWeight} does not override the non-abstract
+ * {@link Weight#bulkScorer(LeafReaderContext)} method and subclasses of
+ * {@code FilterWeight} must provide their bulkScorer implementation
+ * if required.
+ *
+ * @lucene.internal
+ */
+public abstract class FilterWeight extends Weight {
+
+  final protected Weight in;
+
+  /**
+   * Default constructor.
+   */
+  protected FilterWeight(Weight weight) {
+    this(weight.getQuery(), weight);
+  }
+
+  /**
+   * Alternative constructor.
+   * Use this variant only if the <code>weight</code> was not obtained
+   * via the {@link Query#createWeight(IndexSearcher, boolean, float)}
+   * method of the <code>query</code> object.
+   */
+  protected FilterWeight(Query query, Weight weight) {
+    super(query);
+    this.in = weight;
+  }
+
+  @Override
+  public void extractTerms(Set<Term> terms) {
+    in.extractTerms(terms);
+  }
+
+  @Override
+  public Explanation explain(LeafReaderContext context, int doc) throws IOException {
+    return in.explain(context, doc);
+  }
+
+  @Override
+  public Scorer scorer(LeafReaderContext context) throws IOException {
+    return in.scorer(context);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/15e8719b/lucene/core/src/test/org/apache/lucene/search/TestFilterWeight.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestFilterWeight.java b/lucene/core/src/test/org/apache/lucene/search/TestFilterWeight.java
new file mode 100644
index 0000000..cfa01bf
--- /dev/null
+++ b/lucene/core/src/test/org/apache/lucene/search/TestFilterWeight.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.search;
+
+import java.lang.reflect.Method;
+import java.lang.reflect.Modifier;
+
+import org.apache.lucene.util.LuceneTestCase;
+import org.junit.Test;
+
+public class TestFilterWeight extends LuceneTestCase {
+
+  @Test
+  public void testDeclaredMethodsOverridden() throws Exception {
+    final Class<?> subClass = FilterWeight.class;
+    implTestDeclaredMethodsOverridden(subClass.getSuperclass(), subClass);
+  }
+
+  private void implTestDeclaredMethodsOverridden(Class<?> superClass, Class<?> subClass) throws Exception {
+    for (final Method superClassMethod : superClass.getDeclaredMethods()) {
+      final int modifiers = superClassMethod.getModifiers();
+      if (Modifier.isFinal(modifiers)) continue;
+      if (Modifier.isStatic(modifiers)) continue;
+      if (superClassMethod.getName().equals("bulkScorer")) {
+        try {
+          final Method subClassMethod = subClass.getDeclaredMethod(
+              superClassMethod.getName(),
+              superClassMethod.getParameterTypes());
+          fail(subClass + " must not override\n'" + superClassMethod + "'"
+              + " but it does override\n'" + subClassMethod + "'");
+        } catch (NoSuchMethodException e) {
+          /* FilterWeight must not override the bulkScorer method
+           * since as of July 2016 not all deriving classes use the
+           * {code}return in.bulkScorer(content);{code}
+           * implementation that FilterWeight.bulkScorer would use.
+           */
+          continue;
+        }
+      }
+      try {
+        final Method subClassMethod = subClass.getDeclaredMethod(
+            superClassMethod.getName(),
+            superClassMethod.getParameterTypes());
+        assertEquals("getReturnType() difference",
+            superClassMethod.getReturnType(),
+            subClassMethod.getReturnType());
+      } catch (NoSuchMethodException e) {
+        fail(subClass + " needs to override '" + superClassMethod + "'");
+      }
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/15e8719b/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java b/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java
index a2e4e1d..460f4a7 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java
@@ -29,7 +29,6 @@ import java.util.HashSet;
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
-import java.util.Set;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicInteger;
@@ -1088,30 +1087,18 @@ public class TestLRUQueryCache extends LuceneTestCase {
     }
   }
 
-  private static class WeightWrapper extends Weight {
+  private static class WeightWrapper extends FilterWeight {
 
-    private final Weight in;
     private final AtomicBoolean scorerCalled;
     private final AtomicBoolean bulkScorerCalled;
 
     protected WeightWrapper(Weight in, AtomicBoolean scorerCalled, AtomicBoolean bulkScorerCalled) {
-      super(in.getQuery());
-      this.in = in;
+      super(in);
       this.scorerCalled = scorerCalled;
       this.bulkScorerCalled = bulkScorerCalled;
     }
 
     @Override
-    public void extractTerms(Set<Term> terms) {
-      in.extractTerms(terms);
-    }
-
-    @Override
-    public Explanation explain(LeafReaderContext context, int doc) throws IOException {
-      return in.explain(context, doc);
-    }
-
-    @Override
     public Scorer scorer(LeafReaderContext context) throws IOException {
       scorerCalled.set(true);
       return in.scorer(context);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/15e8719b/lucene/join/src/java/org/apache/lucene/search/join/GlobalOrdinalsWithScoreQuery.java
----------------------------------------------------------------------
diff --git a/lucene/join/src/java/org/apache/lucene/search/join/GlobalOrdinalsWithScoreQuery.java b/lucene/join/src/java/org/apache/lucene/search/join/GlobalOrdinalsWithScoreQuery.java
index 8781d1e..a192996 100644
--- a/lucene/join/src/java/org/apache/lucene/search/join/GlobalOrdinalsWithScoreQuery.java
+++ b/lucene/join/src/java/org/apache/lucene/search/join/GlobalOrdinalsWithScoreQuery.java
@@ -22,6 +22,7 @@ import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.index.MultiDocValues;
 import org.apache.lucene.index.SortedDocValues;
 import org.apache.lucene.index.Term;
+import org.apache.lucene.search.FilterWeight;
 import org.apache.lucene.search.DocIdSetIterator;
 import org.apache.lucene.search.Explanation;
 import org.apache.lucene.search.IndexSearcher;
@@ -102,13 +103,10 @@ final class GlobalOrdinalsWithScoreQuery extends Query {
         '}';
   }
 
-  final class W extends Weight {
-
-    private final Weight approximationWeight;
+  final class W extends FilterWeight {
 
     W(Query query, Weight approximationWeight) {
-      super(query);
-      this.approximationWeight = approximationWeight;
+      super(query, approximationWeight);
     }
 
     @Override
@@ -148,7 +146,7 @@ final class GlobalOrdinalsWithScoreQuery extends Query {
         return null;
       }
 
-      Scorer approximationScorer = approximationWeight.scorer(context);
+      Scorer approximationScorer = in.scorer(context);
       if (approximationScorer == null) {
         return null;
       } else if (globalOrds != null) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/15e8719b/lucene/join/src/java/org/apache/lucene/search/join/ToChildBlockJoinQuery.java
----------------------------------------------------------------------
diff --git a/lucene/join/src/java/org/apache/lucene/search/join/ToChildBlockJoinQuery.java b/lucene/join/src/java/org/apache/lucene/search/join/ToChildBlockJoinQuery.java
index b925b31..53f13b6 100644
--- a/lucene/join/src/java/org/apache/lucene/search/join/ToChildBlockJoinQuery.java
+++ b/lucene/join/src/java/org/apache/lucene/search/join/ToChildBlockJoinQuery.java
@@ -20,11 +20,9 @@ import java.io.IOException;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.Locale;
-import java.util.Set;
-
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.index.Term;
+import org.apache.lucene.search.FilterWeight;
 import org.apache.lucene.search.DocIdSetIterator;
 import org.apache.lucene.search.Explanation;
 import org.apache.lucene.search.IndexSearcher;
@@ -90,29 +88,22 @@ public class ToChildBlockJoinQuery extends Query {
     return parentQuery;
   }
 
-  private static class ToChildBlockJoinWeight extends Weight {
-    private final Weight parentWeight;
+  private static class ToChildBlockJoinWeight extends FilterWeight {
     private final BitSetProducer parentsFilter;
     private final boolean doScores;
 
     public ToChildBlockJoinWeight(Query joinQuery, Weight parentWeight, BitSetProducer parentsFilter, boolean doScores) {
-      super(joinQuery);
-      this.parentWeight = parentWeight;
+      super(joinQuery, parentWeight);
       this.parentsFilter = parentsFilter;
       this.doScores = doScores;
     }
 
-    @Override
-    public void extractTerms(Set<Term> terms) {
-      parentWeight.extractTerms(terms);
-    }
-
     // NOTE: acceptDocs applies (and is checked) only in the
     // child document space
     @Override
     public Scorer scorer(LeafReaderContext readerContext) throws IOException {
 
-      final Scorer parentScorer = parentWeight.scorer(readerContext);
+      final Scorer parentScorer = in.scorer(readerContext);
 
       if (parentScorer == null) {
         // No matches
@@ -138,7 +129,7 @@ public class ToChildBlockJoinQuery extends Query {
         return Explanation.match(
           scorer.score(), 
           String.format(Locale.ROOT, "Score based on parent document %d", parentDoc + context.docBase), 
-          parentWeight.explain(context, parentDoc)
+          in.explain(context, parentDoc)
         );
       }
       return Explanation.noMatch("Not a match");

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/15e8719b/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinQuery.java
----------------------------------------------------------------------
diff --git a/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinQuery.java b/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinQuery.java
index d5745d9..18a5d20 100644
--- a/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinQuery.java
+++ b/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinQuery.java
@@ -20,12 +20,10 @@ import java.io.IOException;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.Locale;
-import java.util.Set;
-
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.index.Term;
+import org.apache.lucene.search.FilterWeight;
 import org.apache.lucene.search.DocIdSetIterator;
 import org.apache.lucene.search.Explanation;
 import org.apache.lucene.search.IndexSearcher;
@@ -124,29 +122,22 @@ public class ToParentBlockJoinQuery extends Query {
     return childQuery;
   }
 
-  private static class BlockJoinWeight extends Weight {
-    private final Weight childWeight;
+  private static class BlockJoinWeight extends FilterWeight {
     private final BitSetProducer parentsFilter;
     private final ScoreMode scoreMode;
 
     public BlockJoinWeight(Query joinQuery, Weight childWeight, BitSetProducer parentsFilter, ScoreMode scoreMode) {
-      super(joinQuery);
-      this.childWeight = childWeight;
+      super(joinQuery, childWeight);
       this.parentsFilter = parentsFilter;
       this.scoreMode = scoreMode;
     }
 
-    @Override
-    public void extractTerms(Set<Term> terms) {
-      childWeight.extractTerms(terms);
-    }
-
     // NOTE: acceptDocs applies (and is checked) only in the
     // parent document space
     @Override
     public Scorer scorer(LeafReaderContext readerContext) throws IOException {
 
-      final Scorer childScorer = childWeight.scorer(readerContext);
+      final Scorer childScorer = in.scorer(readerContext);
       if (childScorer == null) {
         // No matches
         return null;
@@ -174,7 +165,7 @@ public class ToParentBlockJoinQuery extends Query {
     public Explanation explain(LeafReaderContext context, int doc) throws IOException {
       BlockJoinScorer scorer = (BlockJoinScorer) scorer(context);
       if (scorer != null && scorer.iterator().advance(doc) == doc) {
-        return scorer.explain(context, childWeight);
+        return scorer.explain(context, in);
       }
       return Explanation.noMatch("Not a match");
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/15e8719b/lucene/test-framework/src/java/org/apache/lucene/search/AssertingWeight.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/AssertingWeight.java b/lucene/test-framework/src/java/org/apache/lucene/search/AssertingWeight.java
index 9f24f22..75529df 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/search/AssertingWeight.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/search/AssertingWeight.java
@@ -18,35 +18,20 @@ package org.apache.lucene.search;
 
 import java.io.IOException;
 import java.util.Random;
-import java.util.Set;
-
 import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.index.Term;
 
-class AssertingWeight extends Weight {
+class AssertingWeight extends FilterWeight {
 
   final Random random;
-  final Weight in;
   final boolean needsScores;
 
   AssertingWeight(Random random, Weight in, boolean needsScores) {
-    super(in.getQuery());
+    super(in);
     this.random = random;
-    this.in = in;
     this.needsScores = needsScores;
   }
 
   @Override
-  public void extractTerms(Set<Term> terms) {
-    in.extractTerms(terms);
-  }
-
-  @Override
-  public Explanation explain(LeafReaderContext context, int doc) throws IOException {
-    return in.explain(context, doc);
-  }
-
-  @Override
   public Scorer scorer(LeafReaderContext context) throws IOException {
     final Scorer inScorer = in.scorer(context);
     assert inScorer == null || inScorer.docID() == -1;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/15e8719b/lucene/test-framework/src/java/org/apache/lucene/search/RandomApproximationQuery.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/RandomApproximationQuery.java b/lucene/test-framework/src/java/org/apache/lucene/search/RandomApproximationQuery.java
index 2db46ff..0bf81e5 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/search/RandomApproximationQuery.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/search/RandomApproximationQuery.java
@@ -18,13 +18,10 @@ package org.apache.lucene.search;
 
 import java.io.IOException;
 import java.util.Random;
-import java.util.Set;
-
 import com.carrotsearch.randomizedtesting.generators.RandomInts;
 
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.index.Term;
 
 /**
  * A {@link Query} that adds random approximations to its scorers.
@@ -70,30 +67,18 @@ public class RandomApproximationQuery extends Query {
     return new RandomApproximationWeight(weight, new Random(random.nextLong()));
   }
 
-  private static class RandomApproximationWeight extends Weight {
+  private static class RandomApproximationWeight extends FilterWeight {
 
-    private final Weight weight;
     private final Random random;
 
     RandomApproximationWeight(Weight weight, Random random) {
-      super(weight.getQuery());
-      this.weight = weight;
+      super(weight);
       this.random = random;
     }
 
     @Override
-    public void extractTerms(Set<Term> terms) {
-      weight.extractTerms(terms);
-    }
-
-    @Override
-    public Explanation explain(LeafReaderContext context, int doc) throws IOException {
-      return weight.explain(context, doc);
-    }
-
-    @Override
     public Scorer scorer(LeafReaderContext context) throws IOException {
-      final Scorer scorer = weight.scorer(context);
+      final Scorer scorer = in.scorer(context);
       if (scorer == null) {
         return null;
       }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/15e8719b/lucene/test-framework/src/test/org/apache/lucene/search/TestBaseExplanationTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/test/org/apache/lucene/search/TestBaseExplanationTestCase.java b/lucene/test-framework/src/test/org/apache/lucene/search/TestBaseExplanationTestCase.java
index 0c342d9..c421647 100644
--- a/lucene/test-framework/src/test/org/apache/lucene/search/TestBaseExplanationTestCase.java
+++ b/lucene/test-framework/src/test/org/apache/lucene/search/TestBaseExplanationTestCase.java
@@ -17,8 +17,6 @@
 package org.apache.lucene.search;
 
 import java.io.IOException;
-import java.util.Set;
-
 import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.index.Term;
 
@@ -78,11 +76,9 @@ public class TestBaseExplanationTestCase extends BaseExplanationTestCase {
     }
   }
   
-  public static final class BrokenExplainWeight extends Weight {
-    final Weight in;
+  public static final class BrokenExplainWeight extends FilterWeight {
     public BrokenExplainWeight(BrokenExplainTermQuery q, Weight in) {
-      super(q);
-      this.in = in;
+      super(q, in);
     }
     public BulkScorer bulkScorer(LeafReaderContext context) throws IOException {
       return in.bulkScorer(context);
@@ -104,11 +100,5 @@ public class TestBaseExplanationTestCase extends BaseExplanationTestCase {
       }
       return result;
     }
-    public void extractTerms(Set<Term> terms) {
-      in.extractTerms(terms);
-    }
-    public Scorer scorer(LeafReaderContext context) throws IOException {
-      return in.scorer(context);
-    }
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/15e8719b/solr/core/src/java/org/apache/solr/search/ReRankQParserPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/ReRankQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/ReRankQParserPlugin.java
index 03a1d33..37919f4 100644
--- a/solr/core/src/java/org/apache/solr/search/ReRankQParserPlugin.java
+++ b/solr/core/src/java/org/apache/solr/search/ReRankQParserPlugin.java
@@ -20,14 +20,12 @@ import java.io.IOException;
 import java.util.Arrays;
 import java.util.Comparator;
 import java.util.Map;
-import java.util.Set;
-
 import com.carrotsearch.hppc.IntFloatHashMap;
 import com.carrotsearch.hppc.IntIntHashMap;
 
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.index.Term;
+import org.apache.lucene.search.FilterWeight;
 import org.apache.lucene.search.Explanation;
 import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.search.LeafCollector;
@@ -35,7 +33,6 @@ import org.apache.lucene.search.MatchAllDocsQuery;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.search.QueryRescorer;
 import org.apache.lucene.search.ScoreDoc;
-import org.apache.lucene.search.Scorer;
 import org.apache.lucene.search.Sort;
 import org.apache.lucene.search.TopDocs;
 import org.apache.lucene.search.TopDocsCollector;
@@ -179,33 +176,20 @@ public class ReRankQParserPlugin extends QParserPlugin {
     }
   }
 
-  private class ReRankWeight extends Weight{
+  private class ReRankWeight extends FilterWeight {
     private Query reRankQuery;
     private IndexSearcher searcher;
-    private Weight mainWeight;
     private double reRankWeight;
 
     public ReRankWeight(Query mainQuery, Query reRankQuery, double reRankWeight, IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
-      super(mainQuery);
+      super(mainQuery, mainQuery.createWeight(searcher, needsScores, boost));
       this.reRankQuery = reRankQuery;
       this.searcher = searcher;
       this.reRankWeight = reRankWeight;
-      this.mainWeight = mainQuery.createWeight(searcher, needsScores, boost);
-    }
-
-    @Override
-    public void extractTerms(Set<Term> terms) {
-      this.mainWeight.extractTerms(terms);
-
-    }
-
-    @Override
-    public Scorer scorer(LeafReaderContext context) throws IOException {
-      return mainWeight.scorer(context);
     }
 
     public Explanation explain(LeafReaderContext context, int doc) throws IOException {
-      Explanation mainExplain = mainWeight.explain(context, doc);
+      Explanation mainExplain = in.explain(context, doc);
       return new QueryRescorer(reRankQuery) {
         @Override
         protected float combine(float firstPassScore, boolean secondPassMatches, float secondPassScore) {


[12/51] [abbrv] lucene-solr:apiv2: LUCENE-7371: Fix CHANGES entry.

Posted by sa...@apache.org.
LUCENE-7371: Fix CHANGES entry.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/b54d4672
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/b54d4672
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/b54d4672

Branch: refs/heads/apiv2
Commit: b54d46722b36f107edd59a8d843b93f5727a9058
Parents: 866398b
Author: Adrien Grand <jp...@gmail.com>
Authored: Tue Jul 12 18:04:08 2016 +0200
Committer: Adrien Grand <jp...@gmail.com>
Committed: Tue Jul 12 18:04:08 2016 +0200

----------------------------------------------------------------------
 lucene/CHANGES.txt | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b54d4672/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index c68d4df..16d9d41 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -117,7 +117,7 @@ Optimizations
 
 * LUCENE-7351: Doc id compression for points. (Adrien Grand)
 
-* LUCENE-7351: Point values are now better compressed using run-length
+* LUCENE-7371: Point values are now better compressed using run-length
   encoding. (Adrien Grand)
 
 Other


[31/51] [abbrv] lucene-solr:apiv2: Don't fail ASAP on Jenkins builds, run the whole build and fail finally (Jenkins does this for us with reason "unstable" once the build is done). This allows to run build completely, although some tests fail.

Posted by sa...@apache.org.
Don't fail ASAP on Jenkins builds, run the whole build and fail finally (Jenkins does this for us with reason "unstable" once the build is done). This allows to run build completely, although some tests fail.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/59218b90
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/59218b90
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/59218b90

Branch: refs/heads/apiv2
Commit: 59218b90e115d3baf00099c105b40b78564293b6
Parents: 621527d
Author: Uwe Schindler <us...@apache.org>
Authored: Sun Jul 17 13:02:03 2016 +0200
Committer: Uwe Schindler <us...@apache.org>
Committed: Sun Jul 17 13:02:03 2016 +0200

----------------------------------------------------------------------
 build.xml | 2 ++
 1 file changed, 2 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/59218b90/build.xml
----------------------------------------------------------------------
diff --git a/build.xml b/build.xml
index 53b278f..247ba60 100644
--- a/build.xml
+++ b/build.xml
@@ -762,6 +762,7 @@ Test args: [${args}]</echo>
   <target name="jenkins-hourly">
     <antcall>
       <param name="is.jenkins.build" value="true"/>
+      <param name="tests.haltonfailure" value="false"/>
       <target name="-jenkins-base"/>
     </antcall>
   </target>
@@ -769,6 +770,7 @@ Test args: [${args}]</echo>
   <target name="jenkins-nightly">
     <antcall>
       <param name="is.jenkins.build" value="true"/>
+      <param name="tests.haltonfailure" value="false"/>
       <param name="tests.nightly" value="true"/>
       <target name="-jenkins-base"/>
     </antcall>


[42/51] [abbrv] lucene-solr:apiv2: LUCENE-7384: Remove defunct ScoringWrapperSpans.

Posted by sa...@apache.org.
LUCENE-7384: Remove defunct ScoringWrapperSpans.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/abb81e4d
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/abb81e4d
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/abb81e4d

Branch: refs/heads/apiv2
Commit: abb81e4dedd05606f91be809d702be0ca8be1caf
Parents: b4c8f56
Author: David Smiley <ds...@apache.org>
Authored: Tue Jul 19 12:45:09 2016 -0400
Committer: David Smiley <ds...@apache.org>
Committed: Tue Jul 19 12:45:09 2016 -0400

----------------------------------------------------------------------
 lucene/CHANGES.txt                              |  7 ++
 .../search/spans/ScoringWrapperSpans.java       | 95 --------------------
 .../lucene/search/spans/SpanNotQuery.java       |  2 +-
 .../apache/lucene/search/spans/SpanOrQuery.java |  2 +-
 4 files changed, 9 insertions(+), 97 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/abb81e4d/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 34e91b6..ec395a3 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -32,6 +32,10 @@ Other
 
 ======================= Lucene 6.2.0 =======================
 
+API Changes
+
+* ScoringWrapperSpans was removed since it had no purpose or effect as of Lucene 5.5.
+
 New Features
 
 * LUCENE-7302: IndexWriter methods that change the index now return a
@@ -148,6 +152,9 @@ Other
 * LUCENE-7372: Factor out an org.apache.lucene.search.FilterWeight class.
   (Christine Poerschke, Adrien Grand, David Smiley)
 
+* LUCENE-7384: Removed ScoringWrapperSpans. And tweaked SpanWeight.buildSimWeight() to
+  reuse the existing Similarity instead of creating a new one. (David Smiley)
+
 ======================= Lucene 6.1.0 =======================
 
 New Features

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/abb81e4d/lucene/core/src/java/org/apache/lucene/search/spans/ScoringWrapperSpans.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/spans/ScoringWrapperSpans.java b/lucene/core/src/java/org/apache/lucene/search/spans/ScoringWrapperSpans.java
deleted file mode 100644
index d38ae83..0000000
--- a/lucene/core/src/java/org/apache/lucene/search/spans/ScoringWrapperSpans.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.search.spans;
-
-
-import java.io.IOException;
-
-import org.apache.lucene.search.similarities.Similarity;
-import org.apache.lucene.search.TwoPhaseIterator;
-
-/**
- * A Spans that wraps another Spans with a different SimScorer
- */
-public class ScoringWrapperSpans extends Spans {
-
-  private final Spans in;
-
-  /**
-   * Creates a new ScoringWrapperSpans
-   * @param spans the scorer to wrap
-   * @param simScorer  the SimScorer to use for scoring
-   */
-  public ScoringWrapperSpans(Spans spans, Similarity.SimScorer simScorer) {
-    this.in = spans;
-  }
-
-  @Override
-  public int nextStartPosition() throws IOException {
-    return in.nextStartPosition();
-  }
-
-  @Override
-  public int startPosition() {
-    return in.startPosition();
-  }
-
-  @Override
-  public int endPosition() {
-    return in.endPosition();
-  }
-
-  @Override
-  public int width() {
-    return in.width();
-  }
-
-  @Override
-  public void collect(SpanCollector collector) throws IOException {
-    in.collect(collector);
-  }
-
-  @Override
-  public int docID() {
-    return in.docID();
-  }
-
-  @Override
-  public int nextDoc() throws IOException {
-    return in.nextDoc();
-  }
-
-  @Override
-  public int advance(int target) throws IOException {
-    return in.advance(target);
-  }
-
-  @Override
-  public long cost() {
-    return in.cost();
-  }
-
-  @Override
-  public TwoPhaseIterator asTwoPhaseIterator() {
-    return in.asTwoPhaseIterator();
-  }
-
-  @Override
-  public float positionsCost() {
-    return in.positionsCost();
-  }
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/abb81e4d/lucene/core/src/java/org/apache/lucene/search/spans/SpanNotQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/spans/SpanNotQuery.java b/lucene/core/src/java/org/apache/lucene/search/spans/SpanNotQuery.java
index 0984bd9..05d3f8e 100644
--- a/lucene/core/src/java/org/apache/lucene/search/spans/SpanNotQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/spans/SpanNotQuery.java
@@ -126,7 +126,7 @@ public final class SpanNotQuery extends SpanQuery {
 
       Spans excludeSpans = excludeWeight.getSpans(context, requiredPostings);
       if (excludeSpans == null) {
-        return new ScoringWrapperSpans(includeSpans, getSimScorer(context));
+        return includeSpans;
       }
 
       TwoPhaseIterator excludeTwoPhase = excludeSpans.asTwoPhaseIterator();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/abb81e4d/lucene/core/src/java/org/apache/lucene/search/spans/SpanOrQuery.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/spans/SpanOrQuery.java b/lucene/core/src/java/org/apache/lucene/search/spans/SpanOrQuery.java
index e273dd9..15abc7d 100644
--- a/lucene/core/src/java/org/apache/lucene/search/spans/SpanOrQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/spans/SpanOrQuery.java
@@ -161,7 +161,7 @@ public final class SpanOrQuery extends SpanQuery {
       if (subSpans.size() == 0) {
         return null;
       } else if (subSpans.size() == 1) {
-        return new ScoringWrapperSpans(subSpans.get(0), getSimScorer(context));
+        return subSpans.get(0);
       }
 
       DisiPriorityQueue byDocQueue = new DisiPriorityQueue(subSpans.size());


[14/51] [abbrv] lucene-solr:apiv2: SOLR-9298: add resources directory to solr/test-framework's pom.xml

Posted by sa...@apache.org.
SOLR-9298: add resources directory to solr/test-framework's pom.xml

This is needed to fix solr/contrib/analysis-extras mvn test failure (SSLTestConfig).

(Christine Poerschke, Steve Row)


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/415d3210
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/415d3210
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/415d3210

Branch: refs/heads/apiv2
Commit: 415d3210443800c1a7976c9536b78ffe4f2ba15e
Parents: eefdc62
Author: Christine Poerschke <cp...@apache.org>
Authored: Tue Jul 12 16:57:19 2016 +0100
Committer: Christine Poerschke <cp...@apache.org>
Committed: Wed Jul 13 12:50:26 2016 +0100

----------------------------------------------------------------------
 dev-tools/maven/solr/test-framework/pom.xml.template | 5 +----
 1 file changed, 1 insertion(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/415d3210/dev-tools/maven/solr/test-framework/pom.xml.template
----------------------------------------------------------------------
diff --git a/dev-tools/maven/solr/test-framework/pom.xml.template b/dev-tools/maven/solr/test-framework/pom.xml.template
index 805bffe..22f2fb0 100644
--- a/dev-tools/maven/solr/test-framework/pom.xml.template
+++ b/dev-tools/maven/solr/test-framework/pom.xml.template
@@ -58,10 +58,7 @@
     <testSourceDirectory>${module-path}/src/test</testSourceDirectory>
     <resources>
       <resource>
-        <directory>${module-path}</directory>
-        <excludes>
-          <exclude>**/*.java</exclude>
-        </excludes>
+        <directory>${module-path}/src/resources</directory>
       </resource>
     </resources>
     <plugins>


[13/51] [abbrv] lucene-solr:apiv2: SOLR-9242: Move license headers to the top + force refresh cluster propery before reading the 'location' param

Posted by sa...@apache.org.
SOLR-9242: Move license headers to the top + force refresh cluster propery before reading the 'location' param


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/eefdc62c
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/eefdc62c
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/eefdc62c

Branch: refs/heads/apiv2
Commit: eefdc62c997f7936b6db203111d8149dc934b243
Parents: b54d467
Author: Varun Thacker <va...@apache.org>
Authored: Wed Jul 13 14:42:19 2016 +0530
Committer: Varun Thacker <va...@apache.org>
Committed: Wed Jul 13 14:42:19 2016 +0530

----------------------------------------------------------------------
 .../apache/solr/core/backup/BackupManager.java  | 33 ++++++++++----------
 .../backup/repository/BackupRepository.java     |  3 +-
 .../org/apache/solr/handler/SnapShooter.java    |  3 +-
 .../solr/handler/admin/CollectionsHandler.java  |  6 ++++
 .../solr/handler/admin/CoreAdminOperation.java  |  4 +--
 .../AbstractCloudBackupRestoreTestCase.java     | 33 ++++++++++----------
 .../solr/cloud/TestHdfsCloudBackupRestore.java  | 33 ++++++++++----------
 .../cloud/TestLocalFSCloudBackupRestore.java    |  7 ++---
 .../solr/core/TestBackupRepositoryFactory.java  |  3 +-
 .../apache/solr/common/cloud/ZkStateReader.java |  8 +++++
 10 files changed, 70 insertions(+), 63 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eefdc62c/solr/core/src/java/org/apache/solr/core/backup/BackupManager.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/backup/BackupManager.java b/solr/core/src/java/org/apache/solr/core/backup/BackupManager.java
index 0575bff..51227e8 100644
--- a/solr/core/src/java/org/apache/solr/core/backup/BackupManager.java
+++ b/solr/core/src/java/org/apache/solr/core/backup/BackupManager.java
@@ -1,3 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package org.apache.solr.core.backup;
 
 import java.io.IOException;
@@ -31,23 +47,6 @@ import org.apache.zookeeper.KeeperException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
 /**
  * This class implements functionality to create a backup with extension points provided to integrate with different
  * types of file-systems.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eefdc62c/solr/core/src/java/org/apache/solr/core/backup/repository/BackupRepository.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/backup/repository/BackupRepository.java b/solr/core/src/java/org/apache/solr/core/backup/repository/BackupRepository.java
index 20d8628..8950ce7 100644
--- a/solr/core/src/java/org/apache/solr/core/backup/repository/BackupRepository.java
+++ b/solr/core/src/java/org/apache/solr/core/backup/repository/BackupRepository.java
@@ -1,5 +1,3 @@
-package org.apache.solr.core.backup.repository;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -16,6 +14,7 @@ package org.apache.solr.core.backup.repository;
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+package org.apache.solr.core.backup.repository;
 
 import java.io.Closeable;
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eefdc62c/solr/core/src/java/org/apache/solr/handler/SnapShooter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/SnapShooter.java b/solr/core/src/java/org/apache/solr/handler/SnapShooter.java
index 4b39097..5ac3243 100644
--- a/solr/core/src/java/org/apache/solr/handler/SnapShooter.java
+++ b/solr/core/src/java/org/apache/solr/handler/SnapShooter.java
@@ -28,6 +28,7 @@ import java.util.List;
 import java.util.Locale;
 import java.util.function.Consumer;
 
+import com.google.common.base.Preconditions;
 import org.apache.lucene.index.IndexCommit;
 import org.apache.lucene.store.Directory;
 import org.apache.solr.common.SolrException;
@@ -43,8 +44,6 @@ import org.apache.solr.util.RefCounted;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.base.Preconditions;
-
 /**
  * <p> Provides functionality equivalent to the snapshooter script </p>
  * This is no longer used in standard replication.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eefdc62c/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
index 97fbd2d..874e68c 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
@@ -816,6 +816,9 @@ public class CollectionsHandler extends RequestHandlerBase implements Permission
 
         String location = repository.getBackupLocation(req.getParams().get(CoreAdminParams.BACKUP_LOCATION));
         if (location == null) {
+          //Refresh the cluster property file to make sure the value set for location is the latest
+          h.coreContainer.getZkController().getZkStateReader().forceUpdateClusterProperties();
+
           // Check if the location is specified in the cluster property.
           location = h.coreContainer.getZkController().getZkStateReader().getClusterProperty(CoreAdminParams.BACKUP_LOCATION, null);
           if (location == null) {
@@ -857,6 +860,9 @@ public class CollectionsHandler extends RequestHandlerBase implements Permission
 
         String location = repository.getBackupLocation(req.getParams().get(CoreAdminParams.BACKUP_LOCATION));
         if (location == null) {
+          //Refresh the cluster property file to make sure the value set for location is the latest
+          h.coreContainer.getZkController().getZkStateReader().forceUpdateClusterProperties();
+
           // Check if the location is specified in the cluster property.
           location = h.coreContainer.getZkController().getZkStateReader().getClusterProperty("location", null);
           if (location == null) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eefdc62c/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminOperation.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminOperation.java b/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminOperation.java
index fa3bedd..33fe19a 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminOperation.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminOperation.java
@@ -860,7 +860,7 @@ enum CoreAdminOperation {
       BackupRepository repository = callInfo.handler.coreContainer.newBackupRepository(Optional.ofNullable(repoName));
 
       String location = repository.getBackupLocation(params.get(CoreAdminParams.BACKUP_LOCATION));
-      if(location == null) {
+      if (location == null) {
         throw new SolrException(ErrorCode.BAD_REQUEST, "'location' is not specified as a query"
             + " parameter or as a default repository property");
       }
@@ -906,7 +906,7 @@ enum CoreAdminOperation {
       BackupRepository repository = callInfo.handler.coreContainer.newBackupRepository(Optional.ofNullable(repoName));
 
       String location = repository.getBackupLocation(params.get(CoreAdminParams.BACKUP_LOCATION));
-      if(location == null) {
+      if (location == null) {
         throw new SolrException(ErrorCode.BAD_REQUEST, "'location' is not specified as a query"
             + " parameter or as a default repository property");
       }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eefdc62c/solr/core/src/test/org/apache/solr/cloud/AbstractCloudBackupRestoreTestCase.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/AbstractCloudBackupRestoreTestCase.java b/solr/core/src/test/org/apache/solr/cloud/AbstractCloudBackupRestoreTestCase.java
index 96faf92..fd74eaf 100644
--- a/solr/core/src/test/org/apache/solr/cloud/AbstractCloudBackupRestoreTestCase.java
+++ b/solr/core/src/test/org/apache/solr/cloud/AbstractCloudBackupRestoreTestCase.java
@@ -1,3 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package org.apache.solr.cloud;
 
 import java.io.IOException;
@@ -30,23 +46,6 @@ import org.slf4j.LoggerFactory;
 
 import static org.apache.solr.common.params.ShardParams._ROUTE_;
 
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
 /**
  * This class implements the logic required to test Solr cloud backup/restore capability.
  */

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eefdc62c/solr/core/src/test/org/apache/solr/cloud/TestHdfsCloudBackupRestore.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestHdfsCloudBackupRestore.java b/solr/core/src/test/org/apache/solr/cloud/TestHdfsCloudBackupRestore.java
index a09fc2f..5fd7666 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestHdfsCloudBackupRestore.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestHdfsCloudBackupRestore.java
@@ -1,3 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package org.apache.solr.cloud;
 
 import java.io.IOException;
@@ -19,23 +35,6 @@ import org.junit.BeforeClass;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
 /**
  * This class implements the tests for HDFS integration for Solr backup/restore capability.
  */

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eefdc62c/solr/core/src/test/org/apache/solr/cloud/TestLocalFSCloudBackupRestore.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestLocalFSCloudBackupRestore.java b/solr/core/src/test/org/apache/solr/cloud/TestLocalFSCloudBackupRestore.java
index 6f3e2bc..db68913 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestLocalFSCloudBackupRestore.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestLocalFSCloudBackupRestore.java
@@ -1,7 +1,3 @@
-package org.apache.solr.cloud;
-
-import org.junit.BeforeClass;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -18,6 +14,9 @@ import org.junit.BeforeClass;
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+package org.apache.solr.cloud;
+
+import org.junit.BeforeClass;
 
 /**
  * This class implements the tests for local file-system integration for Solr backup/restore capability.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eefdc62c/solr/core/src/test/org/apache/solr/core/TestBackupRepositoryFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/TestBackupRepositoryFactory.java b/solr/core/src/test/org/apache/solr/core/TestBackupRepositoryFactory.java
index a03d4c4..060cca1 100644
--- a/solr/core/src/test/org/apache/solr/core/TestBackupRepositoryFactory.java
+++ b/solr/core/src/test/org/apache/solr/core/TestBackupRepositoryFactory.java
@@ -1,5 +1,3 @@
-package org.apache.solr.core;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -16,6 +14,7 @@ package org.apache.solr.core;
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+package org.apache.solr.core;
 
 import java.io.File;
 import java.util.HashMap;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/eefdc62c/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java b/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java
index 3f422fa..227a8b3 100644
--- a/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java
+++ b/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java
@@ -845,6 +845,14 @@ public class ZkStateReader implements Closeable {
     loadClusterProperties();
   };
 
+  /**
+   * We should try keeping this to a minimum. Only in scenarios where the value being read is a user facing property
+   * should we force update to make sure we are reading the latest value.
+   */
+  public void forceUpdateClusterProperties() {
+    loadClusterProperties();
+  }
+
   @SuppressWarnings("unchecked")
   private void loadClusterProperties() {
     try {


[08/51] [abbrv] lucene-solr:apiv2: LUCENE-7355: Add Analyzer#normalize() and use it in query parsers.

Posted by sa...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/test-framework/src/java/org/apache/lucene/analysis/MockBytesAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/analysis/MockBytesAnalyzer.java b/lucene/test-framework/src/java/org/apache/lucene/analysis/MockBytesAnalyzer.java
index 01f3d4d..b8cfc5b 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/analysis/MockBytesAnalyzer.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/analysis/MockBytesAnalyzer.java
@@ -16,6 +16,8 @@
  */
 package org.apache.lucene.analysis;
 
+import org.apache.lucene.util.AttributeFactory;
+
 /**
  * Analyzer for testing that encodes terms as UTF-16 bytes.
  */
@@ -26,4 +28,9 @@ public final class MockBytesAnalyzer extends Analyzer {
         MockTokenizer.KEYWORD, false, MockTokenizer.DEFAULT_MAX_TOKEN_LENGTH);
     return new TokenStreamComponents(t);
   }
+
+  @Override
+  protected AttributeFactory attributeFactory() {
+    return MockUTF16TermAttributeImpl.UTF16_TERM_ATTRIBUTE_FACTORY;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/lucene/test-framework/src/java/org/apache/lucene/analysis/MockLowerCaseFilter.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/analysis/MockLowerCaseFilter.java b/lucene/test-framework/src/java/org/apache/lucene/analysis/MockLowerCaseFilter.java
new file mode 100644
index 0000000..b1aea3d
--- /dev/null
+++ b/lucene/test-framework/src/java/org/apache/lucene/analysis/MockLowerCaseFilter.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.analysis;
+
+import java.io.IOException;
+
+import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
+
+/** A lowercasing {@link TokenFilter}. */
+public final class MockLowerCaseFilter extends TokenFilter {
+  private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class);
+
+  /** Sole constructor. */
+  public MockLowerCaseFilter(TokenStream in) {
+    super(in);
+  }
+  
+  @Override
+  public final boolean incrementToken() throws IOException {
+    if (input.incrementToken()) {
+      CharacterUtils.toLowerCase(termAtt.buffer(), 0, termAtt.length());
+      return true;
+    } else
+      return false;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e92a38af/solr/core/src/java/org/apache/solr/analysis/TokenizerChain.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/analysis/TokenizerChain.java b/solr/core/src/java/org/apache/solr/analysis/TokenizerChain.java
index c9f263d..a5afbec 100644
--- a/solr/core/src/java/org/apache/solr/analysis/TokenizerChain.java
+++ b/solr/core/src/java/org/apache/solr/analysis/TokenizerChain.java
@@ -18,6 +18,7 @@ package org.apache.solr.analysis;
 
 import org.apache.lucene.analysis.*;
 import org.apache.lucene.analysis.util.CharFilterFactory;
+import org.apache.lucene.analysis.util.MultiTermAwareComponent;
 import org.apache.lucene.analysis.util.TokenFilterFactory;
 import org.apache.lucene.analysis.util.TokenizerFactory;
 
@@ -84,8 +85,21 @@ public final class TokenizerChain extends SolrAnalyzer {
   }
 
   @Override
+  protected Reader initReaderForNormalization(String fieldName, Reader reader) {
+    if (charFilters != null && charFilters.length > 0) {
+      for (CharFilterFactory charFilter : charFilters) {
+        if (charFilter instanceof MultiTermAwareComponent) {
+          charFilter = (CharFilterFactory) ((MultiTermAwareComponent) charFilter).getMultiTermComponent();
+          reader = charFilter.create(reader);
+        }
+      }
+    }
+    return reader;
+  }
+
+  @Override
   protected TokenStreamComponents createComponents(String fieldName) {
-    Tokenizer tk = tokenizer.create();
+    Tokenizer tk = tokenizer.create(attributeFactory());
     TokenStream ts = tk;
     for (TokenFilterFactory filter : filters) {
       ts = filter.create(ts);
@@ -94,6 +108,18 @@ public final class TokenizerChain extends SolrAnalyzer {
   }
 
   @Override
+  protected TokenStream normalize(String fieldName, TokenStream in) {
+    TokenStream result = in;
+    for (TokenFilterFactory filter : filters) {
+      if (filter instanceof MultiTermAwareComponent) {
+        filter = (TokenFilterFactory) ((MultiTermAwareComponent) filter).getMultiTermComponent();
+        result = filter.create(in);
+      }
+    }
+    return result;
+  }
+
+  @Override
   public String toString() {
     StringBuilder sb = new StringBuilder("TokenizerChain(");
     for (CharFilterFactory filter: charFilters) {


[07/51] [abbrv] lucene-solr:apiv2: add points to back compat indices

Posted by sa...@apache.org.
add points to back compat indices


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/ced91403
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/ced91403
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/ced91403

Branch: refs/heads/apiv2
Commit: ced9140368df2f06a0e58f51b11db8aefd95db59
Parents: 1e794e0
Author: Mike McCandless <mi...@apache.org>
Authored: Tue Jul 12 10:22:03 2016 -0400
Committer: Mike McCandless <mi...@apache.org>
Committed: Tue Jul 12 10:23:12 2016 -0400

----------------------------------------------------------------------
 .../org/apache/lucene/index/index.6.1.0-cfs.zip | Bin 13769 -> 15803 bytes
 .../apache/lucene/index/index.6.1.0-nocfs.zip   | Bin 13769 -> 15829 bytes
 2 files changed, 0 insertions(+), 0 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ced91403/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.1.0-cfs.zip
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.1.0-cfs.zip b/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.1.0-cfs.zip
index 88bc64c..ff1b952 100644
Binary files a/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.1.0-cfs.zip and b/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.1.0-cfs.zip differ

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ced91403/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.1.0-nocfs.zip
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.1.0-nocfs.zip b/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.1.0-nocfs.zip
index e1dbf73..4e2d6a8 100644
Binary files a/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.1.0-nocfs.zip and b/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.1.0-nocfs.zip differ


[16/51] [abbrv] lucene-solr:apiv2: SOLR-9300: fix replace expression in GetMavenDependenciesTask's dependencyToArtifactId (Christine Poerschke, Daniel Collins)

Posted by sa...@apache.org.
SOLR-9300: fix replace expression in GetMavenDependenciesTask's dependencyToArtifactId (Christine Poerschke, Daniel Collins)


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/1e92fc5f
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/1e92fc5f
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/1e92fc5f

Branch: refs/heads/apiv2
Commit: 1e92fc5f35aedd27b9f57e259e241e560d666515
Parents: 15e8719
Author: Christine Poerschke <cp...@apache.org>
Authored: Wed Jul 13 12:08:14 2016 +0100
Committer: Christine Poerschke <cp...@apache.org>
Committed: Wed Jul 13 17:12:35 2016 +0100

----------------------------------------------------------------------
 .../org/apache/lucene/dependencies/GetMavenDependenciesTask.java   | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/1e92fc5f/lucene/tools/src/java/org/apache/lucene/dependencies/GetMavenDependenciesTask.java
----------------------------------------------------------------------
diff --git a/lucene/tools/src/java/org/apache/lucene/dependencies/GetMavenDependenciesTask.java b/lucene/tools/src/java/org/apache/lucene/dependencies/GetMavenDependenciesTask.java
index 4e642ee..45a9d11 100644
--- a/lucene/tools/src/java/org/apache/lucene/dependencies/GetMavenDependenciesTask.java
+++ b/lucene/tools/src/java/org/apache/lucene/dependencies/GetMavenDependenciesTask.java
@@ -640,7 +640,7 @@ public class GetMavenDependenciesTask extends Task {
       // Pattern.compile("(lucene|solr)/build/(.*)/classes/java");
       String artifact = matcher.group(2);
       artifact = artifact.replace('/', '-');
-      artifact = artifact.replace("(?<!solr-)analysis-", "analyzers-");
+      artifact = artifact.replaceAll("(?<!solr-)analysis-", "analyzers-");
       if ("lucene".equals(matcher.group(1))) {
         artifactId.append("lucene-");
       }


[17/51] [abbrv] lucene-solr:apiv2: LUCENE-7013: add licence header position checker to -validate-source-patterns, and fix the violations it found

Posted by sa...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/core/src/test/org/apache/solr/cloud/SolrCLIZkUtilsTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/SolrCLIZkUtilsTest.java b/solr/core/src/test/org/apache/solr/cloud/SolrCLIZkUtilsTest.java
index 082b618..78be30b 100644
--- a/solr/core/src/test/org/apache/solr/cloud/SolrCLIZkUtilsTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/SolrCLIZkUtilsTest.java
@@ -1,3 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package org.apache.solr.cloud;
 
 import java.io.ByteArrayOutputStream;
@@ -20,23 +37,6 @@ import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
 public class SolrCLIZkUtilsTest extends SolrCloudTestCase {
 
   @BeforeClass

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/core/src/test/org/apache/solr/cloud/TestLockTree.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestLockTree.java b/solr/core/src/test/org/apache/solr/cloud/TestLockTree.java
index 3d9c059..ec50327 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestLockTree.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestLockTree.java
@@ -1,3 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package org.apache.solr.cloud;
 
 import java.lang.invoke.MethodHandles;
@@ -20,23 +37,6 @@ import static org.apache.solr.common.params.CollectionParams.CollectionAction.DE
 import static org.apache.solr.common.params.CollectionParams.CollectionAction.MODIFYCOLLECTION;
 import static org.apache.solr.common.params.CollectionParams.CollectionAction.SPLITSHARD;
 
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
 public class TestLockTree extends SolrTestCaseJ4 {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/core/src/test/org/apache/solr/cloud/TestOnReconnectListenerSupport.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestOnReconnectListenerSupport.java b/solr/core/src/test/org/apache/solr/cloud/TestOnReconnectListenerSupport.java
index 052cd1f..165b22a 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestOnReconnectListenerSupport.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestOnReconnectListenerSupport.java
@@ -1,5 +1,3 @@
-package org.apache.solr.cloud;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.solr.cloud;
  * limitations under the License.
  */
 
+package org.apache.solr.cloud;
+
 import java.lang.invoke.MethodHandles;
 import java.util.Set;
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/core/src/test/org/apache/solr/cloud/TestSizeLimitedDistributedMap.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestSizeLimitedDistributedMap.java b/solr/core/src/test/org/apache/solr/cloud/TestSizeLimitedDistributedMap.java
index 2131f48..801403a 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestSizeLimitedDistributedMap.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestSizeLimitedDistributedMap.java
@@ -1,5 +1,3 @@
-package org.apache.solr.cloud;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.solr.cloud;
  * limitations under the License.
  */
 
+package org.apache.solr.cloud;
+
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.cloud.SolrZkClient;
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/core/src/test/org/apache/solr/cloud/rule/ImplicitSnitchTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/rule/ImplicitSnitchTest.java b/solr/core/src/test/org/apache/solr/cloud/rule/ImplicitSnitchTest.java
index 5ae9710..424b6a5 100644
--- a/solr/core/src/test/org/apache/solr/cloud/rule/ImplicitSnitchTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/rule/ImplicitSnitchTest.java
@@ -1,20 +1,3 @@
-package org.apache.solr.cloud.rule;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import com.google.common.collect.Sets;
-import org.junit.Before;
-import org.junit.Test;
-import org.mockito.Mockito;
-
-import static org.hamcrest.core.Is.is;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertThat;
-import static org.junit.Assert.assertTrue;
-import static org.mockito.Matchers.anyString;
-import static org.mockito.Mockito.when;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -32,6 +15,23 @@ import static org.mockito.Mockito.when;
  * limitations under the License.
  */
 
+package org.apache.solr.cloud.rule;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import com.google.common.collect.Sets;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import static org.hamcrest.core.Is.is;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertThat;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Matchers.anyString;
+import static org.mockito.Mockito.when;
+
 public class ImplicitSnitchTest {
 
   private ImplicitSnitch snitch;
@@ -184,4 +184,4 @@ public class ImplicitSnitchTest {
     assertFalse(snitch.isKnownTag("ip_5"));
   }
 
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/core/src/test/org/apache/solr/core/BlobRepositoryCloudTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/BlobRepositoryCloudTest.java b/solr/core/src/test/org/apache/solr/core/BlobRepositoryCloudTest.java
index d477c3a..40f5792 100644
--- a/solr/core/src/test/org/apache/solr/core/BlobRepositoryCloudTest.java
+++ b/solr/core/src/test/org/apache/solr/core/BlobRepositoryCloudTest.java
@@ -1,5 +1,21 @@
-package org.apache.solr.core;
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 
+package org.apache.solr.core;
 
 import java.io.IOException;
 import java.nio.ByteBuffer;
@@ -18,23 +34,6 @@ import org.apache.solr.handler.TestBlobHandler;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
 public class BlobRepositoryCloudTest extends SolrCloudTestCase {
 
   public static final Path TEST_PATH = getFile("solr/configsets").toPath();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/core/src/test/org/apache/solr/core/BlobRepositoryMockingTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/BlobRepositoryMockingTest.java b/solr/core/src/test/org/apache/solr/core/BlobRepositoryMockingTest.java
index 42454d9..6da1367 100644
--- a/solr/core/src/test/org/apache/solr/core/BlobRepositoryMockingTest.java
+++ b/solr/core/src/test/org/apache/solr/core/BlobRepositoryMockingTest.java
@@ -1,3 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package org.apache.solr.core;
 
 import java.io.IOException;
@@ -22,24 +39,6 @@ import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
 public class BlobRepositoryMockingTest {
 
   private static final Charset UTF8 = Charset.forName("UTF-8");

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/core/src/test/org/apache/solr/handler/BackupRestoreUtils.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/handler/BackupRestoreUtils.java b/solr/core/src/test/org/apache/solr/handler/BackupRestoreUtils.java
index 6bc7d47..e2f4304 100644
--- a/solr/core/src/test/org/apache/solr/handler/BackupRestoreUtils.java
+++ b/solr/core/src/test/org/apache/solr/handler/BackupRestoreUtils.java
@@ -1,5 +1,3 @@
-package org.apache.solr.handler;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.solr.handler;
  * limitations under the License.
  */
 
+package org.apache.solr.handler;
+
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
 import java.util.ArrayList;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/core/src/test/org/apache/solr/handler/TestHdfsBackupRestoreCore.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/handler/TestHdfsBackupRestoreCore.java b/solr/core/src/test/org/apache/solr/handler/TestHdfsBackupRestoreCore.java
index 887ebfe..a840428 100644
--- a/solr/core/src/test/org/apache/solr/handler/TestHdfsBackupRestoreCore.java
+++ b/solr/core/src/test/org/apache/solr/handler/TestHdfsBackupRestoreCore.java
@@ -1,5 +1,3 @@
-package org.apache.solr.handler;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.solr.handler;
  * limitations under the License.
  */
 
+package org.apache.solr.handler;
+
 import java.io.IOException;
 import java.io.InputStream;
 import java.lang.invoke.MethodHandles;
@@ -248,4 +248,4 @@ public class TestHdfsBackupRestoreCore extends SolrCloudTestCase {
       IOUtils.closeQuietly(stream);
     }
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/core/src/test/org/apache/solr/handler/component/ResourceSharingTestComponent.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/handler/component/ResourceSharingTestComponent.java b/solr/core/src/test/org/apache/solr/handler/component/ResourceSharingTestComponent.java
index 8223fe5..7c4e663 100644
--- a/solr/core/src/test/org/apache/solr/handler/component/ResourceSharingTestComponent.java
+++ b/solr/core/src/test/org/apache/solr/handler/component/ResourceSharingTestComponent.java
@@ -1,3 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package org.apache.solr.handler.component;
 
 import org.apache.solr.common.params.ModifiableSolrParams;
@@ -20,23 +37,6 @@ import java.util.stream.Stream;
 
 import static org.junit.Assert.assertEquals;
 
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
 public class ResourceSharingTestComponent extends SearchComponent implements SolrCoreAware {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/core/src/test/org/apache/solr/request/TestFacetMethods.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/request/TestFacetMethods.java b/solr/core/src/test/org/apache/solr/request/TestFacetMethods.java
index 29c0ef2..1da41f5 100644
--- a/solr/core/src/test/org/apache/solr/request/TestFacetMethods.java
+++ b/solr/core/src/test/org/apache/solr/request/TestFacetMethods.java
@@ -1,5 +1,3 @@
-package org.apache.solr.request;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.solr.request;
  * limitations under the License.
  */
 
+package org.apache.solr.request;
+
 import org.apache.solr.schema.BoolField;
 import org.apache.solr.schema.SchemaField;
 import org.apache.solr.schema.StrField;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/core/src/test/org/apache/solr/response/TestGraphMLResponseWriter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/response/TestGraphMLResponseWriter.java b/solr/core/src/test/org/apache/solr/response/TestGraphMLResponseWriter.java
index 283f64d..e974b42 100644
--- a/solr/core/src/test/org/apache/solr/response/TestGraphMLResponseWriter.java
+++ b/solr/core/src/test/org/apache/solr/response/TestGraphMLResponseWriter.java
@@ -1,5 +1,3 @@
-package org.apache.solr.response;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.solr.response;
  * limitations under the License.
  */
 
+package org.apache.solr.response;
+
 import java.io.StringWriter;
 import java.util.Map;
 import java.util.HashMap;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/core/src/test/org/apache/solr/schema/BooleanFieldTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/schema/BooleanFieldTest.java b/solr/core/src/test/org/apache/solr/schema/BooleanFieldTest.java
index 8f13204..d851d71 100644
--- a/solr/core/src/test/org/apache/solr/schema/BooleanFieldTest.java
+++ b/solr/core/src/test/org/apache/solr/schema/BooleanFieldTest.java
@@ -1,5 +1,3 @@
-package org.apache.solr.schema;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.solr.schema;
  * limitations under the License.
  */
 
+package org.apache.solr.schema;
+
 import org.apache.solr.SolrTestCaseJ4;
 import org.junit.BeforeClass;
 import org.junit.Test;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/core/src/test/org/apache/solr/schema/TestManagedSchemaAPI.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/schema/TestManagedSchemaAPI.java b/solr/core/src/test/org/apache/solr/schema/TestManagedSchemaAPI.java
index 3bd4dea..e426e3b 100644
--- a/solr/core/src/test/org/apache/solr/schema/TestManagedSchemaAPI.java
+++ b/solr/core/src/test/org/apache/solr/schema/TestManagedSchemaAPI.java
@@ -1,3 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package org.apache.solr.schema;
 
 import java.io.IOException;
@@ -19,23 +36,6 @@ import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
 public class TestManagedSchemaAPI extends SolrCloudTestCase {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/core/src/test/org/apache/solr/search/TestGraphTermsQParserPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/TestGraphTermsQParserPlugin.java b/solr/core/src/test/org/apache/solr/search/TestGraphTermsQParserPlugin.java
index a4d0211..05ec982 100644
--- a/solr/core/src/test/org/apache/solr/search/TestGraphTermsQParserPlugin.java
+++ b/solr/core/src/test/org/apache/solr/search/TestGraphTermsQParserPlugin.java
@@ -1,5 +1,3 @@
-package org.apache.solr.search;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.solr.search;
  * limitations under the License.
  */
 
+package org.apache.solr.search;
+
 import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.params.ModifiableSolrParams;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/core/src/test/org/apache/solr/update/processor/ClassificationUpdateProcessorFactoryTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/processor/ClassificationUpdateProcessorFactoryTest.java b/solr/core/src/test/org/apache/solr/update/processor/ClassificationUpdateProcessorFactoryTest.java
index 27d8dca..05d112f 100644
--- a/solr/core/src/test/org/apache/solr/update/processor/ClassificationUpdateProcessorFactoryTest.java
+++ b/solr/core/src/test/org/apache/solr/update/processor/ClassificationUpdateProcessorFactoryTest.java
@@ -1,5 +1,3 @@
-package org.apache.solr.update.processor;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.solr.update.processor;
  * limitations under the License.
  */
 
+package org.apache.solr.update.processor;
+
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.HashMap;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/ShortestPathStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/ShortestPathStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/ShortestPathStream.java
index 6d7b32a..768ce07 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/ShortestPathStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/ShortestPathStream.java
@@ -1,5 +1,3 @@
-package org.apache.solr.client.solrj.io.graph;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.solr.client.solrj.io.graph;
  * limitations under the License.
  */
 
+package org.apache.solr.client.solrj.io.graph;
+
 import java.io.IOException;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -539,4 +539,4 @@ public class ShortestPathStream extends TupleStream implements Expressible {
   public StreamComparator getStreamSort() {
     return null;
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ScoreNodesStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ScoreNodesStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ScoreNodesStream.java
index 6c8247f..6a0cfc7 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ScoreNodesStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ScoreNodesStream.java
@@ -1,5 +1,3 @@
-package org.apache.solr.client.solrj.io.stream;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.solr.client.solrj.io.stream;
  * limitations under the License.
  */
 
+package org.apache.solr.client.solrj.io.stream;
+
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Iterator;
@@ -253,4 +253,4 @@ public class ScoreNodesStream extends TupleStream implements Expressible
     return 0;
   }
 
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TopicStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TopicStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TopicStream.java
index 8d3279a..30c6f59 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TopicStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TopicStream.java
@@ -1,5 +1,3 @@
-package org.apache.solr.client.solrj.io.stream;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.solr.client.solrj.io.stream;
  * limitations under the License.
  */
 
+package org.apache.solr.client.solrj.io.stream;
+
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
 import java.util.ArrayList;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterProperties.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterProperties.java b/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterProperties.java
index 6645336..e6df845 100644
--- a/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterProperties.java
+++ b/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterProperties.java
@@ -1,5 +1,3 @@
-package org.apache.solr.common.cloud;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.solr.common.cloud;
  * limitations under the License.
  */
 
+package org.apache.solr.common.cloud;
+
 import java.io.IOException;
 import java.util.Collections;
 import java.util.LinkedHashMap;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/solrj/src/java/org/apache/solr/common/cloud/CollectionStatePredicate.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/CollectionStatePredicate.java b/solr/solrj/src/java/org/apache/solr/common/cloud/CollectionStatePredicate.java
index 0b0a28e..37b00d7 100644
--- a/solr/solrj/src/java/org/apache/solr/common/cloud/CollectionStatePredicate.java
+++ b/solr/solrj/src/java/org/apache/solr/common/cloud/CollectionStatePredicate.java
@@ -1,5 +1,3 @@
-package org.apache.solr.common.cloud;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.solr.common.cloud;
  * limitations under the License.
  */
 
+package org.apache.solr.common.cloud;
+
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/solrj/src/java/org/apache/solr/common/cloud/CollectionStateWatcher.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/CollectionStateWatcher.java b/solr/solrj/src/java/org/apache/solr/common/cloud/CollectionStateWatcher.java
index 0bcbe04..cd42ad6 100644
--- a/solr/solrj/src/java/org/apache/solr/common/cloud/CollectionStateWatcher.java
+++ b/solr/solrj/src/java/org/apache/solr/common/cloud/CollectionStateWatcher.java
@@ -1,5 +1,3 @@
-package org.apache.solr.common.cloud;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -15,7 +13,9 @@ package org.apache.solr.common.cloud;
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * See the License for the specific language governing permissions and
  * limitations under the License.
-*/
+ */
+
+package org.apache.solr.common.cloud;
 
 import java.util.Set;
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/solrj/src/java/org/apache/solr/common/cloud/ZkMaintenanceUtils.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/ZkMaintenanceUtils.java b/solr/solrj/src/java/org/apache/solr/common/cloud/ZkMaintenanceUtils.java
index d508aaa..b7aa3d2 100644
--- a/solr/solrj/src/java/org/apache/solr/common/cloud/ZkMaintenanceUtils.java
+++ b/solr/solrj/src/java/org/apache/solr/common/cloud/ZkMaintenanceUtils.java
@@ -1,3 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package org.apache.solr.common.cloud;
 
 import java.io.IOException;
@@ -17,23 +34,6 @@ import org.apache.zookeeper.KeeperException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
 /**
  * Class to hold  ZK upload/download/move common code. With the advent of the upconfig/downconfig/cp/ls/mv commands
  * in bin/solr it made sense to keep the individual transfer methods in a central place, so here it is.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientBuilderTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientBuilderTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientBuilderTest.java
index 5f1c5c2..4c38554 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientBuilderTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientBuilderTest.java
@@ -1,12 +1,3 @@
-package org.apache.solr.client.solrj.impl;
-
-import java.io.IOException;
-import java.util.ArrayList;
-
-import org.apache.lucene.util.LuceneTestCase;
-import org.apache.solr.client.solrj.impl.CloudSolrClient.Builder;
-import org.junit.Test;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -24,6 +15,15 @@ import org.junit.Test;
  * limitations under the License.
  */
 
+package org.apache.solr.client.solrj.impl;
+
+import java.io.IOException;
+import java.util.ArrayList;
+
+import org.apache.lucene.util.LuceneTestCase;
+import org.apache.solr.client.solrj.impl.CloudSolrClient.Builder;
+import org.junit.Test;
+
 public class CloudSolrClientBuilderTest extends LuceneTestCase {
   private static final String ANY_CHROOT = "/ANY_CHROOT";
   private static final String ANY_ZK_HOST = "ANY_ZK_HOST";

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/solrj/src/test/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrClientBuilderTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrClientBuilderTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrClientBuilderTest.java
index 3389b7f..5f986b7 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrClientBuilderTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrClientBuilderTest.java
@@ -1,9 +1,3 @@
-package org.apache.solr.client.solrj.impl;
-
-import org.apache.lucene.util.LuceneTestCase;
-import org.apache.solr.client.solrj.impl.ConcurrentUpdateSolrClient.Builder;
-import org.junit.Test;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -21,6 +15,12 @@ import org.junit.Test;
  * limitations under the License.
  */
 
+package org.apache.solr.client.solrj.impl;
+
+import org.apache.lucene.util.LuceneTestCase;
+import org.apache.solr.client.solrj.impl.ConcurrentUpdateSolrClient.Builder;
+import org.junit.Test;
+
 /**
  * Unit tests for {@link Builder}.
  */

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/solrj/src/test/org/apache/solr/client/solrj/impl/HttpSolrClientBuilderTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/HttpSolrClientBuilderTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/HttpSolrClientBuilderTest.java
index da56576..a42e820 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/HttpSolrClientBuilderTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/HttpSolrClientBuilderTest.java
@@ -1,14 +1,3 @@
-package org.apache.solr.client.solrj.impl;
-
-import java.io.IOException;
-
-import org.apache.http.client.HttpClient;
-import org.apache.http.impl.client.HttpClientBuilder;
-import org.apache.lucene.util.LuceneTestCase;
-import org.apache.solr.client.solrj.ResponseParser;
-import org.apache.solr.client.solrj.impl.HttpSolrClient.Builder;
-import org.junit.Test;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -26,6 +15,17 @@ import org.junit.Test;
  * limitations under the License.
  */
 
+package org.apache.solr.client.solrj.impl;
+
+import java.io.IOException;
+
+import org.apache.http.client.HttpClient;
+import org.apache.http.impl.client.HttpClientBuilder;
+import org.apache.lucene.util.LuceneTestCase;
+import org.apache.solr.client.solrj.ResponseParser;
+import org.apache.solr.client.solrj.impl.HttpSolrClient.Builder;
+import org.junit.Test;
+
 /**
  * Unit tests for {@link Builder}.
  */

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/solrj/src/test/org/apache/solr/client/solrj/impl/LBHttpSolrClientBuilderTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/LBHttpSolrClientBuilderTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/LBHttpSolrClientBuilderTest.java
index 6a02da5..83870d0 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/LBHttpSolrClientBuilderTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/LBHttpSolrClientBuilderTest.java
@@ -1,12 +1,3 @@
-package org.apache.solr.client.solrj.impl;
-
-import org.apache.http.client.HttpClient;
-import org.apache.http.impl.client.HttpClientBuilder;
-import org.apache.lucene.util.LuceneTestCase;
-import org.apache.solr.client.solrj.impl.LBHttpSolrClient.Builder;
-import org.apache.solr.client.solrj.ResponseParser;
-import org.junit.Test;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -24,6 +15,15 @@ import org.junit.Test;
  * limitations under the License.
  */
 
+package org.apache.solr.client.solrj.impl;
+
+import org.apache.http.client.HttpClient;
+import org.apache.http.impl.client.HttpClientBuilder;
+import org.apache.lucene.util.LuceneTestCase;
+import org.apache.solr.client.solrj.impl.LBHttpSolrClient.Builder;
+import org.apache.solr.client.solrj.ResponseParser;
+import org.junit.Test;
+
 /**
  * Unit tests for {@link Builder}.
  */

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/solrj/src/test/org/apache/solr/client/solrj/io/graph/GraphExpressionTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/graph/GraphExpressionTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/graph/GraphExpressionTest.java
index a141b73..dcd5ff4 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/graph/GraphExpressionTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/graph/GraphExpressionTest.java
@@ -1,5 +1,3 @@
-package org.apache.solr.client.solrj.io.graph;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.solr.client.solrj.io.graph;
  * limitations under the License.
  */
 
+package org.apache.solr.client.solrj.io.graph;
+
 import java.io.IOException;
 import java.io.InputStreamReader;
 import java.io.InputStream;
@@ -897,4 +897,4 @@ public class GraphExpressionTest extends SolrCloudTestCase {
     return true;
   }
 
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/solrj/src/test/org/apache/solr/client/solrj/io/graph/GraphTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/graph/GraphTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/graph/GraphTest.java
index b098be0..dea758b 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/graph/GraphTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/graph/GraphTest.java
@@ -1,5 +1,3 @@
-package org.apache.solr.client.solrj.io.graph;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.solr.client.solrj.io.graph;
  * limitations under the License.
  */
 
+package org.apache.solr.client.solrj.io.graph;
+
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.HashSet;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51d4af68/solr/solrj/src/test/org/apache/solr/common/cloud/TestCollectionStateWatchers.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/common/cloud/TestCollectionStateWatchers.java b/solr/solrj/src/test/org/apache/solr/common/cloud/TestCollectionStateWatchers.java
index 2b2e181..d959aa8 100644
--- a/solr/solrj/src/test/org/apache/solr/common/cloud/TestCollectionStateWatchers.java
+++ b/solr/solrj/src/test/org/apache/solr/common/cloud/TestCollectionStateWatchers.java
@@ -1,5 +1,3 @@
-package org.apache.solr.common.cloud;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,6 +15,8 @@ package org.apache.solr.common.cloud;
  * limitations under the License.
  */
 
+package org.apache.solr.common.cloud;
+
 import java.lang.invoke.MethodHandles;
 import java.util.HashMap;
 import java.util.Set;