You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by sa...@apache.org on 2016/11/02 23:58:58 UTC

[01/50] [abbrv] lucene-solr:apiv2: SOLR-2094: XPathEntityProcessor should reinitialize the XPathRecordReader instance if the 'forEach' or 'xpath' attributes are templates & it is not a root entity

Repository: lucene-solr
Updated Branches:
  refs/heads/apiv2 c482b339e -> 27baf3fb4


SOLR-2094: XPathEntityProcessor should reinitialize the XPathRecordReader instance if the 'forEach' or 'xpath' attributes are templates & it is not a root entity


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/d6b6e747
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/d6b6e747
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/d6b6e747

Branch: refs/heads/apiv2
Commit: d6b6e74703d5f2d29c110d3a7d9491306af9be2c
Parents: d9c4846
Author: Noble Paul <no...@apache.org>
Authored: Thu Oct 27 14:52:24 2016 +0530
Committer: Noble Paul <no...@apache.org>
Committed: Thu Oct 27 14:52:24 2016 +0530

----------------------------------------------------------------------
 solr/CHANGES.txt                                |   3 +
 .../dataimport/XPathEntityProcessor.java        |  12 +-
 .../dataimport/MockStringDataSource.java        |  54 ++++++++
 .../solr/handler/dataimport/TestDocBuilder.java | 129 ++++++++++++++++---
 4 files changed, 181 insertions(+), 17 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d6b6e747/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 7d9a4fa..ae1d709 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -234,6 +234,9 @@ Bug Fixes
   (Cao Manh Dat, Lance Norskog, Webster Homer, hossman, yonik)
 
 * SOLR-9692: blockUnknown property makes inter-node communication impossible (noble)
+
+* SOLR-2094: XPathEntityProcessor should reinitialize the XPathRecordReader instance if
+  the 'forEach' or 'xpath' attributes are templates & it is not a root entity (Cao Manh Dat, noble)
  
 Optimizations
 ----------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d6b6e747/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathEntityProcessor.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathEntityProcessor.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathEntityProcessor.java
index 637e1ef..cc28dc4 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathEntityProcessor.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathEntityProcessor.java
@@ -85,12 +85,14 @@ public class XPathEntityProcessor extends EntityProcessorBase {
   protected int blockingQueueSize = 1000;
 
   protected Thread publisherThread;
+
+  protected boolean reinitXPathReader = true;
   
   @Override
   @SuppressWarnings("unchecked")
   public void init(Context context) {
     super.init(context);
-    if (xpathReader == null)
+    if (reinitXPathReader)
       initXpathReader(context.getVariableResolver());
     pk = context.getEntityAttribute("pk");
     dataSource = context.getDataSource();
@@ -99,6 +101,7 @@ public class XPathEntityProcessor extends EntityProcessorBase {
   }
 
   private void initXpathReader(VariableResolver resolver) {
+    reinitXPathReader = false;
     useSolrAddXml = Boolean.parseBoolean(context
             .getEntityAttribute(USE_SOLR_ADD_SCHEMA));
     streamRows = Boolean.parseBoolean(context
@@ -147,11 +150,12 @@ public class XPathEntityProcessor extends EntityProcessorBase {
       xpathReader.addField("name", "/add/doc/field/@name", true);
       xpathReader.addField("value", "/add/doc/field", true);
     } else {
-      String forEachXpath = context.getEntityAttribute(FOR_EACH);
+      String forEachXpath = context.getResolvedEntityAttribute(FOR_EACH);
       if (forEachXpath == null)
         throw new DataImportHandlerException(SEVERE,
                 "Entity : " + context.getEntityAttribute("name")
                         + " must have a 'forEach' attribute");
+      if (forEachXpath.equals(context.getEntityAttribute(FOR_EACH))) reinitXPathReader = true;
 
       try {
         xpathReader = new XPathRecordReader(forEachXpath);
@@ -164,6 +168,10 @@ public class XPathEntityProcessor extends EntityProcessorBase {
           }
           String xpath = field.get(XPATH);
           xpath = context.replaceTokens(xpath);
+          //!xpath.equals(field.get(XPATH) means the field xpath has a template
+          //in that case ensure that the XPathRecordReader is reinitialized
+          //for each xml
+          if (!xpath.equals(field.get(XPATH)) && !context.isRootEntity()) reinitXPathReader = true;
           xpathReader.addField(field.get(DataImporter.COLUMN),
                   xpath,
                   Boolean.parseBoolean(field.get(DataImporter.MULTI_VALUED)),

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d6b6e747/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/MockStringDataSource.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/MockStringDataSource.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/MockStringDataSource.java
new file mode 100644
index 0000000..7c9a6d1
--- /dev/null
+++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/MockStringDataSource.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.handler.dataimport;
+
+
+import java.io.Reader;
+import java.io.StringReader;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+
+public class MockStringDataSource extends DataSource<Reader> {
+
+  private static Map<String, String> cache = new HashMap<>();
+
+  public static void setData(String query,
+                                 String data) {
+    cache.put(query, data);
+  }
+
+  public static void clearCache() {
+    cache.clear();
+  }
+  @Override
+  public void init(Context context, Properties initProps) {
+
+  }
+
+  @Override
+  public Reader getData(String query) {
+    return new StringReader(cache.get(query));
+  }
+
+  @Override
+  public void close() {
+    cache.clear();
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d6b6e747/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestDocBuilder.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestDocBuilder.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestDocBuilder.java
index 527dad0..39dd891 100644
--- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestDocBuilder.java
+++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestDocBuilder.java
@@ -39,9 +39,10 @@ public class TestDocBuilder extends AbstractDataImportHandlerTestCase {
   @After
   public void tearDown() throws Exception {
     MockDataSource.clearCache();
+    MockStringDataSource.clearCache();
     super.tearDown();
   }
-  
+
   @Test
   public void loadClass() throws Exception {
     @SuppressWarnings("unchecked")
@@ -180,6 +181,52 @@ public class TestDocBuilder extends AbstractDataImportHandlerTestCase {
     assertEquals(3, di.getDocBuilder().importStatistics.rowsCount.get());
   }
 
+  @Test
+  public void templateXPath() {
+    DataImporter di = new DataImporter();
+    di.loadAndInit(dc_variableXpath);
+    DIHConfiguration cfg = di.getConfig();
+
+    RequestInfo rp = new RequestInfo(null, createMap("command", "full-import"), null);
+    List<Map<String, Object>> l = new ArrayList<>();
+    l.add(createMap("id", 1, "name", "iphone", "manufacturer", "Apple"));
+    l.add(createMap("id", 2, "name", "ipad", "manufacturer", "Apple"));
+    l.add(createMap("id", 3, "name", "pixel", "manufacturer", "Google"));
+
+    MockDataSource.setIterator("select * from x", l.iterator());
+
+    List<Map<String,Object>> nestedData = new ArrayList<>();
+    nestedData.add(createMap("founded", "Cupertino, California, U.S", "year", "1976", "year2", "1976"));
+    nestedData.add(createMap("founded", "Cupertino, California, U.S", "year", "1976", "year2", "1976"));
+    nestedData.add(createMap("founded", "Menlo Park, California, U.S", "year", "1998", "year2", "1998"));
+
+    MockStringDataSource.setData("companies.xml", xml_attrVariableXpath);
+    MockStringDataSource.setData("companies2.xml", xml_variableXpath);
+    MockStringDataSource.setData("companies3.xml", xml_variableForEach);
+
+    SolrWriterImpl swi = new SolrWriterImpl();
+    di.runCmd(rp, swi);
+    assertEquals(Boolean.TRUE, swi.deleteAllCalled);
+    assertEquals(Boolean.TRUE, swi.commitCalled);
+    assertEquals(Boolean.TRUE, swi.finishCalled);
+    assertEquals(3, swi.docs.size());
+    for (int i = 0; i < l.size(); i++) {
+      SolrInputDocument doc = swi.docs.get(i);
+
+      Map<String, Object> map = l.get(i);
+      for (Map.Entry<String, Object> entry : map.entrySet()) {
+        assertEquals(entry.getValue(), doc.getFieldValue(entry.getKey()));
+      }
+
+      map = nestedData.get(i);
+      for (Map.Entry<String, Object> entry : map.entrySet()) {
+        assertEquals(entry.getValue(), doc.getFieldValue(entry.getKey()));
+      }
+    }
+    assertEquals(1, di.getDocBuilder().importStatistics.queryCount.get());
+    assertEquals(3, di.getDocBuilder().importStatistics.docCount.get());
+  }
+
   static class SolrWriterImpl extends SolrWriter {
     List<SolrInputDocument> docs = new ArrayList<>();
 
@@ -215,21 +262,73 @@ public class TestDocBuilder extends AbstractDataImportHandlerTestCase {
   }
 
   public static final String dc_singleEntity = "<dataConfig>\n"
-          + "<dataSource  type=\"MockDataSource\"/>\n"
-          + "    <document name=\"X\" >\n"
-          + "        <entity name=\"x\" query=\"select * from x\">\n"
-          + "          <field column=\"id\"/>\n"
-          + "          <field column=\"desc\"/>\n"
-          + "          <field column=\"desc\" name=\"desc_s\" />" + "        </entity>\n"
-          + "    </document>\n" + "</dataConfig>";
+      + "<dataSource  type=\"MockDataSource\"/>\n"
+      + "    <document name=\"X\" >\n"
+      + "        <entity name=\"x\" query=\"select * from x\">\n"
+      + "          <field column=\"id\"/>\n"
+      + "          <field column=\"desc\"/>\n"
+      + "          <field column=\"desc\" name=\"desc_s\" />" + "        </entity>\n"
+      + "    </document>\n" + "</dataConfig>";
 
   public static final String dc_deltaConfig = "<dataConfig>\n"
-          + "<dataSource  type=\"MockDataSource\"/>\n"          
-          + "    <document name=\"X\" >\n"
-          + "        <entity name=\"x\" query=\"select * from x\" deltaQuery=\"select id from x\">\n"
-          + "          <field column=\"id\"/>\n"
-          + "          <field column=\"desc\"/>\n"
-          + "          <field column=\"desc\" name=\"desc_s\" />" + "        </entity>\n"
-          + "    </document>\n" + "</dataConfig>";
+      + "<dataSource  type=\"MockDataSource\"/>\n"
+      + "    <document name=\"X\" >\n"
+      + "        <entity name=\"x\" query=\"select * from x\" deltaQuery=\"select id from x\">\n"
+      + "          <field column=\"id\"/>\n"
+      + "          <field column=\"desc\"/>\n"
+      + "          <field column=\"desc\" name=\"desc_s\" />" + "        </entity>\n"
+      + "    </document>\n" + "</dataConfig>";
+
+  public static final String dc_variableXpath = "<dataConfig>\n"
+      + "<dataSource type=\"MockDataSource\"/>\n"
+      + "<dataSource name=\"xml\" type=\"MockStringDataSource\"/>\n"
+      + "    <document name=\"X\" >\n"
+      + "        <entity name=\"x\" query=\"select * from x\">\n"
+      + "          <field column=\"id\"/>\n"
+      + "          <field column=\"name\"/>\n"
+      + "          <field column=\"manufacturer\"/>"
+      + "          <entity name=\"c1\" url=\"companies.xml\" dataSource=\"xml\" forEach=\"/companies/company\" processor=\"XPathEntityProcessor\">"
+      + "            <field column=\"year\" xpath=\"/companies/company/year[@name='p_${x.manufacturer}_s']\" />"
+      + "          </entity>"
+      + "          <entity name=\"c2\" url=\"companies2.xml\" dataSource=\"xml\" forEach=\"/companies/company\" processor=\"XPathEntityProcessor\">"
+      + "            <field column=\"founded\" xpath=\"/companies/company/p_${x.manufacturer}_s/founded\" />"
+      + "          </entity>"
+      + "          <entity name=\"c3\" url=\"companies3.xml\" dataSource=\"xml\" forEach=\"/companies/${x.manufacturer}\" processor=\"XPathEntityProcessor\">"
+      + "            <field column=\"year2\" xpath=\"/companies/${x.manufacturer}/year\" />"
+      + "          </entity>"
+      + "        </entity>\n"
+      + "    </document>\n" + "</dataConfig>";
+
+
+  public static final String xml_variableForEach = "<companies>\n" +
+      "\t<Apple>\n" +
+      "\t\t<year>1976</year>\n" +
+      "\t</Apple>\n" +
+      "\t<Google>\n" +
+      "\t\t<year>1998</year>\n" +
+      "\t</Google>\n" +
+      "</companies>";
+
+  public static final String xml_variableXpath = "<companies>\n" +
+      "\t<company>\n" +
+      "\t\t<p_Apple_s>\n" +
+      "\t\t\t<founded>Cupertino, California, U.S</founded>\n" +
+      "\t\t</p_Apple_s>\t\t\n" +
+      "\t</company>\n" +
+      "\t<company>\n" +
+      "\t\t<p_Google_s>\n" +
+      "\t\t\t<founded>Menlo Park, California, U.S</founded>\n" +
+      "\t\t</p_Google_s>\n" +
+      "\t</company>\n" +
+      "</companies>";
+
+  public static final String xml_attrVariableXpath = "<companies>\n" +
+      "\t<company>\n" +
+      "\t\t<year name='p_Apple_s'>1976</year>\n" +
+      "\t</company>\n" +
+      "\t<company>\n" +
+      "\t\t<year name='p_Google_s'>1998</year>\t\t\n" +
+      "\t</company>\n" +
+      "</companies>";
 
 }


[13/50] [abbrv] lucene-solr:apiv2: SOLR-9132: Fix precommit

Posted by sa...@apache.org.
SOLR-9132: Fix precommit


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/b6e0ab01
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/b6e0ab01
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/b6e0ab01

Branch: refs/heads/apiv2
Commit: b6e0ab01743df112dd7ad49135bd33769b7773b7
Parents: 1f06411
Author: Alan Woodward <ro...@apache.org>
Authored: Fri Oct 28 13:29:13 2016 +0100
Committer: Alan Woodward <ro...@apache.org>
Committed: Fri Oct 28 14:07:51 2016 +0100

----------------------------------------------------------------------
 .../test/org/apache/solr/cloud/CreateCollectionCleanupTest.java  | 1 -
 solr/core/src/test/org/apache/solr/cloud/RecoveryZkTest.java     | 4 ++--
 2 files changed, 2 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b6e0ab01/solr/core/src/test/org/apache/solr/cloud/CreateCollectionCleanupTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/CreateCollectionCleanupTest.java b/solr/core/src/test/org/apache/solr/cloud/CreateCollectionCleanupTest.java
index df7a2e2..869f66c 100644
--- a/solr/core/src/test/org/apache/solr/cloud/CreateCollectionCleanupTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/CreateCollectionCleanupTest.java
@@ -17,7 +17,6 @@
 
 package org.apache.solr.cloud;
 
-import java.util.ArrayList;
 import java.util.Properties;
 
 import org.apache.solr.client.solrj.impl.CloudSolrClient;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b6e0ab01/solr/core/src/test/org/apache/solr/cloud/RecoveryZkTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/RecoveryZkTest.java b/solr/core/src/test/org/apache/solr/cloud/RecoveryZkTest.java
index 0aecdf9..7e30d5c 100644
--- a/solr/core/src/test/org/apache/solr/cloud/RecoveryZkTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/RecoveryZkTest.java
@@ -38,7 +38,7 @@ import org.slf4j.LoggerFactory;
 @Slow
 public class RecoveryZkTest extends SolrCloudTestCase {
 
-  private static Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   @BeforeClass
   public static void setupCluster() throws Exception {
@@ -140,7 +140,7 @@ public class RecoveryZkTest extends SolrCloudTestCase {
     }
     for (int j = 1; j < replicas.size(); j++) {
       if (numCounts[j] != numCounts[j - 1])
-        fail("Mismatch in counts between replicas");  // nocommit improve this!
+        fail("Mismatch in counts between replicas");  // TODO improve this!
       if (numCounts[j] == 0 && expectDocs)
         fail("Expected docs on shard " + shard.getName() + " but found none");
     }


[39/50] [abbrv] lucene-solr:apiv2: SOLR-8542: Adds Solr Learning to Rank (LTR) plugin for reranking results with machine learning models. (Michael Nilsson, Diego Ceccarelli, Joshua Pantony, Jon Dorando, Naveen Santhapuri, Alessandro Benedetti, David Groh

Posted by sa...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/java/org/apache/solr/ltr/store/rest/ManagedModelStore.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/store/rest/ManagedModelStore.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/store/rest/ManagedModelStore.java
new file mode 100644
index 0000000..97aaa40
--- /dev/null
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/store/rest/ManagedModelStore.java
@@ -0,0 +1,319 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.store.rest;
+
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.core.SolrCore;
+import org.apache.solr.core.SolrResourceLoader;
+import org.apache.solr.ltr.feature.Feature;
+import org.apache.solr.ltr.model.LTRScoringModel;
+import org.apache.solr.ltr.model.ModelException;
+import org.apache.solr.ltr.norm.IdentityNormalizer;
+import org.apache.solr.ltr.norm.Normalizer;
+import org.apache.solr.ltr.store.FeatureStore;
+import org.apache.solr.ltr.store.ModelStore;
+import org.apache.solr.response.SolrQueryResponse;
+import org.apache.solr.rest.BaseSolrResource;
+import org.apache.solr.rest.ManagedResource;
+import org.apache.solr.rest.ManagedResourceObserver;
+import org.apache.solr.rest.ManagedResourceStorage;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Menaged resource for storing a model
+ */
+public class ManagedModelStore extends ManagedResource implements ManagedResource.ChildResourceSupport {
+
+  public static void registerManagedModelStore(SolrResourceLoader solrResourceLoader,
+      ManagedResourceObserver managedResourceObserver) {
+    solrResourceLoader.getManagedResourceRegistry().registerManagedResource(
+        REST_END_POINT,
+        ManagedModelStore.class,
+        managedResourceObserver);
+  }
+
+  public static ManagedModelStore getManagedModelStore(SolrCore core) {
+    return (ManagedModelStore) core.getRestManager()
+        .getManagedResource(REST_END_POINT);
+  }
+
+  /** the model store rest endpoint **/
+  public static final String REST_END_POINT = "/schema/model-store";
+  // TODO: reduce from public to package visibility (once tests no longer need public access)
+
+  /**
+   * Managed model store: the name of the attribute containing all the models of
+   * a model store
+   **/
+  private static final String MODELS_JSON_FIELD = "models";
+
+  /** name of the attribute containing a class **/
+  static final String CLASS_KEY = "class";
+  /** name of the attribute containing the features **/
+  static final String FEATURES_KEY = "features";
+  /** name of the attribute containing a name **/
+  static final String NAME_KEY = "name";
+  /** name of the attribute containing a normalizer **/
+  static final String NORM_KEY = "norm";
+  /** name of the attribute containing parameters **/
+  static final String PARAMS_KEY = "params";
+  /** name of the attribute containing a store **/
+  static final String STORE_KEY = "store";
+
+  private final ModelStore store;
+  private ManagedFeatureStore managedFeatureStore;
+
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+  public ManagedModelStore(String resourceId, SolrResourceLoader loader,
+      ManagedResourceStorage.StorageIO storageIO) throws SolrException {
+    super(resourceId, loader, storageIO);
+    store = new ModelStore();
+  }
+
+  public void setManagedFeatureStore(ManagedFeatureStore managedFeatureStore) {
+    log.info("INIT model store");
+    this.managedFeatureStore = managedFeatureStore;
+  }
+
+  public ManagedFeatureStore getManagedFeatureStore() {
+    return managedFeatureStore;
+  }
+
+  private Object managedData;
+
+  @SuppressWarnings("unchecked")
+  @Override
+  protected void onManagedDataLoadedFromStorage(NamedList<?> managedInitArgs,
+      Object managedData) throws SolrException {
+    store.clear();
+    // the managed models on the disk or on zookeeper will be loaded in a lazy
+    // way, since we need to set the managed features first (unfortunately
+    // managed resources do not
+    // decouple the creation of a managed resource with the reading of the data
+    // from the storage)
+    this.managedData = managedData;
+
+  }
+
+  public void loadStoredModels() {
+    log.info("------ managed models ~ loading ------");
+
+    if ((managedData != null) && (managedData instanceof List)) {
+      final List<Map<String,Object>> up = (List<Map<String,Object>>) managedData;
+      for (final Map<String,Object> u : up) {
+        try {
+          final LTRScoringModel algo = fromLTRScoringModelMap(solrResourceLoader, u, managedFeatureStore);
+          addModel(algo);
+        } catch (final ModelException e) {
+          throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
+        }
+      }
+    }
+  }
+
+  public synchronized void addModel(LTRScoringModel ltrScoringModel) throws ModelException {
+    try {
+      log.info("adding model {}", ltrScoringModel.getName());
+      store.addModel(ltrScoringModel);
+    } catch (final ModelException e) {
+      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
+    }
+  }
+
+  @SuppressWarnings("unchecked")
+  @Override
+  protected Object applyUpdatesToManagedData(Object updates) {
+    if (updates instanceof List) {
+      final List<Map<String,Object>> up = (List<Map<String,Object>>) updates;
+      for (final Map<String,Object> u : up) {
+        try {
+          final LTRScoringModel algo = fromLTRScoringModelMap(solrResourceLoader, u, managedFeatureStore);
+          addModel(algo);
+        } catch (final ModelException e) {
+          throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
+        }
+      }
+    }
+
+    if (updates instanceof Map) {
+      final Map<String,Object> map = (Map<String,Object>) updates;
+      try {
+        final LTRScoringModel algo = fromLTRScoringModelMap(solrResourceLoader, map, managedFeatureStore);
+        addModel(algo);
+      } catch (final ModelException e) {
+        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
+      }
+    }
+
+    return modelsAsManagedResources(store.getModels());
+  }
+
+  @Override
+  public synchronized void doDeleteChild(BaseSolrResource endpoint, String childId) {
+    if (childId.equals("*")) {
+      store.clear();
+    } else {
+      store.delete(childId);
+    }
+    storeManagedData(applyUpdatesToManagedData(null));
+  }
+
+  /**
+   * Called to retrieve a named part (the given childId) of the resource at the
+   * given endpoint. Note: since we have a unique child managed store we ignore
+   * the childId.
+   */
+  @Override
+  public void doGet(BaseSolrResource endpoint, String childId) {
+
+    final SolrQueryResponse response = endpoint.getSolrResponse();
+    response.add(MODELS_JSON_FIELD,
+        modelsAsManagedResources(store.getModels()));
+  }
+
+  public LTRScoringModel getModel(String modelName) {
+    // this function replicates getModelStore().getModel(modelName), but
+    // it simplifies the testing (we can avoid to mock also a ModelStore).
+    return store.getModel(modelName);
+  }
+
+  @Override
+  public String toString() {
+    return "ManagedModelStore [store=" + store + ", featureStores="
+        + managedFeatureStore + "]";
+  }
+
+  /**
+   * Returns the available models as a list of Maps objects. After an update the
+   * managed resources needs to return the resources in this format in order to
+   * store in json somewhere (zookeeper, disk...)
+   *
+   *
+   * @return the available models as a list of Maps objects
+   */
+  private static List<Object> modelsAsManagedResources(List<LTRScoringModel> models) {
+    final List<Object> list = new ArrayList<>(models.size());
+    for (final LTRScoringModel model : models) {
+      list.add(toLTRScoringModelMap(model));
+    }
+    return list;
+  }
+
+  @SuppressWarnings("unchecked")
+  public static LTRScoringModel fromLTRScoringModelMap(SolrResourceLoader solrResourceLoader,
+      Map<String,Object> modelMap, ManagedFeatureStore managedFeatureStore) {
+
+    final FeatureStore featureStore =
+        managedFeatureStore.getFeatureStore((String) modelMap.get(STORE_KEY));
+
+    final List<Feature> features = new ArrayList<>();
+    final List<Normalizer> norms = new ArrayList<>();
+
+    final List<Object> featureList = (List<Object>) modelMap.get(FEATURES_KEY);
+    if (featureList != null) {
+      for (final Object feature : featureList) {
+        final Map<String,Object> featureMap = (Map<String,Object>) feature;
+        features.add(lookupFeatureFromFeatureMap(featureMap, featureStore));
+        norms.add(createNormalizerFromFeatureMap(solrResourceLoader, featureMap));
+      }
+    }
+
+    return LTRScoringModel.getInstance(solrResourceLoader,
+        (String) modelMap.get(CLASS_KEY), // modelClassName
+        (String) modelMap.get(NAME_KEY), // modelName
+        features,
+        norms,
+        featureStore.getName(),
+        featureStore.getFeatures(),
+        (Map<String,Object>) modelMap.get(PARAMS_KEY));
+  }
+
+  private static LinkedHashMap<String,Object> toLTRScoringModelMap(LTRScoringModel model) {
+    final LinkedHashMap<String,Object> modelMap = new LinkedHashMap<>(5, 1.0f);
+
+    modelMap.put(NAME_KEY, model.getName());
+    modelMap.put(CLASS_KEY, model.getClass().getCanonicalName());
+    modelMap.put(STORE_KEY, model.getFeatureStoreName());
+
+    final List<Map<String,Object>> features = new ArrayList<>();
+    final List<Feature> featuresList = model.getFeatures();
+    final List<Normalizer> normsList = model.getNorms();
+    for (int ii=0; ii<featuresList.size(); ++ii) {
+      features.add(toFeatureMap(featuresList.get(ii), normsList.get(ii)));
+    }
+    modelMap.put(FEATURES_KEY, features);
+
+    modelMap.put(PARAMS_KEY, model.getParams());
+
+    return modelMap;
+  }
+
+  private static Feature lookupFeatureFromFeatureMap(Map<String,Object> featureMap,
+      FeatureStore featureStore) {
+    final String featureName = (String)featureMap.get(NAME_KEY);
+    return (featureName == null ? null
+        : featureStore.get(featureName));
+  }
+
+  @SuppressWarnings("unchecked")
+  private static Normalizer createNormalizerFromFeatureMap(SolrResourceLoader solrResourceLoader,
+      Map<String,Object> featureMap) {
+    final Map<String,Object> normMap = (Map<String,Object>)featureMap.get(NORM_KEY);
+    return  (normMap == null ? IdentityNormalizer.INSTANCE
+        : fromNormalizerMap(solrResourceLoader, normMap));
+  }
+
+  private static LinkedHashMap<String,Object> toFeatureMap(Feature feature, Normalizer norm) {
+    final LinkedHashMap<String,Object> map = new LinkedHashMap<String,Object>(2, 1.0f);
+    map.put(NAME_KEY,  feature.getName());
+    map.put(NORM_KEY, toNormalizerMap(norm));
+    return map;
+  }
+
+  private static Normalizer fromNormalizerMap(SolrResourceLoader solrResourceLoader,
+      Map<String,Object> normMap) {
+    final String className = (String) normMap.get(CLASS_KEY);
+
+    @SuppressWarnings("unchecked")
+    final Map<String,Object> params = (Map<String,Object>) normMap.get(PARAMS_KEY);
+
+    return Normalizer.getInstance(solrResourceLoader, className, params);
+  }
+
+  private static LinkedHashMap<String,Object> toNormalizerMap(Normalizer norm) {
+    final LinkedHashMap<String,Object> normalizer = new LinkedHashMap<>(2, 1.0f);
+
+    normalizer.put(CLASS_KEY, norm.getClass().getCanonicalName());
+
+    final LinkedHashMap<String,Object> params = norm.paramsToMap();
+    if (params != null) {
+      normalizer.put(PARAMS_KEY, params);
+    }
+
+    return normalizer;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/java/org/apache/solr/ltr/store/rest/package-info.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/store/rest/package-info.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/store/rest/package-info.java
new file mode 100644
index 0000000..fbf7029
--- /dev/null
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/store/rest/package-info.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Contains the {@link org.apache.solr.rest.ManagedResource} that encapsulate
+ * the feature and the model stores.
+ */
+package org.apache.solr.ltr.store.rest;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/java/org/apache/solr/response/transform/LTRFeatureLoggerTransformerFactory.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/response/transform/LTRFeatureLoggerTransformerFactory.java b/solr/contrib/ltr/src/java/org/apache/solr/response/transform/LTRFeatureLoggerTransformerFactory.java
new file mode 100644
index 0000000..d144292
--- /dev/null
+++ b/solr/contrib/ltr/src/java/org/apache/solr/response/transform/LTRFeatureLoggerTransformerFactory.java
@@ -0,0 +1,254 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.response.transform;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.search.Explanation;
+import org.apache.solr.common.SolrDocument;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.params.SolrParams;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.ltr.FeatureLogger;
+import org.apache.solr.ltr.LTRRescorer;
+import org.apache.solr.ltr.LTRScoringQuery;
+import org.apache.solr.ltr.LTRThreadModule;
+import org.apache.solr.ltr.SolrQueryRequestContextUtils;
+import org.apache.solr.ltr.feature.Feature;
+import org.apache.solr.ltr.model.LTRScoringModel;
+import org.apache.solr.ltr.norm.Normalizer;
+import org.apache.solr.ltr.store.FeatureStore;
+import org.apache.solr.ltr.store.rest.ManagedFeatureStore;
+import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.response.ResultContext;
+import org.apache.solr.search.LTRQParserPlugin;
+import org.apache.solr.search.SolrIndexSearcher;
+import org.apache.solr.util.SolrPluginUtils;
+
+/**
+ * This transformer will take care to generate and append in the response the
+ * features declared in the feature store of the current model. The class is
+ * useful if you are not interested in the reranking (e.g., bootstrapping a
+ * machine learning framework).
+ */
+public class LTRFeatureLoggerTransformerFactory extends TransformerFactory {
+
+  // used inside fl to specify the output format (csv/json) of the extracted features
+  private static final String FV_RESPONSE_WRITER = "fvwt";
+
+  // used inside fl to specify the format (dense|sparse) of the extracted features
+  private static final String FV_FORMAT = "format";
+
+  // used inside fl to specify the feature store to use for the feature extraction
+  private static final String FV_STORE = "store";
+
+  private static String DEFAULT_LOGGING_MODEL_NAME = "logging-model";
+
+  private String loggingModelName = DEFAULT_LOGGING_MODEL_NAME;
+  private String defaultFvStore;
+  private String defaultFvwt;
+  private String defaultFvFormat;
+
+  private LTRThreadModule threadManager = null;
+
+  public void setLoggingModelName(String loggingModelName) {
+    this.loggingModelName = loggingModelName;
+  }
+
+  public void setStore(String defaultFvStore) {
+    this.defaultFvStore = defaultFvStore;
+  }
+
+  public void setFvwt(String defaultFvwt) {
+    this.defaultFvwt = defaultFvwt;
+  }
+
+  public void setFormat(String defaultFvFormat) {
+    this.defaultFvFormat = defaultFvFormat;
+  }
+
+  @Override
+  public void init(@SuppressWarnings("rawtypes") NamedList args) {
+    super.init(args);
+    threadManager = LTRThreadModule.getInstance(args);
+    SolrPluginUtils.invokeSetters(this, args);
+  }
+
+  @Override
+  public DocTransformer create(String name, SolrParams params,
+      SolrQueryRequest req) {
+
+    // Hint to enable feature vector cache since we are requesting features
+    SolrQueryRequestContextUtils.setIsExtractingFeatures(req);
+
+    // Communicate which feature store we are requesting features for
+    SolrQueryRequestContextUtils.setFvStoreName(req, params.get(FV_STORE, defaultFvStore));
+
+    // Create and supply the feature logger to be used
+    SolrQueryRequestContextUtils.setFeatureLogger(req,
+        FeatureLogger.createFeatureLogger(
+            params.get(FV_RESPONSE_WRITER, defaultFvwt),
+            params.get(FV_FORMAT, defaultFvFormat)));
+
+    return new FeatureTransformer(name, params, req);
+  }
+
+  class FeatureTransformer extends DocTransformer {
+
+    final private String name;
+    final private SolrParams params;
+    final private SolrQueryRequest req;
+
+    private List<LeafReaderContext> leafContexts;
+    private SolrIndexSearcher searcher;
+    private LTRScoringQuery scoringQuery;
+    private LTRScoringQuery.ModelWeight modelWeight;
+    private FeatureLogger<?> featureLogger;
+    private boolean docsWereNotReranked;
+
+    /**
+     * @param name
+     *          Name of the field to be added in a document representing the
+     *          feature vectors
+     */
+    public FeatureTransformer(String name, SolrParams params,
+        SolrQueryRequest req) {
+      this.name = name;
+      this.params = params;
+      this.req = req;
+    }
+
+    @Override
+    public String getName() {
+      return name;
+    }
+
+    @Override
+    public void setContext(ResultContext context) {
+      super.setContext(context);
+      if (context == null) {
+        return;
+      }
+      if (context.getRequest() == null) {
+        return;
+      }
+
+      searcher = context.getSearcher();
+      if (searcher == null) {
+        throw new SolrException(
+            SolrException.ErrorCode.BAD_REQUEST,
+            "searcher is null");
+      }
+      leafContexts = searcher.getTopReaderContext().leaves();
+
+      // Setup LTRScoringQuery
+      scoringQuery = SolrQueryRequestContextUtils.getScoringQuery(req);
+      docsWereNotReranked = (scoringQuery == null);
+      String featureStoreName = SolrQueryRequestContextUtils.getFvStoreName(req);
+      if (docsWereNotReranked || (featureStoreName != null && (!featureStoreName.equals(scoringQuery.getScoringModel().getFeatureStoreName())))) {
+        // if store is set in the transformer we should overwrite the logger
+
+        final ManagedFeatureStore fr = ManagedFeatureStore.getManagedFeatureStore(req.getCore());
+
+        final FeatureStore store = fr.getFeatureStore(featureStoreName);
+        featureStoreName = store.getName(); // if featureStoreName was null before this gets actual name
+
+        try {
+          final LoggingModel lm = new LoggingModel(loggingModelName,
+              featureStoreName, store.getFeatures());
+
+          scoringQuery = new LTRScoringQuery(lm,
+              LTRQParserPlugin.extractEFIParams(params),
+              true,
+              threadManager); // request feature weights to be created for all features
+
+          // Local transformer efi if provided
+          scoringQuery.setOriginalQuery(context.getQuery());
+
+        }catch (final Exception e) {
+          throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
+              "retrieving the feature store "+featureStoreName, e);
+        }
+      }
+
+      if (scoringQuery.getFeatureLogger() == null){
+        scoringQuery.setFeatureLogger( SolrQueryRequestContextUtils.getFeatureLogger(req) );
+      }
+      scoringQuery.setRequest(req);
+
+      featureLogger = scoringQuery.getFeatureLogger();
+
+      try {
+        modelWeight = scoringQuery.createWeight(searcher, true, 1f);
+      } catch (final IOException e) {
+        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e.getMessage(), e);
+      }
+      if (modelWeight == null) {
+        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
+            "error logging the features, model weight is null");
+      }
+    }
+
+    @Override
+    public void transform(SolrDocument doc, int docid, float score)
+        throws IOException {
+      Object fv = featureLogger.getFeatureVector(docid, scoringQuery, searcher);
+      if (fv == null) { // FV for this document was not in the cache
+        fv = featureLogger.makeFeatureVector(
+            LTRRescorer.extractFeaturesInfo(
+                modelWeight,
+                docid,
+                (docsWereNotReranked ? new Float(score) : null),
+                leafContexts));
+      }
+
+      doc.addField(name, fv);
+    }
+
+  }
+
+  private static class LoggingModel extends LTRScoringModel {
+
+    public LoggingModel(String name, String featureStoreName, List<Feature> allFeatures){
+      this(name, Collections.emptyList(), Collections.emptyList(),
+          featureStoreName, allFeatures, Collections.emptyMap());
+    }
+
+    protected LoggingModel(String name, List<Feature> features,
+        List<Normalizer> norms, String featureStoreName,
+        List<Feature> allFeatures, Map<String,Object> params) {
+      super(name, features, norms, featureStoreName, allFeatures, params);
+    }
+
+    @Override
+    public float score(float[] modelFeatureValuesNormalized) {
+      return 0;
+    }
+
+    @Override
+    public Explanation explain(LeafReaderContext context, int doc, float finalScore, List<Explanation> featureExplanations) {
+      return Explanation.match(finalScore, toString()
+          + " logging model, used only for logging the features");
+    }
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/java/org/apache/solr/response/transform/package-info.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/response/transform/package-info.java b/solr/contrib/ltr/src/java/org/apache/solr/response/transform/package-info.java
new file mode 100644
index 0000000..bab3ebf
--- /dev/null
+++ b/solr/contrib/ltr/src/java/org/apache/solr/response/transform/package-info.java
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * APIs and implementations of {@link org.apache.solr.response.transform.DocTransformer} for modifying documents in Solr request responses
+ */
+package org.apache.solr.response.transform;
+
+

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/java/org/apache/solr/search/LTRQParserPlugin.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/search/LTRQParserPlugin.java b/solr/contrib/ltr/src/java/org/apache/solr/search/LTRQParserPlugin.java
new file mode 100644
index 0000000..40cbaa9
--- /dev/null
+++ b/solr/contrib/ltr/src/java/org/apache/solr/search/LTRQParserPlugin.java
@@ -0,0 +1,233 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.search;
+
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+
+import org.apache.lucene.analysis.util.ResourceLoader;
+import org.apache.lucene.analysis.util.ResourceLoaderAware;
+import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.search.Query;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.params.SolrParams;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.core.SolrResourceLoader;
+import org.apache.solr.ltr.LTRRescorer;
+import org.apache.solr.ltr.LTRScoringQuery;
+import org.apache.solr.ltr.LTRThreadModule;
+import org.apache.solr.ltr.SolrQueryRequestContextUtils;
+import org.apache.solr.ltr.model.LTRScoringModel;
+import org.apache.solr.ltr.store.rest.ManagedFeatureStore;
+import org.apache.solr.ltr.store.rest.ManagedModelStore;
+import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.rest.ManagedResource;
+import org.apache.solr.rest.ManagedResourceObserver;
+import org.apache.solr.util.SolrPluginUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Plug into solr a rerank model.
+ *
+ * Learning to Rank Query Parser Syntax: rq={!ltr model=6029760550880411648 reRankDocs=300
+ * efi.myCompanyQueryIntent=0.98}
+ *
+ */
+public class LTRQParserPlugin extends QParserPlugin implements ResourceLoaderAware, ManagedResourceObserver {
+  public static final String NAME = "ltr";
+  private static Query defaultQuery = new MatchAllDocsQuery();
+
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+  // params for setting custom external info that features can use, like query
+  // intent
+  static final String EXTERNAL_FEATURE_INFO = "efi.";
+
+  private ManagedFeatureStore fr = null;
+  private ManagedModelStore mr = null;
+
+  private LTRThreadModule threadManager = null;
+
+  /** query parser plugin: the name of the attribute for setting the model **/
+  public static final String MODEL = "model";
+
+  /** query parser plugin: default number of documents to rerank **/
+  public static final int DEFAULT_RERANK_DOCS = 200;
+
+  /**
+   * query parser plugin:the param that will select how the number of document
+   * to rerank
+   **/
+  public static final String RERANK_DOCS = "reRankDocs";
+
+  @Override
+  public void init(@SuppressWarnings("rawtypes") NamedList args) {
+    super.init(args);
+    threadManager = LTRThreadModule.getInstance(args);
+    SolrPluginUtils.invokeSetters(this, args);
+  }
+
+  @Override
+  public QParser createParser(String qstr, SolrParams localParams,
+      SolrParams params, SolrQueryRequest req) {
+    return new LTRQParser(qstr, localParams, params, req);
+  }
+
+  /**
+   * Given a set of local SolrParams, extract all of the efi.key=value params into a map
+   * @param localParams Local request parameters that might conatin efi params
+   * @return Map of efi params, where the key is the name of the efi param, and the
+   *  value is the value of the efi param
+   */
+  public static Map<String,String[]> extractEFIParams(SolrParams localParams) {
+    final Map<String,String[]> externalFeatureInfo = new HashMap<>();
+    for (final Iterator<String> it = localParams.getParameterNamesIterator(); it
+        .hasNext();) {
+      final String name = it.next();
+      if (name.startsWith(EXTERNAL_FEATURE_INFO)) {
+        externalFeatureInfo.put(
+            name.substring(EXTERNAL_FEATURE_INFO.length()),
+            new String[] {localParams.get(name)});
+      }
+    }
+    return externalFeatureInfo;
+  }
+
+
+  @Override
+  public void inform(ResourceLoader loader) throws IOException {
+    final SolrResourceLoader solrResourceLoader = (SolrResourceLoader) loader;
+    ManagedFeatureStore.registerManagedFeatureStore(solrResourceLoader, this);
+    ManagedModelStore.registerManagedModelStore(solrResourceLoader, this);
+  }
+
+  @Override
+  public void onManagedResourceInitialized(NamedList<?> args, ManagedResource res) throws SolrException {
+    if (res instanceof ManagedFeatureStore) {
+      fr = (ManagedFeatureStore)res;
+    }
+    if (res instanceof ManagedModelStore){
+      mr = (ManagedModelStore)res;
+    }
+    if (mr != null && fr != null){
+      mr.setManagedFeatureStore(fr);
+      // now we can safely load the models
+      mr.loadStoredModels();
+
+    }
+  }
+
+  public class LTRQParser extends QParser {
+
+    public LTRQParser(String qstr, SolrParams localParams, SolrParams params,
+        SolrQueryRequest req) {
+      super(qstr, localParams, params, req);
+    }
+
+    @Override
+    public Query parse() throws SyntaxError {
+      // ReRanking Model
+      final String modelName = localParams.get(LTRQParserPlugin.MODEL);
+      if ((modelName == null) || modelName.isEmpty()) {
+        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
+            "Must provide model in the request");
+      }
+
+      final LTRScoringModel ltrScoringModel = mr.getModel(modelName);
+      if (ltrScoringModel == null) {
+        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
+            "cannot find " + LTRQParserPlugin.MODEL + " " + modelName);
+      }
+
+      final String modelFeatureStoreName = ltrScoringModel.getFeatureStoreName();
+      final boolean extractFeatures = SolrQueryRequestContextUtils.isExtractingFeatures(req);
+      final String fvStoreName = SolrQueryRequestContextUtils.getFvStoreName(req);
+      // Check if features are requested and if the model feature store and feature-transform feature store are the same
+      final boolean featuresRequestedFromSameStore = (modelFeatureStoreName.equals(fvStoreName) || fvStoreName == null) ? extractFeatures:false;
+
+      final LTRScoringQuery scoringQuery = new LTRScoringQuery(ltrScoringModel,
+          extractEFIParams(localParams),
+          featuresRequestedFromSameStore, threadManager);
+
+      // Enable the feature vector caching if we are extracting features, and the features
+      // we requested are the same ones we are reranking with
+      if (featuresRequestedFromSameStore) {
+        scoringQuery.setFeatureLogger( SolrQueryRequestContextUtils.getFeatureLogger(req) );
+      }
+      SolrQueryRequestContextUtils.setScoringQuery(req, scoringQuery);
+
+      int reRankDocs = localParams.getInt(RERANK_DOCS, DEFAULT_RERANK_DOCS);
+      reRankDocs = Math.max(1, reRankDocs);
+
+      // External features
+      scoringQuery.setRequest(req);
+
+      return new LTRQuery(scoringQuery, reRankDocs);
+    }
+  }
+
+  /**
+   * A learning to rank Query, will incapsulate a learning to rank model, and delegate to it the rescoring
+   * of the documents.
+   **/
+  public class LTRQuery extends AbstractReRankQuery {
+    private final LTRScoringQuery scoringQuery;
+
+    public LTRQuery(LTRScoringQuery scoringQuery, int reRankDocs) {
+      super(defaultQuery, reRankDocs, new LTRRescorer(scoringQuery));
+      this.scoringQuery = scoringQuery;
+    }
+
+    @Override
+    public int hashCode() {
+      return 31 * classHash() + (mainQuery.hashCode() + scoringQuery.hashCode() + reRankDocs);
+    }
+
+    @Override
+    public boolean equals(Object o) {
+      return sameClassAs(o) &&  equalsTo(getClass().cast(o));
+    }
+
+    private boolean equalsTo(LTRQuery other) {
+      return (mainQuery.equals(other.mainQuery)
+          && scoringQuery.equals(other.scoringQuery) && (reRankDocs == other.reRankDocs));
+    }
+
+    @Override
+    public RankQuery wrap(Query _mainQuery) {
+      super.wrap(_mainQuery);
+      scoringQuery.setOriginalQuery(_mainQuery);
+      return this;
+    }
+
+    @Override
+    public String toString(String field) {
+      return "{!ltr mainQuery='" + mainQuery.toString() + "' scoringQuery='"
+          + scoringQuery.toString() + "' reRankDocs=" + reRankDocs + "}";
+    }
+
+    @Override
+    protected Query rewrite(Query rewrittenMainQuery) throws IOException {
+      return new LTRQuery(scoringQuery, reRankDocs).wrap(rewrittenMainQuery);
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/java/org/apache/solr/search/package-info.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/search/package-info.java b/solr/contrib/ltr/src/java/org/apache/solr/search/package-info.java
new file mode 100644
index 0000000..2286a93
--- /dev/null
+++ b/solr/contrib/ltr/src/java/org/apache/solr/search/package-info.java
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * APIs and classes for {@linkplain org.apache.solr.search.QParserPlugin parsing} and {@linkplain org.apache.solr.search.SolrIndexSearcher processing} search requests
+ */
+package org.apache.solr.search;
+
+

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/java/overview.html
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/overview.html b/solr/contrib/ltr/src/java/overview.html
new file mode 100644
index 0000000..ccae361
--- /dev/null
+++ b/solr/contrib/ltr/src/java/overview.html
@@ -0,0 +1,91 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<html>
+<body>
+Apache Solr Search Server: Learning to Rank Contrib
+
+<p>
+This module contains a logic <strong>to plug machine learned ranking modules into Solr</strong>.
+</p>
+<p>
+In information retrieval systems, Learning to Rank is used to re-rank the top X
+retrieved documents using trained machine learning models. The hope is
+that sophisticated models can make more nuanced ranking decisions than standard ranking
+functions like TF-IDF or BM25.
+</p>
+<p>
+This module allows to plug a reranking component directly into Solr, enabling users
+to easily build their own learning to rank systems and access the rich
+matching features readily available in Solr. It also provides tools to perform
+feature engineering and feature extraction.
+</p>
+<h2> Code structure </h2>
+<p>
+A Learning to Rank model is plugged into the ranking through the {@link org.apache.solr.search.LTRQParserPlugin},
+a {@link org.apache.solr.search.QParserPlugin}. The plugin will
+read from the request the model (instance of {@link org.apache.solr.ltr.model.LTRScoringModel})
+used to perform the request plus other
+parameters. The plugin will generate a {@link org.apache.solr.search.LTRQParserPlugin.LTRQuery LTRQuery}:
+a particular {@link org.apache.solr.search.RankQuery}
+that will encapsulate the given model and use it to
+rescore and rerank the document (by using an {@link org.apache.solr.ltr.LTRRescorer}).
+</p>
+<p>
+A model will be applied on each document through a {@link org.apache.solr.ltr.LTRScoringQuery}, a
+subclass of {@link org.apache.lucene.search.Query}. As a normal query,
+the learned model will produce a new score
+for each document reranked.
+</p>
+<p>
+A {@link org.apache.solr.ltr.LTRScoringQuery} is created by providing an instance of
+{@link org.apache.solr.ltr.model.LTRScoringModel}. An instance of
+{@link org.apache.solr.ltr.model.LTRScoringModel}
+defines how to combine the features in order to create a new
+score for a document. A new learning to rank model is plugged
+into the framework  by extending {@link org.apache.solr.ltr.model.LTRScoringModel},
+(see for example {@link org.apache.solr.ltr.model.MultipleAdditiveTreesModel} and {@link org.apache.solr.ltr.model.LinearModel}).
+</p>
+<p>
+The {@link org.apache.solr.ltr.LTRScoringQuery} will take care of computing the values of
+all the features (see {@link org.apache.solr.ltr.feature.Feature}) and then will delegate the final score
+generation to the {@link org.apache.solr.ltr.model.LTRScoringModel}, by calling the method
+{@link org.apache.solr.ltr.model.LTRScoringModel#score(float[] modelFeatureValuesNormalized)}.
+</p>
+<p>
+A {@link org.apache.solr.ltr.feature.Feature} will produce a particular value for each document, so
+it is modeled as a {@link org.apache.lucene.search.Query}. The package
+{@link org.apache.solr.ltr.feature} contains several examples
+of features. One benefit of extending the Query object is that we can reuse
+Query as a feature, see for example {@link org.apache.solr.ltr.feature.SolrFeature}.
+Features for a document can also be returned in the response by
+using the FeatureTransformer (a {@link org.apache.solr.response.transform.DocTransformer DocTransformer})
+provided by {@link org.apache.solr.response.transform.LTRFeatureLoggerTransformerFactory}.
+</p>
+<p>
+{@link org.apache.solr.ltr.store} contains all the logic to store all the features and the models.
+Models are registered into a unique {@link org.apache.solr.ltr.store.ModelStore ModelStore},
+and each model specifies a particular {@link org.apache.solr.ltr.store.FeatureStore FeatureStore} that
+will contain a particular subset of features.
+<p>
+</p>
+Features and models can be managed through a REST API, provided by the
+{@link org.apache.solr.rest.ManagedResource Managed Resources}
+{@link org.apache.solr.ltr.store.rest.ManagedFeatureStore ManagedFeatureStore}
+and {@link org.apache.solr.ltr.store.rest.ManagedModelStore ManagedModelStore}.
+</p>
+</body>
+</html>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test-files/featureExamples/comp_features.json
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test-files/featureExamples/comp_features.json b/solr/contrib/ltr/src/test-files/featureExamples/comp_features.json
new file mode 100644
index 0000000..8d75739
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/featureExamples/comp_features.json
@@ -0,0 +1,37 @@
+[
+{ "name":"origScore",
+  "class":"org.apache.solr.ltr.feature.OriginalScoreFeature",
+  "params":{},
+  "store": "feature-store-6"
+},
+{
+  "name": "descriptionTermFreq",
+  "class": "org.apache.solr.ltr.feature.SolrFeature",
+  "params": { "q" : "{!func}termfreq(description,${user_text})" },
+  "store": "feature-store-6"
+},
+{
+  "name": "popularity",
+  "class": "org.apache.solr.ltr.feature.SolrFeature",
+  "params": { "q" : "{!func}normHits"},
+  "store": "feature-store-6"
+},
+{
+  "name": "isPopular",
+  "class": "org.apache.solr.ltr.feature.SolrFeature",
+  "params": {"fq" : ["{!field f=popularity}201"] },
+  "store": "feature-store-6"
+},
+{
+  "name": "queryPartialMatch2",
+  "class": "org.apache.solr.ltr.feature.SolrFeature",
+  "params": {"q": "{!dismax qf=description mm=2}${user_text}" },
+  "store": "feature-store-6"
+},
+{
+  "name": "queryPartialMatch2.1",
+  "class": "org.apache.solr.ltr.feature.SolrFeature",
+  "params": {"q": "{!dismax qf=description mm=2}${user_text}" },
+  "store": "feature-store-6"
+}
+]
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test-files/featureExamples/external_features.json
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test-files/featureExamples/external_features.json b/solr/contrib/ltr/src/test-files/featureExamples/external_features.json
new file mode 100644
index 0000000..6c0cfa6
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/featureExamples/external_features.json
@@ -0,0 +1,51 @@
+[ {
+    "name" : "matchedTitle",
+    "class" : "org.apache.solr.ltr.feature.SolrFeature",
+    "params" : {
+        "q" : "{!terms f=title}${user_query}"
+    }
+}, {
+    "name" : "confidence",
+    "class" : "org.apache.solr.ltr.feature.ValueFeature",
+    "store": "fstore2",
+    "params" : {
+        "value" : "${myconf}"
+    }
+}, {
+    "name":"originalScore",
+    "class":"org.apache.solr.ltr.feature.OriginalScoreFeature",
+    "store": "fstore2",
+    "params":{}
+}, {
+    "name" : "occurrences",
+    "class" : "org.apache.solr.ltr.feature.ValueFeature",
+    "store": "fstore3",
+    "params" : {
+        "value" : "${myOcc}",
+        "required" : false
+    }
+}, {
+    "name":"originalScore",
+    "class":"org.apache.solr.ltr.feature.OriginalScoreFeature",
+    "store": "fstore3",
+    "params":{}
+}, {
+    "name" : "popularity",
+    "class" : "org.apache.solr.ltr.feature.ValueFeature",
+    "store": "fstore4",
+    "params" : {
+        "value" : "${myPop}",
+        "required" : true
+    }
+}, {
+    "name":"originalScore",
+    "class":"org.apache.solr.ltr.feature.OriginalScoreFeature",
+    "store": "fstore4",
+    "params":{}
+}, {
+    "name" : "titlePhraseMatch",
+    "class" : "org.apache.solr.ltr.feature.SolrFeature",
+    "params" : {
+        "q" : "{!field f=title}${user_query}"
+    }
+} ]

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test-files/featureExamples/external_features_for_sparse_processing.json
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test-files/featureExamples/external_features_for_sparse_processing.json b/solr/contrib/ltr/src/test-files/featureExamples/external_features_for_sparse_processing.json
new file mode 100644
index 0000000..52bab27
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/featureExamples/external_features_for_sparse_processing.json
@@ -0,0 +1,18 @@
+[{
+  "name" : "user_device_smartphone",
+  "class":"org.apache.solr.ltr.feature.ValueFeature",
+  "params" : {
+    "value": "${user_device_smartphone}"
+  }
+},
+  {
+    "name" : "user_device_tablet",
+    "class":"org.apache.solr.ltr.feature.ValueFeature",
+    "params" : {
+      "value": "${user_device_tablet}"
+    }
+  }
+
+
+
+]

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test-files/featureExamples/features-linear-efi.json
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test-files/featureExamples/features-linear-efi.json b/solr/contrib/ltr/src/test-files/featureExamples/features-linear-efi.json
new file mode 100644
index 0000000..e05542a
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/featureExamples/features-linear-efi.json
@@ -0,0 +1,17 @@
+[
+  {
+    "name": "sampleConstant",
+    "class": "org.apache.solr.ltr.feature.ValueFeature",
+    "params": {
+      "value": 5
+    }
+  },
+  {
+    "name" : "search_number_of_nights",
+    "class":"org.apache.solr.ltr.feature.ValueFeature",
+    "params" : {
+      "value": "${search_number_of_nights}"
+    }
+  }
+
+]

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test-files/featureExamples/features-linear.json
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test-files/featureExamples/features-linear.json b/solr/contrib/ltr/src/test-files/featureExamples/features-linear.json
new file mode 100644
index 0000000..8cc2996
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/featureExamples/features-linear.json
@@ -0,0 +1,51 @@
+[
+  {
+     "name": "title",
+     "class": "org.apache.solr.ltr.feature.ValueFeature",
+     "params": {
+         "value": 1
+     }
+  },
+  {
+    "name": "description",
+    "class": "org.apache.solr.ltr.feature.ValueFeature",
+    "params": {
+       "value": 2
+    }
+  },
+  {
+    "name": "keywords",
+    "class": "org.apache.solr.ltr.feature.ValueFeature",
+    "params": {
+        "value": 2
+    }
+  },
+ {
+     "name": "popularity",
+     "class": "org.apache.solr.ltr.feature.ValueFeature",
+     "params": {
+         "value": 3
+     }
+ },
+ {
+     "name": "text",
+     "class": "org.apache.solr.ltr.feature.ValueFeature",
+     "params": {
+         "value": 4
+     }
+ },
+ {
+   "name": "queryIntentPerson",
+   "class": "org.apache.solr.ltr.feature.ValueFeature",
+   "params": {
+       "value": 5
+   }
+ },
+ {
+   "name": "queryIntentCompany",
+   "class": "org.apache.solr.ltr.feature.ValueFeature",
+   "params": {
+       "value": 5
+   }
+ }
+]

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test-files/featureExamples/features-store-test-model.json
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test-files/featureExamples/features-store-test-model.json b/solr/contrib/ltr/src/test-files/featureExamples/features-store-test-model.json
new file mode 100644
index 0000000..69aad84
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/featureExamples/features-store-test-model.json
@@ -0,0 +1,51 @@
+[
+    {
+        "name": "constant1",
+        "class": "org.apache.solr.ltr.feature.ValueFeature",
+     "store":"test",
+        "params": {
+            "value": 1
+        }
+    },
+    {
+        "name": "constant2",
+        "class": "org.apache.solr.ltr.feature.ValueFeature",
+     "store":"test",
+        "params": {
+            "value": 2
+        }
+    },
+    {
+        "name": "constant3",
+        "class": "org.apache.solr.ltr.feature.ValueFeature",
+     "store":"test",
+        "params": {
+            "value": 3
+        }
+    },
+    {
+        "name": "constant4",
+        "class": "org.apache.solr.ltr.feature.ValueFeature",
+     "store":"test",
+        "params": {
+            "value": 4
+        }
+    },
+    {
+        "name": "constant5",
+        "class": "org.apache.solr.ltr.feature.ValueFeature",
+     "store":"test",
+        "params": {
+            "value": 5
+        }
+    },
+     {
+        "name": "pop",
+        "class": "org.apache.solr.ltr.feature.FieldValueFeature",
+     "store":"test",
+        "params": {
+            "field": "popularity"
+        }
+    }
+
+]

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test-files/featureExamples/fq_features.json
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test-files/featureExamples/fq_features.json b/solr/contrib/ltr/src/test-files/featureExamples/fq_features.json
new file mode 100644
index 0000000..13968f9
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/featureExamples/fq_features.json
@@ -0,0 +1,16 @@
+[
+    {
+        "name": "matchedTitle",
+        "class": "org.apache.solr.ltr.feature.SolrFeature",
+        "params": {
+            "q": "{!terms f=title}${user_query}"
+        }
+    },
+    {
+        "name": "popularity",
+        "class": "org.apache.solr.ltr.feature.ValueFeature",
+        "params": {
+            "value": 3
+        }
+    }
+]

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test-files/featureExamples/multipleadditivetreesmodel_features.json
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test-files/featureExamples/multipleadditivetreesmodel_features.json b/solr/contrib/ltr/src/test-files/featureExamples/multipleadditivetreesmodel_features.json
new file mode 100644
index 0000000..92f3861
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/featureExamples/multipleadditivetreesmodel_features.json
@@ -0,0 +1,16 @@
+[
+    {
+        "name": "matchedTitle",
+        "class": "org.apache.solr.ltr.feature.SolrFeature",
+        "params": {
+            "q": "{!terms f=title}${user_query}"
+        }
+    },
+    {
+        "name": "constantScoreToForceMultipleAdditiveTreesScoreAllDocs",
+        "class": "org.apache.solr.ltr.feature.ValueFeature",
+        "params": {
+            "value": 1
+        }
+    }
+]

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test-files/log4j.properties
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test-files/log4j.properties b/solr/contrib/ltr/src/test-files/log4j.properties
new file mode 100644
index 0000000..d86c698
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/log4j.properties
@@ -0,0 +1,32 @@
+#  Logging level
+log4j.rootLogger=INFO, CONSOLE
+
+log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender
+log4j.appender.CONSOLE.Target=System.err
+log4j.appender.CONSOLE.layout=org.apache.log4j.EnhancedPatternLayout
+log4j.appender.CONSOLE.layout.ConversionPattern=%-4r %-5p (%t) [%X{node_name} %X{collection} %X{shard} %X{replica} %X{core}] %c{1.} %m%n
+log4j.logger.org.apache.zookeeper=WARN
+log4j.logger.org.apache.hadoop=WARN
+log4j.logger.org.apache.directory=WARN
+log4j.logger.org.apache.solr.hadoop=INFO
+log4j.logger.org.apache.solr.client.solrj.embedded.JettySolrRunner=DEBUG
+org.apache.solr.client.solrj.embedded.JettySolrRunner=DEBUG
+
+#log4j.logger.org.apache.solr.update.processor.LogUpdateProcessor=DEBUG
+#log4j.logger.org.apache.solr.update.processor.DistributedUpdateProcessor=DEBUG
+#log4j.logger.org.apache.solr.update.PeerSync=DEBUG
+#log4j.logger.org.apache.solr.core.CoreContainer=DEBUG
+#log4j.logger.org.apache.solr.cloud.RecoveryStrategy=DEBUG
+#log4j.logger.org.apache.solr.cloud.SyncStrategy=DEBUG
+#log4j.logger.org.apache.solr.handler.admin.CoreAdminHandler=DEBUG
+#log4j.logger.org.apache.solr.cloud.ZkController=DEBUG
+#log4j.logger.org.apache.solr.update.DefaultSolrCoreState=DEBUG
+#log4j.logger.org.apache.solr.common.cloud.ConnectionManager=DEBUG
+#log4j.logger.org.apache.solr.update.UpdateLog=DEBUG
+#log4j.logger.org.apache.solr.cloud.ChaosMonkey=DEBUG
+#log4j.logger.org.apache.solr.update.TransactionLog=DEBUG
+#log4j.logger.org.apache.solr.handler.ReplicationHandler=DEBUG
+#log4j.logger.org.apache.solr.handler.IndexFetcher=DEBUG
+
+#log4j.logger.org.apache.solr.common.cloud.ClusterStateUtil=DEBUG
+#log4j.logger.org.apache.solr.cloud.OverseerAutoReplicaFailoverThread=DEBUG

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test-files/modelExamples/external_model.json
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test-files/modelExamples/external_model.json b/solr/contrib/ltr/src/test-files/modelExamples/external_model.json
new file mode 100644
index 0000000..04ab229
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/modelExamples/external_model.json
@@ -0,0 +1,12 @@
+{
+    "class":"org.apache.solr.ltr.model.LinearModel",
+    "name":"externalmodel",
+    "features":[
+        { "name": "matchedTitle"}
+    ],
+    "params":{
+        "weights": {
+            "matchedTitle": 0.999
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test-files/modelExamples/external_model_store.json
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test-files/modelExamples/external_model_store.json b/solr/contrib/ltr/src/test-files/modelExamples/external_model_store.json
new file mode 100644
index 0000000..f8e6648
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/modelExamples/external_model_store.json
@@ -0,0 +1,13 @@
+{
+    "class":"org.apache.solr.ltr.model.LinearModel",
+    "name":"externalmodelstore",
+    "store": "fstore2",
+    "features":[
+        { "name": "confidence"}
+    ],
+    "params":{
+        "weights": {
+            "confidence": 0.999
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test-files/modelExamples/fq-model.json
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test-files/modelExamples/fq-model.json b/solr/contrib/ltr/src/test-files/modelExamples/fq-model.json
new file mode 100644
index 0000000..b5d631f
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/modelExamples/fq-model.json
@@ -0,0 +1,20 @@
+{
+	"class":"org.apache.solr.ltr.model.LinearModel",
+	"name":"fqmodel",
+	"features":[
+        {
+            "name":"matchedTitle",
+            "norm": {
+                "class":"org.apache.solr.ltr.norm.MinMaxNormalizer",
+                "params":{ "min":"0.0f", "max":"10.0f" }
+            }
+        },
+        { "name":"popularity"}
+	],
+	"params":{
+	  "weights": {
+	    "matchedTitle": 0.5,
+	    "popularity": 0.5
+	  }
+	}
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test-files/modelExamples/linear-model-efi.json
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test-files/modelExamples/linear-model-efi.json b/solr/contrib/ltr/src/test-files/modelExamples/linear-model-efi.json
new file mode 100644
index 0000000..018466e
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/modelExamples/linear-model-efi.json
@@ -0,0 +1,14 @@
+{
+  "class":"org.apache.solr.ltr.model.LinearModel",
+  "name":"linear-efi",
+  "features":[
+    {"name":"sampleConstant"},
+    {"name":"search_number_of_nights"}
+  ],
+  "params":{
+    "weights":{
+      "sampleConstant":1.0,
+      "search_number_of_nights":2.0
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test-files/modelExamples/linear-model.json
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test-files/modelExamples/linear-model.json b/solr/contrib/ltr/src/test-files/modelExamples/linear-model.json
new file mode 100644
index 0000000..6b46dca
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/modelExamples/linear-model.json
@@ -0,0 +1,30 @@
+{
+	"class":"org.apache.solr.ltr.model.LinearModel",
+	"name":"6029760550880411648",
+	"features":[
+        {"name":"title"},
+        {"name":"description"},
+        {"name":"keywords"},
+        {
+            "name":"popularity",
+            "norm": {
+                "class":"org.apache.solr.ltr.norm.MinMaxNormalizer",
+                "params":{ "min":"0.0f", "max":"10.0f" }
+            }
+        },
+        {"name":"text"},
+        {"name":"queryIntentPerson"},
+        {"name":"queryIntentCompany"}
+	],
+	"params":{
+	  "weights": {
+	    "title": 0.0000000000,
+	    "description": 0.1000000000,
+	    "keywords": 0.2000000000,
+	    "popularity": 0.3000000000,
+	    "text": 0.4000000000,
+	    "queryIntentPerson":0.1231231,
+	    "queryIntentCompany":0.12121211
+	  }
+	}
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel.json
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel.json b/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel.json
new file mode 100644
index 0000000..37551a0
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel.json
@@ -0,0 +1,38 @@
+{
+    "class":"org.apache.solr.ltr.model.MultipleAdditiveTreesModel",
+    "name":"multipleadditivetreesmodel",
+    "features":[
+        { "name": "matchedTitle"},
+        { "name": "constantScoreToForceMultipleAdditiveTreesScoreAllDocs"}
+    ],
+    "params":{
+        "trees": [
+            {
+                "weight" : "1f",
+                "root": {
+                    "feature": "matchedTitle",
+                    "threshold": "0.5f",
+                    "left" : {
+                        "value" : "-100"
+                    },
+                    "right": {
+                        "feature" : "this_feature_doesnt_exist",
+                        "threshold": "10.0f",
+                        "left" : {
+                            "value" : "50"
+                        },
+                        "right" : {
+                            "value" : "75"
+                        }
+                    }
+                }
+            },
+            {
+                "weight" : "2f",
+                "root": {
+                    "value" : "-10"
+                }
+            }
+        ]
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_external_binary_features.json
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_external_binary_features.json b/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_external_binary_features.json
new file mode 100644
index 0000000..cb8996e
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_external_binary_features.json
@@ -0,0 +1,38 @@
+{
+  "class":"org.apache.solr.ltr.model.MultipleAdditiveTreesModel",
+  "name":"external_model_binary_feature",
+  "features":[
+    { "name": "user_device_smartphone"},
+    { "name": "user_device_tablet"}
+  ],
+  "params":{
+    "trees": [
+      {
+        "weight" : "1f",
+        "root": {
+          "feature": "user_device_smartphone",
+          "threshold": "0.5f",
+          "left" : {
+            "value" : "0"
+          },
+          "right" : {
+            "value" : "50"
+          }
+
+        }},
+      {
+        "weight" : "1f",
+        "root": {
+          "feature": "user_device_tablet",
+          "threshold": "0.5f",
+          "left" : {
+            "value" : "0"
+          },
+          "right" : {
+            "value" : "65"
+          }
+
+        }}
+    ]
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_no_feature.json
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_no_feature.json b/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_no_feature.json
new file mode 100644
index 0000000..2919f07
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_no_feature.json
@@ -0,0 +1,24 @@
+{
+    "class":"org.apache.solr.ltr.model.MultipleAdditiveTreesModel",
+    "name":"multipleadditivetreesmodel_no_feature",
+    "features":[
+        { "name": "matchedTitle"},
+        { "name": "constantScoreToForceMultipleAdditiveTreesScoreAllDocs"}
+    ],
+    "params":{
+        "trees": [
+            {
+                "weight" : "1f",
+                "root": {
+                    "threshold": "0.5f",
+                    "left" : {
+                        "value" : "-100"
+                    },
+                    "right": {
+                        "value" : "75"
+                    }
+                }
+            }
+        ]
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_no_features.json
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_no_features.json b/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_no_features.json
new file mode 100644
index 0000000..ec4c37f
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_no_features.json
@@ -0,0 +1,14 @@
+{
+    "class":"org.apache.solr.ltr.model.MultipleAdditiveTreesModel",
+    "name":"multipleadditivetreesmodel_no_features",
+    "params":{
+        "trees": [
+            {
+                "weight" : "2f",
+                "root": {
+                    "value" : "-10"
+                }
+            }
+        ]
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_no_left.json
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_no_left.json b/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_no_left.json
new file mode 100644
index 0000000..653d2ff
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_no_left.json
@@ -0,0 +1,22 @@
+{
+    "class":"org.apache.solr.ltr.model.MultipleAdditiveTreesModel",
+    "name":"multipleadditivetreesmodel_no_left",
+    "features":[
+        { "name": "matchedTitle"},
+        { "name": "constantScoreToForceMultipleAdditiveTreesScoreAllDocs"}
+    ],
+    "params":{
+        "trees": [
+            {
+                "weight" : "1f",
+                "root": {
+                    "feature": "matchedTitle",
+                    "threshold": "0.5f",
+                    "right": {
+                        "value" : "75"
+                    }
+                }
+            }
+        ]
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_no_params.json
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_no_params.json b/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_no_params.json
new file mode 100644
index 0000000..4d50c4e
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_no_params.json
@@ -0,0 +1,8 @@
+{
+    "class":"org.apache.solr.ltr.model.MultipleAdditiveTreesModel",
+    "name":"multipleadditivetreesmodel_no_params",
+    "features":[
+        { "name": "matchedTitle"},
+        { "name": "constantScoreToForceMultipleAdditiveTreesScoreAllDocs"}
+    ]
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_no_right.json
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_no_right.json b/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_no_right.json
new file mode 100644
index 0000000..acd2d83
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_no_right.json
@@ -0,0 +1,22 @@
+{
+    "class":"org.apache.solr.ltr.model.MultipleAdditiveTreesModel",
+    "name":"multipleadditivetreesmodel_no_right",
+    "features":[
+        { "name": "matchedTitle"},
+        { "name": "constantScoreToForceMultipleAdditiveTreesScoreAllDocs"}
+    ],
+    "params":{
+        "trees": [
+            {
+                "weight" : "1f",
+                "root": {
+                    "feature": "matchedTitle",
+                    "threshold": "0.5f",
+                    "left" : {
+                        "value" : "-100"
+                    }
+                }
+            }
+        ]
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_no_threshold.json
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_no_threshold.json b/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_no_threshold.json
new file mode 100644
index 0000000..d0fc381
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_no_threshold.json
@@ -0,0 +1,24 @@
+{
+    "class":"org.apache.solr.ltr.model.MultipleAdditiveTreesModel",
+    "name":"multipleadditivetreesmodel_no_threshold",
+    "features":[
+        { "name": "matchedTitle"},
+        { "name": "constantScoreToForceMultipleAdditiveTreesScoreAllDocs"}
+    ],
+    "params":{
+        "trees": [
+            {
+                "weight" : "1f",
+                "root": {
+                    "feature": "matchedTitle",
+                    "left" : {
+                        "value" : "-100"
+                    },
+                    "right": {
+                        "value" : "75"
+                    }
+                }
+            }
+        ]
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_no_tree.json
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_no_tree.json b/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_no_tree.json
new file mode 100644
index 0000000..507def3
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_no_tree.json
@@ -0,0 +1,15 @@
+{
+    "class":"org.apache.solr.ltr.model.MultipleAdditiveTreesModel",
+    "name":"multipleadditivetreesmodel_no_tree",
+    "features":[
+        { "name": "matchedTitle"},
+        { "name": "constantScoreToForceMultipleAdditiveTreesScoreAllDocs"}
+    ],
+    "params":{
+        "trees": [
+            {
+                "weight" : "2f"
+            }
+        ]
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_no_trees.json
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_no_trees.json b/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_no_trees.json
new file mode 100644
index 0000000..bb360dd
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_no_trees.json
@@ -0,0 +1,10 @@
+{
+    "class":"org.apache.solr.ltr.model.MultipleAdditiveTreesModel",
+    "name":"multipleadditivetreesmodel_no_trees",
+    "features":[
+        { "name": "matchedTitle"},
+        { "name": "constantScoreToForceMultipleAdditiveTreesScoreAllDocs"}
+    ],
+    "params":{
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_no_weight.json
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_no_weight.json b/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_no_weight.json
new file mode 100644
index 0000000..9048e6c
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/modelExamples/multipleadditivetreesmodel_no_weight.json
@@ -0,0 +1,24 @@
+{
+    "class":"org.apache.solr.ltr.model.MultipleAdditiveTreesModel",
+    "name":"multipleadditivetreesmodel_no_weight",
+    "features":[
+        { "name": "matchedTitle"},
+        { "name": "constantScoreToForceMultipleAdditiveTreesScoreAllDocs"}
+    ],
+    "params":{
+        "trees": [
+            {
+                "root": {
+                    "feature": "matchedTitle",
+                    "threshold": "0.5f",
+                    "left" : {
+                        "value" : "-100"
+                    },
+                    "right": {
+                        "value" : "75"
+                    }
+                }
+            }
+        ]
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test-files/solr/collection1/conf/schema.xml
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test-files/solr/collection1/conf/schema.xml b/solr/contrib/ltr/src/test-files/solr/collection1/conf/schema.xml
new file mode 100644
index 0000000..15cf140
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/solr/collection1/conf/schema.xml
@@ -0,0 +1,88 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<schema name="example" version="1.5">
+  <fields>
+    <field name="id" type="string" indexed="true" stored="true" required="true" multiValued="false" />
+    <field name="title" type="text_general" indexed="true" stored="true"/>
+    <field name="description" type="text_general" indexed="true" stored="true"/>
+    <field name="keywords" type="text_general" indexed="true" stored="true" multiValued="true"/>
+    <field name="popularity" type="int" indexed="true" stored="true" />
+    <field name="normHits" type="float" indexed="true" stored="true" />
+    <field name="text" type="text_general" indexed="true" stored="false" multiValued="true"/>
+    <field name="_version_" type="long" indexed="true" stored="true"/>
+
+    <dynamicField name="*_s"  type="string"  indexed="true"  stored="true" />
+    <dynamicField name="*_t"  type="text_general"    indexed="true"  stored="true"/>
+  </fields>
+
+  <uniqueKey>id</uniqueKey>
+
+  <copyField source="title" dest="text"/>
+  <copyField source="description" dest="text"/>
+
+  <types>
+    <fieldType name="string" class="solr.StrField" sortMissingLast="true" />
+    <fieldType name="boolean" class="solr.BoolField" sortMissingLast="true"/>
+    <fieldType name="int" class="solr.TrieIntField" precisionStep="0" positionIncrementGap="0"/>
+    <fieldType name="float" class="solr.TrieFloatField" precisionStep="0" positionIncrementGap="0"/>
+    <fieldType name="long" class="solr.TrieLongField" precisionStep="0" positionIncrementGap="0"/>
+    <fieldType name="double" class="solr.TrieDoubleField" precisionStep="0" positionIncrementGap="0"/>
+    <fieldType name="date" class="solr.TrieDateField" precisionStep="0" positionIncrementGap="0"/>
+    <fieldtype name="binary" class="solr.BinaryField"/>
+
+    <fieldType name="text_ws" class="solr.TextField" positionIncrementGap="100">
+      <analyzer>
+        <tokenizer class="solr.WhitespaceTokenizerFactory"/>
+      </analyzer>
+    </fieldType>
+
+    <fieldType name="text_general" class="solr.TextField" positionIncrementGap="100">
+      <analyzer type="index">
+        <tokenizer class="solr.StandardTokenizerFactory"/>
+        <filter class="solr.StopFilterFactory" ignoreCase="true" words="stopwords.txt"  />
+        <filter class="solr.LowerCaseFilterFactory"/>
+      </analyzer>
+      <analyzer type="query">
+        <tokenizer class="solr.StandardTokenizerFactory"/>
+        <filter class="solr.StopFilterFactory" ignoreCase="true" words="stopwords.txt"  />
+        <filter class="solr.SynonymFilterFactory" synonyms="synonyms.txt" ignoreCase="true" expand="true"/>
+        <filter class="solr.LowerCaseFilterFactory"/>
+      </analyzer>
+    </fieldType>
+
+    <fieldType name="text_lc" class="solr.TextField" positionIncrementGap="100">
+      <analyzer>
+        <tokenizer class="solr.KeywordTokenizerFactory"/>
+        <filter class="solr.LowerCaseFilterFactory" />
+      </analyzer>
+    </fieldType>
+  </types>
+
+  <!-- Similarity is the scoring routine for each document vs. a query.
+       A custom Similarity or SimilarityFactory may be specified here, but
+       the default is fine for most applications.
+       For more info: http://wiki.apache.org/solr/SchemaXml#Similarity
+    -->
+  <!--
+     <similarity class="com.example.solr.CustomSimilarityFactory">
+       <str name="paramkey">param value</str>
+     </similarity>
+    -->
+
+</schema>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test-files/solr/collection1/conf/solrconfig-ltr.xml
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test-files/solr/collection1/conf/solrconfig-ltr.xml b/solr/contrib/ltr/src/test-files/solr/collection1/conf/solrconfig-ltr.xml
new file mode 100644
index 0000000..1a18471
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/solr/collection1/conf/solrconfig-ltr.xml
@@ -0,0 +1,65 @@
+<?xml version="1.0" ?>
+<!-- Licensed to the Apache Software Foundation (ASF) under one or more contributor
+ license agreements. See the NOTICE file distributed with this work for additional
+ information regarding copyright ownership. The ASF licenses this file to
+ You under the Apache License, Version 2.0 (the "License"); you may not use
+ this file except in compliance with the License. You may obtain a copy of
+ the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required
+ by applicable law or agreed to in writing, software distributed under the
+ License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
+ OF ANY KIND, either express or implied. See the License for the specific
+ language governing permissions and limitations under the License. -->
+
+<config>
+    <luceneMatchVersion>6.0.0</luceneMatchVersion>
+ <dataDir>${solr.data.dir:}</dataDir>
+ <directoryFactory name="DirectoryFactory"
+  class="${solr.directoryFactory:solr.RAMDirectoryFactory}" />
+
+ <schemaFactory class="ClassicIndexSchemaFactory" />
+
+
+ <!-- Query parser used to rerank top docs with a provided model -->
+ <queryParser name="ltr"
+  class="org.apache.solr.search.LTRQParserPlugin" />
+
+ <query>
+  <filterCache class="solr.FastLRUCache" size="4096"
+   initialSize="2048" autowarmCount="0" />
+  <cache name="QUERY_DOC_FV" class="solr.search.LRUCache" size="4096"
+   initialSize="2048" autowarmCount="4096" regenerator="solr.search.NoOpRegenerator" />
+ </query>
+
+ <!-- add a transformer that will encode the document features in the response.
+  For each document the transformer will add the features as an extra field
+  in the response. The name of the field we will be the the name of the transformer
+  enclosed between brackets (in this case [fv]). In order to get the feature
+  vector you will have to specify that you want the field (e.g., fl="*,[fv]) -->
+ <transformer name="fv"
+  class="org.apache.solr.response.transform.LTRFeatureLoggerTransformerFactory" />
+
+ <updateHandler class="solr.DirectUpdateHandler2">
+  <autoCommit>
+   <maxTime>15000</maxTime>
+   <openSearcher>false</openSearcher>
+  </autoCommit>
+  <autoSoftCommit>
+   <maxTime>1000</maxTime>
+  </autoSoftCommit>
+  <updateLog>
+   <str name="dir">${solr.data.dir:}</str>
+  </updateLog>
+ </updateHandler>
+
+ <requestHandler name="/update" class="solr.UpdateRequestHandler" />
+ <!-- Query request handler managing models and features -->
+ <requestHandler name="/query" class="solr.SearchHandler">
+  <lst name="defaults">
+   <str name="echoParams">explicit</str>
+   <str name="wt">json</str>
+   <str name="indent">true</str>
+   <str name="df">id</str>
+  </lst>
+ </requestHandler>
+
+</config>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test-files/solr/collection1/conf/solrconfig-ltr_Th10_10.xml
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test-files/solr/collection1/conf/solrconfig-ltr_Th10_10.xml b/solr/contrib/ltr/src/test-files/solr/collection1/conf/solrconfig-ltr_Th10_10.xml
new file mode 100644
index 0000000..fd0940a
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/solr/collection1/conf/solrconfig-ltr_Th10_10.xml
@@ -0,0 +1,69 @@
+<?xml version="1.0" ?>
+<!-- Licensed to the Apache Software Foundation (ASF) under one or more contributor
+ license agreements. See the NOTICE file distributed with this work for additional
+ information regarding copyright ownership. The ASF licenses this file to
+ You under the Apache License, Version 2.0 (the "License"); you may not use
+ this file except in compliance with the License. You may obtain a copy of
+ the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required
+ by applicable law or agreed to in writing, software distributed under the
+ License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
+ OF ANY KIND, either express or implied. See the License for the specific
+ language governing permissions and limitations under the License. -->
+
+<config>
+    <luceneMatchVersion>6.0.0</luceneMatchVersion>
+ <dataDir>${solr.data.dir:}</dataDir>
+ <directoryFactory name="DirectoryFactory"
+  class="${solr.directoryFactory:solr.RAMDirectoryFactory}" />
+
+ <schemaFactory class="ClassicIndexSchemaFactory" />
+
+
+ <!-- Query parser used to rerank top docs with a provided model -->
+ <queryParser name="ltr" class="org.apache.solr.search.LTRQParserPlugin" >
+  <int name="threadModule.totalPoolThreads">10</int> <!-- Maximum threads to use for all queries -->
+  <int name="threadModule.numThreadsPerRequest">10</int> <!-- Maximum threads to use for a single query-->
+ </queryParser>
+
+
+
+ <query>
+  <filterCache class="solr.FastLRUCache" size="4096"
+   initialSize="2048" autowarmCount="0" />
+  <cache name="QUERY_DOC_FV" class="solr.search.LRUCache" size="4096"
+   initialSize="2048" autowarmCount="4096" regenerator="solr.search.NoOpRegenerator" />
+ </query>
+
+ <!-- add a transformer that will encode the document features in the response.
+  For each document the transformer will add the features as an extra field
+  in the response. The name of the field we will be the the name of the transformer
+  enclosed between brackets (in this case [fv]). In order to get the feature
+  vector you will have to specify that you want the field (e.g., fl="*,[fv]) -->
+ <transformer name="fv"
+  class="org.apache.solr.response.transform.LTRFeatureLoggerTransformerFactory" />
+
+ <updateHandler class="solr.DirectUpdateHandler2">
+  <autoCommit>
+   <maxTime>15000</maxTime>
+   <openSearcher>false</openSearcher>
+  </autoCommit>
+  <autoSoftCommit>
+   <maxTime>1000</maxTime>
+  </autoSoftCommit>
+  <updateLog>
+   <str name="dir">${solr.data.dir:}</str>
+  </updateLog>
+ </updateHandler>
+
+ <requestHandler name="/update" class="solr.UpdateRequestHandler" />
+ <!-- Query request handler managing models and features -->
+ <requestHandler name="/query" class="solr.SearchHandler">
+  <lst name="defaults">
+   <str name="echoParams">explicit</str>
+   <str name="wt">json</str>
+   <str name="indent">true</str>
+   <str name="df">id</str>
+  </lst>
+ </requestHandler>
+
+</config>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test-files/solr/collection1/conf/solrconfig-multiseg.xml
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test-files/solr/collection1/conf/solrconfig-multiseg.xml b/solr/contrib/ltr/src/test-files/solr/collection1/conf/solrconfig-multiseg.xml
new file mode 100644
index 0000000..a36c1df
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/solr/collection1/conf/solrconfig-multiseg.xml
@@ -0,0 +1,62 @@
+<?xml version="1.0" ?>
+<!-- Licensed to the Apache Software Foundation (ASF) under one or more contributor 
+ license agreements. See the NOTICE file distributed with this work for additional 
+ information regarding copyright ownership. The ASF licenses this file to 
+ You under the Apache License, Version 2.0 (the "License"); you may not use 
+ this file except in compliance with the License. You may obtain a copy of 
+ the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required 
+ by applicable law or agreed to in writing, software distributed under the 
+ License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS 
+ OF ANY KIND, either express or implied. See the License for the specific 
+ language governing permissions and limitations under the License. -->
+
+<config>
+ <luceneMatchVersion>6.0.0</luceneMatchVersion>
+ <dataDir>${solr.data.dir:}</dataDir>
+ <directoryFactory name="DirectoryFactory"
+  class="${solr.directoryFactory:solr.RAMDirectoryFactory}" />
+
+ <schemaFactory class="ClassicIndexSchemaFactory" />
+
+
+ <!-- Query parser used to rerank top docs with a provided model -->
+ <queryParser name="ltr" class="org.apache.solr.search.LTRQParserPlugin" />
+
+ <maxBufferedDocs>1</maxBufferedDocs>
+ <mergePolicyFactory class="org.apache.solr.index.TieredMergePolicyFactory">
+  <int name="maxMergeAtOnce">10</int>
+  <int name="segmentsPerTier">1000</int>
+ </mergePolicyFactory>
+ <!-- add a transformer that will encode the document features in the response. 
+  For each document the transformer will add the features as an extra field 
+  in the response. The name of the field we will be the the name of the transformer 
+  enclosed between brackets (in this case [fv]). In order to get the feature 
+  vector you will have to specify that you want the field (e.g., fl="*,[fv]) -->
+ <transformer name="features"
+  class="org.apache.solr.response.transform.LTRFeatureLoggerTransformerFactory" />
+
+ <updateHandler class="solr.DirectUpdateHandler2">
+  <autoCommit>
+   <maxTime>15000</maxTime>
+   <openSearcher>false</openSearcher>
+  </autoCommit>
+  <autoSoftCommit>
+   <maxTime>1000</maxTime>
+  </autoSoftCommit>
+  <updateLog>
+   <str name="dir">${solr.data.dir:}</str>
+  </updateLog>
+ </updateHandler>
+
+ <requestHandler name="/update" class="solr.UpdateRequestHandler" />
+ <!-- Query request handler managing models and features -->
+ <requestHandler name="/query" class="solr.SearchHandler">
+  <lst name="defaults">
+   <str name="echoParams">explicit</str>
+   <str name="wt">json</str>
+   <str name="indent">true</str>
+   <str name="df">id</str>
+  </lst>
+ </requestHandler>
+
+</config>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test-files/solr/collection1/conf/stopwords.txt
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test-files/solr/collection1/conf/stopwords.txt b/solr/contrib/ltr/src/test-files/solr/collection1/conf/stopwords.txt
new file mode 100644
index 0000000..eabae3b
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/solr/collection1/conf/stopwords.txt
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+a

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test-files/solr/collection1/conf/synonyms.txt
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test-files/solr/collection1/conf/synonyms.txt b/solr/contrib/ltr/src/test-files/solr/collection1/conf/synonyms.txt
new file mode 100644
index 0000000..0ef0e8d
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/solr/collection1/conf/synonyms.txt
@@ -0,0 +1,28 @@
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+#-----------------------------------------------------------------------
+#some test synonym mappings unlikely to appear in real input text
+aaafoo => aaabar
+bbbfoo => bbbfoo bbbbar
+cccfoo => cccbar cccbaz
+fooaaa,baraaa,bazaaa
+
+# Some synonym groups specific to this example
+GB,gib,gigabyte,gigabytes
+MB,mib,megabyte,megabytes
+Television, Televisions, TV, TVs
+#notice we use "gib" instead of "GiB" so any WordDelimiterFilter coming
+#after us won't split it into two words.
+
+# Synonym mappings can be used for spelling correction too
+pixima => pixma


[27/50] [abbrv] lucene-solr:apiv2: SOLR-9442: Adds Array of NamedValuePair (json.nl=arrnvp) style to JSONResponseWriter. (Jonny Marks, Christine Poerschke)

Posted by sa...@apache.org.
SOLR-9442: Adds Array of NamedValuePair (json.nl=arrnvp) style to JSONResponseWriter. (Jonny Marks, Christine Poerschke)


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/87c6ec4c
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/87c6ec4c
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/87c6ec4c

Branch: refs/heads/apiv2
Commit: 87c6ec4cb0a91e1952e4dff31d6e1f92ed0806bf
Parents: 72bdbd2
Author: Christine Poerschke <cp...@apache.org>
Authored: Mon Oct 31 11:17:47 2016 +0000
Committer: Christine Poerschke <cp...@apache.org>
Committed: Mon Oct 31 11:19:39 2016 +0000

----------------------------------------------------------------------
 solr/CHANGES.txt                                |   3 +
 .../solr/response/JSONResponseWriter.java       | 173 ++++++++++++++++++-
 .../apache/solr/response/JSONWriterTest.java    | 102 ++++++++++-
 3 files changed, 274 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/87c6ec4c/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 2f46416..09bf007 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -86,6 +86,9 @@ New Features
   Example:  { type:terms, field:category, filter:"user:yonik" } 
   (yonik)
 
+* SOLR-9442: Adds Array of NamedValuePair (json.nl=arrnvp) style to JSONResponseWriter.
+  (Jonny Marks, Christine Poerschke)
+
 Optimizations
 ----------------------
 * SOLR-9704: Facet Module / JSON Facet API: Optimize blockChildren facets that have

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/87c6ec4c/solr/core/src/java/org/apache/solr/response/JSONResponseWriter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/response/JSONResponseWriter.java b/solr/core/src/java/org/apache/solr/response/JSONResponseWriter.java
index cd6648b..ad128d2 100644
--- a/solr/core/src/java/org/apache/solr/response/JSONResponseWriter.java
+++ b/solr/core/src/java/org/apache/solr/response/JSONResponseWriter.java
@@ -26,6 +26,7 @@ import java.util.Set;
 
 import org.apache.solr.common.SolrDocument;
 import org.apache.solr.common.params.CommonParams;
+import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SimpleOrderedMap;
 import org.apache.solr.request.SolrQueryRequest;
@@ -50,7 +51,19 @@ public class JSONResponseWriter implements QueryResponseWriter {
 
   @Override
   public void write(Writer writer, SolrQueryRequest req, SolrQueryResponse rsp) throws IOException {
-    JSONWriter w = new JSONWriter(writer, req, rsp);
+    final SolrParams params = req.getParams();
+    final String wrapperFunction = params.get(JSONWriter.JSON_WRAPPER_FUNCTION);
+    final String namedListStyle = params.get(JSONWriter.JSON_NL_STYLE, JSONWriter.JSON_NL_FLAT).intern();
+
+    final JSONWriter w;
+    if (namedListStyle.equals(JSONWriter.JSON_NL_ARROFNVP)) {
+      w = new ArrayOfNamedValuePairJSONWriter(
+          writer, req, rsp, wrapperFunction, namedListStyle);
+    } else {
+      w = new JSONWriter(
+          writer, req, rsp, wrapperFunction, namedListStyle);
+    }
+
     try {
       w.writeResponse();
     } finally {
@@ -66,13 +79,14 @@ public class JSONResponseWriter implements QueryResponseWriter {
 
 class JSONWriter extends TextResponseWriter {
   protected String wrapperFunction;
-  final private String namedListStyle;
+  final protected String namedListStyle;
 
   static final String JSON_NL_STYLE="json.nl";
   static final String JSON_NL_MAP="map";
   static final String JSON_NL_FLAT="flat";
   static final String JSON_NL_ARROFARR="arrarr";
   static final String JSON_NL_ARROFMAP="arrmap";
+  static final String JSON_NL_ARROFNVP="arrnvp";
   static final String JSON_WRAPPER_FUNCTION="json.wrf";
 
   public JSONWriter(Writer writer, SolrQueryRequest req, SolrQueryResponse rsp) {
@@ -306,6 +320,9 @@ class JSONWriter extends TextResponseWriter {
       writeNamedListAsArrArr(name,val);
     } else if (namedListStyle==JSON_NL_ARROFMAP) {
       writeNamedListAsArrMap(name,val);
+    } else if (namedListStyle==JSON_NL_ARROFNVP) {
+      throw new UnsupportedOperationException(namedListStyle
+          + " namedListStyle must only be used with "+ArrayOfNamedValuePairJSONWriter.class.getSimpleName());
     }
   }
 
@@ -588,6 +605,158 @@ class JSONWriter extends TextResponseWriter {
 
 }
 
+/**
+ * Writes NamedLists directly as an array of NamedValuePair JSON objects...
+ * NamedList("a"=1,"b"=2,null=3) => [{"name":"a","int":1},{"name":"b","int":2},{"int":3}]
+ * NamedList("a"=1,"bar"="foo",null=3.4f) => [{"name":"a","int":1},{"name":"bar","str":"foo"},{"float":3.4}]
+ */
+class ArrayOfNamedValuePairJSONWriter extends JSONWriter {
+  private boolean writeTypeAsKey = false;
+
+  public ArrayOfNamedValuePairJSONWriter(Writer writer, SolrQueryRequest req, SolrQueryResponse rsp,
+                                         String wrapperFunction, String namedListStyle) {
+    super(writer, req, rsp, wrapperFunction, namedListStyle);
+    if (namedListStyle != JSON_NL_ARROFNVP) {
+      throw new UnsupportedOperationException(ArrayOfNamedValuePairJSONWriter.class.getSimpleName()+" must only be used with "
+          + JSON_NL_ARROFNVP + " style");
+    }
+  }
+
+  @Override
+  public void writeNamedList(String name, NamedList val) throws IOException {
+
+    if (val instanceof SimpleOrderedMap) {
+      super.writeNamedList(name, val);
+      return;
+    }
+
+    final int sz = val.size();
+    indent();
+
+    writeArrayOpener(sz);
+    incLevel();
+
+    boolean first = true;
+    for (int i=0; i<sz; i++) {
+      if (first) {
+        first = false;
+      } else {
+        writeArraySeparator();
+      }
+
+      indent();
+
+      final String elementName = val.getName(i);
+      final Object elementVal = val.getVal(i);
+
+      /*
+       * JSONWriter's writeNamedListAsArrMap turns NamedList("bar"="foo") into [{"foo":"bar"}]
+       * but we here wish to turn it into [ {"name":"bar","str":"foo"} ] instead.
+       *
+       * So first we write the <code>{"name":"bar",</code> portion ...
+       */
+      writeMapOpener(-1);
+      if (elementName != null) {
+        writeKey("name", false);
+        writeVal("name", elementName);
+        writeMapSeparator();
+      }
+
+      /*
+       * ... and then we write the <code>"str":"foo"}</code> portion.
+       */
+      writeTypeAsKey = true;
+      writeVal(null, elementVal); // passing null since writeVal doesn't actually use name (and we already wrote elementName above)
+      if (writeTypeAsKey) {
+        throw new RuntimeException("writeTypeAsKey should have been reset to false by writeVal('"+elementName+"','"+elementVal+"')");
+      }
+      writeMapCloser();
+    }
+
+    decLevel();
+    writeArrayCloser();
+  }
+
+  private void ifNeededWriteTypeAsKey(String type) throws IOException {
+    if (writeTypeAsKey) {
+      writeTypeAsKey = false;
+      writeKey(type, false);
+    }
+  }
+
+  @Override
+  public void writeInt(String name, String val) throws IOException {
+    ifNeededWriteTypeAsKey("int");
+    super.writeInt(name, val);
+  }
+
+  @Override
+  public void writeLong(String name, String val) throws IOException {
+    ifNeededWriteTypeAsKey("long");
+    super.writeLong(name, val);
+  }
+
+  @Override
+  public void writeFloat(String name, String val) throws IOException {
+    ifNeededWriteTypeAsKey("float");
+    super.writeFloat(name, val);
+  }
+
+  @Override
+  public void writeDouble(String name, String val) throws IOException {
+    ifNeededWriteTypeAsKey("double");
+    super.writeDouble(name, val);
+  }
+
+  @Override
+  public void writeBool(String name, String val) throws IOException {
+    ifNeededWriteTypeAsKey("bool");
+    super.writeBool(name, val);
+  }
+
+  @Override
+  public void writeDate(String name, String val) throws IOException {
+    ifNeededWriteTypeAsKey("date");
+    super.writeDate(name, val);
+  }
+
+  @Override
+  public void writeStr(String name, String val, boolean needsEscaping) throws IOException {
+    ifNeededWriteTypeAsKey("str");
+    super.writeStr(name, val, needsEscaping);
+  }
+
+  @Override
+  public void writeSolrDocument(String name, SolrDocument doc, ReturnFields returnFields, int idx) throws IOException {
+    ifNeededWriteTypeAsKey("doc");
+    super.writeSolrDocument(name, doc, returnFields, idx);
+  }
+
+  @Override
+  public void writeStartDocumentList(String name, long start, int size, long numFound, Float maxScore) throws IOException {
+    ifNeededWriteTypeAsKey("doclist");
+    super.writeStartDocumentList(name, start, size, numFound, maxScore);
+  }
+
+  @Override
+  public void writeMap(String name, Map val, boolean excludeOuter, boolean isFirstVal) throws IOException {
+    ifNeededWriteTypeAsKey("map");
+    super.writeMap(name, val, excludeOuter, isFirstVal);
+  }
+
+  @Override
+  public void writeArray(String name, Iterator val) throws IOException {
+    ifNeededWriteTypeAsKey("array");
+    super.writeArray(name, val);
+  }
+
+  @Override
+  public void writeNull(String name) throws IOException {
+    ifNeededWriteTypeAsKey("null");
+    super.writeNull(name);
+  }
+}
+
 abstract class NaNFloatWriter extends JSONWriter {
   
   abstract protected String getNaN();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/87c6ec4c/solr/core/src/test/org/apache/solr/response/JSONWriterTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/response/JSONWriterTest.java b/solr/core/src/test/org/apache/solr/response/JSONWriterTest.java
index ad390cb..a61cff3 100644
--- a/solr/core/src/test/org/apache/solr/response/JSONWriterTest.java
+++ b/solr/core/src/test/org/apache/solr/response/JSONWriterTest.java
@@ -18,7 +18,11 @@ package org.apache.solr.response;
 
 import java.io.IOException;
 import java.io.StringWriter;
+import java.lang.reflect.Method;
+import java.lang.reflect.Modifier;
 import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.List;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.SolrDocument;
 import org.apache.solr.common.SolrDocumentList;
@@ -72,7 +76,8 @@ public class JSONWriterTest extends SolrTestCaseJ4 {
 
   @Test
   public void testJSON() throws IOException {
-    SolrQueryRequest req = req("wt","json","json.nl","arrarr");
+    final String namedListStyle = (random().nextBoolean() ? JSONWriter.JSON_NL_ARROFARR : JSONWriter.JSON_NL_ARROFNVP);
+    SolrQueryRequest req = req("wt","json","json.nl",namedListStyle);
     SolrQueryResponse rsp = new SolrQueryResponse();
     JSONResponseWriter w = new JSONResponseWriter();
 
@@ -87,7 +92,18 @@ public class JSONWriterTest extends SolrTestCaseJ4 {
     rsp.add("bytes", "abc".getBytes(StandardCharsets.UTF_8));
 
     w.write(buf, req, rsp);
-    jsonEq("{\"nl\":[[\"data1\",\"he\\u2028llo\\u2029!\"],[null,42]],\"byte\":-3,\"short\":-4,\"bytes\":\"YWJj\"}", buf.toString());
+
+    final String expectedNLjson;
+    if (namedListStyle == JSONWriter.JSON_NL_ARROFARR) {
+      expectedNLjson = "\"nl\":[[\"data1\",\"he\\u2028llo\\u2029!\"],[null,42]]";
+    } else if (namedListStyle == JSONWriter.JSON_NL_ARROFNVP) {
+      expectedNLjson = "\"nl\":[{\"name\":\"data1\",\"str\":\"he\\u2028llo\\u2029!\"},{\"int\":42}]";
+    } else {
+      expectedNLjson = null;
+      fail("unexpected namedListStyle="+namedListStyle);
+    }
+
+    jsonEq("{"+expectedNLjson+",\"byte\":-3,\"short\":-4,\"bytes\":\"YWJj\"}", buf.toString());
     req.close();
   }
 
@@ -130,6 +146,87 @@ public class JSONWriterTest extends SolrTestCaseJ4 {
 
     req.close();
   }
+
+  @Test
+  public void testArrnvpWriterOverridesAllWrites() {
+    // List rather than Set because two not-overridden methods could share name but not signature
+    final List<String> methodsExpectedNotOverriden = new ArrayList<>(14);
+    methodsExpectedNotOverriden.add("writeResponse");
+    methodsExpectedNotOverriden.add("writeKey");
+    methodsExpectedNotOverriden.add("writeNamedListAsMapMangled");
+    methodsExpectedNotOverriden.add("writeNamedListAsMapWithDups");
+    methodsExpectedNotOverriden.add("writeNamedListAsArrMap");
+    methodsExpectedNotOverriden.add("writeNamedListAsArrArr");
+    methodsExpectedNotOverriden.add("writeNamedListAsFlat");
+    methodsExpectedNotOverriden.add("writeEndDocumentList");
+    methodsExpectedNotOverriden.add("writeMapOpener");
+    methodsExpectedNotOverriden.add("writeMapSeparator");
+    methodsExpectedNotOverriden.add("writeMapCloser");
+    methodsExpectedNotOverriden.add("writeArrayOpener");
+    methodsExpectedNotOverriden.add("writeArraySeparator");
+    methodsExpectedNotOverriden.add("writeArrayCloser");
+
+    final Class<?> subClass = ArrayOfNamedValuePairJSONWriter.class;
+    final Class<?> superClass = subClass.getSuperclass();
+
+    for (final Method superClassMethod : superClass.getDeclaredMethods()) {
+      final String methodName = superClassMethod.getName();
+      if (!methodName.startsWith("write")) continue;
+
+      final int modifiers = superClassMethod.getModifiers();
+      if (Modifier.isFinal(modifiers)) continue;
+      if (Modifier.isStatic(modifiers)) continue;
+      if (Modifier.isPrivate(modifiers)) continue;
+
+      final boolean expectOverriden = !methodsExpectedNotOverriden.contains(methodName);
+
+      try {
+        final Method subClassMethod = subClass.getDeclaredMethod(
+            superClassMethod.getName(),
+            superClassMethod.getParameterTypes());
+
+        if (expectOverriden) {
+          assertEquals("getReturnType() difference",
+              superClassMethod.getReturnType(),
+              subClassMethod.getReturnType());
+        } else {
+          fail(subClass + " must not override '" + superClassMethod + "'");
+        }
+      } catch (NoSuchMethodException e) {
+        if (expectOverriden) {
+          fail(subClass + " needs to override '" + superClassMethod + "'");
+        } else {
+          assertTrue(methodName+" not found in remaining "+methodsExpectedNotOverriden, methodsExpectedNotOverriden.remove(methodName));
+        }
+      }
+    }
+
+    assertTrue("methodsExpected NotOverriden but NotFound instead: "+methodsExpectedNotOverriden,
+        methodsExpectedNotOverriden.isEmpty());
+  }
+
+  @Test
+  public void testArrnvpWriterLacksMethodsOfItsOwn() {
+    final Class<?> subClass = ArrayOfNamedValuePairJSONWriter.class;
+    final Class<?> superClass = subClass.getSuperclass();
+    // ArrayOfNamedValuePairJSONWriter is a simple sub-class
+    // which should have (almost) no methods of its own
+    for (final Method subClassMethod : subClass.getDeclaredMethods()) {
+      // only own private method of its own
+      if (subClassMethod.getName().equals("ifNeededWriteTypeAsKey")) continue;
+      try {
+        final Method superClassMethod = superClass.getDeclaredMethod(
+            subClassMethod.getName(),
+            subClassMethod.getParameterTypes());
+
+          assertEquals("getReturnType() difference",
+              subClassMethod.getReturnType(),
+              superClassMethod.getReturnType());
+      } catch (NoSuchMethodException e) {
+          fail(subClass + " should not have '" + subClassMethod + "' method of its own");
+      }
+    }
+  }
   
   @Test
   public void testConstantsUnchanged() {
@@ -138,6 +235,7 @@ public class JSONWriterTest extends SolrTestCaseJ4 {
     assertEquals("flat", JSONWriter.JSON_NL_FLAT);
     assertEquals("arrarr", JSONWriter.JSON_NL_ARROFARR);
     assertEquals("arrmap", JSONWriter.JSON_NL_ARROFMAP);
+    assertEquals("arrnvp", JSONWriter.JSON_NL_ARROFNVP);
     assertEquals("json.wrf", JSONWriter.JSON_WRAPPER_FUNCTION);
   }
 


[28/50] [abbrv] lucene-solr:apiv2: Up test timeout to match all others in TestCollectionStateWatchers

Posted by sa...@apache.org.
Up test timeout to match all others in TestCollectionStateWatchers


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/c4446582
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/c4446582
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/c4446582

Branch: refs/heads/apiv2
Commit: c4446582862204e10c2b4171bce4e5364cc94753
Parents: 87c6ec4
Author: Alan Woodward <ro...@apache.org>
Authored: Mon Oct 31 12:49:10 2016 +0000
Committer: Alan Woodward <ro...@apache.org>
Committed: Mon Oct 31 12:49:42 2016 +0000

----------------------------------------------------------------------
 .../org/apache/solr/common/cloud/TestCollectionStateWatchers.java  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c4446582/solr/solrj/src/test/org/apache/solr/common/cloud/TestCollectionStateWatchers.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/common/cloud/TestCollectionStateWatchers.java b/solr/solrj/src/test/org/apache/solr/common/cloud/TestCollectionStateWatchers.java
index d8a1f0f..eb3ae7f 100644
--- a/solr/solrj/src/test/org/apache/solr/common/cloud/TestCollectionStateWatchers.java
+++ b/solr/solrj/src/test/org/apache/solr/common/cloud/TestCollectionStateWatchers.java
@@ -282,7 +282,7 @@ public class TestCollectionStateWatchers extends SolrCloudTestCase {
 
     final CloudSolrClient client = cluster.getSolrClient();
 
-    Future<Boolean> future = waitInBackground("stateformat1", 10, TimeUnit.SECONDS,
+    Future<Boolean> future = waitInBackground("stateformat1", MAX_WAIT_TIMEOUT, TimeUnit.SECONDS,
         (n, c) -> DocCollection.isFullyActive(n, c, 1, 1));
 
     CollectionAdminRequest.createCollection("stateformat1", "config", 1, 1).setStateFormat(1)


[22/50] [abbrv] lucene-solr:apiv2: LUCENE-7135: work around security manager when checking for 32/64 bit JVM

Posted by sa...@apache.org.
LUCENE-7135: work around security manager when checking for 32/64 bit JVM


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/813b6855
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/813b6855
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/813b6855

Branch: refs/heads/apiv2
Commit: 813b6855656ecd50a7a28376822bd7b65154cee8
Parents: dbc2bc7
Author: Mike McCandless <mi...@apache.org>
Authored: Sun Oct 30 20:04:37 2016 -0400
Committer: Mike McCandless <mi...@apache.org>
Committed: Sun Oct 30 20:04:37 2016 -0400

----------------------------------------------------------------------
 lucene/CHANGES.txt                                |  4 ++++
 .../java/org/apache/lucene/util/Constants.java    | 18 ++++++++++--------
 2 files changed, 14 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/813b6855/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 5a6601b..385a9ae 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -110,6 +110,10 @@ Bug Fixes
 * LUCENE-7429: AnalyzerWrapper can now modify the normalization chain too and
   DelegatingAnalyzerWrapper does the right thing automatically. (Adrien Grand)
 
+* Lucene's check for 32 or 64 bit JVM now works around security
+  manager blocking access to some properties (Aaron Madlon-Kay via
+  Mike McCandless)
+
 Improvements
 
 * LUCENE-7439: FuzzyQuery now matches all terms within the specified

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/813b6855/lucene/core/src/java/org/apache/lucene/util/Constants.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/util/Constants.java b/lucene/core/src/java/org/apache/lucene/util/Constants.java
index 7df0efc..e6a9609 100644
--- a/lucene/core/src/java/org/apache/lucene/util/Constants.java
+++ b/lucene/core/src/java/org/apache/lucene/util/Constants.java
@@ -68,15 +68,17 @@ public final class Constants {
       JVM_MINOR_VERSION = 0;
     }
     boolean is64Bit = false;
-    final String x = System.getProperty("sun.arch.data.model");
-    if (x != null) {
-      is64Bit = x.contains("64");
-    } else {
-      if (OS_ARCH != null && OS_ARCH.contains("64")) {
-        is64Bit = true;
-      } else {
-        is64Bit = false;
+    String datamodel = null;
+    try {
+      datamodel = System.getProperty("sun.arch.data.model");
+      if (datamodel != null) {
+        is64Bit = datamodel.contains("64");
       }
+    } catch (SecurityException ex) {}
+    if (datamodel == null && OS_ARCH != null && OS_ARCH.contains("64")) {
+      is64Bit = true;
+    } else {
+      is64Bit = false;
     }
     JRE_IS_64BIT = is64Bit;
   }


[37/50] [abbrv] lucene-solr:apiv2: SOLR-8542: Adds Solr Learning to Rank (LTR) plugin for reranking results with machine learning models. (Michael Nilsson, Diego Ceccarelli, Joshua Pantony, Jon Dorando, Naveen Santhapuri, Alessandro Benedetti, David Groh

Posted by sa...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestSelectiveWeightCreation.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestSelectiveWeightCreation.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestSelectiveWeightCreation.java
new file mode 100644
index 0000000..68961d2
--- /dev/null
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestSelectiveWeightCreation.java
@@ -0,0 +1,251 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.FloatDocValuesField;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.RandomIndexWriter;
+import org.apache.lucene.index.ReaderUtil;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.BooleanClause;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Scorer;
+import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.search.TopDocs;
+import org.apache.lucene.search.Weight;
+import org.apache.lucene.store.Directory;
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.ltr.feature.Feature;
+import org.apache.solr.ltr.feature.ValueFeature;
+import org.apache.solr.ltr.model.LTRScoringModel;
+import org.apache.solr.ltr.model.ModelException;
+import org.apache.solr.ltr.model.TestLinearModel;
+import org.apache.solr.ltr.norm.IdentityNormalizer;
+import org.apache.solr.ltr.norm.Normalizer;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestSelectiveWeightCreation extends TestRerankBase {
+  private IndexSearcher getSearcher(IndexReader r) {
+    final IndexSearcher searcher = newSearcher(r, false, false);
+    return searcher;
+  }
+
+  private static List<Feature> makeFeatures(int[] featureIds) {
+    final List<Feature> features = new ArrayList<>();
+    for (final int i : featureIds) {
+      Map<String,Object> params = new HashMap<String,Object>();
+      params.put("value", i);
+      final Feature f = Feature.getInstance(solrResourceLoader,
+          ValueFeature.class.getCanonicalName(),
+          "f" + i, params);
+      f.setIndex(i);
+      features.add(f);
+    }
+    return features;
+  }
+
+  private static Map<String,Object> makeFeatureWeights(List<Feature> features) {
+    final Map<String,Object> nameParams = new HashMap<String,Object>();
+    final HashMap<String,Double> modelWeights = new HashMap<String,Double>();
+    for (final Feature feat : features) {
+      modelWeights.put(feat.getName(), 0.1);
+    }
+    nameParams.put("weights", modelWeights);
+    return nameParams;
+  }
+
+  private LTRScoringQuery.ModelWeight performQuery(TopDocs hits,
+      IndexSearcher searcher, int docid, LTRScoringQuery model) throws IOException,
+      ModelException {
+    final List<LeafReaderContext> leafContexts = searcher.getTopReaderContext()
+        .leaves();
+    final int n = ReaderUtil.subIndex(hits.scoreDocs[0].doc, leafContexts);
+    final LeafReaderContext context = leafContexts.get(n);
+    final int deBasedDoc = hits.scoreDocs[0].doc - context.docBase;
+
+    final Weight weight = searcher.createNormalizedWeight(model, true);
+    final Scorer scorer = weight.scorer(context);
+
+    // rerank using the field final-score
+    scorer.iterator().advance(deBasedDoc);
+    scorer.score();
+    assertTrue(weight instanceof LTRScoringQuery.ModelWeight);
+    final LTRScoringQuery.ModelWeight modelWeight = (LTRScoringQuery.ModelWeight) weight;
+    return modelWeight;
+
+  }
+
+
+  @BeforeClass
+  public static void before() throws Exception {
+    setuptest("solrconfig-ltr.xml", "schema.xml");
+
+    assertU(adoc("id", "1", "title", "w1 w3", "description", "w1", "popularity",
+        "1"));
+    assertU(adoc("id", "2", "title", "w2", "description", "w2", "popularity",
+        "2"));
+    assertU(adoc("id", "3", "title", "w3", "description", "w3", "popularity",
+        "3"));
+    assertU(adoc("id", "4", "title", "w4 w3", "description", "w4", "popularity",
+        "4"));
+    assertU(adoc("id", "5", "title", "w5", "description", "w5", "popularity",
+        "5"));
+    assertU(commit());
+
+    loadFeatures("external_features.json");
+    loadModels("external_model.json");
+    loadModels("external_model_store.json");
+  }
+
+  @AfterClass
+  public static void after() throws Exception {
+    aftertest();
+  }
+
+  @Test
+  public void testScoringQueryWeightCreation() throws IOException, ModelException {
+    final Directory dir = newDirectory();
+    final RandomIndexWriter w = new RandomIndexWriter(random(), dir);
+
+    Document doc = new Document();
+    doc.add(newStringField("id", "0", Field.Store.YES));
+    doc.add(newTextField("field", "wizard the the the the the oz",
+        Field.Store.NO));
+    doc.add(new FloatDocValuesField("final-score", 1.0f));
+
+    w.addDocument(doc);
+    doc = new Document();
+    doc.add(newStringField("id", "1", Field.Store.YES));
+    // 1 extra token, but wizard and oz are close;
+    doc.add(newTextField("field", "wizard oz the the the the the the",
+        Field.Store.NO));
+    doc.add(new FloatDocValuesField("final-score", 2.0f));
+    w.addDocument(doc);
+
+    final IndexReader r = w.getReader();
+    w.close();
+
+    // Do ordinary BooleanQuery:
+    final BooleanQuery.Builder bqBuilder = new BooleanQuery.Builder();
+    bqBuilder.add(new TermQuery(new Term("field", "wizard")), BooleanClause.Occur.SHOULD);
+    bqBuilder.add(new TermQuery(new Term("field", "oz")), BooleanClause.Occur.SHOULD);
+    final IndexSearcher searcher = getSearcher(r);
+    // first run the standard query
+    final TopDocs hits = searcher.search(bqBuilder.build(), 10);
+    assertEquals(2, hits.totalHits);
+    assertEquals("0", searcher.doc(hits.scoreDocs[0].doc).get("id"));
+    assertEquals("1", searcher.doc(hits.scoreDocs[1].doc).get("id"));
+
+    List<Feature> features = makeFeatures(new int[] {0, 1, 2});
+    final List<Feature> allFeatures = makeFeatures(new int[] {0, 1, 2, 3, 4, 5,
+        6, 7, 8, 9});
+    final List<Normalizer> norms = new ArrayList<>();
+    for (int k=0; k < features.size(); ++k){
+        norms.add(IdentityNormalizer.INSTANCE);
+    }
+
+    // when features are NOT requested in the response, only the modelFeature weights should be created
+    final LTRScoringModel ltrScoringModel1 = TestLinearModel.createLinearModel("test",
+        features, norms, "test", allFeatures,
+        makeFeatureWeights(features));
+    LTRScoringQuery.ModelWeight modelWeight = performQuery(hits, searcher,
+        hits.scoreDocs[0].doc, new LTRScoringQuery(ltrScoringModel1, false)); // features not requested in response
+    LTRScoringQuery.FeatureInfo[] featuresInfo = modelWeight.getFeaturesInfo();
+
+    assertEquals(features.size(), modelWeight.getModelFeatureValuesNormalized().length);
+    int validFeatures = 0;
+    for (int i=0; i < featuresInfo.length; ++i){
+      if (featuresInfo[i] != null && featuresInfo[i].isUsed()){
+        validFeatures += 1;
+      }
+    }
+    assertEquals(validFeatures, features.size());
+
+    // when features are requested in the response, weights should be created for all features
+    final LTRScoringModel ltrScoringModel2 = TestLinearModel.createLinearModel("test",
+        features, norms, "test", allFeatures,
+        makeFeatureWeights(features));
+    modelWeight = performQuery(hits, searcher,
+        hits.scoreDocs[0].doc, new LTRScoringQuery(ltrScoringModel2, true)); // features requested in response
+    featuresInfo = modelWeight.getFeaturesInfo();
+
+    assertEquals(features.size(), modelWeight.getModelFeatureValuesNormalized().length);
+    assertEquals(allFeatures.size(), modelWeight.getExtractedFeatureWeights().length);
+
+    validFeatures = 0;
+    for (int i=0; i < featuresInfo.length; ++i){
+      if (featuresInfo[i] != null && featuresInfo[i].isUsed()){
+        validFeatures += 1;
+      }
+    }
+    assertEquals(validFeatures, allFeatures.size());
+
+    assertU(delI("0"));assertU(delI("1"));
+    r.close();
+    dir.close();
+  }
+
+
+  @Test
+  public void testSelectiveWeightsRequestFeaturesFromDifferentStore() throws Exception {
+
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("*:*");
+    query.add("fl", "*,score");
+    query.add("rows", "4");
+    query.add("rq", "{!ltr reRankDocs=4 model=externalmodel efi.user_query=w3}");
+    query.add("fl", "fv:[fv]");
+
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='1'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/id=='3'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/id=='4'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/fv=='matchedTitle:1.0;titlePhraseMatch:0.40254828'"); // extract all features in default store
+
+    query.remove("fl");
+    query.remove("rq");
+    query.add("fl", "*,score");
+    query.add("rq", "{!ltr reRankDocs=4 model=externalmodel efi.user_query=w3}");
+    query.add("fl", "fv:[fv store=fstore4 efi.myPop=3]");
+
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='1'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/score==0.999");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/fv=='popularity:3.0;originalScore:1.0'"); // extract all features from fstore4
+
+
+    query.remove("fl");
+    query.remove("rq");
+    query.add("fl", "*,score");
+    query.add("rq", "{!ltr reRankDocs=4 model=externalmodelstore efi.user_query=w3 efi.myconf=0.8}");
+    query.add("fl", "fv:[fv store=fstore4 efi.myPop=3]");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='1'"); // score using fstore2 used by externalmodelstore
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/score==0.7992");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/fv=='popularity:3.0;originalScore:1.0'"); // extract all features from fstore4
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestEdisMaxSolrFeature.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestEdisMaxSolrFeature.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestEdisMaxSolrFeature.java
new file mode 100644
index 0000000..cd63b5c
--- /dev/null
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestEdisMaxSolrFeature.java
@@ -0,0 +1,76 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.feature;
+
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.ltr.TestRerankBase;
+import org.apache.solr.ltr.model.LinearModel;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestEdisMaxSolrFeature extends TestRerankBase {
+
+  @BeforeClass
+  public static void before() throws Exception {
+    setuptest("solrconfig-ltr.xml", "schema.xml");
+
+    assertU(adoc("id", "1", "title", "w1", "description", "w1", "popularity",
+        "1"));
+    assertU(adoc("id", "2", "title", "w2 2asd asdd didid", "description",
+        "w2 2asd asdd didid", "popularity", "2"));
+    assertU(adoc("id", "3", "title", "w3", "description", "w3", "popularity",
+        "3"));
+    assertU(adoc("id", "4", "title", "w4", "description", "w4", "popularity",
+        "4"));
+    assertU(adoc("id", "5", "title", "w5", "description", "w5", "popularity",
+        "5"));
+    assertU(adoc("id", "6", "title", "w1 w2", "description", "w1 w2",
+        "popularity", "6"));
+    assertU(adoc("id", "7", "title", "w1 w2 w3 w4 w5", "description",
+        "w1 w2 w3 w4 w5 w8", "popularity", "7"));
+    assertU(adoc("id", "8", "title", "w1 w1 w1 w2 w2 w8", "description",
+        "w1 w1 w1 w2 w2", "popularity", "8"));
+    assertU(commit());
+  }
+
+  @AfterClass
+  public static void after() throws Exception {
+    aftertest();
+  }
+
+  @Test
+  public void testEdisMaxSolrFeature() throws Exception {
+    loadFeature(
+        "SomeEdisMax",
+        SolrFeature.class.getCanonicalName(),
+        "{\"q\":\"{!edismax qf='title description' pf='description' mm=100% boost='pow(popularity, 0.1)' v='w1' tie=0.1}\"}");
+
+    loadModel("EdisMax-model", LinearModel.class.getCanonicalName(),
+        new String[] {"SomeEdisMax"}, "{\"weights\":{\"SomeEdisMax\":1.0}}");
+
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("title:w1");
+    query.add("fl", "*, score");
+    query.add("rows", "4");
+
+    query.add("rq", "{!ltr model=EdisMax-model reRankDocs=4}");
+    query.set("debugQuery", "on");
+    restTestHarness.query("/query" + query.toQueryString());
+    assertJQ("/query" + query.toQueryString(), "/response/numFound/==4");
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestExternalFeatures.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestExternalFeatures.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestExternalFeatures.java
new file mode 100644
index 0000000..8c00758
--- /dev/null
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestExternalFeatures.java
@@ -0,0 +1,157 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.feature;
+
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.ltr.TestRerankBase;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestExternalFeatures extends TestRerankBase {
+
+  @BeforeClass
+  public static void before() throws Exception {
+    setuptest("solrconfig-ltr.xml", "schema.xml");
+
+    assertU(adoc("id", "1", "title", "w1", "description", "w1", "popularity",
+        "1"));
+    assertU(adoc("id", "2", "title", "w2", "description", "w2", "popularity",
+        "2"));
+    assertU(adoc("id", "3", "title", "w3", "description", "w3", "popularity",
+        "3"));
+    assertU(adoc("id", "4", "title", "w4", "description", "w4", "popularity",
+        "4"));
+    assertU(adoc("id", "5", "title", "w5", "description", "w5", "popularity",
+        "5"));
+    assertU(commit());
+
+    loadFeatures("external_features.json");
+    loadModels("external_model.json");
+  }
+
+  @AfterClass
+  public static void after() throws Exception {
+    aftertest();
+  }
+
+  @Test
+  public void testEfiInTransformerShouldNotChangeOrderOfRerankedResults() throws Exception {
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("*:*");
+    query.add("fl", "*,score");
+    query.add("rows", "3");
+
+    // Regular scores
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='1'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/score==1.0");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/id=='2'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/score==1.0");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/id=='3'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/score==1.0");
+
+    query.add("fl", "[fv]");
+    query.add("rq", "{!ltr reRankDocs=3 model=externalmodel efi.user_query=w3}");
+
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='3'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/score==0.999");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/id=='1'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/score==0.0");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/id=='2'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/score==0.0");
+
+    // Adding an efi in the transformer should not affect the rq ranking with a
+    // different value for efi of the same parameter
+    query.remove("fl");
+    query.add("fl", "id,[fv efi.user_query=w2]");
+
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='3'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/id=='1'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/id=='2'");
+  }
+
+  @Test
+  public void testFeaturesUseStopwordQueryReturnEmptyFeatureVector() throws Exception {
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("*:*");
+    query.add("fl", "*,score,fv:[fv]");
+    query.add("rows", "1");
+    // Stopword only query passed in
+    query.add("rq", "{!ltr reRankDocs=3 model=externalmodel efi.user_query='a'}");
+
+    // Features are query title matches, which remove stopwords, leaving blank query, so no matches
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/fv==''");
+  }
+
+  @Test
+  public void testEfiFeatureExtraction() throws Exception {
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("*:*");
+    query.add("rows", "1");
+
+    // Features we're extracting depend on external feature info not passed in
+    query.add("fl", "[fv]");
+    assertJQ("/query" + query.toQueryString(), "/error/msg=='Exception from createWeight for SolrFeature [name=matchedTitle, params={q={!terms f=title}${user_query}}] SolrFeatureWeight requires efi parameter that was not passed in request.'");
+
+    // Adding efi in features section should make it work
+    query.remove("fl");
+    query.add("fl", "score,fvalias:[fv store=fstore2 efi.myconf=2.3]");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/fvalias=='confidence:2.3;originalScore:1.0'");
+
+    // Adding efi in transformer + rq should still use the transformer's params for feature extraction
+    query.remove("fl");
+    query.add("fl", "score,fvalias:[fv store=fstore2 efi.myconf=2.3]");
+    query.add("rq", "{!ltr reRankDocs=3 model=externalmodel efi.user_query=w3}");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/fvalias=='confidence:2.3;originalScore:1.0'");
+  }
+
+  @Test
+  public void featureExtraction_valueFeatureImplicitlyNotRequired_shouldNotScoreFeature() throws Exception {
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("*:*");
+    query.add("rows", "1");
+
+    // Efi is explicitly not required, so we do not score the feature
+    query.remove("fl");
+    query.add("fl", "fvalias:[fv store=fstore2]");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/fvalias=='originalScore:0.0'");
+  }
+
+  @Test
+  public void featureExtraction_valueFeatureExplicitlyNotRequired_shouldNotScoreFeature() throws Exception {
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("*:*");
+    query.add("rows", "1");
+
+    // Efi is explicitly not required, so we do not score the feature
+    query.remove("fl");
+    query.add("fl", "fvalias:[fv store=fstore3]");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/fvalias=='originalScore:0.0'");
+  }
+
+  @Test
+  public void featureExtraction_valueFeatureRequired_shouldThrowException() throws Exception {
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("*:*");
+    query.add("rows", "1");
+
+    // Using nondefault store should still result in error with no efi when it is required (myPop)
+    query.remove("fl");
+    query.add("fl", "fvalias:[fv store=fstore4]");
+    assertJQ("/query" + query.toQueryString(), "/error/msg=='Exception from createWeight for ValueFeature [name=popularity, params={value=${myPop}, required=true}] ValueFeatureWeight requires efi parameter that was not passed in request.'");
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestExternalValueFeatures.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestExternalValueFeatures.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestExternalValueFeatures.java
new file mode 100644
index 0000000..bc073cb
--- /dev/null
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestExternalValueFeatures.java
@@ -0,0 +1,86 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.feature;
+
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.ltr.TestRerankBase;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestExternalValueFeatures extends TestRerankBase {
+
+  @BeforeClass
+  public static void before() throws Exception {
+    setuptest("solrconfig-ltr.xml", "schema.xml");
+
+    assertU(adoc("id", "1", "title", "w1", "description", "w1", "popularity",
+        "1"));
+    assertU(adoc("id", "2", "title", "w2", "description", "w2", "popularity",
+        "2"));
+    assertU(adoc("id", "3", "title", "w3", "description", "w3", "popularity",
+        "3"));
+    assertU(adoc("id", "4", "title", "w4", "description", "w4", "popularity",
+        "4"));
+    assertU(adoc("id", "5", "title", "w5", "description", "w5", "popularity",
+        "5"));
+    assertU(commit());
+
+    loadFeatures("external_features_for_sparse_processing.json");
+    loadModels("multipleadditivetreesmodel_external_binary_features.json");
+  }
+
+  @AfterClass
+  public static void after() throws Exception {
+    aftertest();
+  }
+
+  @Test
+  public void efiFeatureProcessing_oneEfiMissing_shouldNotCalculateMissingFeature() throws Exception {
+    SolrQuery query = new SolrQuery();
+    query.setQuery("*:*");
+    query.add("fl", "*,score,features:[fv]");
+    query.add("rows", "3");
+    query.add("fl", "[fv]");
+    query.add("rq", "{!ltr reRankDocs=3 model=external_model_binary_feature efi.user_device_tablet=1}");
+
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='1'");
+    assertJQ("/query" + query.toQueryString(),
+        "/response/docs/[0]/features=='user_device_tablet:1.0'");
+    assertJQ("/query" + query.toQueryString(),
+        "/response/docs/[0]/score==65.0");
+  }
+
+  @Test
+  public void efiFeatureProcessing_allEfisMissing_shouldReturnZeroScore() throws Exception {
+    SolrQuery query = new SolrQuery();
+    query.setQuery("*:*");
+    query.add("fl", "*,score,features:[fv]");
+    query.add("rows", "3");
+
+    query.add("fl", "[fv]");
+    query
+        .add("rq", "{!ltr reRankDocs=3 model=external_model_binary_feature}");
+
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='1'");
+    assertJQ("/query" + query.toQueryString(),
+        "/response/docs/[0]/features==''");
+    assertJQ("/query" + query.toQueryString(),
+        "/response/docs/[0]/score==0.0");
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFeatureExtractionFromMultipleSegments.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFeatureExtractionFromMultipleSegments.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFeatureExtractionFromMultipleSegments.java
new file mode 100644
index 0000000..7658f62
--- /dev/null
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFeatureExtractionFromMultipleSegments.java
@@ -0,0 +1,105 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.feature;
+
+import java.util.List;
+import java.util.Map;
+
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.ltr.TestRerankBase;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.noggit.ObjectBuilder;
+
+
+public class TestFeatureExtractionFromMultipleSegments extends TestRerankBase {
+  static final String AB = "abcdefghijklmnopqrstuvwxyz";
+
+  static String randomString( int len ){
+    StringBuilder sb = new StringBuilder( len );
+    for( int i = 0; i < len; i++ ) {
+      sb.append( AB.charAt( random().nextInt(AB.length()) ) );
+    }
+    return sb.toString();
+ }
+
+  @BeforeClass
+  public static void before() throws Exception {
+    // solrconfig-multiseg.xml contains the merge policy to restrict merging
+    setuptest("solrconfig-multiseg.xml", "schema.xml");
+    // index 400 documents
+    for(int i = 0; i<400;i=i+20) {
+      assertU(adoc("id", new Integer(i).toString(),   "popularity", "201", "description", "apple is a company " + randomString(i%6+3), "normHits", "0.1"));
+      assertU(adoc("id", new Integer(i+1).toString(), "popularity", "201", "description", "d " + randomString(i%6+3), "normHits", "0.11"));
+
+      assertU(adoc("id", new Integer(i+2).toString(), "popularity", "201", "description", "apple is a company too " + randomString(i%6+3), "normHits", "0.1"));
+      assertU(adoc("id", new Integer(i+3).toString(), "popularity", "201", "description", "new york city is big apple " + randomString(i%6+3), "normHits", "0.11"));
+
+      assertU(adoc("id", new Integer(i+6).toString(), "popularity", "301", "description", "function name " + randomString(i%6+3), "normHits", "0.1"));
+      assertU(adoc("id", new Integer(i+7).toString(), "popularity", "301", "description", "function " + randomString(i%6+3), "normHits", "0.1"));
+
+      assertU(adoc("id", new Integer(i+8).toString(), "popularity", "301", "description", "This is a sample function for testing " + randomString(i%6+3), "normHits", "0.1"));
+      assertU(adoc("id", new Integer(i+9).toString(), "popularity", "301", "description", "Function to check out stock prices "+randomString(i%6+3), "normHits", "0.1"));
+      assertU(adoc("id", new Integer(i+10).toString(),"popularity", "301", "description", "Some descriptions "+randomString(i%6+3), "normHits", "0.1"));
+
+      assertU(adoc("id", new Integer(i+11).toString(), "popularity", "201", "description", "apple apple is a company " + randomString(i%6+3), "normHits", "0.1"));
+      assertU(adoc("id", new Integer(i+12).toString(), "popularity", "201", "description", "Big Apple is New York.", "normHits", "0.01"));
+      assertU(adoc("id", new Integer(i+13).toString(), "popularity", "201", "description", "New some York is Big. "+ randomString(i%6+3), "normHits", "0.1"));
+
+      assertU(adoc("id", new Integer(i+14).toString(), "popularity", "201", "description", "apple apple is a company " + randomString(i%6+3), "normHits", "0.1"));
+      assertU(adoc("id", new Integer(i+15).toString(), "popularity", "201", "description", "Big Apple is New York.", "normHits", "0.01"));
+      assertU(adoc("id", new Integer(i+16).toString(), "popularity", "401", "description", "barack h", "normHits", "0.0"));
+      assertU(adoc("id", new Integer(i+17).toString(), "popularity", "201", "description", "red delicious apple " + randomString(i%6+3), "normHits", "0.1"));
+      assertU(adoc("id", new Integer(i+18).toString(), "popularity", "201", "description", "nyc " + randomString(i%6+3), "normHits", "0.11"));
+    }
+
+    assertU(commit());
+
+    loadFeatures("comp_features.json");
+  }
+
+  @AfterClass
+  public static void after() throws Exception {
+    aftertest();
+  }
+
+  @Test
+  public void testFeatureExtractionFromMultipleSegments() throws Exception {
+
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("{!edismax qf='description^1' boost='sum(product(pow(normHits, 0.7), 1600), .1)' v='apple'}");
+    // request 100 rows, if any rows are fetched from the second or subsequent segments the tests should succeed if LTRRescorer::extractFeaturesInfo() advances the doc iterator properly
+    int numRows = 100;
+    query.add("rows", (new Integer(numRows)).toString());
+    query.add("wt", "json");
+    query.add("fq", "popularity:201");
+    query.add("fl", "*, score,id,normHits,description,fv:[features store='feature-store-6' format='dense' efi.user_text='apple']");
+    String res = restTestHarness.query("/query" + query.toQueryString());
+
+    Map<String,Object> resultJson = (Map<String,Object>) ObjectBuilder.fromJSON(res);
+
+    List<Map<String,Object>> docs = (List<Map<String,Object>>)((Map<String,Object>)resultJson.get("response")).get("docs");
+    int passCount = 0;
+    for (final Map<String,Object> doc : docs) {
+       String features = (String)doc.get("fv");
+       assert(features.length() > 0);
+       ++passCount;
+    }
+    assert(passCount == numRows);
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFeatureLogging.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFeatureLogging.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFeatureLogging.java
new file mode 100644
index 0000000..14e2903
--- /dev/null
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFeatureLogging.java
@@ -0,0 +1,254 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.feature;
+
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.ltr.TestRerankBase;
+import org.apache.solr.ltr.model.LinearModel;
+import org.apache.solr.ltr.store.FeatureStore;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestFeatureLogging extends TestRerankBase {
+
+  @BeforeClass
+  public static void setup() throws Exception {
+    setuptest();
+  }
+
+  @AfterClass
+  public static void after() throws Exception {
+    aftertest();
+  }
+
+  @Test
+  public void testGeneratedFeatures() throws Exception {
+    loadFeature("c1", ValueFeature.class.getCanonicalName(), "test1",
+        "{\"value\":1.0}");
+    loadFeature("c2", ValueFeature.class.getCanonicalName(), "test1",
+        "{\"value\":2.0}");
+    loadFeature("c3", ValueFeature.class.getCanonicalName(), "test1",
+        "{\"value\":3.0}");
+    loadFeature("pop", FieldValueFeature.class.getCanonicalName(), "test1",
+        "{\"field\":\"popularity\"}");
+    loadFeature("nomatch", SolrFeature.class.getCanonicalName(), "test1",
+        "{\"q\":\"{!terms f=title}foobarbat\"}");
+    loadFeature("yesmatch", SolrFeature.class.getCanonicalName(), "test1",
+        "{\"q\":\"{!terms f=popularity}2\"}");
+
+    loadModel("sum1", LinearModel.class.getCanonicalName(), new String[] {
+        "c1", "c2", "c3"}, "test1",
+        "{\"weights\":{\"c1\":1.0,\"c2\":1.0,\"c3\":1.0}}");
+
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("title:bloomberg");
+    query.add("fl", "title,description,id,popularity,[fv]");
+    query.add("rows", "3");
+    query.add("debugQuery", "on");
+    query.add("rq", "{!ltr reRankDocs=3 model=sum1}");
+
+    restTestHarness.query("/query" + query.toQueryString());
+    assertJQ(
+        "/query" + query.toQueryString(),
+        "/response/docs/[0]/=={'title':'bloomberg bloomberg ', 'description':'bloomberg','id':'7', 'popularity':2,  '[fv]':'c1:1.0;c2:2.0;c3:3.0;pop:2.0;yesmatch:1.0'}");
+
+    query.remove("fl");
+    query.add("fl", "[fv]");
+    query.add("rows", "3");
+    query.add("rq", "{!ltr reRankDocs=3 model=sum1}");
+
+    restTestHarness.query("/query" + query.toQueryString());
+    assertJQ("/query" + query.toQueryString(),
+        "/response/docs/[0]/=={'[fv]':'c1:1.0;c2:2.0;c3:3.0;pop:2.0;yesmatch:1.0'}");
+    query.remove("rq");
+
+    // set logging at false but still asking for feature, and it should work anyway
+    query.add("rq", "{!ltr reRankDocs=3 model=sum1}");
+    assertJQ("/query" + query.toQueryString(),
+        "/response/docs/[0]/=={'[fv]':'c1:1.0;c2:2.0;c3:3.0;pop:2.0;yesmatch:1.0'}");
+
+
+  }
+
+  @Test
+  public void testDefaultStoreFeatureExtraction() throws Exception {
+    loadFeature("defaultf1", ValueFeature.class.getCanonicalName(),
+        FeatureStore.DEFAULT_FEATURE_STORE_NAME,
+        "{\"value\":1.0}");
+    loadFeature("store8f1", ValueFeature.class.getCanonicalName(),
+        "store8",
+        "{\"value\":2.0}");
+    loadFeature("store9f1", ValueFeature.class.getCanonicalName(),
+        "store9",
+        "{\"value\":3.0}");
+    loadModel("store9m1", LinearModel.class.getCanonicalName(),
+      new String[] {"store9f1"},
+      "store9",
+      "{\"weights\":{\"store9f1\":1.0}}");
+
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("id:7");
+    query.add("rows", "1");
+
+    // No store specified, use default store for extraction
+    query.add("fl", "fv:[fv]");
+    assertJQ("/query" + query.toQueryString(),
+        "/response/docs/[0]/=={'fv':'defaultf1:1.0'}");
+
+    // Store specified, use store for extraction
+    query.remove("fl");
+    query.add("fl", "fv:[fv store=store8]");
+    assertJQ("/query" + query.toQueryString(),
+        "/response/docs/[0]/=={'fv':'store8f1:2.0'}");
+
+    // Store specified + model specified, use store for extraction
+    query.add("rq", "{!ltr reRankDocs=3 model=store9m1}");
+    assertJQ("/query" + query.toQueryString(),
+        "/response/docs/[0]/=={'fv':'store8f1:2.0'}");
+
+    // No store specified + model specified, use model store for extraction
+    query.remove("fl");
+    query.add("fl", "fv:[fv]");
+    assertJQ("/query" + query.toQueryString(),
+        "/response/docs/[0]/=={'fv':'store9f1:3.0'}");
+  }
+
+
+  @Test
+  public void testGeneratedGroup() throws Exception {
+    loadFeature("c1", ValueFeature.class.getCanonicalName(), "testgroup",
+        "{\"value\":1.0}");
+    loadFeature("c2", ValueFeature.class.getCanonicalName(), "testgroup",
+        "{\"value\":2.0}");
+    loadFeature("c3", ValueFeature.class.getCanonicalName(), "testgroup",
+        "{\"value\":3.0}");
+    loadFeature("pop", FieldValueFeature.class.getCanonicalName(), "testgroup",
+        "{\"field\":\"popularity\"}");
+
+    loadModel("sumgroup", LinearModel.class.getCanonicalName(), new String[] {
+        "c1", "c2", "c3"}, "testgroup",
+        "{\"weights\":{\"c1\":1.0,\"c2\":1.0,\"c3\":1.0}}");
+
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("title:bloomberg");
+    query.add("fl", "*,[fv]");
+    query.add("debugQuery", "on");
+
+    query.remove("fl");
+    query.add("fl", "fv:[fv]");
+    query.add("rows", "3");
+    query.add("group", "true");
+    query.add("group.field", "title");
+
+    query.add("rq", "{!ltr reRankDocs=3 model=sumgroup}");
+
+    restTestHarness.query("/query" + query.toQueryString());
+    assertJQ(
+        "/query" + query.toQueryString(),
+        "/grouped/title/groups/[0]/doclist/docs/[0]/=={'fv':'c1:1.0;c2:2.0;c3:3.0;pop:5.0'}");
+
+    query.remove("fl");
+    query.add("fl", "fv:[fv fvwt=json]");
+    restTestHarness.query("/query" + query.toQueryString());
+    assertJQ(
+        "/query" + query.toQueryString(),
+        "/grouped/title/groups/[0]/doclist/docs/[0]/fv/=={'c1':1.0,'c2':2.0,'c3':3.0,'pop':5.0}");
+    query.remove("fl");
+    query.add("fl", "fv:[fv fvwt=json]");
+
+    assertJQ(
+        "/query" + query.toQueryString(),
+        "/grouped/title/groups/[0]/doclist/docs/[0]/fv/=={'c1':1.0,'c2':2.0,'c3':3.0,'pop':5.0}");
+  }
+
+  @Test
+  public void testSparseDenseFeatures() throws Exception {
+    loadFeature("match", SolrFeature.class.getCanonicalName(), "test4",
+        "{\"q\":\"{!terms f=title}different\"}");
+    loadFeature("c4", ValueFeature.class.getCanonicalName(), "test4",
+        "{\"value\":1.0}");
+
+    loadModel("sum4", LinearModel.class.getCanonicalName(), new String[] {
+        "match"}, "test4",
+        "{\"weights\":{\"match\":1.0}}");
+
+    //json - no feature format check (default to sparse)
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("title:bloomberg");
+    query.add("rows", "10");
+    query.add("fl", "*,score,fv:[fv store=test4 fvwt=json]");
+    query.add("rq", "{!ltr reRankDocs=10 model=sum4}");
+    assertJQ(
+        "/query" + query.toQueryString(),
+        "/response/docs/[0]/fv/=={'match':1.0,'c4':1.0}");
+    assertJQ(
+        "/query" + query.toQueryString(),
+        "/response/docs/[1]/fv/=={'c4':1.0}");
+
+    //json - sparse feature format check
+    query.remove("fl");
+    query.add("fl", "*,score,fv:[fv store=test4 format=sparse fvwt=json]");
+    assertJQ(
+        "/query" + query.toQueryString(),
+        "/response/docs/[0]/fv/=={'match':1.0,'c4':1.0}");
+    assertJQ(
+        "/query" + query.toQueryString(),
+        "/response/docs/[1]/fv/=={'c4':1.0}");
+
+    //json - dense feature format check
+    query.remove("fl");
+    query.add("fl", "*,score,fv:[fv store=test4 format=dense fvwt=json]");
+    assertJQ(
+        "/query" + query.toQueryString(),
+        "/response/docs/[0]/fv/=={'match':1.0,'c4':1.0}");
+    assertJQ(
+        "/query" + query.toQueryString(),
+        "/response/docs/[1]/fv/=={'match':0.0,'c4':1.0}");
+
+    //csv - no feature format check (default to sparse)
+    query.remove("fl");
+    query.add("fl", "*,score,fv:[fv store=test4 fvwt=csv]");
+    assertJQ(
+        "/query" + query.toQueryString(),
+        "/response/docs/[0]/fv/=='match:1.0;c4:1.0'");
+    assertJQ(
+        "/query" + query.toQueryString(),
+        "/response/docs/[1]/fv/=='c4:1.0'");
+
+    //csv - sparse feature format check
+    query.remove("fl");
+    query.add("fl", "*,score,fv:[fv store=test4 format=sparse fvwt=csv]");
+    assertJQ(
+        "/query" + query.toQueryString(),
+        "/response/docs/[0]/fv/=='match:1.0;c4:1.0'");
+    assertJQ(
+        "/query" + query.toQueryString(),
+        "/response/docs/[1]/fv/=='c4:1.0'");
+
+    //csv - dense feature format check
+    query.remove("fl");
+    query.add("fl", "*,score,fv:[fv store=test4 format=dense fvwt=csv]");
+    assertJQ(
+        "/query" + query.toQueryString(),
+        "/response/docs/[0]/fv/=='match:1.0;c4:1.0'");
+    assertJQ(
+        "/query" + query.toQueryString(),
+        "/response/docs/[1]/fv/=='match:0.0;c4:1.0'");
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFeatureLtrScoringModel.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFeatureLtrScoringModel.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFeatureLtrScoringModel.java
new file mode 100644
index 0000000..5fcebad
--- /dev/null
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFeatureLtrScoringModel.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.feature;
+
+import org.apache.solr.ltr.TestRerankBase;
+import org.apache.solr.ltr.store.rest.ManagedFeatureStore;
+import org.apache.solr.ltr.store.rest.TestManagedFeatureStore;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestFeatureLtrScoringModel extends TestRerankBase {
+
+  static ManagedFeatureStore store = null;
+
+  @BeforeClass
+  public static void setup() throws Exception {
+    setuptest();
+    store = getManagedFeatureStore();
+  }
+
+  @AfterClass
+  public static void after() throws Exception {
+    aftertest();
+  }
+
+  @Test
+  public void getInstanceTest() throws FeatureException
+  {
+    store.addFeature(TestManagedFeatureStore.createMap("test",
+        OriginalScoreFeature.class.getCanonicalName(), null),
+        "testFstore");
+    final Feature feature = store.getFeatureStore("testFstore").get("test");
+    assertNotNull(feature);
+    assertEquals("test", feature.getName());
+    assertEquals(OriginalScoreFeature.class.getCanonicalName(), feature
+        .getClass().getCanonicalName());
+  }
+
+  @Test
+  public void getInvalidInstanceTest()
+  {
+    final String nonExistingClassName = "org.apache.solr.ltr.feature.LOLFeature";
+    final ClassNotFoundException expectedException =
+        new ClassNotFoundException(nonExistingClassName);
+    try {
+      store.addFeature(TestManagedFeatureStore.createMap("test",
+          nonExistingClassName, null),
+          "testFstore2");
+      fail("getInvalidInstanceTest failed to throw exception: "+expectedException);
+    } catch (Exception actualException) {
+      Throwable rootError = getRootCause(actualException);
+      assertEquals(expectedException.toString(), rootError.toString());
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFeatureStore.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFeatureStore.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFeatureStore.java
new file mode 100644
index 0000000..0ed0cda
--- /dev/null
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFeatureStore.java
@@ -0,0 +1,106 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.feature;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.solr.ltr.TestRerankBase;
+import org.apache.solr.ltr.store.FeatureStore;
+import org.apache.solr.ltr.store.rest.ManagedFeatureStore;
+import org.apache.solr.ltr.store.rest.TestManagedFeatureStore;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestFeatureStore extends TestRerankBase {
+
+  static ManagedFeatureStore fstore = null;
+
+  @BeforeClass
+  public static void setup() throws Exception {
+    setuptest();
+    fstore = getManagedFeatureStore();
+  }
+
+  @Test
+  public void testDefaultFeatureStoreName()
+  {
+    assertEquals("_DEFAULT_", FeatureStore.DEFAULT_FEATURE_STORE_NAME);
+    final FeatureStore expectedFeatureStore = fstore.getFeatureStore(FeatureStore.DEFAULT_FEATURE_STORE_NAME);
+    final FeatureStore actualFeatureStore = fstore.getFeatureStore(null);
+    assertEquals("getFeatureStore(null) should return the default feature store", expectedFeatureStore, actualFeatureStore);
+  }
+
+  @Test
+  public void testFeatureStoreAdd() throws FeatureException
+  {
+    final FeatureStore fs = fstore.getFeatureStore("fstore-testFeature");
+    for (int i = 0; i < 5; i++) {
+      final String name = "c" + i;
+
+      fstore.addFeature(TestManagedFeatureStore.createMap(name,
+          OriginalScoreFeature.class.getCanonicalName(), null),
+          "fstore-testFeature");
+
+      final Feature f = fs.get(name);
+      assertNotNull(f);
+
+    }
+    assertEquals(5, fs.getFeatures().size());
+
+  }
+
+  @Test
+  public void testFeatureStoreGet() throws FeatureException
+  {
+    final FeatureStore fs = fstore.getFeatureStore("fstore-testFeature2");
+    for (int i = 0; i < 5; i++) {
+      Map<String,Object> params = new HashMap<String,Object>();
+      params.put("value", i);
+      final String name = "c" + i;
+
+      fstore.addFeature(TestManagedFeatureStore.createMap(name,
+          ValueFeature.class.getCanonicalName(), params),
+          "fstore-testFeature2");
+
+    }
+
+    for (int i = 0; i < 5; i++) {
+      final Feature f = fs.get("c" + i);
+      assertEquals("c" + i, f.getName());
+      assertTrue(f instanceof ValueFeature);
+      final ValueFeature vf = (ValueFeature)f;
+      assertEquals(i, vf.getValue());
+    }
+  }
+
+  @Test
+  public void testMissingFeatureReturnsNull() {
+    final FeatureStore fs = fstore.getFeatureStore("fstore-testFeature3");
+    for (int i = 0; i < 5; i++) {
+      Map<String,Object> params = new HashMap<String,Object>();
+      params.put("value", i);
+      final String name = "testc" + (float) i;
+      fstore.addFeature(TestManagedFeatureStore.createMap(name,
+          ValueFeature.class.getCanonicalName(), params),
+          "fstore-testFeature3");
+
+    }
+    assertNull(fs.get("missing_feature_name"));
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFieldLengthFeature.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFieldLengthFeature.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFieldLengthFeature.java
new file mode 100644
index 0000000..4a0d449
--- /dev/null
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFieldLengthFeature.java
@@ -0,0 +1,156 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.feature;
+
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.ltr.TestRerankBase;
+import org.apache.solr.ltr.model.LinearModel;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestFieldLengthFeature extends TestRerankBase {
+
+  @BeforeClass
+  public static void before() throws Exception {
+    setuptest("solrconfig-ltr.xml", "schema.xml");
+
+    assertU(adoc("id", "1", "title", "w1", "description", "w1"));
+    assertU(adoc("id", "2", "title", "w2 2asd asdd didid", "description",
+        "w2 2asd asdd didid"));
+    assertU(adoc("id", "3", "title", "w3", "description", "w3"));
+    assertU(adoc("id", "4", "title", "w4", "description", "w4"));
+    assertU(adoc("id", "5", "title", "w5", "description", "w5"));
+    assertU(adoc("id", "6", "title", "w1 w2", "description", "w1 w2"));
+    assertU(adoc("id", "7", "title", "w1 w2 w3 w4 w5", "description",
+        "w1 w2 w3 w4 w5 w8"));
+    assertU(adoc("id", "8", "title", "w1 w1 w1 w2 w2 w8", "description",
+        "w1 w1 w1 w2 w2"));
+    assertU(commit());
+  }
+
+  @AfterClass
+  public static void after() throws Exception {
+    aftertest();
+  }
+
+  @Test
+  public void testIfFieldIsMissingInDocumentLengthIsZero() throws Exception {
+    // add a document without the field 'description'
+    assertU(adoc("id", "42", "title", "w10"));
+    assertU(commit());
+
+    loadFeature("description-length2", FieldLengthFeature.class.getCanonicalName(),
+            "{\"field\":\"description\"}");
+
+    loadModel("description-model2", LinearModel.class.getCanonicalName(),
+            new String[] {"description-length2"}, "{\"weights\":{\"description-length2\":1.0}}");
+
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("title:w10");
+    query.add("fl", "*, score");
+    query.add("rows", "4");
+    query.add("rq", "{!ltr model=description-model2 reRankDocs=8}");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/score==0.0");
+  }
+
+
+  @Test
+  public void testIfFieldIsEmptyLengthIsZero() throws Exception {
+    // add a document without the field 'description'
+    assertU(adoc("id", "43", "title", "w11", "description", ""));
+    assertU(commit());
+
+    loadFeature("description-length3", FieldLengthFeature.class.getCanonicalName(),
+            "{\"field\":\"description\"}");
+
+    loadModel("description-model3", LinearModel.class.getCanonicalName(),
+            new String[] {"description-length3"}, "{\"weights\":{\"description-length3\":1.0}}");
+
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("title:w11");
+    query.add("fl", "*, score");
+    query.add("rows", "4");
+    query.add("rq", "{!ltr model=description-model3 reRankDocs=8}");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/score==0.0");
+  }
+
+
+  @Test
+  public void testRanking() throws Exception {
+    loadFeature("title-length", FieldLengthFeature.class.getCanonicalName(),
+        "{\"field\":\"title\"}");
+
+    loadModel("title-model", LinearModel.class.getCanonicalName(),
+        new String[] {"title-length"}, "{\"weights\":{\"title-length\":1.0}}");
+
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("title:w1");
+    query.add("fl", "*, score");
+    query.add("rows", "4");
+
+    // Normal term match
+    assertJQ("/query" + query.toQueryString(), "/response/numFound/==4");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='1'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/id=='8'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/id=='6'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/id=='7'");
+    // Normal term match
+
+    query.add("rq", "{!ltr model=title-model reRankDocs=4}");
+
+    assertJQ("/query" + query.toQueryString(), "/response/numFound/==4");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='8'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/id=='7'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/id=='6'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/id=='1'");
+
+    query.setQuery("*:*");
+    query.remove("rows");
+    query.add("rows", "8");
+    query.remove("rq");
+    query.add("rq", "{!ltr model=title-model reRankDocs=8}");
+
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='8'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/id=='7'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/id=='2'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/id=='6'");
+
+    loadFeature("description-length",
+        FieldLengthFeature.class.getCanonicalName(),
+        "{\"field\":\"description\"}");
+    loadModel("description-model", LinearModel.class.getCanonicalName(),
+        new String[] {"description-length"},
+        "{\"weights\":{\"description-length\":1.0}}");
+    query.setQuery("title:w1");
+    query.remove("rq");
+    query.remove("rows");
+    query.add("rows", "4");
+    query.add("rq", "{!ltr model=description-model reRankDocs=4}");
+
+    assertJQ("/query" + query.toQueryString(), "/response/numFound/==4");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='7'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/id=='8'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/id=='6'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/id=='1'");
+  }
+
+
+
+
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFieldValueFeature.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFieldValueFeature.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFieldValueFeature.java
new file mode 100644
index 0000000..af150c0
--- /dev/null
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFieldValueFeature.java
@@ -0,0 +1,173 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.feature;
+
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.ltr.TestRerankBase;
+import org.apache.solr.ltr.model.LinearModel;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestFieldValueFeature extends TestRerankBase {
+
+  private static final float FIELD_VALUE_FEATURE_DEFAULT_VAL = 0.0f;
+
+  @BeforeClass
+  public static void before() throws Exception {
+    setuptest("solrconfig-ltr.xml", "schema.xml");
+
+    assertU(adoc("id", "1", "title", "w1", "description", "w1", "popularity",
+        "1"));
+    assertU(adoc("id", "2", "title", "w2 2asd asdd didid", "description",
+        "w2 2asd asdd didid", "popularity", "2"));
+    assertU(adoc("id", "3", "title", "w3", "description", "w3", "popularity",
+        "3"));
+    assertU(adoc("id", "4", "title", "w4", "description", "w4", "popularity",
+        "4"));
+    assertU(adoc("id", "5", "title", "w5", "description", "w5", "popularity",
+        "5"));
+    assertU(adoc("id", "6", "title", "w1 w2", "description", "w1 w2",
+        "popularity", "6"));
+    assertU(adoc("id", "7", "title", "w1 w2 w3 w4 w5", "description",
+        "w1 w2 w3 w4 w5 w8", "popularity", "7"));
+    assertU(adoc("id", "8", "title", "w1 w1 w1 w2 w2 w8", "description",
+        "w1 w1 w1 w2 w2", "popularity", "8"));
+
+    // a document without the popularity field
+    assertU(adoc("id", "42", "title", "NO popularity", "description", "NO popularity"));
+
+    assertU(commit());
+
+    loadFeature("popularity", FieldValueFeature.class.getCanonicalName(),
+            "{\"field\":\"popularity\"}");
+
+    loadModel("popularity-model", LinearModel.class.getCanonicalName(),
+            new String[] {"popularity"}, "{\"weights\":{\"popularity\":1.0}}");
+  }
+
+  @AfterClass
+  public static void after() throws Exception {
+    aftertest();
+  }
+
+  @Test
+  public void testRanking() throws Exception {
+
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("title:w1");
+    query.add("fl", "*, score");
+    query.add("rows", "4");
+
+    // Normal term match
+    assertJQ("/query" + query.toQueryString(), "/response/numFound/==4");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='1'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/id=='8'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/id=='6'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/id=='7'");
+
+    query.add("rq", "{!ltr model=popularity-model reRankDocs=4}");
+
+    assertJQ("/query" + query.toQueryString(), "/response/numFound/==4");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='8'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/id=='7'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/id=='6'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/id=='1'");
+
+    query.setQuery("*:*");
+    query.remove("rows");
+    query.add("rows", "8");
+    query.remove("rq");
+    query.add("rq", "{!ltr model=popularity-model reRankDocs=8}");
+
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='8'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/id=='7'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/id=='6'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/id=='5'");
+  }
+
+
+  @Test
+  public void testIfADocumentDoesntHaveAFieldDefaultValueIsReturned() throws Exception {
+    SolrQuery query = new SolrQuery();
+    query.setQuery("id:42");
+    query.add("fl", "*, score");
+    query.add("rows", "4");
+
+    assertJQ("/query" + query.toQueryString(), "/response/numFound/==1");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='42'");
+    query = new SolrQuery();
+    query.setQuery("id:42");
+    query.add("rq", "{!ltr model=popularity-model reRankDocs=4}");
+    query.add("fl", "[fv]");
+    assertJQ("/query" + query.toQueryString(), "/response/numFound/==1");
+    assertJQ("/query" + query.toQueryString(),
+            "/response/docs/[0]/=={'[fv]':'popularity:"+FIELD_VALUE_FEATURE_DEFAULT_VAL+"'}");
+
+  }
+
+
+  @Test
+  public void testIfADocumentDoesntHaveAFieldASetDefaultValueIsReturned() throws Exception {
+
+    final String fstore = "testIfADocumentDoesntHaveAFieldASetDefaultValueIsReturned";
+
+    loadFeature("popularity42", FieldValueFeature.class.getCanonicalName(), fstore,
+            "{\"field\":\"popularity\",\"defaultValue\":\"42.0\"}");
+
+    SolrQuery query = new SolrQuery();
+    query.setQuery("id:42");
+    query.add("fl", "*, score");
+    query.add("rows", "4");
+
+    loadModel("popularity-model42", LinearModel.class.getCanonicalName(),
+            new String[] {"popularity42"}, fstore, "{\"weights\":{\"popularity42\":1.0}}");
+
+    assertJQ("/query" + query.toQueryString(), "/response/numFound/==1");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='42'");
+    query = new SolrQuery();
+    query.setQuery("id:42");
+    query.add("rq", "{!ltr model=popularity-model42 reRankDocs=4}");
+    query.add("fl", "[fv]");
+    assertJQ("/query" + query.toQueryString(), "/response/numFound/==1");
+    assertJQ("/query" + query.toQueryString(),
+            "/response/docs/[0]/=={'[fv]':'popularity42:42.0'}");
+
+  }
+
+  @Test
+  public void testThatIfaFieldDoesNotExistDefaultValueIsReturned() throws Exception {
+    // using a different fstore to avoid a clash with the other tests
+    final String fstore = "testThatIfaFieldDoesNotExistDefaultValueIsReturned";
+    loadFeature("not-existing-field", FieldValueFeature.class.getCanonicalName(), fstore,
+            "{\"field\":\"cowabunga\"}");
+
+    loadModel("not-existing-field-model", LinearModel.class.getCanonicalName(),
+            new String[] {"not-existing-field"}, fstore, "{\"weights\":{\"not-existing-field\":1.0}}");
+
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("id:42");
+    query.add("rq", "{!ltr model=not-existing-field-model reRankDocs=4}");
+    query.add("fl", "[fv]");
+    assertJQ("/query" + query.toQueryString(), "/response/numFound/==1");
+    assertJQ("/query" + query.toQueryString(),
+            "/response/docs/[0]/=={'[fv]':'not-existing-field:"+FIELD_VALUE_FEATURE_DEFAULT_VAL+"'}");
+
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFilterSolrFeature.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFilterSolrFeature.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFilterSolrFeature.java
new file mode 100644
index 0000000..14baefa
--- /dev/null
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFilterSolrFeature.java
@@ -0,0 +1,105 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.feature;
+
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.ltr.TestRerankBase;
+import org.apache.solr.ltr.model.LinearModel;
+import org.apache.solr.ltr.store.rest.ManagedFeatureStore;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestFilterSolrFeature extends TestRerankBase {
+  @BeforeClass
+  public static void before() throws Exception {
+    setuptest("solrconfig-ltr.xml", "schema.xml");
+
+    assertU(adoc("id", "1", "title", "w1", "description", "w1", "popularity",
+        "1"));
+    assertU(adoc("id", "2", "title", "w2 2asd asdd didid", "description",
+        "w2 2asd asdd didid", "popularity", "2"));
+    assertU(adoc("id", "3", "title", "w1", "description", "w1", "popularity",
+        "3"));
+    assertU(adoc("id", "4", "title", "w1", "description", "w1", "popularity",
+        "4"));
+    assertU(adoc("id", "5", "title", "w5", "description", "w5", "popularity",
+        "5"));
+    assertU(adoc("id", "6", "title", "w6 w2", "description", "w1 w2",
+        "popularity", "6"));
+    assertU(adoc("id", "7", "title", "w1 w2 w3 w4 w5", "description",
+        "w6 w2 w3 w4 w5 w8", "popularity", "88888"));
+    assertU(adoc("id", "8", "title", "w1 w1 w1 w2 w2 w8", "description",
+        "w1 w1 w1 w2 w2", "popularity", "88888"));
+    assertU(commit());
+  }
+
+  @AfterClass
+  public static void after() throws Exception {
+    aftertest();
+  }
+
+  @Test
+  public void testUserTermScoreWithFQ() throws Exception {
+    loadFeature("SomeTermFQ", SolrFeature.class.getCanonicalName(),
+        "{\"fq\":[\"{!terms f=popularity}88888\"]}");
+    loadFeature("SomeEfiFQ", SolrFeature.class.getCanonicalName(),
+        "{\"fq\":[\"{!terms f=title}${user_query}\"]}");
+    loadModel("Term-modelFQ", LinearModel.class.getCanonicalName(),
+        new String[] {"SomeTermFQ", "SomeEfiFQ"},
+        "{\"weights\":{\"SomeTermFQ\":1.6, \"SomeEfiFQ\":2.0}}");
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("*:*");
+    query.add("fl", "*, score");
+    query.add("rows", "3");
+    query.add("fq", "{!terms f=title}w1");
+    query.add("rq",
+        "{!ltr model=Term-modelFQ reRankDocs=5 efi.user_query='w5'}");
+
+    assertJQ("/query" + query.toQueryString(), "/response/numFound/==5");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/score==3.6");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/score==1.6");
+  }
+
+  @Test
+  public void testBadFeature() throws Exception {
+    // Missing q/fq
+    final String feature = getFeatureInJson("badFeature", "test",
+        SolrFeature.class.getCanonicalName(), "{\"df\":\"foo\"]}");
+    assertJPut(ManagedFeatureStore.REST_END_POINT, feature,
+        "/responseHeader/status==500");
+  }
+
+  @Test
+  public void testFeatureNotEqualWhenNormalizerDifferent() throws Exception {
+    loadFeatures("fq_features.json"); // features that use filter query
+    loadModels("fq-model.json"); // model that uses filter query features
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("*:*");
+    query.add("fl", "*,score");
+    query.add("rows", "4");
+
+    query.add("rq", "{!ltr reRankDocs=4 model=fqmodel efi.user_query=w2}");
+    query.add("fl", "fv:[fv]");
+
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='2'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/id=='1'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/id=='3'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/fv=='matchedTitle:1.0;popularity:3.0'");
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestNoMatchSolrFeature.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestNoMatchSolrFeature.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestNoMatchSolrFeature.java
new file mode 100644
index 0000000..5712687
--- /dev/null
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestNoMatchSolrFeature.java
@@ -0,0 +1,192 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.feature;
+
+import java.util.ArrayList;
+import java.util.Map;
+
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.ltr.TestRerankBase;
+import org.apache.solr.ltr.model.LinearModel;
+import org.apache.solr.ltr.model.MultipleAdditiveTreesModel;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.noggit.ObjectBuilder;
+
+public class TestNoMatchSolrFeature extends TestRerankBase {
+
+  @BeforeClass
+  public static void before() throws Exception {
+    setuptest("solrconfig-ltr.xml", "schema.xml");
+
+    assertU(adoc("id", "1", "title", "w1", "description", "w1", "popularity",
+        "1"));
+    assertU(adoc("id", "2", "title", "w2 2asd asdd didid", "description",
+        "w2 2asd asdd didid", "popularity", "2"));
+    assertU(adoc("id", "3", "title", "w3", "description", "w3", "popularity",
+        "3"));
+    assertU(adoc("id", "4", "title", "w4", "description", "w4", "popularity",
+        "4"));
+    assertU(adoc("id", "5", "title", "w5", "description", "w5", "popularity",
+        "5"));
+    assertU(adoc("id", "6", "title", "w1 w2", "description", "w1 w2",
+        "popularity", "6"));
+    assertU(adoc("id", "7", "title", "w1 w2 w3 w4 w5", "description",
+        "w1 w2 w3 w4 w5 w8", "popularity", "7"));
+    assertU(adoc("id", "8", "title", "w1 w1 w1 w2 w2 w8", "description",
+        "w1 w1 w1 w2 w2", "popularity", "8"));
+    assertU(commit());
+
+    loadFeature("nomatchfeature", SolrFeature.class.getCanonicalName(),
+        "{\"q\":\"foobarbat12345\",\"df\":\"title\"}");
+    loadFeature("yesmatchfeature", SolrFeature.class.getCanonicalName(),
+        "{\"q\":\"w1\",\"df\":\"title\"}");
+    loadFeature("nomatchfeature2", SolrFeature.class.getCanonicalName(),
+        "{\"q\":\"foobarbat12345\",\"df\":\"title\"}");
+    loadModel(
+        "nomatchmodel",
+        LinearModel.class.getCanonicalName(),
+        new String[] {"nomatchfeature", "yesmatchfeature", "nomatchfeature2"},
+        "{\"weights\":{\"nomatchfeature\":1.0,\"yesmatchfeature\":1.1,\"nomatchfeature2\":1.1}}");
+
+    loadFeature("nomatchfeature3", SolrFeature.class.getCanonicalName(),
+        "{\"q\":\"foobarbat12345\",\"df\":\"title\"}");
+    loadModel("nomatchmodel2", LinearModel.class.getCanonicalName(),
+        new String[] {"nomatchfeature3"},
+        "{\"weights\":{\"nomatchfeature3\":1.0}}");
+
+    loadFeature("nomatchfeature4", SolrFeature.class.getCanonicalName(),
+        "noMatchFeaturesStore", "{\"q\":\"foobarbat12345\",\"df\":\"title\"}");
+    loadModel("nomatchmodel3", LinearModel.class.getCanonicalName(),
+        new String[] {"nomatchfeature4"}, "noMatchFeaturesStore",
+        "{\"weights\":{\"nomatchfeature4\":1.0}}");
+  }
+
+  @AfterClass
+  public static void after() throws Exception {
+    aftertest();
+  }
+
+  @Test
+  public void test2NoMatch1YesMatchFeatureReturnsFvWith1FeatureAndDocScoreScaledByModel() throws Exception {
+    // Tests model with all no matching features but 1
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("*:*");
+    query.add("fl", "*, score,fv:[fv]");
+    query.add("rows", "4");
+    query.add("fv", "true");
+    query.add("rq", "{!ltr model=nomatchmodel reRankDocs=4}");
+
+    final SolrQuery yesMatchFeatureQuery = new SolrQuery();
+    yesMatchFeatureQuery.setQuery("title:w1");
+    yesMatchFeatureQuery.add("fl", "score");
+    yesMatchFeatureQuery.add("rows", "4");
+    String res = restTestHarness.query("/query"
+        + yesMatchFeatureQuery.toQueryString());
+
+    final Map<String,Object> jsonParse = (Map<String,Object>) ObjectBuilder
+        .fromJSON(res);
+    final Double doc0Score = (Double) ((Map<String,Object>) ((ArrayList<Object>) ((Map<String,Object>) jsonParse
+        .get("response")).get("docs")).get(0)).get("score");
+
+
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='1'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/score=="
+        + (doc0Score * 1.1));
+    assertJQ("/query" + query.toQueryString(),
+        "/response/docs/[0]/fv=='yesmatchfeature:" + doc0Score + "'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/id=='2'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/score==0.0");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/fv==''");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/id=='3'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/score==0.0");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/fv==''");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/id=='4'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/score==0.0");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/fv==''");
+  }
+
+  @Test
+  public void test1NoMatchFeatureReturnsFvWith1MatchingFeatureFromStoreAndDocWith0Score() throws Exception {
+    // Tests model with all no matching features, but 1 feature store feature matching for extraction
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("*:*");
+    query.add("fl", "*, score,fv:[fv]");
+    query.add("rows", "4");
+    query.add("rq", "{!ltr model=nomatchmodel2 reRankDocs=4}");
+
+    final SolrQuery yesMatchFeatureQuery = new SolrQuery();
+    yesMatchFeatureQuery.setQuery("title:w1");
+    yesMatchFeatureQuery.add("fl", "score");
+    yesMatchFeatureQuery.add("rows", "4");
+    String res = restTestHarness.query("/query"
+        + yesMatchFeatureQuery.toQueryString());
+
+    final Map<String,Object> jsonParse = (Map<String,Object>) ObjectBuilder
+        .fromJSON(res);
+    final Double doc0Score = (Double) ((Map<String,Object>) ((ArrayList<Object>) ((Map<String,Object>) jsonParse
+        .get("response")).get("docs")).get(0)).get("score");
+
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/score==0.0");
+    assertJQ("/query" + query.toQueryString(),
+        "/response/docs/[0]/fv=='yesmatchfeature:" + doc0Score + "'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/score==0.0");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/fv==''");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/score==0.0");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/fv==''");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/score==0.0");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/fv==''");
+  }
+
+  @Test
+  public void tesOnlyNoMatchFeaturesInStoreAndModelReturnsZeroScore() throws Exception {
+    // Tests model with all no matching features
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("*:*");
+    query.add("fl", "*, score,fv:[fv]");
+    query.add("rows", "4");
+    query.add("fv", "true");
+    query.add("rq", "{!ltr model=nomatchmodel3 reRankDocs=4}");
+
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/score==0.0");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/fv==''");
+  }
+
+  @Test
+  public void tesOnlyNoMatchFeaturesInStoreAndModelReturnsNonzeroScore() throws Exception {
+    // Tests model with all no matching features but expects a non 0 score
+    //  MultipleAdditiveTrees will return scores even for docs without any feature matches
+    loadModel(
+        "nomatchmodel4",
+        MultipleAdditiveTreesModel.class.getCanonicalName(),
+        new String[] {"nomatchfeature4"},
+        "noMatchFeaturesStore",
+        "{\"trees\":[{\"weight\":\"1f\", \"root\":{\"feature\": \"matchedTitle\",\"threshold\": \"0.5f\",\"left\":{\"value\" : \"-10\"},\"right\":{\"value\" : \"9\"}}}]}");
+
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("*:*");
+    query.add("fl", "*, score,fv:[fv]");
+    query.add("rows", "4");
+    query.add("rq", "{!ltr model=nomatchmodel4 reRankDocs=4}");
+
+    assertJQ("/query" + query.toQueryString(),
+        "/response/docs/[0]/score==0.0");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/fv==''");
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestOriginalScoreFeature.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestOriginalScoreFeature.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestOriginalScoreFeature.java
new file mode 100644
index 0000000..e525891
--- /dev/null
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestOriginalScoreFeature.java
@@ -0,0 +1,148 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.feature;
+
+import java.util.ArrayList;
+import java.util.Map;
+
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.ltr.TestRerankBase;
+import org.apache.solr.ltr.model.LinearModel;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.noggit.ObjectBuilder;
+
+public class TestOriginalScoreFeature extends TestRerankBase {
+
+  @BeforeClass
+  public static void before() throws Exception {
+    setuptest("solrconfig-ltr.xml", "schema.xml");
+
+    assertU(adoc("id", "1", "title", "w1"));
+    assertU(adoc("id", "2", "title", "w2"));
+    assertU(adoc("id", "3", "title", "w3"));
+    assertU(adoc("id", "4", "title", "w4"));
+    assertU(adoc("id", "5", "title", "w5"));
+    assertU(adoc("id", "6", "title", "w1 w2"));
+    assertU(adoc("id", "7", "title", "w1 w2 w3 w4 w5"));
+    assertU(adoc("id", "8", "title", "w1 w1 w1 w2 w2"));
+    assertU(commit());
+  }
+
+  @AfterClass
+  public static void after() throws Exception {
+    aftertest();
+  }
+
+  @Test
+  public void testOriginalScore() throws Exception {
+    loadFeature("score", OriginalScoreFeature.class.getCanonicalName(), "{}");
+
+    loadModel("originalScore", LinearModel.class.getCanonicalName(),
+        new String[] {"score"}, "{\"weights\":{\"score\":1.0}}");
+
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("title:w1");
+    query.add("fl", "*, score");
+    query.add("rows", "4");
+    query.add("wt", "json");
+
+    // Normal term match
+    assertJQ("/query" + query.toQueryString(), "/response/numFound/==4");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='1'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/id=='8'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/id=='6'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/id=='7'");
+
+    final String res = restTestHarness.query("/query" + query.toQueryString());
+    final Map<String,Object> jsonParse = (Map<String,Object>) ObjectBuilder
+        .fromJSON(res);
+    final String doc0Score = ((Double) ((Map<String,Object>) ((ArrayList<Object>) ((Map<String,Object>) jsonParse
+        .get("response")).get("docs")).get(0)).get("score")).toString();
+    final String doc1Score = ((Double) ((Map<String,Object>) ((ArrayList<Object>) ((Map<String,Object>) jsonParse
+        .get("response")).get("docs")).get(1)).get("score")).toString();
+    final String doc2Score = ((Double) ((Map<String,Object>) ((ArrayList<Object>) ((Map<String,Object>) jsonParse
+        .get("response")).get("docs")).get(2)).get("score")).toString();
+    final String doc3Score = ((Double) ((Map<String,Object>) ((ArrayList<Object>) ((Map<String,Object>) jsonParse
+        .get("response")).get("docs")).get(3)).get("score")).toString();
+
+    query.add("fl", "[fv]");
+    query.add("rq", "{!ltr model=originalScore reRankDocs=4}");
+
+    assertJQ("/query" + query.toQueryString(), "/response/numFound/==4");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='1'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/score=="
+        + doc0Score);
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/id=='8'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/score=="
+        + doc1Score);
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/id=='6'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/score=="
+        + doc2Score);
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/id=='7'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/score=="
+        + doc3Score);
+  }
+
+  @Test
+  public void testOriginalScoreWithNonScoringFeatures() throws Exception {
+    loadFeature("origScore", OriginalScoreFeature.class.getCanonicalName(),
+        "store2", "{}");
+    loadFeature("c2", ValueFeature.class.getCanonicalName(), "store2",
+        "{\"value\":2.0}");
+
+    loadModel("origScore", LinearModel.class.getCanonicalName(),
+        new String[] {"origScore"}, "store2",
+        "{\"weights\":{\"origScore\":1.0}}");
+
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("title:w1");
+    query.add("fl", "*, score, fv:[fv]");
+    query.add("rows", "4");
+    query.add("wt", "json");
+    query.add("rq", "{!ltr model=origScore reRankDocs=4}");
+
+    final String res = restTestHarness.query("/query" + query.toQueryString());
+    final Map<String,Object> jsonParse = (Map<String,Object>) ObjectBuilder
+        .fromJSON(res);
+    final String doc0Score = ((Double) ((Map<String,Object>) ((ArrayList<Object>) ((Map<String,Object>) jsonParse
+        .get("response")).get("docs")).get(0)).get("score")).toString();
+    final String doc1Score = ((Double) ((Map<String,Object>) ((ArrayList<Object>) ((Map<String,Object>) jsonParse
+        .get("response")).get("docs")).get(1)).get("score")).toString();
+    final String doc2Score = ((Double) ((Map<String,Object>) ((ArrayList<Object>) ((Map<String,Object>) jsonParse
+        .get("response")).get("docs")).get(2)).get("score")).toString();
+    final String doc3Score = ((Double) ((Map<String,Object>) ((ArrayList<Object>) ((Map<String,Object>) jsonParse
+        .get("response")).get("docs")).get(3)).get("score")).toString();
+
+    assertJQ("/query" + query.toQueryString(), "/response/numFound/==4");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='1'");
+    assertJQ("/query" + query.toQueryString(),
+        "/response/docs/[0]/fv=='origScore:" + doc0Score + ";c2:2.0'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/id=='8'");
+
+    assertJQ("/query" + query.toQueryString(),
+        "/response/docs/[1]/fv=='origScore:" + doc1Score + ";c2:2.0'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/id=='6'");
+    assertJQ("/query" + query.toQueryString(),
+        "/response/docs/[2]/fv=='origScore:" + doc2Score + ";c2:2.0'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/id=='7'");
+    assertJQ("/query" + query.toQueryString(),
+        "/response/docs/[3]/fv=='origScore:" + doc3Score + ";c2:2.0'");
+  }
+
+}


[41/50] [abbrv] lucene-solr:apiv2: SOLR-8542: Adds Solr Learning to Rank (LTR) plugin for reranking results with machine learning models. (Michael Nilsson, Diego Ceccarelli, Joshua Pantony, Jon Dorando, Naveen Santhapuri, Alessandro Benedetti, David Groh

Posted by sa...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/java/org/apache/solr/ltr/FeatureLogger.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/FeatureLogger.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/FeatureLogger.java
new file mode 100644
index 0000000..a5afd05
--- /dev/null
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/FeatureLogger.java
@@ -0,0 +1,193 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr;
+
+import java.lang.invoke.MethodHandles;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.solr.search.SolrIndexSearcher;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * FeatureLogger can be registered in a model and provide a strategy for logging
+ * the feature values.
+ */
+public abstract class FeatureLogger<FV_TYPE> {
+
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+  /** the name of the cache using for storing the feature value **/
+  private static final String QUERY_FV_CACHE_NAME = "QUERY_DOC_FV";
+
+  protected enum FeatureFormat {DENSE, SPARSE};
+  protected final FeatureFormat featureFormat;
+
+  protected FeatureLogger(FeatureFormat f) {
+    this.featureFormat = f;
+  }
+
+  /**
+   * Log will be called every time that the model generates the feature values
+   * for a document and a query.
+   *
+   * @param docid
+   *          Solr document id whose features we are saving
+   * @param featuresInfo
+   *          List of all the {@link LTRScoringQuery.FeatureInfo} objects which contain name and value
+   *          for all the features triggered by the result set
+   * @return true if the logger successfully logged the features, false
+   *         otherwise.
+   */
+
+  public boolean log(int docid, LTRScoringQuery scoringQuery,
+      SolrIndexSearcher searcher, LTRScoringQuery.FeatureInfo[] featuresInfo) {
+    final FV_TYPE featureVector = makeFeatureVector(featuresInfo);
+    if (featureVector == null) {
+      return false;
+    }
+
+    return searcher.cacheInsert(QUERY_FV_CACHE_NAME,
+        fvCacheKey(scoringQuery, docid), featureVector) != null;
+  }
+
+  /**
+   * returns a FeatureLogger that logs the features in output, using the format
+   * specified in the 'stringFormat' param: 'csv' will log the features as a unique
+   * string in csv format 'json' will log the features in a map in a Map of
+   * featureName keys to featureValue values if format is null or empty, csv
+   * format will be selected.
+   * 'featureFormat' param: 'dense' will write features in dense format,
+   * 'sparse' will write the features in sparse format, null or empty will
+   * default to 'sparse'
+   *
+   *
+   * @return a feature logger for the format specified.
+   */
+  public static FeatureLogger<?> createFeatureLogger(String stringFormat, String featureFormat) {
+    final FeatureFormat f;
+    if (featureFormat == null || featureFormat.isEmpty() ||
+        featureFormat.equals("sparse")) {
+      f = FeatureFormat.SPARSE;
+    }
+    else if (featureFormat.equals("dense")) {
+      f = FeatureFormat.DENSE;
+    }
+    else {
+      f = FeatureFormat.SPARSE;
+      log.warn("unknown feature logger feature format {} | {}", stringFormat, featureFormat);
+    }
+    if ((stringFormat == null) || stringFormat.isEmpty()) {
+      return new CSVFeatureLogger(f);
+    }
+    if (stringFormat.equals("csv")) {
+      return new CSVFeatureLogger(f);
+    }
+    if (stringFormat.equals("json")) {
+      return new MapFeatureLogger(f);
+    }
+    log.warn("unknown feature logger string format {} | {}", stringFormat, featureFormat);
+    return null;
+
+  }
+
+  public abstract FV_TYPE makeFeatureVector(LTRScoringQuery.FeatureInfo[] featuresInfo);
+
+  private static int fvCacheKey(LTRScoringQuery scoringQuery, int docid) {
+    return  scoringQuery.hashCode() + (31 * docid);
+  }
+
+  /**
+   * populate the document with its feature vector
+   *
+   * @param docid
+   *          Solr document id
+   * @return String representation of the list of features calculated for docid
+   */
+
+  public FV_TYPE getFeatureVector(int docid, LTRScoringQuery scoringQuery,
+      SolrIndexSearcher searcher) {
+    return (FV_TYPE) searcher.cacheLookup(QUERY_FV_CACHE_NAME, fvCacheKey(scoringQuery, docid));
+  }
+
+
+  public static class MapFeatureLogger extends FeatureLogger<Map<String,Float>> {
+
+    public MapFeatureLogger(FeatureFormat f) {
+      super(f);
+    }
+
+    @Override
+    public Map<String,Float> makeFeatureVector(LTRScoringQuery.FeatureInfo[] featuresInfo) {
+      boolean isDense = featureFormat.equals(FeatureFormat.DENSE);
+      Map<String,Float> hashmap = Collections.emptyMap();
+      if (featuresInfo.length > 0) {
+        hashmap = new HashMap<String,Float>(featuresInfo.length);
+        for (LTRScoringQuery.FeatureInfo featInfo:featuresInfo){
+          if (featInfo.isUsed() || isDense){
+            hashmap.put(featInfo.getName(), featInfo.getValue());
+          }
+        }
+      }
+      return hashmap;
+    }
+
+  }
+
+  public static class CSVFeatureLogger extends FeatureLogger<String> {
+    StringBuilder sb = new StringBuilder(500);
+    char keyValueSep = ':';
+    char featureSep = ';';
+
+    public CSVFeatureLogger(FeatureFormat f) {
+      super(f);
+    }
+
+    public CSVFeatureLogger setKeyValueSep(char keyValueSep) {
+      this.keyValueSep = keyValueSep;
+      return this;
+    }
+
+    public CSVFeatureLogger setFeatureSep(char featureSep) {
+      this.featureSep = featureSep;
+      return this;
+    }
+
+    @Override
+    public String makeFeatureVector(LTRScoringQuery.FeatureInfo[] featuresInfo) {
+      boolean isDense = featureFormat.equals(FeatureFormat.DENSE);
+      for (LTRScoringQuery.FeatureInfo featInfo:featuresInfo) {
+        if (featInfo.isUsed() || isDense){
+          sb.append(featInfo.getName())
+          .append(keyValueSep)
+          .append(featInfo.getValue())
+          .append(featureSep);
+        }
+      }
+
+      final String features = (sb.length() > 0 ? sb.substring(0,
+          sb.length() - 1) : "");
+      sb.setLength(0);
+
+      return features;
+    }
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/java/org/apache/solr/ltr/LTRRescorer.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/LTRRescorer.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/LTRRescorer.java
new file mode 100644
index 0000000..27223b7
--- /dev/null
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/LTRRescorer.java
@@ -0,0 +1,249 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Comparator;
+import java.util.List;
+
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.ReaderUtil;
+import org.apache.lucene.search.Explanation;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Rescorer;
+import org.apache.lucene.search.ScoreDoc;
+import org.apache.lucene.search.TopDocs;
+import org.apache.lucene.search.Weight;
+import org.apache.solr.search.SolrIndexSearcher;
+
+
+/**
+ * Implements the rescoring logic. The top documents returned by solr with their
+ * original scores, will be processed by a {@link LTRScoringQuery} that will assign a
+ * new score to each document. The top documents will be resorted based on the
+ * new score.
+ * */
+public class LTRRescorer extends Rescorer {
+
+  LTRScoringQuery scoringQuery;
+  public LTRRescorer(LTRScoringQuery scoringQuery) {
+    this.scoringQuery = scoringQuery;
+  }
+
+  private void heapAdjust(ScoreDoc[] hits, int size, int root) {
+    final ScoreDoc doc = hits[root];
+    final float score = doc.score;
+    int i = root;
+    while (i <= ((size >> 1) - 1)) {
+      final int lchild = (i << 1) + 1;
+      final ScoreDoc ldoc = hits[lchild];
+      final float lscore = ldoc.score;
+      float rscore = Float.MAX_VALUE;
+      final int rchild = (i << 1) + 2;
+      ScoreDoc rdoc = null;
+      if (rchild < size) {
+        rdoc = hits[rchild];
+        rscore = rdoc.score;
+      }
+      if (lscore < score) {
+        if (rscore < lscore) {
+          hits[i] = rdoc;
+          hits[rchild] = doc;
+          i = rchild;
+        } else {
+          hits[i] = ldoc;
+          hits[lchild] = doc;
+          i = lchild;
+        }
+      } else if (rscore < score) {
+        hits[i] = rdoc;
+        hits[rchild] = doc;
+        i = rchild;
+      } else {
+        return;
+      }
+    }
+  }
+
+  private void heapify(ScoreDoc[] hits, int size) {
+    for (int i = (size >> 1) - 1; i >= 0; i--) {
+      heapAdjust(hits, size, i);
+    }
+  }
+
+  /**
+   * rescores the documents:
+   *
+   * @param searcher
+   *          current IndexSearcher
+   * @param firstPassTopDocs
+   *          documents to rerank;
+   * @param topN
+   *          documents to return;
+   */
+  @Override
+  public TopDocs rescore(IndexSearcher searcher, TopDocs firstPassTopDocs,
+      int topN) throws IOException {
+    if ((topN == 0) || (firstPassTopDocs.totalHits == 0)) {
+      return firstPassTopDocs;
+    }
+    final ScoreDoc[] hits = firstPassTopDocs.scoreDocs;
+    Arrays.sort(hits, new Comparator<ScoreDoc>() {
+      @Override
+      public int compare(ScoreDoc a, ScoreDoc b) {
+        return a.doc - b.doc;
+      }
+    });
+
+    topN = Math.min(topN, firstPassTopDocs.totalHits);
+    final ScoreDoc[] reranked = new ScoreDoc[topN];
+    final List<LeafReaderContext> leaves = searcher.getIndexReader().leaves();
+    final LTRScoringQuery.ModelWeight modelWeight = (LTRScoringQuery.ModelWeight) searcher
+        .createNormalizedWeight(scoringQuery, true);
+
+    final SolrIndexSearcher solrIndexSearch = (SolrIndexSearcher) searcher;
+    scoreFeatures(solrIndexSearch, firstPassTopDocs,topN, modelWeight, hits, leaves, reranked);
+    // Must sort all documents that we reranked, and then select the top
+    Arrays.sort(reranked, new Comparator<ScoreDoc>() {
+      @Override
+      public int compare(ScoreDoc a, ScoreDoc b) {
+        // Sort by score descending, then docID ascending:
+        if (a.score > b.score) {
+          return -1;
+        } else if (a.score < b.score) {
+          return 1;
+        } else {
+          // This subtraction can't overflow int
+          // because docIDs are >= 0:
+          return a.doc - b.doc;
+        }
+      }
+    });
+
+    return new TopDocs(firstPassTopDocs.totalHits, reranked, reranked[0].score);
+  }
+
+  public void scoreFeatures(SolrIndexSearcher solrIndexSearch, TopDocs firstPassTopDocs,
+      int topN, LTRScoringQuery.ModelWeight modelWeight, ScoreDoc[] hits, List<LeafReaderContext> leaves,
+      ScoreDoc[] reranked) throws IOException {
+
+    int readerUpto = -1;
+    int endDoc = 0;
+    int docBase = 0;
+
+    LTRScoringQuery.ModelWeight.ModelScorer scorer = null;
+    int hitUpto = 0;
+    final FeatureLogger<?> featureLogger = scoringQuery.getFeatureLogger();
+
+    while (hitUpto < hits.length) {
+      final ScoreDoc hit = hits[hitUpto];
+      final int docID = hit.doc;
+      LeafReaderContext readerContext = null;
+      while (docID >= endDoc) {
+        readerUpto++;
+        readerContext = leaves.get(readerUpto);
+        endDoc = readerContext.docBase + readerContext.reader().maxDoc();
+      }
+      // We advanced to another segment
+      if (readerContext != null) {
+        docBase = readerContext.docBase;
+        scorer = modelWeight.scorer(readerContext);
+      }
+      // Scorer for a LTRScoringQuery.ModelWeight should never be null since we always have to
+      // call score
+      // even if no feature scorers match, since a model might use that info to
+      // return a
+      // non-zero score. Same applies for the case of advancing a LTRScoringQuery.ModelWeight.ModelScorer
+      // past the target
+      // doc since the model algorithm still needs to compute a potentially
+      // non-zero score from blank features.
+      assert (scorer != null);
+      final int targetDoc = docID - docBase;
+      scorer.docID();
+      scorer.iterator().advance(targetDoc);
+
+      scorer.getDocInfo().setOriginalDocScore(new Float(hit.score));
+      hit.score = scorer.score();
+      if (hitUpto < topN) {
+        reranked[hitUpto] = hit;
+        // if the heap is not full, maybe I want to log the features for this
+        // document
+        if (featureLogger != null) {
+          featureLogger.log(hit.doc, scoringQuery, solrIndexSearch,
+              modelWeight.getFeaturesInfo());
+        }
+      } else if (hitUpto == topN) {
+        // collected topN document, I create the heap
+        heapify(reranked, topN);
+      }
+      if (hitUpto >= topN) {
+        // once that heap is ready, if the score of this document is lower that
+        // the minimum
+        // i don't want to log the feature. Otherwise I replace it with the
+        // minimum and fix the
+        // heap.
+        if (hit.score > reranked[0].score) {
+          reranked[0] = hit;
+          heapAdjust(reranked, topN, 0);
+          if (featureLogger != null) {
+            featureLogger.log(hit.doc, scoringQuery, solrIndexSearch,
+                modelWeight.getFeaturesInfo());
+          }
+        }
+      }
+      hitUpto++;
+    }
+  }
+
+  @Override
+  public Explanation explain(IndexSearcher searcher,
+      Explanation firstPassExplanation, int docID) throws IOException {
+
+    final List<LeafReaderContext> leafContexts = searcher.getTopReaderContext()
+        .leaves();
+    final int n = ReaderUtil.subIndex(docID, leafContexts);
+    final LeafReaderContext context = leafContexts.get(n);
+    final int deBasedDoc = docID - context.docBase;
+    final Weight modelWeight = searcher.createNormalizedWeight(scoringQuery,
+        true);
+    return modelWeight.explain(context, deBasedDoc);
+  }
+
+  public static LTRScoringQuery.FeatureInfo[] extractFeaturesInfo(LTRScoringQuery.ModelWeight modelWeight,
+      int docid,
+      Float originalDocScore,
+      List<LeafReaderContext> leafContexts)
+          throws IOException {
+    final int n = ReaderUtil.subIndex(docid, leafContexts);
+    final LeafReaderContext atomicContext = leafContexts.get(n);
+    final int deBasedDoc = docid - atomicContext.docBase;
+    final LTRScoringQuery.ModelWeight.ModelScorer r = modelWeight.scorer(atomicContext);
+    if ( (r == null) || (r.iterator().advance(deBasedDoc) != deBasedDoc) ) {
+      return new LTRScoringQuery.FeatureInfo[0];
+    } else {
+      if (originalDocScore != null) {
+        // If results have not been reranked, the score passed in is the original query's
+        // score, which some features can use instead of recalculating it
+        r.getDocInfo().setOriginalDocScore(originalDocScore);
+      }
+      r.score();
+      return modelWeight.getFeaturesInfo();
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/java/org/apache/solr/ltr/LTRScoringQuery.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/LTRScoringQuery.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/LTRScoringQuery.java
new file mode 100644
index 0000000..991c1ed
--- /dev/null
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/LTRScoringQuery.java
@@ -0,0 +1,738 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr;
+
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.Callable;
+import java.util.concurrent.Future;
+import java.util.concurrent.FutureTask;
+import java.util.concurrent.RunnableFuture;
+import java.util.concurrent.Semaphore;
+
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.DisiPriorityQueue;
+import org.apache.lucene.search.DisiWrapper;
+import org.apache.lucene.search.DisjunctionDISIApproximation;
+import org.apache.lucene.search.DocIdSetIterator;
+import org.apache.lucene.search.Explanation;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.Scorer;
+import org.apache.lucene.search.Weight;
+import org.apache.solr.ltr.feature.Feature;
+import org.apache.solr.ltr.model.LTRScoringModel;
+import org.apache.solr.request.SolrQueryRequest;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * The ranking query that is run, reranking results using the
+ * LTRScoringModel algorithm
+ */
+public class LTRScoringQuery extends Query {
+
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+  // contains a description of the model
+  final private LTRScoringModel ltrScoringModel;
+  final private boolean extractAllFeatures;
+  final private LTRThreadModule ltrThreadMgr;
+  final private Semaphore querySemaphore; // limits the number of threads per query, so that multiple requests can be serviced simultaneously
+
+  // feature logger to output the features.
+  private FeatureLogger<?> fl;
+  // Map of external parameters, such as query intent, that can be used by
+  // features
+  final private Map<String,String[]> efi;
+  // Original solr query used to fetch matching documents
+  private Query originalQuery;
+  // Original solr request
+  private SolrQueryRequest request;
+
+  public LTRScoringQuery(LTRScoringModel ltrScoringModel) {
+    this(ltrScoringModel, Collections.<String,String[]>emptyMap(), false, null);
+  }
+
+  public LTRScoringQuery(LTRScoringModel ltrScoringModel, boolean extractAllFeatures) {
+    this(ltrScoringModel, Collections.<String, String[]>emptyMap(), extractAllFeatures, null);
+  }
+
+  public LTRScoringQuery(LTRScoringModel ltrScoringModel,
+      Map<String, String[]> externalFeatureInfo,
+      boolean extractAllFeatures, LTRThreadModule ltrThreadMgr) {
+    this.ltrScoringModel = ltrScoringModel;
+    this.efi = externalFeatureInfo;
+    this.extractAllFeatures = extractAllFeatures;
+    this.ltrThreadMgr = ltrThreadMgr;
+    if (this.ltrThreadMgr != null) {
+      this.querySemaphore = this.ltrThreadMgr.createQuerySemaphore();
+    } else{
+      this.querySemaphore = null;
+    }
+  }
+
+  public LTRScoringModel getScoringModel() {
+    return ltrScoringModel;
+  }
+
+  public void setFeatureLogger(FeatureLogger fl) {
+    this.fl = fl;
+  }
+
+  public FeatureLogger getFeatureLogger() {
+    return fl;
+  }
+
+  public void setOriginalQuery(Query originalQuery) {
+    this.originalQuery = originalQuery;
+  }
+
+  public Query getOriginalQuery() {
+    return originalQuery;
+  }
+
+  public Map<String,String[]> getExternalFeatureInfo() {
+    return efi;
+  }
+
+  public void setRequest(SolrQueryRequest request) {
+    this.request = request;
+  }
+
+  public SolrQueryRequest getRequest() {
+    return request;
+  }
+
+  @Override
+  public int hashCode() {
+    final int prime = 31;
+    int result = classHash();
+    result = (prime * result) + ((ltrScoringModel == null) ? 0 : ltrScoringModel.hashCode());
+    result = (prime * result)
+        + ((originalQuery == null) ? 0 : originalQuery.hashCode());
+    if (efi == null) {
+      result = (prime * result) + 0;
+    }
+    else {
+      for (final Map.Entry<String,String[]> entry : efi.entrySet()) {
+        final String key = entry.getKey();
+        final String[] values = entry.getValue();
+        result = (prime * result) + key.hashCode();
+        result = (prime * result) + Arrays.hashCode(values);
+      }
+    }
+    result = (prime * result) + this.toString().hashCode();
+    return result;
+  }
+  @Override
+  public boolean equals(Object o) {
+    return sameClassAs(o) &&  equalsTo(getClass().cast(o));
+  }
+
+  private boolean equalsTo(LTRScoringQuery other) {
+    if (ltrScoringModel == null) {
+      if (other.ltrScoringModel != null) {
+        return false;
+      }
+    } else if (!ltrScoringModel.equals(other.ltrScoringModel)) {
+      return false;
+    }
+    if (originalQuery == null) {
+      if (other.originalQuery != null) {
+        return false;
+      }
+    } else if (!originalQuery.equals(other.originalQuery)) {
+      return false;
+    }
+    if (efi == null) {
+      if (other.efi != null) {
+        return false;
+      }
+    } else {
+      if (other.efi == null || efi.size() != other.efi.size()) {
+        return false;
+      }
+      for(final Map.Entry<String,String[]> entry : efi.entrySet()) {
+        final String key = entry.getKey();
+        final String[] otherValues = other.efi.get(key);
+        if (otherValues == null || !Arrays.equals(otherValues,entry.getValue())) {
+          return false;
+        }
+      }
+    }
+    return true;
+  }
+
+  @Override
+  public ModelWeight createWeight(IndexSearcher searcher, boolean needsScores, float boost)
+      throws IOException {
+    final Collection<Feature> modelFeatures = ltrScoringModel.getFeatures();
+    final Collection<Feature> allFeatures = ltrScoringModel.getAllFeatures();
+    int modelFeatSize = modelFeatures.size();
+
+    Collection<Feature> features = null;
+    if (this.extractAllFeatures) {
+      features = allFeatures;
+    }
+    else{
+      features =  modelFeatures;
+    }
+    final Feature.FeatureWeight[] extractedFeatureWeights = new Feature.FeatureWeight[features.size()];
+    final Feature.FeatureWeight[] modelFeaturesWeights = new Feature.FeatureWeight[modelFeatSize];
+    List<Feature.FeatureWeight > featureWeights = new ArrayList<>(features.size());
+
+    if (querySemaphore == null) {
+      createWeights(searcher, needsScores, boost, featureWeights, features);
+    }
+    else{
+      createWeightsParallel(searcher, needsScores, boost, featureWeights, features);
+    }
+    int i=0, j = 0;
+    if (this.extractAllFeatures) {
+      for (final Feature.FeatureWeight fw : featureWeights) {
+        extractedFeatureWeights[i++] = fw;
+      }
+      for (final Feature f : modelFeatures){
+        modelFeaturesWeights[j++] = extractedFeatureWeights[f.getIndex()]; // we can lookup by featureid because all features will be extracted when this.extractAllFeatures is set
+      }
+    }
+    else{
+      for (final Feature.FeatureWeight fw: featureWeights){
+        extractedFeatureWeights[i++] = fw;
+        modelFeaturesWeights[j++] = fw;
+      }
+    }
+    return new ModelWeight(modelFeaturesWeights, extractedFeatureWeights, allFeatures.size());
+  }
+
+  private void createWeights(IndexSearcher searcher, boolean needsScores, float boost,
+      List<Feature.FeatureWeight > featureWeights, Collection<Feature> features) throws IOException {
+    final SolrQueryRequest req = getRequest();
+    // since the feature store is a linkedhashmap order is preserved
+    for (final Feature f : features) {
+      try{
+        Feature.FeatureWeight fw = f.createWeight(searcher, needsScores, req, originalQuery, efi);
+        featureWeights.add(fw);
+      } catch (final Exception e) {
+        throw new RuntimeException("Exception from createWeight for " + f.toString() + " "
+            + e.getMessage(), e);
+      }
+    }
+  }
+
+  private class CreateWeightCallable implements Callable<Feature.FeatureWeight>{
+    final private Feature f;
+    final private IndexSearcher searcher;
+    final private boolean needsScores;
+    final private SolrQueryRequest req;
+
+    public CreateWeightCallable(Feature f, IndexSearcher searcher, boolean needsScores, SolrQueryRequest req){
+      this.f = f;
+      this.searcher = searcher;
+      this.needsScores = needsScores;
+      this.req = req;
+    }
+
+    @Override
+    public Feature.FeatureWeight call() throws Exception{
+      try {
+        Feature.FeatureWeight fw  = f.createWeight(searcher, needsScores, req, originalQuery, efi);
+        return fw;
+      } catch (final Exception e) {
+        throw new RuntimeException("Exception from createWeight for " + f.toString() + " "
+            + e.getMessage(), e);
+      } finally {
+        querySemaphore.release();
+        ltrThreadMgr.releaseLTRSemaphore();
+      }
+    }
+  } // end of call CreateWeightCallable
+
+  private void createWeightsParallel(IndexSearcher searcher, boolean needsScores, float boost,
+      List<Feature.FeatureWeight > featureWeights, Collection<Feature> features) throws RuntimeException {
+
+    final SolrQueryRequest req = getRequest();
+    List<Future<Feature.FeatureWeight> > futures = new ArrayList<>(features.size());
+    try{
+      for (final Feature f : features) {
+        CreateWeightCallable callable = new CreateWeightCallable(f, searcher, needsScores, req);
+        RunnableFuture<Feature.FeatureWeight> runnableFuture = new FutureTask<>(callable);
+        querySemaphore.acquire(); // always acquire before the ltrSemaphore is acquired, to guarantee a that the current query is within the limit for max. threads
+        ltrThreadMgr.acquireLTRSemaphore();//may block and/or interrupt
+        ltrThreadMgr.execute(runnableFuture);//releases semaphore when done
+        futures.add(runnableFuture);
+      }
+      //Loop over futures to get the feature weight objects
+      for (final Future<Feature.FeatureWeight> future : futures) {
+        featureWeights.add(future.get()); // future.get() will block if the job is still running
+      }
+    } catch (Exception e) { // To catch InterruptedException and ExecutionException
+      log.info("Error while creating weights in LTR: InterruptedException", e);
+      throw new RuntimeException("Error while creating weights in LTR: " + e.getMessage(), e);
+    }
+  }
+
+  @Override
+  public String toString(String field) {
+    return field;
+  }
+
+  public class FeatureInfo {
+    final private String name;
+    private float value;
+    private boolean used;
+
+    FeatureInfo(String n, float v, boolean u){
+      name = n; value = v; used = u;
+    }
+
+    public void setValue(float value){
+      this.value = value;
+    }
+
+    public String getName(){
+      return name;
+    }
+
+    public float getValue(){
+      return value;
+    }
+
+    public boolean isUsed(){
+      return used;
+    }
+
+    public void setUsed(boolean used){
+      this.used = used;
+    }
+  }
+
+  public class ModelWeight extends Weight {
+
+    // List of the model's features used for scoring. This is a subset of the
+    // features used for logging.
+    final private Feature.FeatureWeight[] modelFeatureWeights;
+    final private float[] modelFeatureValuesNormalized;
+    final private Feature.FeatureWeight[] extractedFeatureWeights;
+
+    // List of all the feature names, values - used for both scoring and logging
+    /*
+     *  What is the advantage of using a hashmap here instead of an array of objects?
+     *     A set of arrays was used earlier and the elements were accessed using the featureId.
+     *     With the updated logic to create weights selectively,
+     *     the number of elements in the array can be fewer than the total number of features.
+     *     When [features] are not requested, only the model features are extracted.
+     *     In this case, the indexing by featureId, fails. For this reason,
+     *     we need a map which holds just the features that were triggered by the documents in the result set.
+     *
+     */
+    final private FeatureInfo[] featuresInfo;
+    /*
+     * @param modelFeatureWeights
+     *     - should be the same size as the number of features used by the model
+     * @param extractedFeatureWeights
+     *     - if features are requested from the same store as model feature store,
+     *       this will be the size of total number of features in the model feature store
+     *       else, this will be the size of the modelFeatureWeights
+     * @param allFeaturesSize
+     *     - total number of feature in the feature store used by this model
+     */
+    public ModelWeight(Feature.FeatureWeight[] modelFeatureWeights,
+        Feature.FeatureWeight[] extractedFeatureWeights, int allFeaturesSize) {
+      super(LTRScoringQuery.this);
+      this.extractedFeatureWeights = extractedFeatureWeights;
+      this.modelFeatureWeights = modelFeatureWeights;
+      this.modelFeatureValuesNormalized = new float[modelFeatureWeights.length];
+      this.featuresInfo = new FeatureInfo[allFeaturesSize];
+      setFeaturesInfo();
+    }
+
+    private void setFeaturesInfo(){
+      for (int i = 0; i < extractedFeatureWeights.length;++i){
+        String featName = extractedFeatureWeights[i].getName();
+        int featId = extractedFeatureWeights[i].getIndex();
+        float value = extractedFeatureWeights[i].getDefaultValue();
+        featuresInfo[featId] = new FeatureInfo(featName,value,false);
+      }
+    }
+
+    public FeatureInfo[] getFeaturesInfo(){
+      return featuresInfo;
+    }
+
+    // for test use
+    Feature.FeatureWeight[] getModelFeatureWeights() {
+      return modelFeatureWeights;
+    }
+
+    // for test use
+    float[] getModelFeatureValuesNormalized() {
+      return modelFeatureValuesNormalized;
+    }
+
+    // for test use
+    Feature.FeatureWeight[] getExtractedFeatureWeights() {
+      return extractedFeatureWeights;
+    }
+
+    /**
+     * Goes through all the stored feature values, and calculates the normalized
+     * values for all the features that will be used for scoring.
+     */
+    private void makeNormalizedFeatures() {
+      int pos = 0;
+      for (final Feature.FeatureWeight feature : modelFeatureWeights) {
+        final int featureId = feature.getIndex();
+        FeatureInfo fInfo = featuresInfo[featureId];
+        if (fInfo.isUsed()) { // not checking for finfo == null as that would be a bug we should catch
+          modelFeatureValuesNormalized[pos] = fInfo.getValue();
+        } else {
+          modelFeatureValuesNormalized[pos] = feature.getDefaultValue();
+        }
+        pos++;
+      }
+      ltrScoringModel.normalizeFeaturesInPlace(modelFeatureValuesNormalized);
+    }
+
+    @Override
+    public Explanation explain(LeafReaderContext context, int doc)
+        throws IOException {
+
+      final Explanation[] explanations = new Explanation[this.featuresInfo.length];
+      for (final Feature.FeatureWeight feature : extractedFeatureWeights) {
+        explanations[feature.getIndex()] = feature.explain(context, doc);
+      }
+      final List<Explanation> featureExplanations = new ArrayList<>();
+      for (int idx = 0 ;idx < modelFeatureWeights.length; ++idx) {
+        final Feature.FeatureWeight f = modelFeatureWeights[idx];
+        Explanation e = ltrScoringModel.getNormalizerExplanation(explanations[f.getIndex()], idx);
+        featureExplanations.add(e);
+      }
+      final ModelScorer bs = scorer(context);
+      bs.iterator().advance(doc);
+
+      final float finalScore = bs.score();
+
+      return ltrScoringModel.explain(context, doc, finalScore, featureExplanations);
+
+    }
+
+    @Override
+    public void extractTerms(Set<Term> terms) {
+      for (final Feature.FeatureWeight feature : extractedFeatureWeights) {
+        feature.extractTerms(terms);
+      }
+    }
+
+    protected void reset() {
+      for (int i = 0; i < extractedFeatureWeights.length;++i){
+        int featId = extractedFeatureWeights[i].getIndex();
+        float value = extractedFeatureWeights[i].getDefaultValue();
+        featuresInfo[featId].setValue(value); // need to set default value everytime as the default value is used in 'dense' mode even if used=false
+        featuresInfo[featId].setUsed(false);
+      }
+    }
+
+    @Override
+    public ModelScorer scorer(LeafReaderContext context) throws IOException {
+
+      final List<Feature.FeatureWeight.FeatureScorer> featureScorers = new ArrayList<Feature.FeatureWeight.FeatureScorer>(
+          extractedFeatureWeights.length);
+      for (final Feature.FeatureWeight featureWeight : extractedFeatureWeights) {
+        final Feature.FeatureWeight.FeatureScorer scorer = featureWeight.scorer(context);
+        if (scorer != null) {
+          featureScorers.add(scorer);
+        }
+      }
+      // Always return a ModelScorer, even if no features match, because we
+      // always need to call
+      // score on the model for every document, since 0 features matching could
+      // return a
+      // non 0 score for a given model.
+      ModelScorer mscorer = new ModelScorer(this, featureScorers);
+      return mscorer;
+
+    }
+
+    public class ModelScorer extends Scorer {
+      final private DocInfo docInfo;
+      final private Scorer featureTraversalScorer;
+
+      public DocInfo getDocInfo() {
+        return docInfo;
+      }
+
+      public ModelScorer(Weight weight, List<Feature.FeatureWeight.FeatureScorer> featureScorers) {
+        super(weight);
+        docInfo = new DocInfo();
+        for (final Feature.FeatureWeight.FeatureScorer subSocer : featureScorers) {
+          subSocer.setDocInfo(docInfo);
+        }
+        if (featureScorers.size() <= 1) { // TODO: Allow the use of dense
+          // features in other cases
+          featureTraversalScorer = new DenseModelScorer(weight, featureScorers);
+        } else {
+          featureTraversalScorer = new SparseModelScorer(weight, featureScorers);
+        }
+      }
+
+      @Override
+      public Collection<ChildScorer> getChildren() {
+        return featureTraversalScorer.getChildren();
+      }
+
+      @Override
+      public int docID() {
+        return featureTraversalScorer.docID();
+      }
+
+      @Override
+      public float score() throws IOException {
+        return featureTraversalScorer.score();
+      }
+
+      @Override
+      public int freq() throws IOException {
+        return featureTraversalScorer.freq();
+      }
+
+      @Override
+      public DocIdSetIterator iterator() {
+        return featureTraversalScorer.iterator();
+      }
+
+      private class SparseModelScorer extends Scorer {
+        final private DisiPriorityQueue subScorers;
+        final private ScoringQuerySparseIterator itr;
+
+        private int targetDoc = -1;
+        private int activeDoc = -1;
+
+        private SparseModelScorer(Weight weight,
+            List<Feature.FeatureWeight.FeatureScorer> featureScorers) {
+          super(weight);
+          if (featureScorers.size() <= 1) {
+            throw new IllegalArgumentException(
+                "There must be at least 2 subScorers");
+          }
+          subScorers = new DisiPriorityQueue(featureScorers.size());
+          for (final Scorer scorer : featureScorers) {
+            final DisiWrapper w = new DisiWrapper(scorer);
+            subScorers.add(w);
+          }
+
+          itr = new ScoringQuerySparseIterator(subScorers);
+        }
+
+        @Override
+        public int docID() {
+          return itr.docID();
+        }
+
+        @Override
+        public float score() throws IOException {
+          final DisiWrapper topList = subScorers.topList();
+          // If target doc we wanted to advance to matches the actual doc
+          // the underlying features advanced to, perform the feature
+          // calculations,
+          // otherwise just continue with the model's scoring process with empty
+          // features.
+          reset();
+          if (activeDoc == targetDoc) {
+            for (DisiWrapper w = topList; w != null; w = w.next) {
+              final Scorer subScorer = w.scorer;
+              Feature.FeatureWeight scFW = (Feature.FeatureWeight) subScorer.getWeight();
+              final int featureId = scFW.getIndex();
+              featuresInfo[featureId].setValue(subScorer.score());
+              featuresInfo[featureId].setUsed(true);
+            }
+          }
+          makeNormalizedFeatures();
+          return ltrScoringModel.score(modelFeatureValuesNormalized);
+        }
+
+        @Override
+        public int freq() throws IOException {
+          final DisiWrapper subMatches = subScorers.topList();
+          int freq = 1;
+          for (DisiWrapper w = subMatches.next; w != null; w = w.next) {
+            freq += 1;
+          }
+          return freq;
+        }
+
+        @Override
+        public DocIdSetIterator iterator() {
+          return itr;
+        }
+
+        @Override
+        public final Collection<ChildScorer> getChildren() {
+          final ArrayList<ChildScorer> children = new ArrayList<>();
+          for (final DisiWrapper scorer : subScorers) {
+            children.add(new ChildScorer(scorer.scorer, "SHOULD"));
+          }
+          return children;
+        }
+
+        private class ScoringQuerySparseIterator extends DisjunctionDISIApproximation {
+
+          public ScoringQuerySparseIterator(DisiPriorityQueue subIterators) {
+            super(subIterators);
+          }
+
+          @Override
+          public final int nextDoc() throws IOException {
+            if (activeDoc == targetDoc) {
+              activeDoc = super.nextDoc();
+            } else if (activeDoc < targetDoc) {
+              activeDoc = super.advance(targetDoc + 1);
+            }
+            return ++targetDoc;
+          }
+
+          @Override
+          public final int advance(int target) throws IOException {
+            // If target doc we wanted to advance to matches the actual doc
+            // the underlying features advanced to, perform the feature
+            // calculations,
+            // otherwise just continue with the model's scoring process with
+            // empty features.
+            if (activeDoc < target) {
+              activeDoc = super.advance(target);
+            }
+            targetDoc = target;
+            return targetDoc;
+          }
+        }
+
+      }
+
+      private class DenseModelScorer extends Scorer {
+        private int activeDoc = -1; // The doc that our scorer's are actually at
+        private int targetDoc = -1; // The doc we were most recently told to go to
+        private int freq = -1;
+        final private List<Feature.FeatureWeight.FeatureScorer> featureScorers;
+
+        private DenseModelScorer(Weight weight,
+            List<Feature.FeatureWeight.FeatureScorer> featureScorers) {
+          super(weight);
+          this.featureScorers = featureScorers;
+        }
+
+        @Override
+        public int docID() {
+          return targetDoc;
+        }
+
+        @Override
+        public float score() throws IOException {
+          reset();
+          freq = 0;
+          if (targetDoc == activeDoc) {
+            for (final Scorer scorer : featureScorers) {
+              if (scorer.docID() == activeDoc) {
+                freq++;
+                Feature.FeatureWeight scFW = (Feature.FeatureWeight) scorer.getWeight();
+                final int featureId = scFW.getIndex();
+                featuresInfo[featureId].setValue(scorer.score());
+                featuresInfo[featureId].setUsed(true);
+              }
+            }
+          }
+          makeNormalizedFeatures();
+          return ltrScoringModel.score(modelFeatureValuesNormalized);
+        }
+
+        @Override
+        public final Collection<ChildScorer> getChildren() {
+          final ArrayList<ChildScorer> children = new ArrayList<>();
+          for (final Scorer scorer : featureScorers) {
+            children.add(new ChildScorer(scorer, "SHOULD"));
+          }
+          return children;
+        }
+
+        @Override
+        public int freq() throws IOException {
+          return freq;
+        }
+
+        @Override
+        public DocIdSetIterator iterator() {
+          return new DenseIterator();
+        }
+
+        private class DenseIterator extends DocIdSetIterator {
+
+          @Override
+          public int docID() {
+            return targetDoc;
+          }
+
+          @Override
+          public int nextDoc() throws IOException {
+            if (activeDoc <= targetDoc) {
+              activeDoc = NO_MORE_DOCS;
+              for (final Scorer scorer : featureScorers) {
+                if (scorer.docID() != NO_MORE_DOCS) {
+                  activeDoc = Math.min(activeDoc, scorer.iterator().nextDoc());
+                }
+              }
+            }
+            return ++targetDoc;
+          }
+
+          @Override
+          public int advance(int target) throws IOException {
+            if (activeDoc < target) {
+              activeDoc = NO_MORE_DOCS;
+              for (final Scorer scorer : featureScorers) {
+                if (scorer.docID() != NO_MORE_DOCS) {
+                  activeDoc = Math.min(activeDoc,
+                      scorer.iterator().advance(target));
+                }
+              }
+            }
+            targetDoc = target;
+            return target;
+          }
+
+          @Override
+          public long cost() {
+            long sum = 0;
+            for (int i = 0; i < featureScorers.size(); i++) {
+              sum += featureScorers.get(i).iterator().cost();
+            }
+            return sum;
+          }
+
+        }
+      }
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/java/org/apache/solr/ltr/LTRThreadModule.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/LTRThreadModule.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/LTRThreadModule.java
new file mode 100644
index 0000000..8e2563f
--- /dev/null
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/LTRThreadModule.java
@@ -0,0 +1,163 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr;
+
+import java.util.Iterator;
+import java.util.Map;
+import java.util.concurrent.Executor;
+import java.util.concurrent.Semaphore;
+import java.util.concurrent.SynchronousQueue;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.solr.common.util.ExecutorUtil;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.util.DefaultSolrThreadFactory;
+import org.apache.solr.util.SolrPluginUtils;
+import org.apache.solr.util.plugin.NamedListInitializedPlugin;
+
+final public class LTRThreadModule implements NamedListInitializedPlugin {
+
+  public static LTRThreadModule getInstance(NamedList args) {
+
+    final LTRThreadModule threadManager;
+    final NamedList threadManagerArgs = extractThreadModuleParams(args);
+    // if and only if there are thread module args then we want a thread module!
+    if (threadManagerArgs.size() > 0) {
+      // create and initialize the new instance
+      threadManager = new LTRThreadModule();
+      threadManager.init(threadManagerArgs);
+    } else {
+      threadManager = null;
+    }
+
+    return threadManager;
+  }
+
+  private static String CONFIG_PREFIX = "threadModule.";
+
+  private static NamedList extractThreadModuleParams(NamedList args) {
+
+    // gather the thread module args from amongst the general args
+    final NamedList extractedArgs = new NamedList();
+    for (Iterator<Map.Entry<String,Object>> it = args.iterator();
+        it.hasNext(); ) {
+      final Map.Entry<String,Object> entry = it.next();
+      final String key = entry.getKey();
+      if (key.startsWith(CONFIG_PREFIX)) {
+        extractedArgs.add(key.substring(CONFIG_PREFIX.length()), entry.getValue());
+      }
+    }
+
+    // remove consumed keys only once iteration is complete
+    // since NamedList iterator does not support 'remove'
+    for (Object key : extractedArgs.asShallowMap().keySet()) {
+      args.remove(CONFIG_PREFIX+key);
+    }
+
+    return extractedArgs;
+  }
+
+  // settings
+  private int totalPoolThreads = 1;
+  private int numThreadsPerRequest = 1;
+  private int maxPoolSize = Integer.MAX_VALUE;
+  private long keepAliveTimeSeconds = 10;
+  private String threadNamePrefix = "ltrExecutor";
+
+  // implementation
+  private Semaphore ltrSemaphore;
+  private Executor createWeightScoreExecutor;
+
+  public LTRThreadModule() {
+  }
+
+  // For test use only.
+  LTRThreadModule(int totalPoolThreads, int numThreadsPerRequest) {
+    this.totalPoolThreads = totalPoolThreads;
+    this.numThreadsPerRequest = numThreadsPerRequest;
+    init(null);
+  }
+
+  @Override
+  public void init(NamedList args) {
+    if (args != null) {
+      SolrPluginUtils.invokeSetters(this, args);
+    }
+    validate();
+    if  (this.totalPoolThreads > 1 ){
+      ltrSemaphore = new Semaphore(totalPoolThreads);
+    } else {
+      ltrSemaphore = null;
+    }
+    createWeightScoreExecutor = new ExecutorUtil.MDCAwareThreadPoolExecutor(
+        0,
+        maxPoolSize,
+        keepAliveTimeSeconds, TimeUnit.SECONDS, // terminate idle threads after 10 sec
+        new SynchronousQueue<Runnable>(),  // directly hand off tasks
+        new DefaultSolrThreadFactory(threadNamePrefix)
+        );
+  }
+
+  private void validate() {
+    if (totalPoolThreads <= 0){
+      throw new IllegalArgumentException("totalPoolThreads cannot be less than 1");
+    }
+    if (numThreadsPerRequest <= 0){
+      throw new IllegalArgumentException("numThreadsPerRequest cannot be less than 1");
+    }
+    if (totalPoolThreads < numThreadsPerRequest){
+      throw new IllegalArgumentException("numThreadsPerRequest cannot be greater than totalPoolThreads");
+    }
+  }
+
+  public void setTotalPoolThreads(int totalPoolThreads) {
+    this.totalPoolThreads = totalPoolThreads;
+  }
+
+  public void setNumThreadsPerRequest(int numThreadsPerRequest) {
+    this.numThreadsPerRequest = numThreadsPerRequest;
+  }
+
+  public void setMaxPoolSize(int maxPoolSize) {
+    this.maxPoolSize = maxPoolSize;
+  }
+
+  public void setKeepAliveTimeSeconds(long keepAliveTimeSeconds) {
+    this.keepAliveTimeSeconds = keepAliveTimeSeconds;
+  }
+
+  public void setThreadNamePrefix(String threadNamePrefix) {
+    this.threadNamePrefix = threadNamePrefix;
+  }
+
+  public Semaphore createQuerySemaphore() {
+    return (numThreadsPerRequest > 1 ? new Semaphore(numThreadsPerRequest) : null);
+  }
+
+  public void acquireLTRSemaphore() throws InterruptedException {
+    ltrSemaphore.acquire();
+  }
+
+  public void releaseLTRSemaphore() throws InterruptedException {
+    ltrSemaphore.release();
+  }
+
+  public void execute(Runnable command) {
+    createWeightScoreExecutor.execute(command);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/java/org/apache/solr/ltr/SolrQueryRequestContextUtils.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/SolrQueryRequestContextUtils.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/SolrQueryRequestContextUtils.java
new file mode 100644
index 0000000..66426ea
--- /dev/null
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/SolrQueryRequestContextUtils.java
@@ -0,0 +1,83 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr;
+
+import org.apache.solr.request.SolrQueryRequest;
+
+public class SolrQueryRequestContextUtils {
+
+  /** key prefix to reduce possibility of clash with other code's key choices **/
+  private static final String LTR_PREFIX = "ltr.";
+
+  /** key of the feature logger in the request context **/
+  private static final String FEATURE_LOGGER = LTR_PREFIX + "feature_logger";
+
+  /** key of the scoring query in the request context **/
+  private static final String SCORING_QUERY = LTR_PREFIX + "scoring_query";
+
+  /** key of the isExtractingFeatures flag in the request context **/
+  private static final String IS_EXTRACTING_FEATURES = LTR_PREFIX + "isExtractingFeatures";
+
+  /** key of the feature vector store name in the request context **/
+  private static final String STORE = LTR_PREFIX + "store";
+
+  /** feature logger accessors **/
+
+  public static void setFeatureLogger(SolrQueryRequest req, FeatureLogger<?> featureLogger) {
+    req.getContext().put(FEATURE_LOGGER, featureLogger);
+  }
+
+  public static FeatureLogger<?> getFeatureLogger(SolrQueryRequest req) {
+    return (FeatureLogger<?>) req.getContext().get(FEATURE_LOGGER);
+  }
+
+  /** scoring query accessors **/
+
+  public static void setScoringQuery(SolrQueryRequest req, LTRScoringQuery scoringQuery) {
+    req.getContext().put(SCORING_QUERY, scoringQuery);
+  }
+
+  public static LTRScoringQuery getScoringQuery(SolrQueryRequest req) {
+    return (LTRScoringQuery) req.getContext().get(SCORING_QUERY);
+  }
+
+  /** isExtractingFeatures flag accessors **/
+
+  public static void setIsExtractingFeatures(SolrQueryRequest req) {
+    req.getContext().put(IS_EXTRACTING_FEATURES, Boolean.TRUE);
+  }
+
+  public static void clearIsExtractingFeatures(SolrQueryRequest req) {
+    req.getContext().put(IS_EXTRACTING_FEATURES, Boolean.FALSE);
+  }
+
+  public static boolean isExtractingFeatures(SolrQueryRequest req) {
+    return Boolean.TRUE.equals(req.getContext().get(IS_EXTRACTING_FEATURES));
+  }
+
+  /** feature vector store name accessors **/
+
+  public static void setFvStoreName(SolrQueryRequest req, String fvStoreName) {
+    req.getContext().put(STORE, fvStoreName);
+  }
+
+  public static String getFvStoreName(SolrQueryRequest req) {
+    return (String) req.getContext().get(STORE);
+  }
+
+}
+

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/Feature.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/Feature.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/Feature.java
new file mode 100644
index 0000000..228b964
--- /dev/null
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/Feature.java
@@ -0,0 +1,335 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.feature;
+
+import java.io.IOException;
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.DocIdSetIterator;
+import org.apache.lucene.search.Explanation;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.Scorer;
+import org.apache.lucene.search.Weight;
+import org.apache.solr.core.SolrResourceLoader;
+import org.apache.solr.ltr.DocInfo;
+import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.request.macro.MacroExpander;
+import org.apache.solr.util.SolrPluginUtils;
+
+/**
+ * A recipe for computing a feature.  Subclass this for specialized feature calculations.
+ * <p>
+ * A feature consists of
+ * <ul>
+ * <li> a name as the identifier
+ * <li> parameters to represent the specific feature
+ * </ul>
+ * <p>
+ * Example configuration (snippet):
+ * <pre>{
+   "class" : "...",
+   "name" : "myFeature",
+   "params" : {
+       ...
+   }
+}</pre>
+ * <p>
+ * {@link Feature} is an abstract class and concrete classes should implement
+ * the {@link #validate()} function, and must implement the {@link #paramsToMap()}
+ * and createWeight() methods.
+ */
+public abstract class Feature extends Query {
+
+  final protected String name;
+  private int index = -1;
+  private float defaultValue = 0.0f;
+
+  final private Map<String,Object> params;
+
+  public static Feature getInstance(SolrResourceLoader solrResourceLoader,
+      String className, String name, Map<String,Object> params) {
+    final Feature f = solrResourceLoader.newInstance(
+        className,
+        Feature.class,
+        new String[0], // no sub packages
+        new Class[] { String.class, Map.class },
+        new Object[] { name, params });
+    if (params != null) {
+      SolrPluginUtils.invokeSetters(f, params.entrySet());
+    }
+    f.validate();
+    return f;
+  }
+
+  public Feature(String name, Map<String,Object> params) {
+    this.name = name;
+    this.params = params;
+  }
+
+  /**
+   * As part of creation of a feature instance, this function confirms
+   * that the feature parameters are valid.
+   *
+   * @throws FeatureException
+   *             Feature Exception
+   */
+  protected abstract void validate() throws FeatureException;
+
+  @Override
+  public String toString(String field) {
+    final StringBuilder sb = new StringBuilder(64); // default initialCapacity of 16 won't be enough
+    sb.append(getClass().getSimpleName());
+    sb.append(" [name=").append(name);
+    final LinkedHashMap<String,Object> params = paramsToMap();
+    if (params != null) {
+      sb.append(", params=").append(params);
+    }
+    sb.append(']');
+    return sb.toString();
+  }
+
+  public abstract FeatureWeight createWeight(IndexSearcher searcher,
+      boolean needsScores, SolrQueryRequest request, Query originalQuery, Map<String,String[]> efi) throws IOException;
+
+  public float getDefaultValue() {
+    return defaultValue;
+  }
+
+  public void setDefaultValue(String value){
+    defaultValue = Float.parseFloat(value);
+  }
+
+
+  @Override
+  public int hashCode() {
+    final int prime = 31;
+    int result = classHash();
+    result = (prime * result) + index;
+    result = (prime * result) + ((name == null) ? 0 : name.hashCode());
+    result = (prime * result) + ((params == null) ? 0 : params.hashCode());
+    return result;
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    return sameClassAs(o) &&  equalsTo(getClass().cast(o));
+  }
+
+  private boolean equalsTo(Feature other) {
+    if (index != other.index) {
+      return false;
+    }
+    if (name == null) {
+      if (other.name != null) {
+        return false;
+      }
+    } else if (!name.equals(other.name)) {
+      return false;
+    }
+    if (params == null) {
+      if (other.params != null) {
+        return false;
+      }
+    } else if (!params.equals(other.params)) {
+      return false;
+    }
+    return true;
+  }
+
+  /**
+   * @return the name
+   */
+  public String getName() {
+    return name;
+  }
+
+  /**
+   * @return the id
+   */
+  public int getIndex() {
+    return index;
+  }
+
+  /**
+   * @param index
+   *          Unique ID for this feature. Similar to feature name, except it can
+   *          be used to directly access the feature in the global list of
+   *          features.
+   */
+  public void setIndex(int index) {
+    this.index = index;
+  }
+
+  public abstract LinkedHashMap<String,Object> paramsToMap();
+  /**
+   * Weight for a feature
+   **/
+  public abstract class FeatureWeight extends Weight {
+
+    final protected IndexSearcher searcher;
+    final protected SolrQueryRequest request;
+    final protected Map<String,String[]> efi;
+    final protected MacroExpander macroExpander;
+    final protected Query originalQuery;
+
+    /**
+     * Initialize a feature without the normalizer from the feature file. This is
+     * called on initial construction since multiple models share the same
+     * features, but have different normalizers. A concrete model's feature is
+     * copied through featForNewModel().
+     *
+     * @param q
+     *          Solr query associated with this FeatureWeight
+     * @param searcher
+     *          Solr searcher available for features if they need them
+     */
+    public FeatureWeight(Query q, IndexSearcher searcher,
+        SolrQueryRequest request, Query originalQuery, Map<String,String[]> efi) {
+      super(q);
+      this.searcher = searcher;
+      this.request = request;
+      this.originalQuery = originalQuery;
+      this.efi = efi;
+      macroExpander = new MacroExpander(efi,true);
+    }
+
+    public String getName() {
+      return Feature.this.getName();
+    }
+
+    public int getIndex() {
+      return Feature.this.getIndex();
+    }
+
+    public float getDefaultValue() {
+      return Feature.this.getDefaultValue();
+    }
+
+    @Override
+    public abstract FeatureScorer scorer(LeafReaderContext context)
+        throws IOException;
+
+    @Override
+    public Explanation explain(LeafReaderContext context, int doc)
+        throws IOException {
+      final FeatureScorer r = scorer(context);
+      float score = getDefaultValue();
+      if (r != null) {
+        r.iterator().advance(doc);
+        if (r.docID() == doc) {
+          score = r.score();
+        }
+        return Explanation.match(score, toString());
+      }else{
+        return Explanation.match(score, "The feature has no value");
+      }
+    }
+
+    /**
+     * Used in the FeatureWeight's explain. Each feature should implement this
+     * returning properties of the specific scorer useful for an explain. For
+     * example "MyCustomClassFeature [name=" + name + "myVariable:" + myVariable +
+     * "]";  If not provided, a default implementation will return basic feature
+     * properties, which might not include query time specific values.
+     */
+    @Override
+    public String toString() {
+      return Feature.this.toString();
+    }
+
+    @Override
+    public void extractTerms(Set<Term> terms) {
+      // needs to be implemented by query subclasses
+      throw new UnsupportedOperationException();
+    }
+
+    /**
+     * A 'recipe' for computing a feature
+     */
+    public abstract class FeatureScorer extends Scorer {
+
+      final protected String name;
+      private DocInfo docInfo;
+      final protected DocIdSetIterator itr;
+
+      public FeatureScorer(Feature.FeatureWeight weight,
+          DocIdSetIterator itr) {
+        super(weight);
+        this.itr = itr;
+        name = weight.getName();
+        docInfo = null;
+      }
+
+      @Override
+      public abstract float score() throws IOException;
+
+      /**
+       * Used to provide context from initial score steps to later reranking steps.
+       */
+      public void setDocInfo(DocInfo docInfo) {
+        this.docInfo = docInfo;
+      }
+
+      public DocInfo getDocInfo() {
+        return docInfo;
+      }
+
+      @Override
+      public int freq() throws IOException {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public int docID() {
+        return itr.docID();
+      }
+
+      @Override
+      public DocIdSetIterator iterator() {
+        return itr;
+      }
+    }
+
+    /**
+     * Default FeatureScorer class that returns the score passed in. Can be used
+     * as a simple ValueFeature, or to return a default scorer in case an
+     * underlying feature's scorer is null.
+     */
+    public class ValueFeatureScorer extends FeatureScorer {
+      float constScore;
+
+      public ValueFeatureScorer(FeatureWeight weight, float constScore,
+          DocIdSetIterator itr) {
+        super(weight,itr);
+        this.constScore = constScore;
+      }
+
+      @Override
+      public float score() {
+        return constScore;
+      }
+
+    }
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/FeatureException.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/FeatureException.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/FeatureException.java
new file mode 100644
index 0000000..6c8f827
--- /dev/null
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/FeatureException.java
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.feature;
+
+public class FeatureException extends RuntimeException {
+
+  private static final long serialVersionUID = 1L;
+
+  public FeatureException(String message) {
+    super(message);
+  }
+
+  public FeatureException(String message, Exception cause) {
+    super(message, cause);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/FieldLengthFeature.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/FieldLengthFeature.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/FieldLengthFeature.java
new file mode 100644
index 0000000..4c17aff
--- /dev/null
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/FieldLengthFeature.java
@@ -0,0 +1,152 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.feature;
+
+import java.io.IOException;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+import org.apache.lucene.index.IndexableField;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.NumericDocValues;
+import org.apache.lucene.search.DocIdSetIterator;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.util.SmallFloat;
+import org.apache.solr.request.SolrQueryRequest;
+/**
+ * This feature returns the length of a field (in terms) for the current document.
+ * Example configuration:
+ * <pre>{
+  "name":  "titleLength",
+  "class": "org.apache.solr.ltr.feature.FieldLengthFeature",
+  "params": {
+      "field": "title"
+  }
+}</pre>
+ * Note: since this feature relies on norms values that are stored in a single byte
+ * the value of the feature could have a lightly different value.
+ * (see also {@link org.apache.lucene.search.similarities.ClassicSimilarity})
+ **/
+public class FieldLengthFeature extends Feature {
+
+  private String field;
+
+  public String getField() {
+    return field;
+  }
+
+  public void setField(String field) {
+    this.field = field;
+  }
+
+  @Override
+  public LinkedHashMap<String,Object> paramsToMap() {
+    final LinkedHashMap<String,Object> params = new LinkedHashMap<>(1, 1.0f);
+    params.put("field", field);
+    return params;
+  }
+
+  @Override
+  protected void validate() throws FeatureException {
+    if (field == null || field.isEmpty()) {
+      throw new FeatureException(getClass().getSimpleName()+
+          ": field must be provided");
+    }
+  }
+
+  /** Cache of decoded bytes. */
+
+  private static final float[] NORM_TABLE = new float[256];
+
+  static {
+    NORM_TABLE[0] = 0;
+    for (int i = 1; i < 256; i++) {
+      float norm = SmallFloat.byte315ToFloat((byte) i);
+      NORM_TABLE[i] = 1.0f / (norm * norm);
+    }
+  }
+
+  /**
+   * Decodes the norm value, assuming it is a single byte.
+   *
+   */
+
+  private final float decodeNorm(long norm) {
+    return NORM_TABLE[(int) (norm & 0xFF)]; // & 0xFF maps negative bytes to
+    // positive above 127
+  }
+
+  public FieldLengthFeature(String name, Map<String,Object> params) {
+    super(name, params);
+  }
+
+  @Override
+  public FeatureWeight createWeight(IndexSearcher searcher, boolean needsScores,
+      SolrQueryRequest request, Query originalQuery, Map<String,String[]> efi)
+          throws IOException {
+
+    return new FieldLengthFeatureWeight(searcher, request, originalQuery, efi);
+  }
+
+
+  public class FieldLengthFeatureWeight extends FeatureWeight {
+
+    public FieldLengthFeatureWeight(IndexSearcher searcher,
+        SolrQueryRequest request, Query originalQuery, Map<String,String[]> efi) {
+      super(FieldLengthFeature.this, searcher, request, originalQuery, efi);
+    }
+
+    @Override
+    public FeatureScorer scorer(LeafReaderContext context) throws IOException {
+      NumericDocValues norms = context.reader().getNormValues(field);
+      if (norms == null){
+        return new ValueFeatureScorer(this, 0f,
+            DocIdSetIterator.all(DocIdSetIterator.NO_MORE_DOCS));
+      }
+      return new FieldLengthFeatureScorer(this, norms);
+    }
+
+    public class FieldLengthFeatureScorer extends FeatureScorer {
+
+      NumericDocValues norms = null;
+
+      public FieldLengthFeatureScorer(FeatureWeight weight,
+          NumericDocValues norms) throws IOException {
+        super(weight, norms);
+        this.norms = norms;
+
+        // In the constructor, docId is -1, so using 0 as default lookup
+        final IndexableField idxF = searcher.doc(0).getField(field);
+        if (idxF.fieldType().omitNorms()) {
+          throw new IOException(
+              "FieldLengthFeatures can't be used if omitNorms is enabled (field="
+                  + field + ")");
+        }
+      }
+
+      @Override
+      public float score() throws IOException {
+
+        final long l = norms.longValue();
+        final float numTerms = decodeNorm(l);
+        return numTerms;
+      }
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/FieldValueFeature.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/FieldValueFeature.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/FieldValueFeature.java
new file mode 100644
index 0000000..279adbc
--- /dev/null
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/FieldValueFeature.java
@@ -0,0 +1,141 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.feature;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.index.IndexableField;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.search.DocIdSetIterator;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Query;
+import org.apache.solr.request.SolrQueryRequest;
+
+/**
+ * This feature returns the value of a field in the current document
+ * Example configuration:
+ * <pre>{
+  "name":  "rawHits",
+  "class": "org.apache.solr.ltr.feature.FieldValueFeature",
+  "params": {
+      "field": "hits"
+  }
+}</pre>
+ */
+public class FieldValueFeature extends Feature {
+
+  private String field;
+  private Set<String> fieldAsSet;
+
+  public String getField() {
+    return field;
+  }
+
+  public void setField(String field) {
+    this.field = field;
+    fieldAsSet = Collections.singleton(field);
+  }
+
+  @Override
+  public LinkedHashMap<String,Object> paramsToMap() {
+    final LinkedHashMap<String,Object> params = new LinkedHashMap<>(1, 1.0f);
+    params.put("field", field);
+    return params;
+  }
+
+  @Override
+  protected void validate() throws FeatureException {
+    if (field == null || field.isEmpty()) {
+      throw new FeatureException(getClass().getSimpleName()+
+          ": field must be provided");
+    }
+  }
+
+  public FieldValueFeature(String name, Map<String,Object> params) {
+    super(name, params);
+  }
+
+  @Override
+  public FeatureWeight createWeight(IndexSearcher searcher, boolean needsScores,
+      SolrQueryRequest request, Query originalQuery, Map<String,String[]> efi)
+          throws IOException {
+    return new FieldValueFeatureWeight(searcher, request, originalQuery, efi);
+  }
+
+  public class FieldValueFeatureWeight extends FeatureWeight {
+
+    public FieldValueFeatureWeight(IndexSearcher searcher,
+        SolrQueryRequest request, Query originalQuery, Map<String,String[]> efi) {
+      super(FieldValueFeature.this, searcher, request, originalQuery, efi);
+    }
+
+    @Override
+    public FeatureScorer scorer(LeafReaderContext context) throws IOException {
+      return new FieldValueFeatureScorer(this, context,
+          DocIdSetIterator.all(DocIdSetIterator.NO_MORE_DOCS));
+    }
+
+    public class FieldValueFeatureScorer extends FeatureScorer {
+
+      LeafReaderContext context = null;
+
+      public FieldValueFeatureScorer(FeatureWeight weight,
+          LeafReaderContext context, DocIdSetIterator itr) {
+        super(weight, itr);
+        this.context = context;
+      }
+
+      @Override
+      public float score() throws IOException {
+
+        try {
+          final Document document = context.reader().document(itr.docID(),
+              fieldAsSet);
+          final IndexableField indexableField = document.getField(field);
+          if (indexableField == null) {
+            return getDefaultValue();
+          }
+          final Number number = indexableField.numericValue();
+          if (number != null) {
+            return number.floatValue();
+          } else {
+            final String string = indexableField.stringValue();
+            // boolean values in the index are encoded with the
+            // chars T/F
+            if (string.equals("T")) {
+              return 1;
+            }
+            if (string.equals("F")) {
+              return 0;
+            }
+          }
+        } catch (final IOException e) {
+          throw new FeatureException(
+              e.toString() + ": " +
+                  "Unable to extract feature for "
+                  + name, e);
+        }
+        return getDefaultValue();
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/OriginalScoreFeature.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/OriginalScoreFeature.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/OriginalScoreFeature.java
new file mode 100644
index 0000000..125615c
--- /dev/null
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/OriginalScoreFeature.java
@@ -0,0 +1,118 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.feature;
+
+import java.io.IOException;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.search.DocIdSetIterator;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.Scorer;
+import org.apache.lucene.search.Weight;
+import org.apache.solr.ltr.DocInfo;
+import org.apache.solr.request.SolrQueryRequest;
+/**
+ * This feature returns the original score that the document had before performing
+ * the reranking.
+ * Example configuration:
+ * <pre>{
+  "name":  "originalScore",
+  "class": "org.apache.solr.ltr.feature.OriginalScoreFeature",
+  "params": { }
+}</pre>
+ **/
+public class OriginalScoreFeature extends Feature {
+
+  public OriginalScoreFeature(String name, Map<String,Object> params) {
+    super(name, params);
+  }
+
+  @Override
+  public LinkedHashMap<String,Object> paramsToMap() {
+    return null;
+  }
+
+  @Override
+  protected void validate() throws FeatureException {
+  }
+
+  @Override
+  public OriginalScoreWeight createWeight(IndexSearcher searcher,
+      boolean needsScores, SolrQueryRequest request, Query originalQuery, Map<String,String[]> efi) throws IOException {
+    return new OriginalScoreWeight(searcher, request, originalQuery, efi);
+
+  }
+
+  public class OriginalScoreWeight extends FeatureWeight {
+
+    final Weight w;
+
+    public OriginalScoreWeight(IndexSearcher searcher,
+        SolrQueryRequest request, Query originalQuery, Map<String,String[]> efi) throws IOException {
+      super(OriginalScoreFeature.this, searcher, request, originalQuery, efi);
+      w = searcher.createNormalizedWeight(originalQuery, true);
+    };
+
+
+    @Override
+    public String toString() {
+      return "OriginalScoreFeature [query:" + originalQuery.toString() + "]";
+    }
+
+
+
+    @Override
+    public FeatureScorer scorer(LeafReaderContext context) throws IOException {
+
+      final Scorer originalScorer = w.scorer(context);
+      return new OriginalScoreScorer(this, originalScorer);
+    }
+
+    public class OriginalScoreScorer extends FeatureScorer {
+      final private Scorer originalScorer;
+
+      public OriginalScoreScorer(FeatureWeight weight, Scorer originalScorer) {
+        super(weight,null);
+        this.originalScorer = originalScorer;
+      }
+
+      @Override
+      public float score() throws IOException {
+        // This is done to improve the speed of feature extraction. Since this
+        // was already scored in step 1
+        // we shouldn't need to calc original score again.
+        final DocInfo docInfo = getDocInfo();
+        return (docInfo.hasOriginalDocScore() ? docInfo.getOriginalDocScore() : originalScorer.score());
+      }
+
+      @Override
+      public int docID() {
+        return originalScorer.docID();
+      }
+
+      @Override
+      public DocIdSetIterator iterator() {
+        return originalScorer.iterator();
+      }
+    }
+
+  }
+
+}


[21/50] [abbrv] lucene-solr:apiv2: SOLR-9481: Uppering debug level on the failing test and inserting extra logging. Also throw instead of swallow in case of problems parsing local security.json file

Posted by sa...@apache.org.
SOLR-9481: Uppering debug level on the failing test and inserting extra logging. Also throw instead of swallow in case of problems parsing local security.json file


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/dbc2bc7c
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/dbc2bc7c
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/dbc2bc7c

Branch: refs/heads/apiv2
Commit: dbc2bc7ce8f76b30138fc47bc5e0a98b2028d504
Parents: 42eab70
Author: Jan H�ydahl <ja...@apache.org>
Authored: Mon Oct 31 00:57:34 2016 +0100
Committer: Jan H�ydahl <ja...@apache.org>
Committed: Mon Oct 31 00:57:34 2016 +0100

----------------------------------------------------------------------
 .../java/org/apache/solr/handler/admin/SecurityConfHandler.java  | 4 ++++
 .../org/apache/solr/handler/admin/SecurityConfHandlerLocal.java  | 4 +++-
 .../test/org/apache/solr/security/BasicAuthStandaloneTest.java   | 4 ++++
 3 files changed, 11 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dbc2bc7c/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandler.java
index 1fea431..88e4b01 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandler.java
@@ -234,6 +234,10 @@ public abstract class SecurityConfHandler extends RequestHandlerBase implements
     public SecurityConfig setData(InputStream securityJsonInputStream) {
       return setData(Utils.fromJSON(securityJsonInputStream));
     }
+
+    public String toString() {
+      return "SecurityConfig: version=" + version + ", data=" + Utils.toJSONString(data);
+    } 
   }
 }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dbc2bc7c/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandlerLocal.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandlerLocal.java b/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandlerLocal.java
index 34a635f..852d501 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandlerLocal.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandlerLocal.java
@@ -56,7 +56,9 @@ public class SecurityConfHandlerLocal extends SecurityConfHandler {
     if (Files.exists(securityJsonPath)) {
       try (InputStream securityJsonIs = Files.newInputStream(securityJsonPath)) {
         return new SecurityConfig().setData(securityJsonIs);
-      } catch (IOException e) { /* Fall through */ }
+      } catch (Exception e) { 
+        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Failed opening existing security.json file: " + securityJsonPath, e);
+      }
     }
     return new SecurityConfig();
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dbc2bc7c/solr/core/src/test/org/apache/solr/security/BasicAuthStandaloneTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/security/BasicAuthStandaloneTest.java b/solr/core/src/test/org/apache/solr/security/BasicAuthStandaloneTest.java
index 55e78e7..e5fec06 100644
--- a/solr/core/src/test/org/apache/solr/security/BasicAuthStandaloneTest.java
+++ b/solr/core/src/test/org/apache/solr/security/BasicAuthStandaloneTest.java
@@ -42,6 +42,7 @@ import org.apache.solr.common.util.Utils;
 import org.apache.solr.handler.admin.SecurityConfHandler;
 import org.apache.solr.handler.admin.SecurityConfHandlerLocalForTesting;
 import org.apache.solr.util.AbstractSolrTestCase;
+import org.apache.solr.util.LogLevel;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
@@ -82,6 +83,7 @@ public class BasicAuthStandaloneTest extends AbstractSolrTestCase {
   }
 
   @Test
+  @LogLevel("org.apache.solr=DEBUG")
   public void testBasicAuth() throws Exception {
 
     String authcPrefix = "/admin/authentication";
@@ -99,6 +101,8 @@ public class BasicAuthStandaloneTest extends AbstractSolrTestCase {
       securityConfHandler.persistConf(new SecurityConfHandler.SecurityConfig()
           .setData(Utils.fromJSONString(STD_CONF.replaceAll("'", "\""))));
       securityConfHandler.securityConfEdited();
+      log.debug("Newly written security.json is " + securityConfHandler.getSecurityConfig(false) +
+        " and baseUrl is " + baseUrl);
       verifySecurityStatus(cl, baseUrl + authcPrefix, "authentication/class", "solr.BasicAuthPlugin", 20);
 
       String command = "{\n" +


[19/50] [abbrv] lucene-solr:apiv2: SOLR-9704: optimization: use filters after blockChildren for acceptDocs

Posted by sa...@apache.org.
SOLR-9704: optimization: use filters after blockChildren for acceptDocs


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/0f8802ba
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/0f8802ba
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/0f8802ba

Branch: refs/heads/apiv2
Commit: 0f8802ba20de35daac75f6bbcc28a1789a27b06a
Parents: d8d3a8b
Author: yonik <yo...@apache.org>
Authored: Sat Oct 29 20:43:54 2016 -0400
Committer: yonik <yo...@apache.org>
Committed: Sat Oct 29 20:43:54 2016 -0400

----------------------------------------------------------------------
 solr/CHANGES.txt                                |  5 +++
 .../solr/search/facet/FacetProcessor.java       | 34 +++++++++++++-------
 2 files changed, 27 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0f8802ba/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 50639ab..3b3fba7 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -86,6 +86,11 @@ New Features
   Example:  { type:terms, field:category, filter:"user:yonik" } 
   (yonik)
 
+Optimizations
+----------------------
+* SOLR-9704: Facet Module / JSON Facet API: Optimize blockChildren facets that have
+  filters specified by using those filters as acceptDocs. (yonik)
+
 Other Changes
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0f8802ba/solr/core/src/java/org/apache/solr/search/facet/FacetProcessor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetProcessor.java b/solr/core/src/java/org/apache/solr/search/facet/FacetProcessor.java
index e610478..a8914f1 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetProcessor.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetProcessor.java
@@ -28,7 +28,6 @@ import java.util.Map;
 import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.search.BooleanClause;
 import org.apache.lucene.search.BooleanQuery;
-import org.apache.lucene.search.MatchAllDocsQuery;
 import org.apache.lucene.search.Query;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.util.SimpleOrderedMap;
@@ -88,14 +87,14 @@ public abstract class FacetProcessor<FacetRequestT extends FacetRequest>  {
   public void process() throws IOException {
     // Check filters... if we do have filters they apply after domain changes.
     // We still calculate them first because we can use it in a parent->child domain change.
-    handleFilters();
-    handleDomainChanges();
-    if (filter != null) {
+    evalFilters();
+    boolean appliedFilters = handleDomainChanges();
+    if (filter != null && !appliedFilters) {
       fcontext.base = fcontext.base.intersection( filter );
     }
   }
 
-  private void handleFilters() throws IOException {
+  private void evalFilters() throws IOException {
     if (freq.filters == null || freq.filters.isEmpty()) return;
 
     List<Query> qlist = new ArrayList<>(freq.filters.size());
@@ -120,10 +119,11 @@ public abstract class FacetProcessor<FacetRequestT extends FacetRequest>  {
     this.filter = fcontext.searcher.getDocSet(qlist);
   }
 
-  private void handleDomainChanges() throws IOException {
-    if (freq.domain == null) return;
+  private boolean handleDomainChanges() throws IOException {
+    if (freq.domain == null) return false;
     handleFilterExclusions();
-    handleBlockJoin();
+    boolean appliedFilters = handleBlockJoin();
+    return appliedFilters;
   }
 
   private void handleFilterExclusions() throws IOException {
@@ -187,8 +187,10 @@ public abstract class FacetProcessor<FacetRequestT extends FacetRequest>  {
     fcontext.base = fcontext.searcher.getDocSet(qlist);
   }
 
-  private void handleBlockJoin() throws IOException {
-    if (!(freq.domain.toChildren || freq.domain.toParent)) return;
+  // returns "true" if filters have already been applied.
+  private boolean handleBlockJoin() throws IOException {
+    boolean appliedFilters = false;
+    if (!(freq.domain.toChildren || freq.domain.toParent)) return appliedFilters;
 
     // TODO: avoid query parsing per-bucket somehow...
     String parentStr = freq.domain.parents;
@@ -205,13 +207,21 @@ public abstract class FacetProcessor<FacetRequestT extends FacetRequest>  {
     DocSet result;
 
     if (freq.domain.toChildren) {
-      DocSet filt = fcontext.searcher.getDocSetBits( new MatchAllDocsQuery() );
-      result = BlockJoin.toChildren(input, parents, filt, fcontext.qcontext);
+      // If there are filters on this facet, then use them as acceptDocs when executing toChildren.
+      // We need to remember to not redundantly re-apply these filters after.
+      DocSet acceptDocs = this.filter;
+      if (acceptDocs == null) {
+        acceptDocs = fcontext.searcher.getLiveDocs();
+      } else {
+        appliedFilters = true;
+      }
+      result = BlockJoin.toChildren(input, parents, acceptDocs, fcontext.qcontext);
     } else {
       result = BlockJoin.toParents(input, parents, fcontext.qcontext);
     }
 
     fcontext.base = result;
+    return appliedFilters;
   }
 
   protected void processStats(SimpleOrderedMap<Object> bucket, DocSet docs, int docCount) throws IOException {


[33/50] [abbrv] lucene-solr:apiv2: SOLR-9481: Clearing existing global interceptors on HttpClientUtil to avoid user/pass leaks from other tests

Posted by sa...@apache.org.
SOLR-9481: Clearing existing global interceptors on HttpClientUtil to avoid user/pass leaks from other tests


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/4383bec8
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/4383bec8
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/4383bec8

Branch: refs/heads/apiv2
Commit: 4383bec84c38464c60e63880ad0ba37128d261a3
Parents: 22aa34e
Author: Jan H�ydahl <ja...@apache.org>
Authored: Tue Nov 1 15:38:21 2016 +0100
Committer: Jan H�ydahl <ja...@apache.org>
Committed: Tue Nov 1 15:38:21 2016 +0100

----------------------------------------------------------------------
 solr/core/src/java/org/apache/solr/security/BasicAuthPlugin.java  | 1 +
 .../test/org/apache/solr/security/BasicAuthStandaloneTest.java    | 3 +--
 2 files changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4383bec8/solr/core/src/java/org/apache/solr/security/BasicAuthPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/security/BasicAuthPlugin.java b/solr/core/src/java/org/apache/solr/security/BasicAuthPlugin.java
index 29a887b..5c0717b 100644
--- a/solr/core/src/java/org/apache/solr/security/BasicAuthPlugin.java
+++ b/solr/core/src/java/org/apache/solr/security/BasicAuthPlugin.java
@@ -119,6 +119,7 @@ public class BasicAuthPlugin extends AuthenticationPlugin implements ConfigEdita
               final String username = credentials.substring(0, p).trim();
               String pwd = credentials.substring(p + 1).trim();
               if (!authenticate(username, pwd)) {
+                log.debug("Bad auth credentials supplied in Authorization header");
                 authenticationFailure(response, "Bad credentials");
               } else {
                 HttpServletRequestWrapper wrapper = new HttpServletRequestWrapper(request) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4383bec8/solr/core/src/test/org/apache/solr/security/BasicAuthStandaloneTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/security/BasicAuthStandaloneTest.java b/solr/core/src/test/org/apache/solr/security/BasicAuthStandaloneTest.java
index e5fec06..7d6c436 100644
--- a/solr/core/src/test/org/apache/solr/security/BasicAuthStandaloneTest.java
+++ b/solr/core/src/test/org/apache/solr/security/BasicAuthStandaloneTest.java
@@ -73,6 +73,7 @@ public class BasicAuthStandaloneTest extends AbstractSolrTestCase {
     instance.setUp();
     jetty = createJetty(instance);
     securityConfHandler = new SecurityConfHandlerLocalForTesting(jetty.getCoreContainer());
+    HttpClientUtil.clearRequestInterceptors(); // Clear out any old Authorization headers
   }
 
   @Override
@@ -101,8 +102,6 @@ public class BasicAuthStandaloneTest extends AbstractSolrTestCase {
       securityConfHandler.persistConf(new SecurityConfHandler.SecurityConfig()
           .setData(Utils.fromJSONString(STD_CONF.replaceAll("'", "\""))));
       securityConfHandler.securityConfEdited();
-      log.debug("Newly written security.json is " + securityConfHandler.getSecurityConfig(false) +
-        " and baseUrl is " + baseUrl);
       verifySecurityStatus(cl, baseUrl + authcPrefix, "authentication/class", "solr.BasicAuthPlugin", 20);
 
       String command = "{\n" +


[23/50] [abbrv] lucene-solr:apiv2: SOLR-9697: Fixed zk upconfig which was broken on windows

Posted by sa...@apache.org.
SOLR-9697: Fixed zk upconfig which was broken on windows


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/2ad2fca4
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/2ad2fca4
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/2ad2fca4

Branch: refs/heads/apiv2
Commit: 2ad2fca4169374225e6d08a072c3121beba24a93
Parents: 813b685
Author: Jan H�ydahl <ja...@apache.org>
Authored: Mon Oct 31 09:35:09 2016 +0100
Committer: Jan H�ydahl <ja...@apache.org>
Committed: Mon Oct 31 09:35:09 2016 +0100

----------------------------------------------------------------------
 solr/CHANGES.txt  | 2 ++
 solr/bin/solr.cmd | 1 -
 2 files changed, 2 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2ad2fca4/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 6c3ffcc..2f46416 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -279,6 +279,8 @@ Bug Fixes
 
 * SOLR-2094: XPathEntityProcessor should reinitialize the XPathRecordReader instance if
   the 'forEach' or 'xpath' attributes are templates & it is not a root entity (Cao Manh Dat, noble)
+
+* SOLR-9697: zk upconfig broken on windows (Xavier Jmlucjav via janhoy)
  
 Optimizations
 ----------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2ad2fca4/solr/bin/solr.cmd
----------------------------------------------------------------------
diff --git a/solr/bin/solr.cmd b/solr/bin/solr.cmd
index 262a6f8..9860d77 100644
--- a/solr/bin/solr.cmd
+++ b/solr/bin/solr.cmd
@@ -1361,7 +1361,6 @@ IF "%1"=="-upconfig" (
   if not "%~1"=="" (
     goto set_zk_src
   )
-  goto zk_usage
 ) ELSE IF "!ZK_DST!"=="" (
   IF "%ZK_OP%"=="cp" (
     goto set_zk_dst


[46/50] [abbrv] lucene-solr:apiv2: SOLR-9709: add json.nl=map example comment, expand json.nl test coverage.

Posted by sa...@apache.org.
SOLR-9709: add json.nl=map example comment, expand json.nl test coverage.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/d0e32f3e
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/d0e32f3e
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/d0e32f3e

Branch: refs/heads/apiv2
Commit: d0e32f3e5ca2a0acf2959551d6d4295738ff8088
Parents: 92f56ea
Author: Christine Poerschke <cp...@apache.org>
Authored: Wed Nov 2 12:12:44 2016 +0000
Committer: Christine Poerschke <cp...@apache.org>
Committed: Wed Nov 2 12:30:40 2016 +0000

----------------------------------------------------------------------
 .../solr/response/JSONResponseWriter.java       |  4 ++++
 .../apache/solr/response/JSONWriterTest.java    | 23 ++++++++++++++++++--
 2 files changed, 25 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d0e32f3e/solr/core/src/java/org/apache/solr/response/JSONResponseWriter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/response/JSONResponseWriter.java b/solr/core/src/java/org/apache/solr/response/JSONResponseWriter.java
index ad128d2..218d2e7 100644
--- a/solr/core/src/java/org/apache/solr/response/JSONResponseWriter.java
+++ b/solr/core/src/java/org/apache/solr/response/JSONResponseWriter.java
@@ -82,11 +82,14 @@ class JSONWriter extends TextResponseWriter {
   final protected String namedListStyle;
 
   static final String JSON_NL_STYLE="json.nl";
+  static final int    JSON_NL_STYLE_COUNT = 5; // for use by JSONWriterTest
+
   static final String JSON_NL_MAP="map";
   static final String JSON_NL_FLAT="flat";
   static final String JSON_NL_ARROFARR="arrarr";
   static final String JSON_NL_ARROFMAP="arrmap";
   static final String JSON_NL_ARROFNVP="arrnvp";
+
   static final String JSON_WRAPPER_FUNCTION="json.wrf";
 
   public JSONWriter(Writer writer, SolrQueryRequest req, SolrQueryResponse rsp) {
@@ -181,6 +184,7 @@ class JSONWriter extends TextResponseWriter {
    * repeating any keys if they are repeated in the NamedList.  null is mapped
    * to "".
    */ 
+  // NamedList("a"=1,"bar"="foo",null=3) => {"a":1,"bar":"foo","":3}
   protected void writeNamedListAsMapWithDups(String name, NamedList val) throws IOException {
     int sz = val.size();
     writeMapOpener(sz);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d0e32f3e/solr/core/src/test/org/apache/solr/response/JSONWriterTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/response/JSONWriterTest.java b/solr/core/src/test/org/apache/solr/response/JSONWriterTest.java
index a61cff3..b096a09 100644
--- a/solr/core/src/test/org/apache/solr/response/JSONWriterTest.java
+++ b/solr/core/src/test/org/apache/solr/response/JSONWriterTest.java
@@ -76,7 +76,20 @@ public class JSONWriterTest extends SolrTestCaseJ4 {
 
   @Test
   public void testJSON() throws IOException {
-    final String namedListStyle = (random().nextBoolean() ? JSONWriter.JSON_NL_ARROFARR : JSONWriter.JSON_NL_ARROFNVP);
+    final String[] namedListStyles = new String[] {
+        JSONWriter.JSON_NL_FLAT,
+        JSONWriter.JSON_NL_MAP,
+        JSONWriter.JSON_NL_ARROFARR,
+        JSONWriter.JSON_NL_ARROFMAP,
+        JSONWriter.JSON_NL_ARROFNVP,
+    };
+    for (final String namedListStyle : namedListStyles) {
+      implTestJSON(namedListStyle);
+    }
+    assertEquals(JSONWriter.JSON_NL_STYLE_COUNT, namedListStyles.length);
+  }
+
+  private void implTestJSON(final String namedListStyle) throws IOException {
     SolrQueryRequest req = req("wt","json","json.nl",namedListStyle);
     SolrQueryResponse rsp = new SolrQueryResponse();
     JSONResponseWriter w = new JSONResponseWriter();
@@ -94,8 +107,14 @@ public class JSONWriterTest extends SolrTestCaseJ4 {
     w.write(buf, req, rsp);
 
     final String expectedNLjson;
-    if (namedListStyle == JSONWriter.JSON_NL_ARROFARR) {
+    if (namedListStyle == JSONWriter.JSON_NL_FLAT) {
+      expectedNLjson = "\"nl\":[\"data1\",\"he\\u2028llo\\u2029!\",null,42]";
+    } else if (namedListStyle == JSONWriter.JSON_NL_MAP) {
+      expectedNLjson = "\"nl\":{\"data1\":\"he\\u2028llo\\u2029!\",\"\":42}";
+    } else if (namedListStyle == JSONWriter.JSON_NL_ARROFARR) {
       expectedNLjson = "\"nl\":[[\"data1\",\"he\\u2028llo\\u2029!\"],[null,42]]";
+    } else if (namedListStyle == JSONWriter.JSON_NL_ARROFMAP) {
+      expectedNLjson = "\"nl\":[{\"data1\":\"he\\u2028llo\\u2029!\"},42]";
     } else if (namedListStyle == JSONWriter.JSON_NL_ARROFNVP) {
       expectedNLjson = "\"nl\":[{\"name\":\"data1\",\"str\":\"he\\u2028llo\\u2029!\"},{\"int\":42}]";
     } else {


[17/50] [abbrv] lucene-solr:apiv2: SOLR-9681: add filters to any facet command

Posted by sa...@apache.org.
SOLR-9681: add filters to any facet command


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/650276e1
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/650276e1
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/650276e1

Branch: refs/heads/apiv2
Commit: 650276e14bd85cdd12a77956f2403369ff3465ac
Parents: 2e21511
Author: yonik <yo...@apache.org>
Authored: Sat Oct 29 14:16:27 2016 -0400
Committer: yonik <yo...@apache.org>
Committed: Sat Oct 29 14:54:19 2016 -0400

----------------------------------------------------------------------
 solr/CHANGES.txt                                |  7 ++++
 .../solr/search/facet/FacetProcessor.java       | 35 +++++++++++++++++++-
 .../apache/solr/search/facet/FacetRequest.java  | 13 +++++++-
 .../solr/search/facet/TestJsonFacets.java       | 21 ++++++++++++
 4 files changed, 74 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/650276e1/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index f81ff75..50639ab 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -79,6 +79,13 @@ Jetty 9.3.8.v20160314
 Detailed Change List
 ----------------------
 
+New Features
+----------------------
+* SOLR-9681: FacetModule / JSON Facet API added the ability to add filters directly to
+  any facet command.  The filters are applied after any domain change operations.
+  Example:  { type:terms, field:category, filter:"user:yonik" } 
+  (yonik)
+
 Other Changes
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/650276e1/solr/core/src/java/org/apache/solr/search/facet/FacetProcessor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetProcessor.java b/solr/core/src/java/org/apache/solr/search/facet/FacetProcessor.java
index fa26319..e610478 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetProcessor.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetProcessor.java
@@ -50,11 +50,13 @@ public abstract class FacetProcessor<FacetRequestT extends FacetRequest>  {
   FacetContext fcontext;
   FacetRequestT freq;
 
+  DocSet filter;  // additional filters specified by "filter"  // TODO: do these need to be on the context to support recomputing during multi-select?
   LinkedHashMap<String,SlotAcc> accMap;
   SlotAcc[] accs;
   CountSlotAcc countAcc;
 
-  /** factory method for invoking json facet framework as whole */
+  /** factory method for invoking json facet framework as whole.
+   * Note: this is currently only used from SimpleFacets, not from JSON Facet API itself. */
   public static FacetProcessor<?> createProcessor(SolrQueryRequest req,
                                                   Map<String, Object> params, DocSet docs){
     FacetParser parser = new FacetTopParser(req);
@@ -84,7 +86,38 @@ public abstract class FacetProcessor<FacetRequestT extends FacetRequest>  {
   }
 
   public void process() throws IOException {
+    // Check filters... if we do have filters they apply after domain changes.
+    // We still calculate them first because we can use it in a parent->child domain change.
+    handleFilters();
     handleDomainChanges();
+    if (filter != null) {
+      fcontext.base = fcontext.base.intersection( filter );
+    }
+  }
+
+  private void handleFilters() throws IOException {
+    if (freq.filters == null || freq.filters.isEmpty()) return;
+
+    List<Query> qlist = new ArrayList<>(freq.filters.size());
+    // TODO: prevent parsing filters each time!
+    for (Object rawFilter : freq.filters) {
+      Query symbolicFilter;
+      if (rawFilter instanceof String) {
+        QParser parser = null;
+        try {
+          parser = QParser.getParser((String)rawFilter, fcontext.req);
+          symbolicFilter = parser.getQuery();
+        } catch (SyntaxError syntaxError) {
+          throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, syntaxError);
+        }
+      } else {
+        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Bad query (expected a string):" + rawFilter);
+      }
+
+      qlist.add(symbolicFilter);
+    }
+
+    this.filter = fcontext.searcher.getDocSet(qlist);
   }
 
   private void handleDomainChanges() throws IOException {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/650276e1/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java b/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java
index 40ca686..d3c8722 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java
@@ -16,6 +16,7 @@
  */
 package org.apache.solr.search.facet;
 
+import java.util.ArrayList;
 import java.util.EnumSet;
 import java.util.LinkedHashMap;
 import java.util.List;
@@ -77,7 +78,7 @@ public abstract class FacetRequest {
 
   protected Map<String,AggValueSource> facetStats;  // per-bucket statistics
   protected Map<String,FacetRequest> subFacets;     // per-bucket sub-facets
-  protected List<String> filters;
+  protected List<Object> filters;
   protected boolean processEmpty;
   protected Domain domain;
 
@@ -376,6 +377,16 @@ abstract class FacetParser<FacetRequestT extends FacetRequest> {
 
       }
 
+      Object filterOrList = m.get("filter");
+      if (filterOrList != null) {
+        if (filterOrList instanceof List) {
+          facet.filters = (List<Object>)filterOrList;
+        } else {
+          facet.filters = new ArrayList<>(1);
+          facet.filters.add(filterOrList);
+        }
+      }
+
     }
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/650276e1/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
index 5527a3d..eafa134 100644
--- a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
+++ b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
@@ -1164,6 +1164,27 @@ public class TestJsonFacets extends SolrTestCaseHS {
     );
 
 
+    // test filter
+    client.testJQ(params(p, "q", "*:*", "myfilt","${cat_s}:A"
+        , "json.facet", "{" +
+            "t:{${terms} type:terms, field:${cat_s}, filter:[]}" + // empty filter list
+            ",t_filt:{${terms} type:terms, field:${cat_s}, filter:'${cat_s}:B'}" +
+            ",t_filt2:{${terms} type:terms, field:${cat_s}, filter:'{!query v=$myfilt}'}" +  // test access to qparser and other query parameters
+            ",t_filt3:{${terms} type:terms, field:${cat_s}, filter:['-id:1','-id:2']}" +
+            ",q:{type:query, q:'${cat_s}:B', filter:['-id:5']}" + // also tests a top-level negative filter
+            ",r:{type:range, field:${num_d}, start:-5, end:10, gap:5, filter:'-id:4'}" +
+            "}"
+        )
+        , "facets=={ count:6, " +
+            "t       :{ buckets:[ {val:B, count:3}, {val:A, count:2} ] }" +
+            ",t_filt :{ buckets:[ {val:B, count:3}] } " +
+            ",t_filt2:{ buckets:[ {val:A, count:2}] } " +
+            ",t_filt3:{ buckets:[ {val:B, count:2}, {val:A, count:1}] } " +
+            ",q:{count:2}" +
+            ",r:{buckets:[ {val:-5.0,count:1}, {val:0.0,count:1}, {val:5.0,count:0} ] }" +
+            "}"
+    );
+    
   }
 
   @Test


[48/50] [abbrv] lucene-solr:apiv2: SOLR-9710: fix test bug

Posted by sa...@apache.org.
SOLR-9710: fix test bug


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/be772dbd
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/be772dbd
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/be772dbd

Branch: refs/heads/apiv2
Commit: be772dbd4851363edade0fa22587a986122d1ff3
Parents: 9eb806a
Author: jdyer1 <jd...@apache.org>
Authored: Wed Nov 2 10:07:55 2016 -0500
Committer: jdyer1 <jd...@apache.org>
Committed: Wed Nov 2 10:07:55 2016 -0500

----------------------------------------------------------------------
 .../collection1/conf/solrconfig-spellcheckcomponent.xml   | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/be772dbd/solr/core/src/test-files/solr/collection1/conf/solrconfig-spellcheckcomponent.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/collection1/conf/solrconfig-spellcheckcomponent.xml b/solr/core/src/test-files/solr/collection1/conf/solrconfig-spellcheckcomponent.xml
index 23e1cd2..0e4d190 100644
--- a/solr/core/src/test-files/solr/collection1/conf/solrconfig-spellcheckcomponent.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/solrconfig-spellcheckcomponent.xml
@@ -28,12 +28,12 @@
     <mergeScheduler class="${solr.tests.mergeScheduler}" />
     <writeLockTimeout>1000</writeLockTimeout>
     <commitLockTimeout>10000</commitLockTimeout>
-    <lockType>${solr.tests.lockType:single}</lockType>
-    <query>
-      <useColdSearcher>false</useColdSearcher>
-      <maxWarmingSearchers>1</maxWarmingSearchers>
-    </query>
+    <lockType>${solr.tests.lockType:single}</lockType>    
   </indexConfig>
+  <query>
+    <useColdSearcher>false</useColdSearcher>
+    <maxWarmingSearchers>1</maxWarmingSearchers>
+  </query>
   <requestHandler name="standard"
      class="solr.StandardRequestHandler"></requestHandler>
 


[45/50] [abbrv] lucene-solr:apiv2: LUCENE-7135: only use OS_ARCH if we couldn't access sun.arch.data.model

Posted by sa...@apache.org.
LUCENE-7135: only use OS_ARCH if we couldn't access sun.arch.data.model


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/92f56ea9
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/92f56ea9
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/92f56ea9

Branch: refs/heads/apiv2
Commit: 92f56ea9dd72d935c21aadae6a20ee23fa9c3cf8
Parents: 69e654b
Author: Mike McCandless <mi...@apache.org>
Authored: Wed Nov 2 06:48:29 2016 -0400
Committer: Mike McCandless <mi...@apache.org>
Committed: Wed Nov 2 06:48:29 2016 -0400

----------------------------------------------------------------------
 .../core/src/java/org/apache/lucene/util/Constants.java   | 10 ++++++----
 1 file changed, 6 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/92f56ea9/lucene/core/src/java/org/apache/lucene/util/Constants.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/util/Constants.java b/lucene/core/src/java/org/apache/lucene/util/Constants.java
index e6a9609..007ef85 100644
--- a/lucene/core/src/java/org/apache/lucene/util/Constants.java
+++ b/lucene/core/src/java/org/apache/lucene/util/Constants.java
@@ -75,10 +75,12 @@ public final class Constants {
         is64Bit = datamodel.contains("64");
       }
     } catch (SecurityException ex) {}
-    if (datamodel == null && OS_ARCH != null && OS_ARCH.contains("64")) {
-      is64Bit = true;
-    } else {
-      is64Bit = false;
+    if (datamodel == null) {
+      if (OS_ARCH != null && OS_ARCH.contains("64")) {
+        is64Bit = true;
+      } else {
+        is64Bit = false;
+      }
     }
     JRE_IS_64BIT = is64Bit;
   }


[38/50] [abbrv] lucene-solr:apiv2: SOLR-8542: Adds Solr Learning to Rank (LTR) plugin for reranking results with machine learning models. (Michael Nilsson, Diego Ceccarelli, Joshua Pantony, Jon Dorando, Naveen Santhapuri, Alessandro Benedetti, David Groh

Posted by sa...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test-files/solr/solr.xml
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test-files/solr/solr.xml b/solr/contrib/ltr/src/test-files/solr/solr.xml
new file mode 100644
index 0000000..c8c3ebe
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/solr/solr.xml
@@ -0,0 +1,42 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<solr>
+
+  <str name="shareSchema">${shareSchema:false}</str>
+  <str name="configSetBaseDir">${configSetBaseDir:configsets}</str>
+  <str name="coreRootDirectory">${coreRootDirectory:.}</str>
+
+  <shardHandlerFactory name="shardHandlerFactory" class="HttpShardHandlerFactory">
+    <str name="urlScheme">${urlScheme:}</str>
+    <int name="socketTimeout">${socketTimeout:90000}</int>
+    <int name="connTimeout">${connTimeout:15000}</int>
+  </shardHandlerFactory>
+
+  <solrcloud>
+    <str name="host">127.0.0.1</str>
+    <int name="hostPort">${hostPort:8983}</int>
+    <str name="hostContext">${hostContext:solr}</str>
+    <int name="zkClientTimeout">${solr.zkclienttimeout:30000}</int>
+    <bool name="genericCoreNodeNames">${genericCoreNodeNames:true}</bool>
+    <int name="leaderVoteWait">${leaderVoteWait:10000}</int>
+    <int name="distribUpdateConnTimeout">${distribUpdateConnTimeout:45000}</int>
+    <int name="distribUpdateSoTimeout">${distribUpdateSoTimeout:340000}</int>
+  </solrcloud>
+
+</solr>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTROnSolrCloud.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTROnSolrCloud.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTROnSolrCloud.java
new file mode 100644
index 0000000..2e01a64
--- /dev/null
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTROnSolrCloud.java
@@ -0,0 +1,211 @@
+/* * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr;
+
+import java.io.File;
+import java.util.SortedMap;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.client.solrj.embedded.JettyConfig;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.client.solrj.response.CollectionAdminResponse;
+import org.apache.solr.client.solrj.response.QueryResponse;
+import org.apache.solr.cloud.AbstractDistribZkTestBase;
+import org.apache.solr.cloud.MiniSolrCloudCluster;
+import org.apache.solr.common.SolrInputDocument;
+import org.apache.solr.common.cloud.ZkStateReader;
+import org.apache.solr.ltr.feature.SolrFeature;
+import org.apache.solr.ltr.feature.ValueFeature;
+import org.apache.solr.ltr.model.LinearModel;
+import org.eclipse.jetty.servlet.ServletHolder;
+import org.junit.AfterClass;
+import org.junit.Test;
+
+public class TestLTROnSolrCloud extends TestRerankBase {
+
+  private MiniSolrCloudCluster solrCluster;
+  String solrconfig = "solrconfig-ltr.xml";
+  String schema = "schema.xml";
+
+  SortedMap<ServletHolder,String> extraServlets = null;
+
+  @Override
+  public void setUp() throws Exception {
+    super.setUp();
+    extraServlets = setupTestInit(solrconfig, schema, true);
+    System.setProperty("enable.update.log", "true");
+
+    int numberOfShards = random().nextInt(4)+1;
+    int numberOfReplicas = random().nextInt(2)+1;
+    int maxShardsPerNode = numberOfShards+random().nextInt(4)+1;
+
+    int numberOfNodes = numberOfShards * maxShardsPerNode;
+
+    setupSolrCluster(numberOfShards, numberOfReplicas, numberOfNodes, maxShardsPerNode);
+
+
+  }
+
+
+  @Override
+  public void tearDown() throws Exception {
+    restTestHarness.close();
+    restTestHarness = null;
+    jetty.stop();
+    jetty = null;
+    solrCluster.shutdown();
+    super.tearDown();
+  }
+
+  @Test
+  public void testSimpleQuery() throws Exception {
+    // will randomly pick a configuration with [1..5] shards and [1..3] replicas
+
+    // Test regular query, it will sort the documents by inverse
+    // popularity (the less popular, docid == 1, will be in the first
+    // position
+    SolrQuery query = new SolrQuery("{!func}sub(8,field(popularity))");
+
+    query.setRequestHandler("/query");
+    query.setFields("*,score");
+    query.setParam("rows", "8");
+
+    QueryResponse queryResponse =
+        solrCluster.getSolrClient().query(COLLECTION,query);
+    assertEquals(8, queryResponse.getResults().getNumFound());
+    assertEquals("1", queryResponse.getResults().get(0).get("id").toString());
+    assertEquals("2", queryResponse.getResults().get(1).get("id").toString());
+    assertEquals("3", queryResponse.getResults().get(2).get("id").toString());
+    assertEquals("4", queryResponse.getResults().get(3).get("id").toString());
+
+    // Test re-rank and feature vectors returned
+    query.setFields("*,score,features:[fv]");
+    query.add("rq", "{!ltr model=powpularityS-model reRankDocs=8}");
+    queryResponse =
+        solrCluster.getSolrClient().query(COLLECTION,query);
+    assertEquals(8, queryResponse.getResults().getNumFound());
+    assertEquals("8", queryResponse.getResults().get(0).get("id").toString());
+    assertEquals("powpularityS:64.0;c3:2.0",
+        queryResponse.getResults().get(0).get("features").toString());
+    assertEquals("7", queryResponse.getResults().get(1).get("id").toString());
+    assertEquals("powpularityS:49.0;c3:2.0",
+        queryResponse.getResults().get(1).get("features").toString());
+    assertEquals("6", queryResponse.getResults().get(2).get("id").toString());
+    assertEquals("powpularityS:36.0;c3:2.0",
+        queryResponse.getResults().get(2).get("features").toString());
+    assertEquals("5", queryResponse.getResults().get(3).get("id").toString());
+    assertEquals("powpularityS:25.0;c3:2.0",
+        queryResponse.getResults().get(3).get("features").toString());
+  }
+
+  private void setupSolrCluster(int numShards, int numReplicas, int numServers, int maxShardsPerNode) throws Exception {
+    JettyConfig jc = buildJettyConfig("/solr");
+    jc = JettyConfig.builder(jc).withServlets(extraServlets).build();
+    solrCluster = new MiniSolrCloudCluster(numServers, tmpSolrHome.toPath(), jc);
+    File configDir = tmpSolrHome.toPath().resolve("collection1/conf").toFile();
+    solrCluster.uploadConfigSet(configDir.toPath(), "conf1");
+
+    solrCluster.getSolrClient().setDefaultCollection(COLLECTION);
+
+    createCollection(COLLECTION, "conf1", numShards, numReplicas, maxShardsPerNode);
+    indexDocuments(COLLECTION);
+
+    createJettyAndHarness(tmpSolrHome.getAbsolutePath(), solrconfig, schema,
+            "/solr", true, extraServlets);
+    loadModelsAndFeatures();
+  }
+
+
+  private void createCollection(String name, String config, int numShards, int numReplicas, int maxShardsPerNode)
+      throws Exception {
+    CollectionAdminResponse response;
+    CollectionAdminRequest.Create create =
+        CollectionAdminRequest.createCollection(name, config, numShards, numReplicas);
+    create.setMaxShardsPerNode(maxShardsPerNode);
+    response = create.process(solrCluster.getSolrClient());
+
+    if (response.getStatus() != 0 || response.getErrorMessages() != null) {
+      fail("Could not create collection. Response" + response.toString());
+    }
+    ZkStateReader zkStateReader = solrCluster.getSolrClient().getZkStateReader();
+    AbstractDistribZkTestBase.waitForRecoveriesToFinish(name, zkStateReader, false, true, 100);
+  }
+
+
+  void indexDocument(String collection, String id, String title, String description, int popularity)
+    throws Exception{
+    SolrInputDocument doc = new SolrInputDocument();
+    doc.setField("id", id);
+    doc.setField("title", title);
+    doc.setField("description", description);
+    doc.setField("popularity", popularity);
+    solrCluster.getSolrClient().add(collection, doc);
+  }
+
+  private void indexDocuments(final String collection)
+       throws Exception {
+    final int collectionSize = 8;
+    for (int docId = 1; docId <= collectionSize;  docId++) {
+      final int popularity = docId;
+      indexDocument(collection, String.valueOf(docId), "a1", "bloom", popularity);
+    }
+    solrCluster.getSolrClient().commit(collection);
+  }
+
+
+  private void loadModelsAndFeatures() throws Exception {
+    final String featureStore = "test";
+    final String[] featureNames = new String[] {"powpularityS","c3"};
+    final String jsonModelParams = "{\"weights\":{\"powpularityS\":1.0,\"c3\":1.0}}";
+
+    loadFeature(
+            featureNames[0],
+            SolrFeature.class.getCanonicalName(),
+            featureStore,
+            "{\"q\":\"{!func}pow(popularity,2)\"}"
+    );
+    loadFeature(
+            featureNames[1],
+            ValueFeature.class.getCanonicalName(),
+            featureStore,
+            "{\"value\":2}"
+    );
+
+    loadModel(
+             "powpularityS-model",
+             LinearModel.class.getCanonicalName(),
+             featureNames,
+             featureStore,
+             jsonModelParams
+    );
+    reloadCollection(COLLECTION);
+  }
+
+  private void reloadCollection(String collection) throws Exception {
+    CollectionAdminRequest.Reload reloadRequest = CollectionAdminRequest.reloadCollection(collection);
+    CollectionAdminResponse response = reloadRequest.process(solrCluster.getSolrClient());
+    assertEquals(0, response.getStatus());
+    assertTrue(response.isSuccess());
+  }
+
+  @AfterClass
+  public static void after() throws Exception {
+    FileUtils.deleteDirectory(tmpSolrHome);
+    System.clearProperty("managed.schema.mutable");
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRQParserExplain.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRQParserExplain.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRQParserExplain.java
new file mode 100644
index 0000000..2f90df8
--- /dev/null
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRQParserExplain.java
@@ -0,0 +1,152 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr;
+
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.ltr.model.LinearModel;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestLTRQParserExplain extends TestRerankBase {
+
+  @BeforeClass
+  public static void setup() throws Exception {
+    setuptest();
+    loadFeatures("features-store-test-model.json");
+  }
+
+  @AfterClass
+  public static void after() throws Exception {
+    aftertest();
+  }
+
+
+  @Test
+  public void testRerankedExplain() throws Exception {
+    loadModel("linear2", LinearModel.class.getCanonicalName(), new String[] {
+        "constant1", "constant2", "pop"},
+        "{\"weights\":{\"pop\":1.0,\"constant1\":1.5,\"constant2\":3.5}}");
+
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("title:bloomberg");
+    query.setParam("debugQuery", "on");
+    query.add("rows", "2");
+    query.add("rq", "{!ltr reRankDocs=2 model=linear2}");
+    query.add("fl", "*,score");
+
+    assertJQ(
+        "/query" + query.toQueryString(),
+        "/debug/explain/9=='\n13.5 = LinearModel(name=linear2,featureWeights=[constant1=1.5,constant2=3.5,pop=1.0]) model applied to features, sum of:\n  1.5 = prod of:\n    1.5 = weight on feature\n    1.0 = ValueFeature [name=constant1, params={value=1}]\n  7.0 = prod of:\n    3.5 = weight on feature\n    2.0 = ValueFeature [name=constant2, params={value=2}]\n  5.0 = prod of:\n    1.0 = weight on feature\n    5.0 = FieldValueFeature [name=pop, params={field=popularity}]\n'");
+  }
+
+  @Test
+  public void testRerankedExplainSameBetweenDifferentDocsWithSameFeatures() throws Exception {
+    loadFeatures("features-linear.json");
+    loadModels("linear-model.json");
+
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("title:bloomberg");
+    query.setParam("debugQuery", "on");
+    query.add("rows", "4");
+    query.add("rq", "{!ltr reRankDocs=4 model=6029760550880411648}");
+    query.add("fl", "*,score");
+    query.add("wt", "json");
+    final String expectedExplainNormalizer = "normalized using MinMaxNormalizer(min=0.0,max=10.0)";
+    final String expectedExplain = "\n3.5116758 = LinearModel(name=6029760550880411648,featureWeights=["
+        + "title=0.0,"
+        + "description=0.1,"
+        + "keywords=0.2,"
+        + "popularity=0.3,"
+        + "text=0.4,"
+        + "queryIntentPerson=0.1231231,"
+        + "queryIntentCompany=0.12121211"
+        + "]) model applied to features, sum of:\n  0.0 = prod of:\n    0.0 = weight on feature\n    1.0 = ValueFeature [name=title, params={value=1}]\n  0.2 = prod of:\n    0.1 = weight on feature\n    2.0 = ValueFeature [name=description, params={value=2}]\n  0.4 = prod of:\n    0.2 = weight on feature\n    2.0 = ValueFeature [name=keywords, params={value=2}]\n  0.09 = prod of:\n    0.3 = weight on feature\n    0.3 = "+expectedExplainNormalizer+"\n      3.0 = ValueFeature [name=popularity, params={value=3}]\n  1.6 = prod of:\n    0.4 = weight on feature\n    4.0 = ValueFeature [name=text, params={value=4}]\n  0.6156155 = prod of:\n    0.1231231 = weight on feature\n    5.0 = ValueFeature [name=queryIntentPerson, params={value=5}]\n  0.60606056 = prod of:\n    0.12121211 = weight on feature\n    5.0 = ValueFeature [name=queryIntentCompany, params={value=5}]\n";
+
+    assertJQ(
+        "/query" + query.toQueryString(),
+        "/debug/explain/7=='"+expectedExplain+"'}");
+    assertJQ(
+        "/query" + query.toQueryString(),
+        "/debug/explain/9=='"+expectedExplain+"'}");
+  }
+
+  @Test
+  public void LinearScoreExplainMissingEfiFeatureShouldReturnDefaultScore() throws Exception {
+    loadFeatures("features-linear-efi.json");
+    loadModels("linear-model-efi.json");
+
+    SolrQuery query = new SolrQuery();
+    query.setQuery("title:bloomberg");
+    query.setParam("debugQuery", "on");
+    query.add("rows", "4");
+    query.add("rq", "{!ltr reRankDocs=4 model=linear-efi}");
+    query.add("fl", "*,score");
+    query.add("wt", "xml");
+
+    final String linearModelEfiString = "LinearModel(name=linear-efi,featureWeights=["
+      + "sampleConstant=1.0,"
+      + "search_number_of_nights=2.0])";
+
+    query.remove("wt");
+    query.add("wt", "json");
+    assertJQ(
+        "/query" + query.toQueryString(),
+        "/debug/explain/7=='\n5.0 = "+linearModelEfiString+" model applied to features, sum of:\n  5.0 = prod of:\n    1.0 = weight on feature\n    5.0 = ValueFeature [name=sampleConstant, params={value=5}]\n" +
+            "  0.0 = prod of:\n" +
+            "    2.0 = weight on feature\n" +
+            "    0.0 = The feature has no value\n'}");
+    assertJQ(
+        "/query" + query.toQueryString(),
+        "/debug/explain/9=='\n5.0 = "+linearModelEfiString+" model applied to features, sum of:\n  5.0 = prod of:\n    1.0 = weight on feature\n    5.0 = ValueFeature [name=sampleConstant, params={value=5}]\n" +
+            "  0.0 = prod of:\n" +
+            "    2.0 = weight on feature\n" +
+            "    0.0 = The feature has no value\n'}");
+  }
+
+  @Test
+  public void multipleAdditiveTreesScoreExplainMissingEfiFeatureShouldReturnDefaultScore() throws Exception {
+    loadFeatures("external_features_for_sparse_processing.json");
+    loadModels("multipleadditivetreesmodel_external_binary_features.json");
+
+    SolrQuery query = new SolrQuery();
+    query.setQuery("title:bloomberg");
+    query.setParam("debugQuery", "on");
+    query.add("rows", "4");
+    query.add("rq", "{!ltr reRankDocs=4 model=external_model_binary_feature efi.user_device_tablet=1}");
+    query.add("fl", "*,score");
+
+    final String tree1 = "(weight=1.0,root=(feature=user_device_smartphone,threshold=0.5,left=0.0,right=50.0))";
+    final String tree2 = "(weight=1.0,root=(feature=user_device_tablet,threshold=0.5,left=0.0,right=65.0))";
+    final String trees = "["+tree1+","+tree2+"]";
+
+    query.add("wt", "json");
+    assertJQ(
+        "/query" + query.toQueryString(),
+        "/debug/explain/7=='\n" +
+            "65.0 = MultipleAdditiveTreesModel(name=external_model_binary_feature,trees="+trees+") model applied to features, sum of:\n" +
+            "  0.0 = tree 0 | \\'user_device_smartphone\\':0.0 <= 0.500001, Go Left | val: 0.0\n" +
+            "  65.0 = tree 1 | \\'user_device_tablet\\':1.0 > 0.500001, Go Right | val: 65.0\n'}");
+    assertJQ(
+        "/query" + query.toQueryString(),
+        "/debug/explain/9=='\n" +
+            "65.0 = MultipleAdditiveTreesModel(name=external_model_binary_feature,trees="+trees+") model applied to features, sum of:\n" +
+            "  0.0 = tree 0 | \\'user_device_smartphone\\':0.0 <= 0.500001, Go Left | val: 0.0\n" +
+            "  65.0 = tree 1 | \\'user_device_tablet\\':1.0 > 0.500001, Go Right | val: 65.0\n'}");
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRQParserPlugin.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRQParserPlugin.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRQParserPlugin.java
new file mode 100644
index 0000000..f28ab0d
--- /dev/null
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRQParserPlugin.java
@@ -0,0 +1,114 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr;
+
+import org.apache.solr.client.solrj.SolrQuery;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestLTRQParserPlugin extends TestRerankBase {
+
+
+  @BeforeClass
+  public static void before() throws Exception {
+    setuptest("solrconfig-ltr.xml", "schema.xml");
+    // store = getModelStore();
+    bulkIndex();
+
+    loadFeatures("features-linear.json");
+    loadModels("linear-model.json");
+  }
+
+  @AfterClass
+  public static void after() throws Exception {
+    aftertest();
+    // store.clear();
+  }
+
+  @Test
+  public void ltrModelIdMissingTest() throws Exception {
+    final String solrQuery = "_query_:{!edismax qf='title' mm=100% v='bloomberg' tie=0.1}";
+    final SolrQuery query = new SolrQuery();
+    query.setQuery(solrQuery);
+    query.add("fl", "*, score");
+    query.add("rows", "4");
+    query.add("fv", "true");
+    query.add("rq", "{!ltr reRankDocs=100}");
+
+    final String res = restTestHarness.query("/query" + query.toQueryString());
+    assert (res.contains("Must provide model in the request"));
+  }
+
+  @Test
+  public void ltrModelIdDoesNotExistTest() throws Exception {
+    final String solrQuery = "_query_:{!edismax qf='title' mm=100% v='bloomberg' tie=0.1}";
+    final SolrQuery query = new SolrQuery();
+    query.setQuery(solrQuery);
+    query.add("fl", "*, score");
+    query.add("rows", "4");
+    query.add("fv", "true");
+    query.add("rq", "{!ltr model=-1 reRankDocs=100}");
+
+    final String res = restTestHarness.query("/query" + query.toQueryString());
+    assert (res.contains("cannot find model"));
+  }
+
+  @Test
+  public void ltrMoreResultsThanReRankedTest() throws Exception {
+    final String solrQuery = "_query_:{!edismax qf='title' mm=100% v='bloomberg' tie=0.1}";
+    final SolrQuery query = new SolrQuery();
+    query.setQuery(solrQuery);
+    query.add("fl", "*, score");
+    query.add("rows", "4");
+    query.add("fv", "true");
+
+    String nonRerankedScore = "0.09271725";
+
+    // Normal solr order
+    assertJQ("/query" + query.toQueryString(),
+        "/response/docs/[0]/id=='9'",
+        "/response/docs/[1]/id=='8'",
+        "/response/docs/[2]/id=='7'",
+        "/response/docs/[3]/id=='6'",
+        "/response/docs/[3]/score=="+nonRerankedScore
+    );
+
+    query.add("rq", "{!ltr model=6029760550880411648 reRankDocs=3}");
+
+    // Different order for top 3 reranked, but last one is the same top nonreranked doc
+    assertJQ("/query" + query.toQueryString(),
+        "/response/docs/[0]/id=='7'",
+        "/response/docs/[1]/id=='8'",
+        "/response/docs/[2]/id=='9'",
+        "/response/docs/[3]/id=='6'",
+        "/response/docs/[3]/score=="+nonRerankedScore
+    );
+  }
+
+  @Test
+  public void ltrNoResultsTest() throws Exception {
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("title:bloomberg23");
+    query.add("fl", "*,[fv]");
+    query.add("rows", "3");
+    query.add("debugQuery", "on");
+    query.add("rq", "{!ltr reRankDocs=3 model=6029760550880411648}");
+    assertJQ("/query" + query.toQueryString(), "/response/numFound/==0");
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRReRankingPipeline.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRReRankingPipeline.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRReRankingPipeline.java
new file mode 100644
index 0000000..a98fc4f
--- /dev/null
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRReRankingPipeline.java
@@ -0,0 +1,300 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr;
+
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.FloatDocValuesField;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.RandomIndexWriter;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.BooleanClause;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.Explanation;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.ScoreDoc;
+import org.apache.lucene.search.Scorer;
+import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.search.TopDocs;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.LuceneTestCase;
+import org.apache.solr.core.SolrResourceLoader;
+import org.apache.solr.ltr.feature.Feature;
+import org.apache.solr.ltr.feature.FieldValueFeature;
+import org.apache.solr.ltr.model.LTRScoringModel;
+import org.apache.solr.ltr.model.TestLinearModel;
+import org.apache.solr.ltr.norm.IdentityNormalizer;
+import org.apache.solr.ltr.norm.Normalizer;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class TestLTRReRankingPipeline extends LuceneTestCase {
+
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+  private static final SolrResourceLoader solrResourceLoader = new SolrResourceLoader();
+
+  private IndexSearcher getSearcher(IndexReader r) {
+    final IndexSearcher searcher = newSearcher(r);
+
+    return searcher;
+  }
+
+  private static List<Feature> makeFieldValueFeatures(int[] featureIds,
+      String field) {
+    final List<Feature> features = new ArrayList<>();
+    for (final int i : featureIds) {
+      final Map<String,Object> params = new HashMap<String,Object>();
+      params.put("field", field);
+      final Feature f = Feature.getInstance(solrResourceLoader,
+          FieldValueFeature.class.getCanonicalName(),
+          "f" + i, params);
+      f.setIndex(i);
+      features.add(f);
+    }
+    return features;
+  }
+
+  private class MockModel extends LTRScoringModel {
+
+    public MockModel(String name, List<Feature> features,
+        List<Normalizer> norms,
+        String featureStoreName, List<Feature> allFeatures,
+        Map<String,Object> params) {
+      super(name, features, norms, featureStoreName, allFeatures, params);
+    }
+
+    @Override
+    public float score(float[] modelFeatureValuesNormalized) {
+      return modelFeatureValuesNormalized[2];
+    }
+
+    @Override
+    public Explanation explain(LeafReaderContext context, int doc,
+        float finalScore, List<Explanation> featureExplanations) {
+      return null;
+    }
+
+  }
+
+  @Ignore
+  @Test
+  public void testRescorer() throws IOException {
+    final Directory dir = newDirectory();
+    final RandomIndexWriter w = new RandomIndexWriter(random(), dir);
+
+    Document doc = new Document();
+    doc.add(newStringField("id", "0", Field.Store.YES));
+    doc.add(newTextField("field", "wizard the the the the the oz",
+        Field.Store.NO));
+    doc.add(new FloatDocValuesField("final-score", 1.0f));
+
+    w.addDocument(doc);
+    doc = new Document();
+    doc.add(newStringField("id", "1", Field.Store.YES));
+    // 1 extra token, but wizard and oz are close;
+    doc.add(newTextField("field", "wizard oz the the the the the the",
+        Field.Store.NO));
+    doc.add(new FloatDocValuesField("final-score", 2.0f));
+    w.addDocument(doc);
+
+    final IndexReader r = w.getReader();
+    w.close();
+
+    // Do ordinary BooleanQuery:
+    final BooleanQuery.Builder bqBuilder = new BooleanQuery.Builder();
+    bqBuilder.add(new TermQuery(new Term("field", "wizard")), BooleanClause.Occur.SHOULD);
+    bqBuilder.add(new TermQuery(new Term("field", "oz")), BooleanClause.Occur.SHOULD);
+    final IndexSearcher searcher = getSearcher(r);
+    // first run the standard query
+    TopDocs hits = searcher.search(bqBuilder.build(), 10);
+    assertEquals(2, hits.totalHits);
+    assertEquals("0", searcher.doc(hits.scoreDocs[0].doc).get("id"));
+    assertEquals("1", searcher.doc(hits.scoreDocs[1].doc).get("id"));
+
+    final List<Feature> features = makeFieldValueFeatures(new int[] {0, 1, 2},
+        "final-score");
+    final List<Normalizer> norms =
+        new ArrayList<Normalizer>(
+            Collections.nCopies(features.size(),IdentityNormalizer.INSTANCE));
+    final List<Feature> allFeatures = makeFieldValueFeatures(new int[] {0, 1,
+        2, 3, 4, 5, 6, 7, 8, 9}, "final-score");
+    final LTRScoringModel ltrScoringModel = TestLinearModel.createLinearModel("test",
+        features, norms, "test", allFeatures, null);
+
+    final LTRRescorer rescorer = new LTRRescorer(new LTRScoringQuery(ltrScoringModel));
+    hits = rescorer.rescore(searcher, hits, 2);
+
+    // rerank using the field final-score
+    assertEquals("1", searcher.doc(hits.scoreDocs[0].doc).get("id"));
+    assertEquals("0", searcher.doc(hits.scoreDocs[1].doc).get("id"));
+
+    r.close();
+    dir.close();
+
+  }
+
+  @Ignore
+  @Test
+  public void testDifferentTopN() throws IOException {
+    final Directory dir = newDirectory();
+    final RandomIndexWriter w = new RandomIndexWriter(random(), dir);
+
+    Document doc = new Document();
+    doc.add(newStringField("id", "0", Field.Store.YES));
+    doc.add(newTextField("field", "wizard oz oz oz oz oz", Field.Store.NO));
+    doc.add(new FloatDocValuesField("final-score", 1.0f));
+    w.addDocument(doc);
+
+    doc = new Document();
+    doc.add(newStringField("id", "1", Field.Store.YES));
+    doc.add(newTextField("field", "wizard oz oz oz oz the", Field.Store.NO));
+    doc.add(new FloatDocValuesField("final-score", 2.0f));
+    w.addDocument(doc);
+    doc = new Document();
+    doc.add(newStringField("id", "2", Field.Store.YES));
+    doc.add(newTextField("field", "wizard oz oz oz the the ", Field.Store.NO));
+    doc.add(new FloatDocValuesField("final-score", 3.0f));
+    w.addDocument(doc);
+    doc = new Document();
+    doc.add(newStringField("id", "3", Field.Store.YES));
+    doc.add(newTextField("field", "wizard oz oz the the the the ",
+        Field.Store.NO));
+    doc.add(new FloatDocValuesField("final-score", 4.0f));
+    w.addDocument(doc);
+    doc = new Document();
+    doc.add(newStringField("id", "4", Field.Store.YES));
+    doc.add(newTextField("field", "wizard oz the the the the the the",
+        Field.Store.NO));
+    doc.add(new FloatDocValuesField("final-score", 5.0f));
+    w.addDocument(doc);
+
+    final IndexReader r = w.getReader();
+    w.close();
+
+    // Do ordinary BooleanQuery:
+    final BooleanQuery.Builder bqBuilder = new BooleanQuery.Builder();
+    bqBuilder.add(new TermQuery(new Term("field", "wizard")), BooleanClause.Occur.SHOULD);
+    bqBuilder.add(new TermQuery(new Term("field", "oz")), BooleanClause.Occur.SHOULD);
+    final IndexSearcher searcher = getSearcher(r);
+
+    // first run the standard query
+    TopDocs hits = searcher.search(bqBuilder.build(), 10);
+    assertEquals(5, hits.totalHits);
+
+    assertEquals("0", searcher.doc(hits.scoreDocs[0].doc).get("id"));
+    assertEquals("1", searcher.doc(hits.scoreDocs[1].doc).get("id"));
+    assertEquals("2", searcher.doc(hits.scoreDocs[2].doc).get("id"));
+    assertEquals("3", searcher.doc(hits.scoreDocs[3].doc).get("id"));
+    assertEquals("4", searcher.doc(hits.scoreDocs[4].doc).get("id"));
+
+    final List<Feature> features = makeFieldValueFeatures(new int[] {0, 1, 2},
+        "final-score");
+    final List<Normalizer> norms =
+        new ArrayList<Normalizer>(
+            Collections.nCopies(features.size(),IdentityNormalizer.INSTANCE));
+    final List<Feature> allFeatures = makeFieldValueFeatures(new int[] {0, 1,
+        2, 3, 4, 5, 6, 7, 8, 9}, "final-score");
+    final LTRScoringModel ltrScoringModel = TestLinearModel.createLinearModel("test",
+        features, norms, "test", allFeatures, null);
+
+    final LTRRescorer rescorer = new LTRRescorer(new LTRScoringQuery(ltrScoringModel));
+
+    // rerank @ 0 should not change the order
+    hits = rescorer.rescore(searcher, hits, 0);
+    assertEquals("0", searcher.doc(hits.scoreDocs[0].doc).get("id"));
+    assertEquals("1", searcher.doc(hits.scoreDocs[1].doc).get("id"));
+    assertEquals("2", searcher.doc(hits.scoreDocs[2].doc).get("id"));
+    assertEquals("3", searcher.doc(hits.scoreDocs[3].doc).get("id"));
+    assertEquals("4", searcher.doc(hits.scoreDocs[4].doc).get("id"));
+
+    // test rerank with different topN cuts
+
+    for (int topN = 1; topN <= 5; topN++) {
+      log.info("rerank {} documents ", topN);
+      hits = searcher.search(bqBuilder.build(), 10);
+
+      final ScoreDoc[] slice = new ScoreDoc[topN];
+      System.arraycopy(hits.scoreDocs, 0, slice, 0, topN);
+      hits = new TopDocs(hits.totalHits, slice, hits.getMaxScore());
+      hits = rescorer.rescore(searcher, hits, topN);
+      for (int i = topN - 1, j = 0; i >= 0; i--, j++) {
+        log.info("doc {} in pos {}", searcher.doc(hits.scoreDocs[j].doc)
+            .get("id"), j);
+
+        assertEquals(i,
+            Integer.parseInt(searcher.doc(hits.scoreDocs[j].doc).get("id")));
+        assertEquals(i + 1, hits.scoreDocs[j].score, 0.00001);
+
+      }
+    }
+
+    r.close();
+    dir.close();
+
+  }
+
+  @Test
+  public void testDocParam() throws Exception {
+    final Map<String,Object> test = new HashMap<String,Object>();
+    test.put("fake", 2);
+    List<Feature> features = makeFieldValueFeatures(new int[] {0},
+        "final-score");
+    List<Normalizer> norms =
+        new ArrayList<Normalizer>(
+            Collections.nCopies(features.size(),IdentityNormalizer.INSTANCE));
+    List<Feature> allFeatures = makeFieldValueFeatures(new int[] {0},
+        "final-score");
+    MockModel ltrScoringModel = new MockModel("test",
+        features, norms, "test", allFeatures, null);
+    LTRScoringQuery query = new LTRScoringQuery(ltrScoringModel);
+    LTRScoringQuery.ModelWeight wgt = query.createWeight(null, true, 1f);
+    LTRScoringQuery.ModelWeight.ModelScorer modelScr = wgt.scorer(null);
+    modelScr.getDocInfo().setOriginalDocScore(new Float(1f));
+    for (final Scorer.ChildScorer feat : modelScr.getChildren()) {
+      assertNotNull(((Feature.FeatureWeight.FeatureScorer) feat.child).getDocInfo().getOriginalDocScore());
+    }
+
+    features = makeFieldValueFeatures(new int[] {0, 1, 2}, "final-score");
+    norms =
+        new ArrayList<Normalizer>(
+            Collections.nCopies(features.size(),IdentityNormalizer.INSTANCE));
+    allFeatures = makeFieldValueFeatures(new int[] {0, 1, 2, 3, 4, 5, 6, 7, 8,
+        9}, "final-score");
+    ltrScoringModel = new MockModel("test", features, norms,
+        "test", allFeatures, null);
+    query = new LTRScoringQuery(ltrScoringModel);
+    wgt = query.createWeight(null, true, 1f);
+    modelScr = wgt.scorer(null);
+    modelScr.getDocInfo().setOriginalDocScore(new Float(1f));
+    for (final Scorer.ChildScorer feat : modelScr.getChildren()) {
+      assertNotNull(((Feature.FeatureWeight.FeatureScorer) feat.child).getDocInfo().getOriginalDocScore());
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRScoringQuery.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRScoringQuery.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRScoringQuery.java
new file mode 100644
index 0000000..0576c99
--- /dev/null
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRScoringQuery.java
@@ -0,0 +1,319 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.FloatDocValuesField;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.RandomIndexWriter;
+import org.apache.lucene.index.ReaderUtil;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.BooleanClause;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Scorer;
+import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.search.TopDocs;
+import org.apache.lucene.search.Weight;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.LuceneTestCase;
+import org.apache.solr.core.SolrResourceLoader;
+import org.apache.solr.ltr.feature.Feature;
+import org.apache.solr.ltr.feature.ValueFeature;
+import org.apache.solr.ltr.model.LTRScoringModel;
+import org.apache.solr.ltr.model.ModelException;
+import org.apache.solr.ltr.model.TestLinearModel;
+import org.apache.solr.ltr.norm.IdentityNormalizer;
+import org.apache.solr.ltr.norm.Normalizer;
+import org.apache.solr.ltr.norm.NormalizerException;
+import org.junit.Test;
+
+public class TestLTRScoringQuery extends LuceneTestCase {
+
+  public final static SolrResourceLoader solrResourceLoader = new SolrResourceLoader();
+
+  private IndexSearcher getSearcher(IndexReader r) {
+    final IndexSearcher searcher = newSearcher(r, false, false);
+    return searcher;
+  }
+
+  private static List<Feature> makeFeatures(int[] featureIds) {
+    final List<Feature> features = new ArrayList<>();
+    for (final int i : featureIds) {
+      Map<String,Object> params = new HashMap<String,Object>();
+      params.put("value", i);
+      final Feature f = Feature.getInstance(solrResourceLoader,
+          ValueFeature.class.getCanonicalName(),
+          "f" + i, params);
+      f.setIndex(i);
+      features.add(f);
+    }
+    return features;
+  }
+
+  private static List<Feature> makeFilterFeatures(int[] featureIds) {
+    final List<Feature> features = new ArrayList<>();
+    for (final int i : featureIds) {
+      Map<String,Object> params = new HashMap<String,Object>();
+      params.put("value", i);
+      final Feature f = Feature.getInstance(solrResourceLoader,
+          ValueFeature.class.getCanonicalName(),
+          "f" + i, params);
+      f.setIndex(i);
+      features.add(f);
+    }
+    return features;
+  }
+
+  private static Map<String,Object> makeFeatureWeights(List<Feature> features) {
+    final Map<String,Object> nameParams = new HashMap<String,Object>();
+    final HashMap<String,Double> modelWeights = new HashMap<String,Double>();
+    for (final Feature feat : features) {
+      modelWeights.put(feat.getName(), 0.1);
+    }
+    nameParams.put("weights", modelWeights);
+    return nameParams;
+  }
+
+  private LTRScoringQuery.ModelWeight performQuery(TopDocs hits,
+      IndexSearcher searcher, int docid, LTRScoringQuery model) throws IOException,
+      ModelException {
+    final List<LeafReaderContext> leafContexts = searcher.getTopReaderContext()
+        .leaves();
+    final int n = ReaderUtil.subIndex(hits.scoreDocs[0].doc, leafContexts);
+    final LeafReaderContext context = leafContexts.get(n);
+    final int deBasedDoc = hits.scoreDocs[0].doc - context.docBase;
+
+    final Weight weight = searcher.createNormalizedWeight(model, true);
+    final Scorer scorer = weight.scorer(context);
+
+    // rerank using the field final-score
+    scorer.iterator().advance(deBasedDoc);
+    scorer.score();
+
+    // assertEquals(42.0f, score, 0.0001);
+    // assertTrue(weight instanceof AssertingWeight);
+    // (AssertingIndexSearcher)
+    assertTrue(weight instanceof LTRScoringQuery.ModelWeight);
+    final LTRScoringQuery.ModelWeight modelWeight = (LTRScoringQuery.ModelWeight) weight;
+    return modelWeight;
+
+  }
+
+  @Test
+  public void testLTRScoringQueryEquality() throws ModelException {
+    final List<Feature> features = makeFeatures(new int[] {0, 1, 2});
+    final List<Normalizer> norms =
+        new ArrayList<Normalizer>(
+            Collections.nCopies(features.size(),IdentityNormalizer.INSTANCE));
+    final List<Feature> allFeatures = makeFeatures(
+        new int[] {0, 1, 2, 3, 4, 5, 6, 7, 8, 9});
+    final Map<String,Object> modelParams = makeFeatureWeights(features);
+
+    final LTRScoringModel algorithm1 = TestLinearModel.createLinearModel(
+        "testModelName",
+        features, norms, "testStoreName", allFeatures, modelParams);
+
+    final LTRScoringQuery m0 = new LTRScoringQuery(algorithm1);
+
+    final HashMap<String,String[]> externalFeatureInfo = new HashMap<>();
+    externalFeatureInfo.put("queryIntent", new String[] {"company"});
+    externalFeatureInfo.put("user_query", new String[] {"abc"});
+    final LTRScoringQuery m1 = new LTRScoringQuery(algorithm1, externalFeatureInfo, false, null);
+
+    final HashMap<String,String[]> externalFeatureInfo2 = new HashMap<>();
+    externalFeatureInfo2.put("user_query", new String[] {"abc"});
+    externalFeatureInfo2.put("queryIntent", new String[] {"company"});
+    int totalPoolThreads = 10, numThreadsPerRequest = 10;
+    LTRThreadModule threadManager = new LTRThreadModule(totalPoolThreads, numThreadsPerRequest);
+    final LTRScoringQuery m2 = new LTRScoringQuery(algorithm1, externalFeatureInfo2, false, threadManager);
+
+
+    // Models with same algorithm and efis, just in different order should be the same
+    assertEquals(m1, m2);
+    assertEquals(m1.hashCode(), m2.hashCode());
+
+    // Models with same algorithm, but different efi content should not match
+    assertFalse(m1.equals(m0));
+    assertFalse(m1.hashCode() == m0.hashCode());
+
+
+    final LTRScoringModel algorithm2 = TestLinearModel.createLinearModel(
+        "testModelName2",
+        features, norms, "testStoreName", allFeatures, modelParams);
+    final LTRScoringQuery m3 = new LTRScoringQuery(algorithm2);
+
+    assertFalse(m1.equals(m3));
+    assertFalse(m1.hashCode() == m3.hashCode());
+
+    final LTRScoringModel algorithm3 = TestLinearModel.createLinearModel(
+        "testModelName",
+        features, norms, "testStoreName3", allFeatures, modelParams);
+    final LTRScoringQuery m4 = new LTRScoringQuery(algorithm3);
+
+    assertFalse(m1.equals(m4));
+    assertFalse(m1.hashCode() == m4.hashCode());
+  }
+
+
+  @Test
+  public void testLTRScoringQuery() throws IOException, ModelException {
+    final Directory dir = newDirectory();
+    final RandomIndexWriter w = new RandomIndexWriter(random(), dir);
+
+    Document doc = new Document();
+    doc.add(newStringField("id", "0", Field.Store.YES));
+    doc.add(newTextField("field", "wizard the the the the the oz",
+        Field.Store.NO));
+    doc.add(new FloatDocValuesField("final-score", 1.0f));
+
+    w.addDocument(doc);
+    doc = new Document();
+    doc.add(newStringField("id", "1", Field.Store.YES));
+    // 1 extra token, but wizard and oz are close;
+    doc.add(newTextField("field", "wizard oz the the the the the the",
+        Field.Store.NO));
+    doc.add(new FloatDocValuesField("final-score", 2.0f));
+    w.addDocument(doc);
+
+    final IndexReader r = w.getReader();
+    w.close();
+
+    // Do ordinary BooleanQuery:
+    final BooleanQuery.Builder bqBuilder = new BooleanQuery.Builder();
+    bqBuilder.add(new TermQuery(new Term("field", "wizard")), BooleanClause.Occur.SHOULD);
+    bqBuilder.add(new TermQuery(new Term("field", "oz")), BooleanClause.Occur.SHOULD);
+    final IndexSearcher searcher = getSearcher(r);
+    // first run the standard query
+    final TopDocs hits = searcher.search(bqBuilder.build(), 10);
+    assertEquals(2, hits.totalHits);
+    assertEquals("0", searcher.doc(hits.scoreDocs[0].doc).get("id"));
+    assertEquals("1", searcher.doc(hits.scoreDocs[1].doc).get("id"));
+
+    List<Feature> features = makeFeatures(new int[] {0, 1, 2});
+    final List<Feature> allFeatures = makeFeatures(new int[] {0, 1, 2, 3, 4, 5,
+        6, 7, 8, 9});
+    List<Normalizer> norms =
+        new ArrayList<Normalizer>(
+            Collections.nCopies(features.size(),IdentityNormalizer.INSTANCE));
+    LTRScoringModel ltrScoringModel = TestLinearModel.createLinearModel("test",
+        features, norms, "test", allFeatures,
+        makeFeatureWeights(features));
+
+    LTRScoringQuery.ModelWeight modelWeight = performQuery(hits, searcher,
+        hits.scoreDocs[0].doc, new LTRScoringQuery(ltrScoringModel));
+    assertEquals(3, modelWeight.getModelFeatureValuesNormalized().length);
+
+    for (int i = 0; i < 3; i++) {
+      assertEquals(i, modelWeight.getModelFeatureValuesNormalized()[i], 0.0001);
+    }
+    int[] posVals = new int[] {0, 1, 2};
+    int pos = 0;
+    for (LTRScoringQuery.FeatureInfo fInfo:modelWeight.getFeaturesInfo()) {
+        if (fInfo == null){
+          continue;
+        }
+        assertEquals(posVals[pos], fInfo.getValue(), 0.0001);
+        assertEquals("f"+posVals[pos], fInfo.getName());
+        pos++;
+    }
+
+    final int[] mixPositions = new int[] {8, 2, 4, 9, 0};
+    features = makeFeatures(mixPositions);
+    norms =
+        new ArrayList<Normalizer>(
+            Collections.nCopies(features.size(),IdentityNormalizer.INSTANCE));
+    ltrScoringModel = TestLinearModel.createLinearModel("test",
+        features, norms, "test", allFeatures, makeFeatureWeights(features));
+
+    modelWeight = performQuery(hits, searcher, hits.scoreDocs[0].doc,
+        new LTRScoringQuery(ltrScoringModel));
+    assertEquals(mixPositions.length,
+        modelWeight.getModelFeatureWeights().length);
+
+    for (int i = 0; i < mixPositions.length; i++) {
+      assertEquals(mixPositions[i],
+          modelWeight.getModelFeatureValuesNormalized()[i], 0.0001);
+    }
+
+    final ModelException expectedModelException = new ModelException("no features declared for model test");
+    final int[] noPositions = new int[] {};
+    features = makeFeatures(noPositions);
+    norms =
+        new ArrayList<Normalizer>(
+            Collections.nCopies(features.size(),IdentityNormalizer.INSTANCE));
+    try {
+      ltrScoringModel = TestLinearModel.createLinearModel("test",
+          features, norms, "test", allFeatures, makeFeatureWeights(features));
+      fail("unexpectedly got here instead of catching "+expectedModelException);
+      modelWeight = performQuery(hits, searcher, hits.scoreDocs[0].doc,
+          new LTRScoringQuery(ltrScoringModel));
+      assertEquals(0, modelWeight.getModelFeatureWeights().length);
+    } catch (ModelException actualModelException) {
+      assertEquals(expectedModelException.toString(), actualModelException.toString());
+    }
+
+    // test normalizers
+    features = makeFilterFeatures(mixPositions);
+    final Normalizer norm = new Normalizer() {
+
+      @Override
+      public float normalize(float value) {
+        return 42.42f;
+      }
+
+      @Override
+      public LinkedHashMap<String,Object> paramsToMap() {
+        return null;
+      }
+
+      @Override
+      protected void validate() throws NormalizerException {
+      }
+
+    };
+    norms =
+        new ArrayList<Normalizer>(
+            Collections.nCopies(features.size(),norm));
+    final LTRScoringModel normMeta = TestLinearModel.createLinearModel("test",
+        features, norms, "test", allFeatures,
+        makeFeatureWeights(features));
+
+    modelWeight = performQuery(hits, searcher, hits.scoreDocs[0].doc,
+        new LTRScoringQuery(normMeta));
+    normMeta.normalizeFeaturesInPlace(modelWeight.getModelFeatureValuesNormalized());
+    assertEquals(mixPositions.length,
+        modelWeight.getModelFeatureWeights().length);
+    for (int i = 0; i < mixPositions.length; i++) {
+      assertEquals(42.42f, modelWeight.getModelFeatureValuesNormalized()[i], 0.0001);
+    }
+    r.close();
+    dir.close();
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRWithFacet.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRWithFacet.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRWithFacet.java
new file mode 100644
index 0000000..ab519ec
--- /dev/null
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRWithFacet.java
@@ -0,0 +1,103 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.ltr;
+
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.ltr.feature.SolrFeature;
+import org.apache.solr.ltr.model.LinearModel;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestLTRWithFacet extends TestRerankBase {
+
+  @BeforeClass
+  public static void before() throws Exception {
+    setuptest("solrconfig-ltr.xml", "schema.xml");
+
+    assertU(adoc("id", "1", "title", "a1", "description", "E", "popularity",
+        "1"));
+    assertU(adoc("id", "2", "title", "a1 b1", "description",
+        "B", "popularity", "2"));
+    assertU(adoc("id", "3", "title", "a1 b1 c1", "description", "B", "popularity",
+        "3"));
+    assertU(adoc("id", "4", "title", "a1 b1 c1 d1", "description", "B", "popularity",
+        "4"));
+    assertU(adoc("id", "5", "title", "a1 b1 c1 d1 e1", "description", "E", "popularity",
+        "5"));
+    assertU(adoc("id", "6", "title", "a1 b1 c1 d1 e1 f1", "description", "B",
+        "popularity", "6"));
+    assertU(adoc("id", "7", "title", "a1 b1 c1 d1 e1 f1 g1", "description",
+        "C", "popularity", "7"));
+    assertU(adoc("id", "8", "title", "a1 b1 c1 d1 e1 f1 g1 h1", "description",
+        "D", "popularity", "8"));
+    assertU(commit());
+  }
+
+  @Test
+  public void testRankingSolrFacet() throws Exception {
+    // before();
+    loadFeature("powpularityS", SolrFeature.class.getCanonicalName(),
+        "{\"q\":\"{!func}pow(popularity,2)\"}");
+
+    loadModel("powpularityS-model", LinearModel.class.getCanonicalName(),
+        new String[] {"powpularityS"}, "{\"weights\":{\"powpularityS\":1.0}}");
+
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("title:a1");
+    query.add("fl", "*, score");
+    query.add("rows", "4");
+    query.add("facet", "true");
+    query.add("facet.field", "description");
+
+    assertJQ("/query" + query.toQueryString(), "/response/numFound/==8");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='1'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/id=='2'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/id=='3'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/id=='4'");
+    // Normal term match
+    assertJQ("/query" + query.toQueryString(), ""
+        + "/facet_counts/facet_fields/description=="
+        + "['b', 4, 'e', 2, 'c', 1, 'd', 1]");
+
+    query.add("rq", "{!ltr model=powpularityS-model reRankDocs=4}");
+    query.set("debugQuery", "on");
+
+    assertJQ("/query" + query.toQueryString(), "/response/numFound/==8");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='4'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/score==16.0");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/id=='3'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/score==9.0");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/id=='2'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/score==4.0");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/id=='1'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/score==1.0");
+
+    assertJQ("/query" + query.toQueryString(), ""
+        + "/facet_counts/facet_fields/description=="
+        + "['b', 4, 'e', 2, 'c', 1, 'd', 1]");
+    // aftertest();
+
+  }
+
+  @AfterClass
+  public static void after() throws Exception {
+    aftertest();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRWithSort.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRWithSort.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRWithSort.java
new file mode 100644
index 0000000..1fbe1d5
--- /dev/null
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRWithSort.java
@@ -0,0 +1,102 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.ltr;
+
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.ltr.feature.SolrFeature;
+import org.apache.solr.ltr.model.LinearModel;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestLTRWithSort extends TestRerankBase {
+
+  @BeforeClass
+  public static void before() throws Exception {
+    setuptest("solrconfig-ltr.xml", "schema.xml");
+    assertU(adoc("id", "1", "title", "a1", "description", "E", "popularity",
+        "1"));
+    assertU(adoc("id", "2", "title", "a1 b1", "description",
+        "B", "popularity", "2"));
+    assertU(adoc("id", "3", "title", "a1 b1 c1", "description", "B", "popularity",
+        "3"));
+    assertU(adoc("id", "4", "title", "a1 b1 c1 d1", "description", "B", "popularity",
+        "4"));
+    assertU(adoc("id", "5", "title", "a1 b1 c1 d1 e1", "description", "E", "popularity",
+        "5"));
+    assertU(adoc("id", "6", "title", "a1 b1 c1 d1 e1 f1", "description", "B",
+        "popularity", "6"));
+    assertU(adoc("id", "7", "title", "a1 b1 c1 d1 e1 f1 g1", "description",
+        "C", "popularity", "7"));
+    assertU(adoc("id", "8", "title", "a1 b1 c1 d1 e1 f1 g1 h1", "description",
+        "D", "popularity", "8"));
+    assertU(commit());
+  }
+
+  @Test
+  public void testRankingSolrSort() throws Exception {
+    // before();
+    loadFeature("powpularityS", SolrFeature.class.getCanonicalName(),
+        "{\"q\":\"{!func}pow(popularity,2)\"}");
+
+    loadModel("powpularityS-model", LinearModel.class.getCanonicalName(),
+        new String[] {"powpularityS"}, "{\"weights\":{\"powpularityS\":1.0}}");
+
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("title:a1");
+    query.add("fl", "*, score");
+    query.add("rows", "4");
+
+    // Normal term match
+    assertJQ("/query" + query.toQueryString(), "/response/numFound/==8");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='1'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/id=='2'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/id=='3'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/id=='4'");
+
+    //Add sort
+    query.add("sort", "description desc");
+    assertJQ("/query" + query.toQueryString(), "/response/numFound/==8");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='1'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/id=='5'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/id=='8'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/id=='7'");
+
+    query.add("rq", "{!ltr model=powpularityS-model reRankDocs=4}");
+    query.set("debugQuery", "on");
+
+    assertJQ("/query" + query.toQueryString(), "/response/numFound/==8");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='8'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/score==64.0");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/id=='7'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/score==49.0");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/id=='5'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/score==25.0");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/id=='1'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/score==1.0");
+
+    // aftertest();
+
+  }
+
+  @AfterClass
+  public static void after() throws Exception {
+    aftertest();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestParallelWeightCreation.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestParallelWeightCreation.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestParallelWeightCreation.java
new file mode 100644
index 0000000..f4c21fd
--- /dev/null
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestParallelWeightCreation.java
@@ -0,0 +1,77 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr;
+
+import org.apache.solr.client.solrj.SolrQuery;
+import org.junit.Test;
+
+public class TestParallelWeightCreation extends TestRerankBase{
+
+  @Test
+  public void testLTRScoringQueryParallelWeightCreationResultOrder() throws Exception {
+    setuptest("solrconfig-ltr_Th10_10.xml", "schema.xml");
+
+    assertU(adoc("id", "1", "title", "w1 w3", "description", "w1", "popularity",
+        "1"));
+    assertU(adoc("id", "2", "title", "w2", "description", "w2", "popularity",
+        "2"));
+    assertU(adoc("id", "3", "title", "w3", "description", "w3", "popularity",
+        "3"));
+    assertU(adoc("id", "4", "title", "w4 w3", "description", "w4", "popularity",
+        "4"));
+    assertU(adoc("id", "5", "title", "w5", "description", "w5", "popularity",
+        "5"));
+    assertU(commit());
+
+    loadFeatures("external_features.json");
+    loadModels("external_model.json");
+    loadModels("external_model_store.json");
+
+    // check to make sure that the order of results will be the same when using parallel weight creation
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("*:*");
+    query.add("fl", "*,score");
+    query.add("rows", "4");
+
+    query.add("rq", "{!ltr reRankDocs=4 model=externalmodel efi.user_query=w3}");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='1'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/id=='3'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/id=='4'");
+    aftertest();
+  }
+
+  @Test
+  public void testLTRQParserThreadInitialization() throws Exception {
+    // setting the value of number of threads to -ve should throw an exception
+    String msg1 = null;
+    try{
+      new LTRThreadModule(1,-1);
+    }catch(IllegalArgumentException iae){
+      msg1 = iae.getMessage();;
+    }
+    assertTrue(msg1.equals("numThreadsPerRequest cannot be less than 1"));
+
+    // set totalPoolThreads to 1 and numThreadsPerRequest to 2 and verify that an exception is thrown
+    String msg2 = null;
+    try{
+      new LTRThreadModule(1,2);
+    }catch(IllegalArgumentException iae){
+      msg2 = iae.getMessage();
+    }
+    assertTrue(msg2.equals("numThreadsPerRequest cannot be greater than totalPoolThreads"));
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestRerankBase.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestRerankBase.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestRerankBase.java
new file mode 100644
index 0000000..4914d28
--- /dev/null
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestRerankBase.java
@@ -0,0 +1,429 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.net.URL;
+import java.nio.file.Files;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Scanner;
+import java.util.SortedMap;
+import java.util.TreeMap;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang.StringUtils;
+import org.apache.solr.common.params.CommonParams;
+import org.apache.solr.common.util.ContentStream;
+import org.apache.solr.common.util.ContentStreamBase;
+import org.apache.solr.core.SolrResourceLoader;
+import org.apache.solr.ltr.feature.Feature;
+import org.apache.solr.ltr.feature.FeatureException;
+import org.apache.solr.ltr.feature.ValueFeature;
+import org.apache.solr.ltr.model.LTRScoringModel;
+import org.apache.solr.ltr.model.LinearModel;
+import org.apache.solr.ltr.model.ModelException;
+import org.apache.solr.ltr.store.rest.ManagedFeatureStore;
+import org.apache.solr.ltr.store.rest.ManagedModelStore;
+import org.apache.solr.request.SolrQueryRequestBase;
+import org.apache.solr.response.SolrQueryResponse;
+import org.apache.solr.rest.ManagedResourceStorage;
+import org.apache.solr.rest.SolrSchemaRestApi;
+import org.apache.solr.util.RestTestBase;
+import org.eclipse.jetty.servlet.ServletHolder;
+import org.noggit.ObjectBuilder;
+import org.restlet.ext.servlet.ServerServlet;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class TestRerankBase extends RestTestBase {
+
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+  protected static final SolrResourceLoader solrResourceLoader = new SolrResourceLoader();
+
+  protected static File tmpSolrHome;
+  protected static File tmpConfDir;
+
+  public static final String FEATURE_FILE_NAME = "_schema_feature-store.json";
+  public static final String MODEL_FILE_NAME = "_schema_model-store.json";
+  public static final String PARENT_ENDPOINT = "/schema/*";
+
+  protected static final String COLLECTION = "collection1";
+  protected static final String CONF_DIR = COLLECTION + "/conf";
+
+  protected static File fstorefile = null;
+  protected static File mstorefile = null;
+
+  public static void setuptest() throws Exception {
+    setuptest("solrconfig-ltr.xml", "schema.xml");
+    bulkIndex();
+  }
+
+  public static void setupPersistenttest() throws Exception {
+    setupPersistentTest("solrconfig-ltr.xml", "schema.xml");
+    bulkIndex();
+  }
+
+  public static ManagedFeatureStore getManagedFeatureStore() {
+    return ManagedFeatureStore.getManagedFeatureStore(h.getCore());
+  }
+
+  public static ManagedModelStore getManagedModelStore() {
+    return ManagedModelStore.getManagedModelStore(h.getCore());
+  }
+
+  protected static SortedMap<ServletHolder,String>  setupTestInit(
+      String solrconfig, String schema,
+      boolean isPersistent) throws Exception {
+    tmpSolrHome = createTempDir().toFile();
+    tmpConfDir = new File(tmpSolrHome, CONF_DIR);
+    tmpConfDir.deleteOnExit();
+    FileUtils.copyDirectory(new File(TEST_HOME()),
+        tmpSolrHome.getAbsoluteFile());
+
+    final File fstore = new File(tmpConfDir, FEATURE_FILE_NAME);
+    final File mstore = new File(tmpConfDir, MODEL_FILE_NAME);
+
+    if (isPersistent) {
+      fstorefile = fstore;
+      mstorefile = mstore;
+    }
+
+    if (fstore.exists()) {
+      log.info("remove feature store config file in {}",
+          fstore.getAbsolutePath());
+      Files.delete(fstore.toPath());
+    }
+    if (mstore.exists()) {
+      log.info("remove model store config file in {}",
+          mstore.getAbsolutePath());
+      Files.delete(mstore.toPath());
+    }
+    if (!solrconfig.equals("solrconfig.xml")) {
+      FileUtils.copyFile(new File(tmpSolrHome.getAbsolutePath()
+          + "/collection1/conf/" + solrconfig),
+          new File(tmpSolrHome.getAbsolutePath()
+              + "/collection1/conf/solrconfig.xml"));
+    }
+    if (!schema.equals("schema.xml")) {
+      FileUtils.copyFile(new File(tmpSolrHome.getAbsolutePath()
+          + "/collection1/conf/" + schema),
+          new File(tmpSolrHome.getAbsolutePath()
+              + "/collection1/conf/schema.xml"));
+    }
+
+    final SortedMap<ServletHolder,String> extraServlets = new TreeMap<>();
+    final ServletHolder solrRestApi = new ServletHolder("SolrSchemaRestApi",
+        ServerServlet.class);
+    solrRestApi.setInitParameter("org.restlet.application",
+        SolrSchemaRestApi.class.getCanonicalName());
+    solrRestApi.setInitParameter("storageIO",
+        ManagedResourceStorage.InMemoryStorageIO.class.getCanonicalName());
+    extraServlets.put(solrRestApi, PARENT_ENDPOINT);
+
+    System.setProperty("managed.schema.mutable", "true");
+
+    return extraServlets;
+  }
+
+  public static void setuptest(String solrconfig, String schema)
+      throws Exception {
+    initCore(solrconfig, schema);
+
+    SortedMap<ServletHolder,String> extraServlets =
+        setupTestInit(solrconfig,schema,false);
+    System.setProperty("enable.update.log", "false");
+
+    createJettyAndHarness(tmpSolrHome.getAbsolutePath(), solrconfig, schema,
+        "/solr", true, extraServlets);
+  }
+
+  public static void setupPersistentTest(String solrconfig, String schema)
+      throws Exception {
+    initCore(solrconfig, schema);
+
+    SortedMap<ServletHolder,String> extraServlets =
+        setupTestInit(solrconfig,schema,true);
+
+    createJettyAndHarness(tmpSolrHome.getAbsolutePath(), solrconfig, schema,
+        "/solr", true, extraServlets);
+  }
+
+  protected static void aftertest() throws Exception {
+    restTestHarness.close();
+    restTestHarness = null;
+    jetty.stop();
+    jetty = null;
+    FileUtils.deleteDirectory(tmpSolrHome);
+    System.clearProperty("managed.schema.mutable");
+    // System.clearProperty("enable.update.log");
+
+
+  }
+
+  public static void makeRestTestHarnessNull() {
+    restTestHarness = null;
+  }
+
+  /** produces a model encoded in json **/
+  public static String getModelInJson(String name, String type,
+      String[] features, String fstore, String params) {
+    final StringBuilder sb = new StringBuilder();
+    sb.append("{\n");
+    sb.append("\"name\":").append('"').append(name).append('"').append(",\n");
+    sb.append("\"store\":").append('"').append(fstore).append('"')
+        .append(",\n");
+    sb.append("\"class\":").append('"').append(type).append('"').append(",\n");
+    sb.append("\"features\":").append('[');
+    for (final String feature : features) {
+      sb.append("\n\t{ ");
+      sb.append("\"name\":").append('"').append(feature).append('"')
+          .append("},");
+    }
+    sb.deleteCharAt(sb.length() - 1);
+    sb.append("\n]\n");
+    if (params != null) {
+      sb.append(",\n");
+      sb.append("\"params\":").append(params);
+    }
+    sb.append("\n}\n");
+    return sb.toString();
+  }
+
+  /** produces a model encoded in json **/
+  public static String getFeatureInJson(String name, String type,
+      String fstore, String params) {
+    final StringBuilder sb = new StringBuilder();
+    sb.append("{\n");
+    sb.append("\"name\":").append('"').append(name).append('"').append(",\n");
+    sb.append("\"store\":").append('"').append(fstore).append('"')
+        .append(",\n");
+    sb.append("\"class\":").append('"').append(type).append('"');
+    if (params != null) {
+      sb.append(",\n");
+      sb.append("\"params\":").append(params);
+    }
+    sb.append("\n}\n");
+    return sb.toString();
+  }
+
+  protected static void loadFeature(String name, String type, String params)
+      throws Exception {
+    final String feature = getFeatureInJson(name, type, "test", params);
+    log.info("loading feauture \n{} ", feature);
+    assertJPut(ManagedFeatureStore.REST_END_POINT, feature,
+        "/responseHeader/status==0");
+  }
+
+  protected static void loadFeature(String name, String type, String fstore,
+      String params) throws Exception {
+    final String feature = getFeatureInJson(name, type, fstore, params);
+    log.info("loading feauture \n{} ", feature);
+    assertJPut(ManagedFeatureStore.REST_END_POINT, feature,
+        "/responseHeader/status==0");
+  }
+
+  protected static void loadModel(String name, String type, String[] features,
+      String params) throws Exception {
+    loadModel(name, type, features, "test", params);
+  }
+
+  protected static void loadModel(String name, String type, String[] features,
+      String fstore, String params) throws Exception {
+    final String model = getModelInJson(name, type, features, fstore, params);
+    log.info("loading model \n{} ", model);
+    assertJPut(ManagedModelStore.REST_END_POINT, model,
+        "/responseHeader/status==0");
+  }
+
+  public static void loadModels(String fileName) throws Exception {
+    final URL url = TestRerankBase.class.getResource("/modelExamples/"
+        + fileName);
+    final String multipleModels = FileUtils.readFileToString(
+        new File(url.toURI()), "UTF-8");
+
+    assertJPut(ManagedModelStore.REST_END_POINT, multipleModels,
+        "/responseHeader/status==0");
+  }
+
+  public static LTRScoringModel createModelFromFiles(String modelFileName,
+      String featureFileName) throws ModelException, Exception {
+    URL url = TestRerankBase.class.getResource("/modelExamples/"
+        + modelFileName);
+    final String modelJson = FileUtils.readFileToString(new File(url.toURI()),
+        "UTF-8");
+    final ManagedModelStore ms = getManagedModelStore();
+
+    url = TestRerankBase.class.getResource("/featureExamples/"
+        + featureFileName);
+    final String featureJson = FileUtils.readFileToString(
+        new File(url.toURI()), "UTF-8");
+
+    Object parsedFeatureJson = null;
+    try {
+      parsedFeatureJson = ObjectBuilder.fromJSON(featureJson);
+    } catch (final IOException ioExc) {
+      throw new ModelException("ObjectBuilder failed parsing json", ioExc);
+    }
+
+    final ManagedFeatureStore fs = getManagedFeatureStore();
+    // fs.getFeatureStore(null).clear();
+    fs.doDeleteChild(null, "*"); // is this safe??
+    // based on my need to call this I dont think that
+    // "getNewManagedFeatureStore()"
+    // is actually returning a new feature store each time
+    fs.applyUpdatesToManagedData(parsedFeatureJson);
+    ms.setManagedFeatureStore(fs); // can we skip this and just use fs directly below?
+
+    final LTRScoringModel ltrScoringModel = ManagedModelStore.fromLTRScoringModelMap(
+        solrResourceLoader, mapFromJson(modelJson), ms.getManagedFeatureStore());
+    ms.addModel(ltrScoringModel);
+    return ltrScoringModel;
+  }
+
+  @SuppressWarnings("unchecked")
+  static private Map<String,Object> mapFromJson(String json) throws ModelException {
+    Object parsedJson = null;
+    try {
+      parsedJson = ObjectBuilder.fromJSON(json);
+    } catch (final IOException ioExc) {
+      throw new ModelException("ObjectBuilder failed parsing json", ioExc);
+    }
+    return (Map<String,Object>) parsedJson;
+  }
+
+  public static void loadFeatures(String fileName) throws Exception {
+    final URL url = TestRerankBase.class.getResource("/featureExamples/"
+        + fileName);
+    final String multipleFeatures = FileUtils.readFileToString(
+        new File(url.toURI()), "UTF-8");
+    log.info("send \n{}", multipleFeatures);
+
+    assertJPut(ManagedFeatureStore.REST_END_POINT, multipleFeatures,
+        "/responseHeader/status==0");
+  }
+
+  protected List<Feature> getFeatures(List<String> names)
+      throws FeatureException {
+    final List<Feature> features = new ArrayList<>();
+    int pos = 0;
+    for (final String name : names) {
+      final Map<String,Object> params = new HashMap<String,Object>();
+      params.put("value", 10);
+      final Feature f = Feature.getInstance(solrResourceLoader,
+          ValueFeature.class.getCanonicalName(),
+          name, params);
+      f.setIndex(pos);
+      features.add(f);
+      ++pos;
+    }
+    return features;
+  }
+
+  protected List<Feature> getFeatures(String[] names) throws FeatureException {
+    return getFeatures(Arrays.asList(names));
+  }
+
+  protected static void loadModelAndFeatures(String name, int allFeatureCount,
+      int modelFeatureCount) throws Exception {
+    final String[] features = new String[modelFeatureCount];
+    final String[] weights = new String[modelFeatureCount];
+    for (int i = 0; i < allFeatureCount; i++) {
+      final String featureName = "c" + i;
+      if (i < modelFeatureCount) {
+        features[i] = featureName;
+        weights[i] = "\"" + featureName + "\":1.0";
+      }
+      loadFeature(featureName, ValueFeature.ValueFeatureWeight.class.getCanonicalName(),
+          "{\"value\":" + i + "}");
+    }
+
+    loadModel(name, LinearModel.class.getCanonicalName(), features,
+        "{\"weights\":{" + StringUtils.join(weights, ",") + "}}");
+  }
+
+  protected static void bulkIndex() throws Exception {
+    assertU(adoc("title", "bloomberg different bla", "description",
+        "bloomberg", "id", "6", "popularity", "1"));
+    assertU(adoc("title", "bloomberg bloomberg ", "description", "bloomberg",
+        "id", "7", "popularity", "2"));
+    assertU(adoc("title", "bloomberg bloomberg bloomberg", "description",
+        "bloomberg", "id", "8", "popularity", "3"));
+    assertU(adoc("title", "bloomberg bloomberg bloomberg bloomberg",
+        "description", "bloomberg", "id", "9", "popularity", "5"));
+    assertU(commit());
+  }
+
+  protected static void bulkIndex(String filePath) throws Exception {
+    final SolrQueryRequestBase req = lrf.makeRequest(
+        CommonParams.STREAM_CONTENTTYPE, "application/xml");
+
+    final List<ContentStream> streams = new ArrayList<ContentStream>();
+    final File file = new File(filePath);
+    streams.add(new ContentStreamBase.FileStream(file));
+    req.setContentStreams(streams);
+
+    try {
+      final SolrQueryResponse res = new SolrQueryResponse();
+      h.updater.handleRequest(req, res);
+    } catch (final Throwable ex) {
+      // Ignore. Just log the exception and go to the next file
+      log.error(ex.getMessage(), ex);
+    }
+    assertU(commit());
+
+  }
+
+  protected static void buildIndexUsingAdoc(String filepath)
+      throws FileNotFoundException {
+    final Scanner scn = new Scanner(new File(filepath), "UTF-8");
+    StringBuffer buff = new StringBuffer();
+    scn.nextLine();
+    scn.nextLine();
+    scn.nextLine(); // Skip the first 3 lines then add everything else
+    final ArrayList<String> docsToAdd = new ArrayList<String>();
+    while (scn.hasNext()) {
+      String curLine = scn.nextLine();
+      if (curLine.contains("</doc>")) {
+        buff.append(curLine + "\n");
+        docsToAdd.add(buff.toString().replace("</add>", "")
+            .replace("<doc>", "<add>\n<doc>")
+            .replace("</doc>", "</doc>\n</add>"));
+        if (!scn.hasNext()) {
+          break;
+        } else {
+          curLine = scn.nextLine();
+        }
+        buff = new StringBuffer();
+      }
+      buff.append(curLine + "\n");
+    }
+    for (final String doc : docsToAdd) {
+      assertU(doc.trim());
+    }
+    assertU(commit());
+    scn.close();
+  }
+
+}


[07/50] [abbrv] lucene-solr:apiv2: SOLR-8146: removing the unused class

Posted by sa...@apache.org.
SOLR-8146: removing the unused class


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/e6ce903a
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/e6ce903a
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/e6ce903a

Branch: refs/heads/apiv2
Commit: e6ce903a76b2fd6bb28dc76805add6b37a7814eb
Parents: 0feca1a
Author: Noble Paul <no...@apache.org>
Authored: Fri Oct 28 08:19:55 2016 +0530
Committer: Noble Paul <no...@apache.org>
Committed: Fri Oct 28 08:19:55 2016 +0530

----------------------------------------------------------------------
 .../java/org/apache/solr/cloud/rule/Snitch.java | 34 --------------------
 1 file changed, 34 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e6ce903a/solr/core/src/java/org/apache/solr/cloud/rule/Snitch.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/rule/Snitch.java b/solr/core/src/java/org/apache/solr/cloud/rule/Snitch.java
deleted file mode 100644
index e47184f..0000000
--- a/solr/core/src/java/org/apache/solr/cloud/rule/Snitch.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.cloud.rule;
-
-import java.util.Set;
-
-import com.google.common.collect.ImmutableSet;
-
-/**
- *
- */
-public abstract class Snitch {
-  static Set<Class> WELL_KNOWN_SNITCHES = ImmutableSet.of(ImplicitSnitch.class);
-
-
-  public abstract void getTags(String solrNode, Set<String> requestedTags, org.apache.solr.common.cloud.rule.SnitchContext ctx);
-
-  public abstract boolean isKnownTag(String tag);
-
-}


[18/50] [abbrv] lucene-solr:apiv2: SOLR-9681:tests: add filter after block join test

Posted by sa...@apache.org.
SOLR-9681:tests: add filter after block join test


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/d8d3a8b9
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/d8d3a8b9
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/d8d3a8b9

Branch: refs/heads/apiv2
Commit: d8d3a8b9b8e7345c4a02a62f7e321c4e9a2440bf
Parents: 650276e
Author: yonik <yo...@apache.org>
Authored: Sat Oct 29 17:34:05 2016 -0400
Committer: yonik <yo...@apache.org>
Committed: Sat Oct 29 17:34:05 2016 -0400

----------------------------------------------------------------------
 .../apache/solr/search/facet/TestJsonFacets.java    | 16 ++++++++++++++++
 1 file changed, 16 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8d3a8b9/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
index eafa134..57e3ed1 100644
--- a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
+++ b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
@@ -1416,6 +1416,22 @@ public class TestJsonFacets extends SolrTestCaseHS {
             "}"
     );
 
+    // test filter after block join
+    client.testJQ(params(p, "q", "*:*"
+        , "json.facet", "{ " +
+            "pages1:{type:terms, field:v_t, domain:{blockChildren:'type_s:book'}, filter:'*:*' }" +
+            ",pages2:{type:terms, field:v_t, domain:{blockChildren:'type_s:book'}, filter:'-id:3.1' }" +
+            ",books:{type:terms, field:v_t, domain:{blockParent:'type_s:book'}, filter:'*:*' }" +
+            ",books2:{type:terms, field:v_t, domain:{blockParent:'type_s:book'}, filter:'id:1' }" +
+            "}"
+        )
+        , "facets=={ count:10" +
+            ", pages1:{ buckets:[ {val:y,count:4},{val:x,count:3},{val:z,count:3} ] }" +
+            ", pages2:{ buckets:[ {val:y,count:4},{val:z,count:3},{val:x,count:2} ] }" +
+            ", books:{ buckets:[ {val:q,count:3},{val:e,count:2},{val:w,count:2} ] }" +
+            ", books2:{ buckets:[ {val:q,count:1} ] }" +
+            "}"
+    );
 
   }
 


[50/50] [abbrv] lucene-solr:apiv2: merge trunk

Posted by sa...@apache.org.
merge trunk


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/27baf3fb
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/27baf3fb
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/27baf3fb

Branch: refs/heads/apiv2
Commit: 27baf3fb4f3a905b761e504e4caafe0e193b8d6a
Parents: c482b33 be772db
Author: Steve Rowe <sa...@apache.org>
Authored: Wed Nov 2 19:58:12 2016 -0400
Committer: Steve Rowe <sa...@apache.org>
Committed: Wed Nov 2 19:58:12 2016 -0400

----------------------------------------------------------------------
 build.xml                                       |    2 +
 dev-tools/idea/.idea/modules.xml                |    3 +-
 dev-tools/idea/.idea/workspace.xml              |   72 +-
 dev-tools/idea/solr/contrib/langid/langid.iml   |    1 +
 dev-tools/idea/solr/contrib/ltr/ltr.iml         |   37 +
 .../solr/test-framework/solr-test-framework.iml |    2 +
 .../maven/solr/contrib/ltr/pom.xml.template     |   80 +
 dev-tools/maven/solr/contrib/pom.xml.template   |    1 +
 .../maven/solr/test-framework/pom.xml.template  |    5 +
 dev-tools/scripts/addVersion.py                 |   39 +-
 dev-tools/scripts/checkJavadocLinks.py          |    3 +
 dev-tools/scripts/poll-mirrors.py               |   41 +-
 lucene/CHANGES.txt                              |  140 +-
 .../analysis/core/DecimalDigitFilter.java       |    2 +-
 .../lucene/analysis/core/LowerCaseFilter.java   |   41 +
 .../analysis/core/LowerCaseFilterFactory.java   |    1 -
 .../apache/lucene/analysis/core/StopFilter.java |   47 +
 .../lucene/analysis/core/StopFilterFactory.java |    1 -
 .../lucene/analysis/custom/CustomAnalyzer.java  |    2 +-
 .../lucene/analysis/hunspell/Stemmer.java       |    2 +-
 .../lucene/analysis/minhash/MinHashFilter.java  |   17 +-
 .../miscellaneous/ASCIIFoldingFilter.java       |   24 +-
 .../lucene/analysis/standard/package.html       |    4 +
 .../lucene/collation/CollationKeyAnalyzer.java  |    2 +-
 .../core/TestAllAnalyzersHaveFactories.java     |    4 +-
 .../analysis/core/TestDecimalDigitFilter.java   |  149 +-
 .../lucene/analysis/core/TestRandomChains.java  |    5 +-
 .../miscellaneous/TestASCIIFoldingFilter.java   |   13 +
 .../collation/TestCollationDocValuesField.java  |    3 +-
 .../TestICUCollationDocValuesField.java         |    3 +-
 .../analysis/ja/JapaneseNumberFilter.java       |   11 +
 .../lucene/analysis/ja/TestFactories.java       |  203 ++
 .../codecs/lucene53/Lucene53NormsFormat.java    |   91 +
 .../codecs/lucene53/Lucene53NormsProducer.java  |  236 +++
 .../lucene/codecs/lucene53/package-info.java    |   23 +
 .../lucene54/Lucene54DocValuesConsumer.java     |  797 ++++++++
 .../lucene54/Lucene54DocValuesFormat.java       |  186 ++
 .../lucene54/Lucene54DocValuesProducer.java     | 1803 ++++++++++++++++++
 .../lucene/codecs/lucene54/package-info.java    |  403 ++++
 .../lucene/codecs/lucene60/Lucene60Codec.java   |    4 +-
 .../lucene/codecs/lucene62/Lucene62Codec.java   |  176 ++
 .../apache/lucene/codecs/lucene62/package.html  |   25 +
 .../services/org.apache.lucene.codecs.Codec     |    1 +
 .../org.apache.lucene.codecs.DocValuesFormat    |    1 +
 .../lucene50/TestLucene50SegmentInfoFormat.java |   40 +
 .../codecs/lucene53/Lucene53NormsConsumer.java  |  159 ++
 .../codecs/lucene53/Lucene53RWNormsFormat.java  |   31 +
 .../lucene53/TestLucene53NormsFormat.java       |   38 +
 .../lucene54/TestLucene54DocValuesFormat.java   |  640 +++++++
 .../lucene/codecs/lucene60/Lucene60RWCodec.java |   38 +
 .../lucene/codecs/lucene62/Lucene62RWCodec.java |   32 +
 .../index/TestBackwardsCompatibility.java       |   70 +-
 .../lucene/index/TestManyPointsInOldIndex.java  |   74 +
 .../org/apache/lucene/index/index.6.2.1-cfs.zip |  Bin 0 -> 15851 bytes
 .../apache/lucene/index/index.6.2.1-nocfs.zip   |  Bin 0 -> 15845 bytes
 .../org/apache/lucene/index/manypointsindex.zip |  Bin 0 -> 3739 bytes
 .../lucene/index/unsupported.5.5.3-cfs.zip      |  Bin 0 -> 13724 bytes
 .../lucene/index/unsupported.5.5.3-nocfs.zip    |  Bin 0 -> 13725 bytes
 lucene/benchmark/.gitignore                     |    4 +-
 lucene/benchmark/README.enwiki                  |   11 +-
 lucene/benchmark/conf/highlight-profile.alg     |   68 -
 .../conf/highlight-vs-vector-highlight.alg      |   80 -
 lucene/benchmark/conf/highlighters-postings.alg |   65 +
 lucene/benchmark/conf/highlighters-tv.alg       |   64 +
 lucene/benchmark/conf/highlights.alg            |   69 +
 lucene/benchmark/conf/query-phrases.txt         |   10 +
 lucene/benchmark/conf/query-terms.txt           |   10 +
 lucene/benchmark/conf/query-wildcards.txt       |    7 +
 .../benchmark/conf/standard-highlights-notv.alg |   69 -
 .../benchmark/conf/standard-highlights-tv.alg   |   69 -
 .../benchmark/conf/vector-highlight-profile.alg |   68 -
 .../lucene/benchmark/byTask/PerfRunData.java    |    2 +
 .../lucene/benchmark/byTask/feeds/DocMaker.java |    7 +
 .../byTask/tasks/BenchmarkHighlighter.java      |   30 -
 .../benchmark/byTask/tasks/CreateIndexTask.java |    4 +-
 .../lucene/benchmark/byTask/tasks/ReadTask.java |  120 +-
 .../tasks/SearchTravRetHighlightTask.java       |  283 ++-
 .../tasks/SearchTravRetVectorHighlightTask.java |  147 --
 .../benchmark/byTask/TestPerfTasksLogic.java    |  106 -
 .../tasks/CountingHighlighterTestTask.java      |   68 -
 .../blocktreeords/OrdsIntersectTermsEnum.java   |    2 +-
 .../codecs/memory/DirectDocValuesConsumer.java  |   33 +-
 .../codecs/memory/DirectDocValuesProducer.java  |  100 +-
 .../codecs/memory/DirectPostingsFormat.java     |    2 +-
 .../lucene/codecs/memory/FSTOrdTermsReader.java |    2 +-
 .../lucene/codecs/memory/FSTTermsReader.java    |    2 +-
 .../codecs/memory/MemoryDocValuesConsumer.java  |   45 +-
 .../codecs/memory/MemoryDocValuesProducer.java  |  165 +-
 .../codecs/simpletext/SimpleTextBKDReader.java  |    1 -
 .../simpletext/SimpleTextDocValuesReader.java   |  471 +++--
 .../simpletext/SimpleTextDocValuesWriter.java   |  190 +-
 .../simpletext/SimpleTextNormsFormat.java       |   10 +-
 .../simpletext/SimpleTextPointsReader.java      |   98 +-
 .../simpletext/SimpleTextPointsWriter.java      |   10 +-
 lucene/common-build.xml                         |    4 +-
 .../org/apache/lucene/analysis/Analyzer.java    |    9 +-
 .../apache/lucene/analysis/AnalyzerWrapper.java |   50 +-
 .../analysis/DelegatingAnalyzerWrapper.java     |   14 +-
 .../apache/lucene/analysis/LowerCaseFilter.java |    2 +-
 .../org/apache/lucene/analysis/StopFilter.java  |    2 +-
 .../java/org/apache/lucene/codecs/Codec.java    |    2 +-
 .../apache/lucene/codecs/DocValuesConsumer.java | 1073 +++++------
 .../apache/lucene/codecs/DocValuesProducer.java |    7 -
 .../lucene/codecs/LegacyDocValuesIterables.java |  534 ++++++
 .../lucene/codecs/MultiLevelSkipListReader.java |    9 +-
 .../lucene/codecs/MutablePointValues.java       |   42 +
 .../lucene/codecs/MutablePointsReader.java      |   41 -
 .../org/apache/lucene/codecs/NormsConsumer.java |  138 +-
 .../org/apache/lucene/codecs/PointsFormat.java  |   35 +-
 .../org/apache/lucene/codecs/PointsReader.java  |    5 +-
 .../org/apache/lucene/codecs/PointsWriter.java  |  204 +-
 .../lucene/codecs/blocktree/FieldReader.java    |    2 +-
 .../codecs/blocktree/IntersectTermsEnum.java    |    2 +-
 .../lucene/codecs/lucene50/package-info.java    |    2 +-
 .../codecs/lucene53/Lucene53NormsConsumer.java  |  153 --
 .../codecs/lucene53/Lucene53NormsFormat.java    |   91 -
 .../codecs/lucene53/Lucene53NormsProducer.java  |  208 --
 .../lucene/codecs/lucene53/package-info.java    |   23 -
 .../lucene54/Lucene54DocValuesConsumer.java     |  776 --------
 .../lucene54/Lucene54DocValuesFormat.java       |  183 --
 .../lucene54/Lucene54DocValuesProducer.java     | 1488 ---------------
 .../lucene/codecs/lucene54/package-info.java    |  403 ----
 .../codecs/lucene60/Lucene60PointsReader.java   |   84 +-
 .../codecs/lucene60/Lucene60PointsWriter.java   |   28 +-
 .../lucene/codecs/lucene62/Lucene62Codec.java   |  176 --
 .../lucene/codecs/lucene70/IndexedDISI.java     |  309 +++
 .../lucene/codecs/lucene70/Lucene70Codec.java   |  176 ++
 .../lucene70/Lucene70DocValuesConsumer.java     |  522 +++++
 .../lucene70/Lucene70DocValuesFormat.java       |  160 ++
 .../lucene70/Lucene70DocValuesProducer.java     | 1295 +++++++++++++
 .../codecs/lucene70/Lucene70NormsConsumer.java  |  159 ++
 .../codecs/lucene70/Lucene70NormsFormat.java    |   99 +
 .../codecs/lucene70/Lucene70NormsProducer.java  |  313 +++
 .../lucene/codecs/lucene70/package-info.java    |  392 ++++
 .../perfield/PerFieldDocValuesFormat.java       |   56 +-
 .../codecs/perfield/PerFieldMergeState.java     |  274 +++
 .../codecs/perfield/PerFieldPostingsFormat.java |   85 +-
 .../lucene/document/BinaryDocValuesField.java   |    1 -
 .../org/apache/lucene/document/BinaryPoint.java |    3 +-
 .../java/org/apache/lucene/document/Field.java  |   18 +-
 .../apache/lucene/index/AutomatonTermsEnum.java |    6 +-
 .../apache/lucene/index/BinaryDocValues.java    |   20 +-
 .../lucene/index/BinaryDocValuesWriter.java     |  107 +-
 .../org/apache/lucene/index/CheckIndex.java     |  297 +--
 .../org/apache/lucene/index/CodecReader.java    |  161 +-
 .../lucene/index/DefaultIndexingChain.java      |   19 +-
 .../org/apache/lucene/index/DocIDMerger.java    |   11 +-
 .../java/org/apache/lucene/index/DocValues.java |  332 ++--
 .../apache/lucene/index/DocValuesIterator.java  |   33 +
 .../lucene/index/EmptyDocValuesProducer.java    |   71 +
 .../java/org/apache/lucene/index/FieldInfo.java |    4 +
 .../lucene/index/FilterBinaryDocValues.java     |   66 +
 .../apache/lucene/index/FilterCodecReader.java  |    5 -
 .../apache/lucene/index/FilterLeafReader.java   |   10 +-
 .../lucene/index/FilterNumericDocValues.java    |   64 +
 .../org/apache/lucene/index/IndexWriter.java    |   16 +-
 .../org/apache/lucene/index/LeafReader.java     |   20 +-
 .../lucene/index/LegacyBinaryDocValues.java     |   39 +
 .../index/LegacyBinaryDocValuesWrapper.java     |   90 +
 .../lucene/index/LegacyNumericDocValues.java    |   38 +
 .../index/LegacyNumericDocValuesWrapper.java    |   96 +
 .../lucene/index/LegacySortedDocValues.java     |  110 ++
 .../index/LegacySortedDocValuesWrapper.java     |  101 +
 .../index/LegacySortedNumericDocValues.java     |   49 +
 .../LegacySortedNumericDocValuesWrapper.java    |   98 +
 .../lucene/index/LegacySortedSetDocValues.java  |  111 ++
 .../index/LegacySortedSetDocValuesWrapper.java  |  112 ++
 .../apache/lucene/index/MergeReaderWrapper.java |   19 +-
 .../org/apache/lucene/index/MultiDocValues.java |  767 ++++++--
 .../org/apache/lucene/index/MultiSorter.java    |  339 ++--
 .../apache/lucene/index/NormValuesWriter.java   |  103 +-
 .../apache/lucene/index/NumericDocValues.java   |   12 +-
 .../index/NumericDocValuesFieldUpdates.java     |    3 +-
 .../lucene/index/NumericDocValuesWriter.java    |   98 +-
 .../apache/lucene/index/ParallelLeafReader.java |  106 +-
 .../org/apache/lucene/index/PointValues.java    |   64 +-
 .../apache/lucene/index/PointValuesWriter.java  |   66 +-
 .../apache/lucene/index/RandomAccessOrds.java   |   53 -
 .../apache/lucene/index/ReadersAndUpdates.java  |  226 ++-
 .../lucene/index/SegmentDocValuesProducer.java  |    8 -
 .../org/apache/lucene/index/SegmentReader.java  |   20 +-
 .../index/SingletonSortedNumericDocValues.java  |   53 +-
 .../index/SingletonSortedSetDocValues.java      |   65 +-
 .../lucene/index/SlowCodecReaderWrapper.java    |   53 +-
 .../apache/lucene/index/SortedDocValues.java    |   44 +-
 .../lucene/index/SortedDocValuesWriter.java     |  134 +-
 .../lucene/index/SortedNumericDocValues.java    |   25 +-
 .../index/SortedNumericDocValuesWriter.java     |  124 +-
 .../apache/lucene/index/SortedSetDocValues.java |   29 +-
 .../lucene/index/SortedSetDocValuesWriter.java  |  208 +-
 .../java/org/apache/lucene/index/Sorter.java    |  204 +-
 .../apache/lucene/index/SortingLeafReader.java  |  525 ++++-
 .../lucene/search/DisjunctionMaxQuery.java      |    2 +-
 .../lucene/search/DisjunctionMaxScorer.java     |    2 +-
 .../lucene/search/DocValuesRewriteMethod.java   |   20 +-
 .../apache/lucene/search/ExactPhraseScorer.java |    2 +-
 .../apache/lucene/search/FieldComparator.java   |  266 +--
 .../apache/lucene/search/FieldValueQuery.java   |   45 +-
 .../org/apache/lucene/search/FuzzyQuery.java    |    2 +-
 .../apache/lucene/search/FuzzyTermsEnum.java    |  367 ++--
 .../org/apache/lucene/search/LRUQueryCache.java |   22 +-
 .../lucene/search/LeafFieldComparator.java      |    2 +-
 .../apache/lucene/search/PointInSetQuery.java   |   26 +-
 .../apache/lucene/search/PointRangeQuery.java   |   28 +-
 .../lucene/search/SloppyPhraseScorer.java       |    2 +-
 .../lucene/search/SortedNumericSelector.java    |  119 +-
 .../lucene/search/SortedNumericSortField.java   |    2 +-
 .../apache/lucene/search/SortedSetSelector.java |  295 ++-
 .../lucene/search/SortedSetSortField.java       |    5 +-
 .../apache/lucene/search/TopFieldCollector.java |    2 +-
 .../apache/lucene/search/TopTermsRewrite.java   |    4 +-
 .../org/apache/lucene/search/package-info.java  |    2 +-
 .../search/similarities/BM25Similarity.java     |   27 +-
 .../search/similarities/MultiSimilarity.java    |    4 +-
 .../lucene/search/similarities/Similarity.java  |    4 +-
 .../search/similarities/SimilarityBase.java     |   22 +-
 .../search/similarities/TFIDFSimilarity.java    |   31 +-
 .../src/java/org/apache/lucene/util/Bits.java   |    4 +-
 .../org/apache/lucene/util/ByteBlockPool.java   |    2 +-
 .../java/org/apache/lucene/util/Constants.java  |   12 +-
 .../org/apache/lucene/util/DocIdSetBuilder.java |    2 +-
 .../java/org/apache/lucene/util/LongValues.java |   14 +-
 .../java/org/apache/lucene/util/Version.java    |   14 +
 .../lucene/util/automaton/ByteRunAutomaton.java |    2 +-
 .../util/automaton/CharacterRunAutomaton.java   |    4 +-
 .../util/automaton/CompiledAutomaton.java       |    2 +-
 .../lucene/util/automaton/RunAutomaton.java     |   15 +-
 .../org/apache/lucene/util/bkd/BKDReader.java   |   87 +-
 .../org/apache/lucene/util/bkd/BKDWriter.java   |   38 +-
 .../util/bkd/MutablePointsReaderUtils.java      |   10 +-
 .../util/packed/DirectMonotonicReader.java      |    2 -
 .../apache/lucene/util/packed/PackedInts.java   |    4 +-
 .../services/org.apache.lucene.codecs.Codec     |    2 +-
 .../org.apache.lucene.codecs.DocValuesFormat    |    2 +-
 .../analysis/TestDelegatingAnalyzerWrapper.java |  107 ++
 .../AbstractTestCompressionMode.java            |    8 +-
 .../AbstractTestLZ4CompressionMode.java         |   10 +-
 .../lucene/codecs/lucene50/TestForUtil.java     |    8 +-
 .../lucene50/TestLucene50FieldInfoFormat.java   |   33 -
 ...cene50StoredFieldsFormatHighCompression.java |    8 +-
 .../lucene50/TestLucene60FieldInfoFormat.java   |   33 +
 .../lucene53/TestLucene53NormsFormat.java       |   34 -
 .../lucene54/TestLucene54DocValuesFormat.java   |  607 ------
 .../lucene/codecs/lucene70/TestIndexedDISI.java |  249 +++
 .../lucene70/TestLucene70DocValuesFormat.java   |  537 ++++++
 .../lucene70/TestLucene70NormsFormat.java       |   34 +
 .../perfield/TestPerFieldDocValuesFormat.java   |  132 +-
 .../perfield/TestPerFieldPostingsFormat2.java   |  110 ++
 .../lucene/index/Test2BBinaryDocValues.java     |    6 +-
 .../org/apache/lucene/index/Test2BDocs.java     |  135 ++
 .../lucene/index/Test2BNumericDocValues.java    |    3 +-
 .../org/apache/lucene/index/Test2BPoints.java   |    6 +-
 .../index/Test2BSortedDocValuesFixedSorted.java |    5 +-
 .../lucene/index/Test2BSortedDocValuesOrds.java |    5 +-
 .../lucene/index/Test4GBStoredFields.java       |    4 +-
 .../apache/lucene/index/TestBagOfPositions.java |    4 +-
 .../index/TestBinaryDocValuesUpdates.java       |  139 +-
 .../apache/lucene/index/TestCustomNorms.java    |    3 +-
 .../index/TestDemoParallelLeafReader.java       |   58 +-
 .../lucene/index/TestDirectoryReader.java       |   11 +-
 .../lucene/index/TestDirectoryReaderReopen.java |   23 +-
 .../org/apache/lucene/index/TestDocValues.java  |   11 +-
 .../lucene/index/TestDocValuesIndexing.java     |   28 +-
 .../index/TestExitableDirectoryReader.java      |    5 +-
 .../apache/lucene/index/TestIndexSorting.java   |  171 +-
 .../apache/lucene/index/TestIndexWriter.java    |    4 +-
 .../lucene/index/TestIndexWriterExceptions.java |    8 +-
 .../index/TestIndexingSequenceNumbers.java      |    6 +-
 .../lucene/index/TestMaxTermFrequency.java      |    3 +-
 .../lucene/index/TestMixedDocValuesUpdates.java |   26 +-
 .../apache/lucene/index/TestMultiDocValues.java |  157 +-
 .../test/org/apache/lucene/index/TestNorms.java |   30 +-
 .../index/TestNumericDocValuesUpdates.java      |  139 +-
 .../org/apache/lucene/index/TestOmitNorms.java  |   19 +-
 .../org/apache/lucene/index/TestOrdinalMap.java |   10 +-
 .../apache/lucene/index/TestPointValues.java    |   33 +-
 .../lucene/index/TestPostingsOffsets.java       |   20 +-
 .../org/apache/lucene/index/TestTermsEnum.java  |   13 +-
 .../lucene/index/TestUniqueTermCount.java       |    3 +-
 .../search/FuzzyTermOnShortTermsTest.java       |   15 +-
 .../org/apache/lucene/search/TestBoolean2.java  |   50 +-
 .../org/apache/lucene/search/TestBooleanOr.java |    4 +-
 .../lucene/search/TestBooleanRewrites.java      |   19 +-
 .../lucene/search/TestDisjunctionMaxQuery.java  |   16 +
 .../lucene/search/TestDocValuesScoring.java     |   26 +-
 .../lucene/search/TestElevationComparator.java  |   20 +-
 .../apache/lucene/search/TestFuzzyQuery.java    |  213 +++
 .../lucene/search/TestMinShouldMatch2.java      |    7 +-
 .../apache/lucene/search/TestPointQueries.java  |   17 +-
 .../lucene/search/TestSearcherManager.java      |    6 +-
 .../lucene/search/TestSimilarityProvider.java   |    4 +-
 .../apache/lucene/search/TestSortRandom.java    |    3 +-
 .../apache/lucene/util/TestDocIdSetBuilder.java |   14 +-
 .../lucene/util/TestTimSorterWorstCase.java     |    6 +-
 .../lucene/util/automaton/TestOperations.java   |    4 +-
 .../lucene/util/automaton/TestUTF32ToUTF8.java  |    4 +-
 .../util/bkd/TestMutablePointsReaderUtils.java  |   33 +-
 .../lucene/util/packed/TestDirectPacked.java    |    6 +-
 .../lucene/util/packed/TestPackedInts.java      |   14 +-
 .../lucene/expressions/SimpleBindings.java      |    2 +-
 .../expressions/js/JavascriptCompiler.java      |    2 +-
 .../expressions/TestExpressionValueSource.java  |    1 +
 .../org/apache/lucene/facet/DrillSideways.java  |    2 +-
 .../apache/lucene/facet/FacetsCollector.java    |   42 +-
 .../DefaultSortedSetDocValuesReaderState.java   |   11 +-
 .../SortedSetDocValuesFacetCounts.java          |  115 +-
 .../facet/taxonomy/DocValuesOrdinalsReader.java |   19 +-
 .../facet/taxonomy/FastTaxonomyFacetCounts.java |   61 +-
 .../taxonomy/OrdinalMappingLeafReader.java      |   18 +-
 .../facet/taxonomy/TaxonomyFacetCounts.java     |    2 +-
 .../TaxonomyFacetSumFloatAssociations.java      |   39 +-
 .../TaxonomyFacetSumIntAssociations.java        |   39 +-
 .../lucene/facet/taxonomy/TaxonomyFacets.java   |    4 +-
 .../apache/lucene/facet/TestDrillDownQuery.java |   11 +
 .../lucene/facet/TestMultipleIndexFields.java   |    2 +-
 .../sortedset/TestSortedSetDocValuesFacets.java |   25 +-
 .../taxonomy/TestOrdinalMappingLeafReader.java  |    6 +-
 .../facet/taxonomy/TestTaxonomyFacetCounts.java |   84 +-
 .../AbstractFirstPassGroupingCollector.java     |    6 +-
 .../search/grouping/BlockGroupingCollector.java |    2 +-
 .../FunctionFirstPassGroupingCollector.java     |    2 +-
 .../term/TermAllGroupHeadsCollector.java        |  170 +-
 .../grouping/term/TermAllGroupsCollector.java   |   22 +-
 .../term/TermDistinctValuesCollector.java       |   37 +-
 .../term/TermFirstPassGroupingCollector.java    |   16 +-
 .../grouping/term/TermGroupFacetCollector.java  |   63 +-
 .../term/TermSecondPassGroupingCollector.java   |   17 +-
 .../grouping/AllGroupHeadsCollectorTest.java    |   11 +-
 .../grouping/DistinctValuesCollectorTest.java   |    2 +-
 .../lucene/search/grouping/TestGrouping.java    |   43 +-
 .../search/highlight/TermVectorLeafReader.java  |    7 +-
 .../highlight/WeightedSpanTermExtractor.java    |   46 +-
 .../uhighlight/AnalysisOffsetStrategy.java      |  190 ++
 .../uhighlight/DefaultPassageFormatter.java     |  138 ++
 .../search/uhighlight/FieldHighlighter.java     |  276 +++
 .../search/uhighlight/FieldOffsetStrategy.java  |  122 ++
 .../uhighlight/MultiTermHighlighting.java       |  379 ++++
 .../uhighlight/MultiValueTokenStream.java       |  148 ++
 .../search/uhighlight/NoOpOffsetStrategy.java   |   50 +
 .../lucene/search/uhighlight/OffsetsEnum.java   |   97 +
 .../lucene/search/uhighlight/Passage.java       |  161 ++
 .../search/uhighlight/PassageFormatter.java     |   40 +
 .../lucene/search/uhighlight/PassageScorer.java |  113 ++
 .../lucene/search/uhighlight/PhraseHelper.java  |  581 ++++++
 .../uhighlight/PostingsOffsetStrategy.java      |   61 +
 .../PostingsWithTermVectorsOffsetStrategy.java  |   71 +
 .../uhighlight/SplittingBreakIterator.java      |  244 +++
 .../TermVectorFilteredLeafReader.java           |  130 ++
 .../uhighlight/TermVectorOffsetStrategy.java    |   68 +
 .../uhighlight/TokenStreamFromTermVector.java   |  395 ++++
 .../search/uhighlight/UnifiedHighlighter.java   | 1021 ++++++++++
 .../lucene/search/uhighlight/package-info.java  |   22 +
 .../search/vectorhighlight/FieldQuery.java      |    7 +
 .../search/highlight/HighlighterTest.java       |   47 +-
 .../lucene/search/uhighlight/CambridgeMA.utf8   |    1 +
 .../uhighlight/TestSplittingBreakIterator.java  |  192 ++
 .../uhighlight/TestUnifiedHighlighter.java      |  962 ++++++++++
 .../uhighlight/TestUnifiedHighlighterMTQ.java   |  936 +++++++++
 .../TestUnifiedHighlighterRanking.java          |  339 ++++
 .../TestUnifiedHighlighterReanalysis.java       |   74 +
 .../TestUnifiedHighlighterStrictPhrases.java    |  404 ++++
 .../TestUnifiedHighlighterTermVec.java          |  182 ++
 .../lucene/search/uhighlight/UHTestHelper.java  |   69 +
 .../TestUnifiedHighlighterExtensibility.java    |  182 ++
 .../FastVectorHighlighterTest.java              |   38 +
 lucene/ivy-versions.properties                  |    4 +-
 .../search/join/BaseGlobalOrdinalScorer.java    |    6 +-
 .../lucene/search/join/BlockJoinSelector.java   |  290 ++-
 .../search/join/DocValuesTermsCollector.java    |    5 +-
 .../search/join/GenericTermsCollector.java      |   39 +-
 .../search/join/GlobalOrdinalsCollector.java    |   21 +-
 .../lucene/search/join/GlobalOrdinalsQuery.java |   21 +-
 .../join/GlobalOrdinalsWithScoreCollector.java  |   36 +-
 .../join/GlobalOrdinalsWithScoreQuery.java      |   30 +-
 .../org/apache/lucene/search/join/JoinUtil.java |   44 +-
 .../join/PointInSetIncludingScoreQuery.java     |   10 +-
 .../lucene/search/join/TermsCollector.java      |   23 +-
 .../search/join/TermsWithScoreCollector.java    |   89 +-
 .../search/join/ToChildBlockJoinQuery.java      |    5 +-
 .../search/join/ToParentBlockJoinQuery.java     |   23 +-
 .../search/join/ToParentBlockJoinSortField.java |   54 +-
 .../lucene/search/join/TestBlockJoin.java       |   66 +
 .../search/join/TestBlockJoinSelector.java      |  209 +-
 .../search/join/TestBlockJoinSorting.java       |    9 +-
 .../search/join/TestBlockJoinValidation.java    |    4 +-
 .../apache/lucene/search/join/TestJoinUtil.java |   69 +-
 lucene/licenses/javax.servlet-LICENSE-CDDL.txt  |  139 --
 .../jcl-over-slf4j-LICENSE-BSD_LIKE.txt         |   21 -
 lucene/licenses/jcl-over-slf4j-LICENSE-MIT.txt  |   21 +
 lucene/licenses/morfologik-fsa-2.1.0.jar.sha1   |    1 -
 lucene/licenses/morfologik-fsa-2.1.1.jar.sha1   |    1 +
 .../licenses/morfologik-polish-2.1.0.jar.sha1   |    1 -
 .../licenses/morfologik-polish-2.1.1.jar.sha1   |    1 +
 .../licenses/morfologik-stemming-2.1.0.jar.sha1 |    1 -
 .../licenses/morfologik-stemming-2.1.1.jar.sha1 |    1 +
 .../randomizedtesting-runner-2.3.4.jar.sha1     |    1 -
 .../randomizedtesting-runner-2.4.0.jar.sha1     |    1 +
 lucene/licenses/servlet-api-LICENSE-CDDL.txt    |  137 --
 lucene/licenses/slf4j-LICENSE-BSD_LIKE.txt      |   21 -
 lucene/licenses/slf4j-LICENSE-MIT.txt           |   21 +
 .../apache/lucene/index/memory/MemoryIndex.java |  181 +-
 .../lucene/index/memory/TestMemoryIndex.java    |   48 +-
 .../memory/TestMemoryIndexAgainstRAMDir.java    |   58 +-
 .../apache/lucene/misc/SweetSpotSimilarity.java |   16 +
 .../search/DiversifiedTopDocsCollector.java     |   14 +-
 .../search/TestDiversifiedTopDocsCollector.java |   79 +-
 .../lucene/queries/function/FunctionValues.java |   60 +-
 .../lucene/queries/function/ValueSource.java    |   10 +-
 .../queries/function/ValueSourceScorer.java     |    2 +-
 .../function/docvalues/BoolDocValues.java       |   24 +-
 .../docvalues/DocTermsIndexDocValues.java       |   78 +-
 .../function/docvalues/DoubleDocValues.java     |   32 +-
 .../function/docvalues/FloatDocValues.java      |   22 +-
 .../function/docvalues/IntDocValues.java        |   24 +-
 .../function/docvalues/LongDocValues.java       |   26 +-
 .../function/docvalues/StrDocValues.java        |   12 +-
 .../valuesource/BytesRefFieldSource.java        |   56 +-
 .../valuesource/ComparisonBoolFunction.java     |    8 +-
 .../function/valuesource/DefFunction.java       |   26 +-
 .../function/valuesource/DivFloatFunction.java  |    4 +-
 .../function/valuesource/DoubleFieldSource.java |   43 +-
 .../function/valuesource/DualFloatFunction.java |    8 +-
 .../function/valuesource/EnumFieldSource.java   |   37 +-
 .../function/valuesource/FloatFieldSource.java  |   40 +-
 .../function/valuesource/IfFunction.java        |   24 +-
 .../function/valuesource/IntFieldSource.java    |   40 +-
 .../valuesource/JoinDocFreqValueSource.java     |   27 +-
 .../valuesource/LinearFloatFunction.java        |    6 +-
 .../function/valuesource/LongFieldSource.java   |   56 +-
 .../function/valuesource/MaxFloatFunction.java  |    6 +-
 .../function/valuesource/MinFloatFunction.java  |    6 +-
 .../function/valuesource/MultiBoolFunction.java |    6 +-
 .../valuesource/MultiFloatFunction.java         |   10 +-
 .../function/valuesource/MultiFunction.java     |   13 +-
 .../function/valuesource/NormValueSource.java   |   23 +-
 .../function/valuesource/PowFloatFunction.java  |    4 +-
 .../valuesource/ProductFloatFunction.java       |    4 +-
 .../valuesource/RangeMapFloatFunction.java      |    4 +-
 .../valuesource/ReciprocalFloatFunction.java    |    6 +-
 .../valuesource/ScaleFloatFunction.java         |    6 +-
 .../valuesource/SimpleBoolFunction.java         |    6 +-
 .../valuesource/SimpleFloatFunction.java        |    6 +-
 .../valuesource/SortedSetFieldSource.java       |    6 +-
 .../function/valuesource/SumFloatFunction.java  |    6 +-
 .../function/valuesource/VectorValueSource.java |   33 +-
 .../lucene/queries/TestCustomScoreQuery.java    |   11 +-
 .../function/TestDocValuesFieldSources.java     |    4 +-
 .../queries/function/TestValueSources.java      |    4 +-
 .../classic/MultiFieldQueryParser.java          |    7 +-
 .../queryparser/simple/SimpleQueryParser.java   |    4 +
 .../xml/builders/FuzzyLikeThisQueryBuilder.java |    4 +-
 .../classic/TestMultiFieldQueryParser.java      |   21 +
 .../simple/TestSimpleQueryParser.java           |    7 +
 .../org/apache/lucene/document/LatLonPoint.java |    9 +-
 .../document/LatLonPointDistanceComparator.java |   58 +-
 .../document/LatLonPointDistanceQuery.java      |    4 +-
 .../document/LatLonPointInPolygonQuery.java     |    4 +-
 .../apache/lucene/document/RangeFieldQuery.java |    8 +-
 .../sandbox/queries/FuzzyLikeThisQuery.java     |  312 +--
 .../lucene/sandbox/queries/SlowFuzzyQuery.java  |  201 --
 .../sandbox/queries/SlowFuzzyTermsEnum.java     |  263 ---
 .../lucene/search/DocValuesNumbersQuery.java    |   31 +-
 .../lucene/search/DocValuesRangeQuery.java      |   36 +-
 .../lucene/search/DocValuesTermsQuery.java      |   16 +-
 .../lucene/search/TermAutomatonScorer.java      |    2 +-
 .../org/apache/lucene/document/TestNearest.java |    2 +-
 .../sandbox/queries/FuzzyLikeThisQueryTest.java |   14 +-
 .../sandbox/queries/TestSlowFuzzyQuery.java     |  487 -----
 .../search/BaseRangeFieldQueryTestCase.java     |    5 +-
 .../search/TestDoubleRangeFieldQueries.java     |   34 +-
 .../search/TestFloatRangeFieldQueries.java      |   34 +-
 .../lucene/search/TestIntRangeFieldQueries.java |   34 +-
 .../search/TestLongRangeFieldQueries.java       |   34 +-
 .../spatial/bbox/BBoxSimilarityValueSource.java |    9 +-
 .../lucene/spatial/bbox/BBoxValueSource.java    |   54 +-
 .../serialized/SerializedDVStrategy.java        |   35 +-
 .../spatial/util/CachingDoubleValueSource.java  |    6 +-
 .../util/DistanceToShapeValueSource.java        |    4 +-
 .../spatial/util/ShapeAreaValueSource.java      |    6 +-
 .../spatial/util/ShapePredicateValueSource.java |    4 +-
 .../spatial/vector/DistanceValueSource.java     |   33 +-
 .../GeoPointTermQueryConstantScoreWrapper.java  |   16 +-
 .../lucene/spatial3d/Geo3DDocValuesField.java   |   60 +
 .../spatial3d/Geo3DPointDistanceComparator.java |   43 +-
 .../Geo3DPointOutsideDistanceComparator.java    |   41 +-
 .../spatial3d/PointInGeo3DShapeQuery.java       |    4 +-
 .../spatial3d/PointInShapeIntersectVisitor.java |   30 +-
 .../apache/lucene/spatial3d/TestGeo3DPoint.java |   29 +-
 .../lucene/search/spell/DirectSpellChecker.java |   20 +-
 .../search/suggest/DocumentDictionary.java      |   12 +-
 .../suggest/DocumentValueSourceDictionary.java  |    8 +-
 .../analyzing/AnalyzingInfixSuggester.java      |   43 +-
 .../analyzing/BlendedInfixSuggester.java        |   26 +-
 .../analyzing/AnalyzingInfixSuggesterTest.java  |   76 +
 .../document/TestPrefixCompletionQuery.java     |   33 +-
 .../suggest/document/TestSuggestField.java      |    4 +-
 .../analysis/BaseTokenStreamTestCase.java       |    3 +-
 .../lucene/analysis/MockBytesAnalyzer.java      |    2 +-
 .../apache/lucene/analysis/MockTokenizer.java   |    6 +-
 .../asserting/AssertingDocValuesFormat.java     |  173 +-
 .../asserting/AssertingLiveDocsFormat.java      |    2 +-
 .../codecs/asserting/AssertingNormsFormat.java  |   22 +-
 .../codecs/asserting/AssertingPointsFormat.java |  139 +-
 .../codecs/compressing/CompressingCodec.java    |   14 +-
 .../codecs/cranky/CrankyDocValuesFormat.java    |   21 +-
 .../lucene/codecs/cranky/CrankyNormsFormat.java |    4 +-
 .../codecs/cranky/CrankyPointsFormat.java       |  110 +-
 .../apache/lucene/geo/BaseGeoPointTestCase.java |   23 +-
 .../lucene/index/AssertingLeafReader.java       |  602 ++++--
 .../index/BaseDocValuesFormatTestCase.java      | 1035 +++++-----
 .../index/BaseIndexFileFormatTestCase.java      |  113 +-
 .../lucene/index/BaseNormsFormatTestCase.java   |  440 ++++-
 .../lucene/index/BasePointsFormatTestCase.java  |   80 +-
 .../index/BaseStoredFieldsFormatTestCase.java   |   25 +-
 .../lucene/index/FieldFilterLeafReader.java     |   13 -
 .../org/apache/lucene/index/RandomCodec.java    |    9 +-
 .../lucene/search/AssertingBulkScorer.java      |    4 +-
 .../org/apache/lucene/search/QueryUtils.java    |   13 +-
 .../lucene/search/RandomApproximationQuery.java |    4 +-
 .../org/apache/lucene/util/LuceneTestCase.java  |   81 +-
 .../util/RunListenerPrintReproduceInfo.java     |    4 +-
 .../util/TestRuleSetupAndRestoreClassEnv.java   |   19 +-
 .../java/org/apache/lucene/util/TestUtil.java   |   17 +-
 .../TestCompressingStoredFieldsFormat.java      |    4 +-
 lucene/tools/forbiddenApis/solr.txt             |   20 +
 solr/CHANGES.txt                                |  406 +++-
 solr/NOTICE.txt                                 |    2 +-
 solr/bin/install_solr_service.sh                |   90 +-
 solr/bin/post                                   |   23 +-
 solr/bin/solr                                   |  228 ++-
 solr/bin/solr.cmd                               |  213 ++-
 solr/bin/solr.in.cmd                            |   53 +-
 solr/bin/solr.in.sh                             |   48 +-
 solr/build.xml                                  |   21 +-
 .../analytics/accumulator/BasicAccumulator.java |    2 +-
 .../facet/FieldFacetAccumulator.java            |   44 +-
 .../AbstractDelegatingStatsCollector.java       |    2 +-
 .../statistics/MedianStatsCollector.java        |    3 +-
 .../statistics/MinMaxStatsCollector.java        |    2 +-
 .../statistics/NumericStatsCollector.java       |    3 +-
 .../statistics/PercentileStatsCollector.java    |    3 +-
 .../analytics/statistics/StatsCollector.java    |    2 +-
 .../StatsCollectorSupplierFactory.java          |    2 +-
 .../statistics/UniqueStatsCollector.java        |    3 +-
 .../AbsoluteValueDoubleFunction.java            |    4 +-
 .../util/valuesource/AddDoubleFunction.java     |    4 +-
 .../util/valuesource/ConcatStringFunction.java  |    4 +-
 .../util/valuesource/DateFieldSource.java       |   36 +-
 .../util/valuesource/DateMathFunction.java      |    3 +-
 .../util/valuesource/DivDoubleFunction.java     |    4 +-
 .../util/valuesource/DualDoubleFunction.java    |    8 +-
 .../util/valuesource/FilterFieldSource.java     |   22 +-
 .../util/valuesource/LogDoubleFunction.java     |    4 +-
 .../util/valuesource/MultiDateFunction.java     |   10 +-
 .../util/valuesource/MultiDoubleFunction.java   |    8 +-
 .../util/valuesource/MultiStringFunction.java   |   12 +-
 .../valuesource/MultiplyDoubleFunction.java     |    4 +-
 .../util/valuesource/NegateDoubleFunction.java  |    4 +-
 .../util/valuesource/PowDoubleFunction.java     |    4 +-
 .../util/valuesource/ReverseStringFunction.java |    4 +-
 .../util/valuesource/SingleDoubleFunction.java  |    8 +-
 .../util/valuesource/SingleStringFunction.java  |   14 +-
 .../solr/collection1/conf/solrconfig.xml        |    8 -
 .../solr/handler/dataimport/DIHCache.java       |   16 +-
 .../solr/handler/dataimport/DocBuilder.java     |    2 +
 .../solr/handler/dataimport/EventListener.java  |    2 +-
 .../handler/dataimport/TemplateTransformer.java |   30 +-
 .../handler/dataimport/VariableResolver.java    |   70 +-
 .../dataimport/XPathEntityProcessor.java        |   52 +-
 .../handler/dataimport/XPathRecordReader.java   |   15 +-
 .../handler/dataimport/ZKPropertiesWriter.java  |   20 +-
 .../AbstractDataImportHandlerTestCase.java      |   14 +-
 .../dataimport/MockStringDataSource.java        |   54 +
 .../solr/handler/dataimport/TestDocBuilder.java |  129 +-
 .../handler/dataimport/TestDocBuilder2.java     |   23 +
 .../dataimport/TestTemplateTransformer.java     |   38 +
 .../dataimport/TestXPathRecordReader.java       |   15 +-
 solr/contrib/ltr/README.md                      |  406 ++++
 solr/contrib/ltr/README.txt                     |    1 +
 solr/contrib/ltr/build.xml                      |   30 +
 solr/contrib/ltr/example/config.json            |   14 +
 solr/contrib/ltr/example/libsvm_formatter.py    |  124 ++
 solr/contrib/ltr/example/solrconfig.xml         | 1722 +++++++++++++++++
 .../ltr/example/techproducts-features.json      |   26 +
 .../contrib/ltr/example/techproducts-model.json |   18 +
 .../ltr/example/train_and_upload_demo_model.py  |  163 ++
 solr/contrib/ltr/example/user_queries.txt       |    8 +
 solr/contrib/ltr/ivy.xml                        |   32 +
 .../src/java/org/apache/solr/ltr/DocInfo.java   |   42 +
 .../java/org/apache/solr/ltr/FeatureLogger.java |  193 ++
 .../java/org/apache/solr/ltr/LTRRescorer.java   |  249 +++
 .../org/apache/solr/ltr/LTRScoringQuery.java    |  738 +++++++
 .../org/apache/solr/ltr/LTRThreadModule.java    |  163 ++
 .../solr/ltr/SolrQueryRequestContextUtils.java  |   83 +
 .../org/apache/solr/ltr/feature/Feature.java    |  335 ++++
 .../solr/ltr/feature/FeatureException.java      |   31 +
 .../solr/ltr/feature/FieldLengthFeature.java    |  152 ++
 .../solr/ltr/feature/FieldValueFeature.java     |  141 ++
 .../solr/ltr/feature/OriginalScoreFeature.java  |  118 ++
 .../apache/solr/ltr/feature/SolrFeature.java    |  320 ++++
 .../apache/solr/ltr/feature/ValueFeature.java   |  148 ++
 .../apache/solr/ltr/feature/package-info.java   |   21 +
 .../apache/solr/ltr/model/LTRScoringModel.java  |  298 +++
 .../org/apache/solr/ltr/model/LinearModel.java  |  147 ++
 .../apache/solr/ltr/model/ModelException.java   |   31 +
 .../ltr/model/MultipleAdditiveTreesModel.java   |  377 ++++
 .../org/apache/solr/ltr/model/package-info.java |   21 +
 .../solr/ltr/norm/IdentityNormalizer.java       |   53 +
 .../apache/solr/ltr/norm/MinMaxNormalizer.java  |  107 ++
 .../org/apache/solr/ltr/norm/Normalizer.java    |   64 +
 .../solr/ltr/norm/NormalizerException.java      |   31 +
 .../solr/ltr/norm/StandardNormalizer.java       |   99 +
 .../org/apache/solr/ltr/norm/package-info.java  |   23 +
 .../java/org/apache/solr/ltr/package-info.java  |   45 +
 .../org/apache/solr/ltr/store/FeatureStore.java |   67 +
 .../org/apache/solr/ltr/store/ModelStore.java   |   74 +
 .../org/apache/solr/ltr/store/package-info.java |   21 +
 .../ltr/store/rest/ManagedFeatureStore.java     |  215 +++
 .../solr/ltr/store/rest/ManagedModelStore.java  |  319 ++++
 .../solr/ltr/store/rest/package-info.java       |   22 +
 .../LTRFeatureLoggerTransformerFactory.java     |  254 +++
 .../solr/response/transform/package-info.java   |   23 +
 .../apache/solr/search/LTRQParserPlugin.java    |  233 +++
 .../org/apache/solr/search/package-info.java    |   23 +
 solr/contrib/ltr/src/java/overview.html         |   91 +
 .../featureExamples/comp_features.json          |   37 +
 .../featureExamples/external_features.json      |   51 +
 ...external_features_for_sparse_processing.json |   18 +
 .../featureExamples/features-linear-efi.json    |   17 +
 .../featureExamples/features-linear.json        |   51 +
 .../features-store-test-model.json              |   51 +
 .../test-files/featureExamples/fq_features.json |   16 +
 .../multipleadditivetreesmodel_features.json    |   16 +
 .../contrib/ltr/src/test-files/log4j.properties |   32 +
 .../modelExamples/external_model.json           |   12 +
 .../modelExamples/external_model_store.json     |   13 +
 .../src/test-files/modelExamples/fq-model.json  |   20 +
 .../modelExamples/linear-model-efi.json         |   14 +
 .../test-files/modelExamples/linear-model.json  |   30 +
 .../multipleadditivetreesmodel.json             |   38 +
 ...tivetreesmodel_external_binary_features.json |   38 +
 .../multipleadditivetreesmodel_no_feature.json  |   24 +
 .../multipleadditivetreesmodel_no_features.json |   14 +
 .../multipleadditivetreesmodel_no_left.json     |   22 +
 .../multipleadditivetreesmodel_no_params.json   |    8 +
 .../multipleadditivetreesmodel_no_right.json    |   22 +
 ...multipleadditivetreesmodel_no_threshold.json |   24 +
 .../multipleadditivetreesmodel_no_tree.json     |   15 +
 .../multipleadditivetreesmodel_no_trees.json    |   10 +
 .../multipleadditivetreesmodel_no_weight.json   |   24 +
 .../test-files/solr/collection1/conf/schema.xml |   88 +
 .../solr/collection1/conf/solrconfig-ltr.xml    |   65 +
 .../collection1/conf/solrconfig-ltr_Th10_10.xml |   69 +
 .../collection1/conf/solrconfig-multiseg.xml    |   62 +
 .../solr/collection1/conf/stopwords.txt         |   16 +
 .../solr/collection1/conf/synonyms.txt          |   28 +
 solr/contrib/ltr/src/test-files/solr/solr.xml   |   42 +
 .../org/apache/solr/ltr/TestLTROnSolrCloud.java |  211 ++
 .../apache/solr/ltr/TestLTRQParserExplain.java  |  152 ++
 .../apache/solr/ltr/TestLTRQParserPlugin.java   |  114 ++
 .../solr/ltr/TestLTRReRankingPipeline.java      |  300 +++
 .../apache/solr/ltr/TestLTRScoringQuery.java    |  319 ++++
 .../org/apache/solr/ltr/TestLTRWithFacet.java   |  103 +
 .../org/apache/solr/ltr/TestLTRWithSort.java    |  102 +
 .../solr/ltr/TestParallelWeightCreation.java    |   77 +
 .../org/apache/solr/ltr/TestRerankBase.java     |  429 +++++
 .../solr/ltr/TestSelectiveWeightCreation.java   |  251 +++
 .../ltr/feature/TestEdisMaxSolrFeature.java     |   76 +
 .../solr/ltr/feature/TestExternalFeatures.java  |  157 ++
 .../ltr/feature/TestExternalValueFeatures.java  |   86 +
 ...stFeatureExtractionFromMultipleSegments.java |  105 +
 .../solr/ltr/feature/TestFeatureLogging.java    |  254 +++
 .../ltr/feature/TestFeatureLtrScoringModel.java |   71 +
 .../solr/ltr/feature/TestFeatureStore.java      |  106 +
 .../ltr/feature/TestFieldLengthFeature.java     |  156 ++
 .../solr/ltr/feature/TestFieldValueFeature.java |  173 ++
 .../solr/ltr/feature/TestFilterSolrFeature.java |  105 +
 .../ltr/feature/TestNoMatchSolrFeature.java     |  192 ++
 .../ltr/feature/TestOriginalScoreFeature.java   |  148 ++
 .../solr/ltr/feature/TestRankingFeature.java    |  123 ++
 .../ltr/feature/TestUserTermScoreWithQ.java     |   74 +
 .../ltr/feature/TestUserTermScorerQuery.java    |   74 +
 .../ltr/feature/TestUserTermScorereQDF.java     |   75 +
 .../solr/ltr/feature/TestValueFeature.java      |  165 ++
 .../apache/solr/ltr/model/TestLinearModel.java  |  207 ++
 .../model/TestMultipleAdditiveTreesModel.java   |  246 +++
 .../solr/ltr/norm/TestMinMaxNormalizer.java     |  120 ++
 .../solr/ltr/norm/TestStandardNormalizer.java   |  132 ++
 .../ltr/store/rest/TestManagedFeatureStore.java |   36 +
 .../solr/ltr/store/rest/TestModelManager.java   |  163 ++
 .../store/rest/TestModelManagerPersistence.java |  121 ++
 .../solr/hadoop/TreeMergeOutputFormat.java      |   10 +-
 .../hadoop/morphline/MorphlineMapRunner.java    |    5 +-
 .../solr/morphlines/cell/SolrCellBuilder.java   |   14 +-
 .../solr/SanitizeUnknownSolrFieldsBuilder.java  |   17 +-
 .../solr/morphlines/solr/SolrLocator.java       |    5 +-
 .../morphlines/solr/TokenizeTextBuilder.java    |    8 +-
 .../solr/collection1/conf/solrconfig.xml        |   65 -
 .../test-files/solr/minimr/conf/solrconfig.xml  |   65 -
 .../test-files/solr/mrunit/conf/solrconfig.xml  |   65 -
 .../collection1/conf/solrconfig.xml             |   65 -
 .../solr/solrcloud/conf/solrconfig.xml          |   66 -
 .../solr/AbstractSolrMorphlineTestBase.java     |    8 +-
 .../solr/SolrMorphlineZkAvroTest.java           |    9 +-
 .../uima/solr/collection1/conf/solrconfig.xml   |   55 -
 .../uima/uima-tokenizers-solrconfig.xml         |   53 -
 solr/core/ivy.xml                               |    4 +-
 .../apache/solr/analysis/TokenizerChain.java    |    2 +-
 .../client/solrj/embedded/JettySolrRunner.java  |   28 +-
 .../java/org/apache/solr/cloud/BackupCmd.java   |   75 +-
 .../apache/solr/cloud/CreateCollectionCmd.java  |   12 +-
 .../apache/solr/cloud/CreateSnapshotCmd.java    |  179 ++
 .../org/apache/solr/cloud/DeleteReplicaCmd.java |    4 +-
 .../org/apache/solr/cloud/DeleteShardCmd.java   |    2 +-
 .../apache/solr/cloud/DeleteSnapshotCmd.java    |  160 ++
 .../org/apache/solr/cloud/DistributedQueue.java |    4 +-
 .../org/apache/solr/cloud/ElectionContext.java  |   68 +-
 .../solr/cloud/ExclusiveSliceProperty.java      |  345 ++++
 .../org/apache/solr/cloud/LeaderElector.java    |   14 +-
 .../java/org/apache/solr/cloud/Overseer.java    |  335 +---
 .../OverseerAutoReplicaFailoverThread.java      |    3 +-
 .../cloud/OverseerCollectionMessageHandler.java |   32 +-
 .../solr/cloud/OverseerTaskProcessor.java       |   14 +-
 .../apache/solr/cloud/OverseerTaskQueue.java    |    2 +-
 .../org/apache/solr/cloud/RecoveryStrategy.java |    6 +-
 .../org/apache/solr/cloud/SplitShardCmd.java    |   36 +
 .../org/apache/solr/cloud/SyncStrategy.java     |   34 +-
 .../org/apache/solr/cloud/ZkController.java     |  141 +-
 .../cloud/overseer/ClusterStateMutator.java     |    2 +-
 .../solr/cloud/overseer/CollectionMutator.java  |    8 +
 .../apache/solr/cloud/overseer/NodeMutator.java |    4 +-
 .../solr/cloud/overseer/ReplicaMutator.java     |   77 +-
 .../solr/cloud/overseer/SliceMutator.java       |    6 +-
 .../solr/cloud/overseer/ZkStateWriter.java      |    6 +-
 .../apache/solr/cloud/rule/ImplicitSnitch.java  |  145 +-
 .../apache/solr/cloud/rule/RemoteCallback.java  |   23 -
 .../apache/solr/cloud/rule/ReplicaAssigner.java |   17 +-
 .../java/org/apache/solr/cloud/rule/Rule.java   |    2 +-
 .../solr/cloud/rule/ServerSnitchContext.java    |  108 ++
 .../java/org/apache/solr/cloud/rule/Snitch.java |   34 -
 .../apache/solr/cloud/rule/SnitchContext.java   |  168 --
 .../solr/core/CachingDirectoryFactory.java      |   18 +-
 .../src/java/org/apache/solr/core/Config.java   |    2 +-
 .../org/apache/solr/core/ConfigOverlay.java     |   10 +-
 .../apache/solr/core/ConfigSetProperties.java   |    4 +-
 .../org/apache/solr/core/CoreContainer.java     |   83 +-
 .../org/apache/solr/core/CoreDescriptor.java    |    2 +-
 .../apache/solr/core/CorePropertiesLocator.java |   12 +-
 .../org/apache/solr/core/DirectoryFactory.java  |    2 +-
 .../org/apache/solr/core/JmxMonitoredMap.java   |   20 +-
 .../org/apache/solr/core/MapSerializable.java   |   23 -
 .../java/org/apache/solr/core/PluginBag.java    |    9 +-
 .../java/org/apache/solr/core/PluginInfo.java   |   15 +-
 .../org/apache/solr/core/RequestHandlers.java   |    2 +-
 .../org/apache/solr/core/RequestParams.java     |    8 +-
 .../apache/solr/core/SchemaCodecFactory.java    |    8 +-
 .../java/org/apache/solr/core/SolrConfig.java   |   71 +-
 .../src/java/org/apache/solr/core/SolrCore.java |  168 +-
 .../apache/solr/core/SolrDeletionPolicy.java    |   17 +-
 .../apache/solr/core/SolrResourceLoader.java    |   44 +-
 .../java/org/apache/solr/core/ZkContainer.java  |    4 +-
 .../apache/solr/core/backup/BackupManager.java  |   11 +-
 .../repository/BackupRepositoryFactory.java     |    9 +-
 .../backup/repository/HdfsBackupRepository.java |   17 +-
 .../repository/LocalFileSystemRepository.java   |    3 +-
 .../snapshots/CollectionSnapshotMetaData.java   |  242 +++
 .../core/snapshots/SolrSnapshotManager.java     |  298 ++-
 .../snapshots/SolrSnapshotMetaDataManager.java  |    8 +-
 .../org/apache/solr/handler/ClassifyStream.java |  229 +++
 .../apache/solr/handler/DumpRequestHandler.java |    6 +-
 .../org/apache/solr/handler/GraphHandler.java   |    7 +-
 .../org/apache/solr/handler/IndexFetcher.java   |   28 +-
 .../solr/handler/MoreLikeThisHandler.java       |    2 +-
 .../apache/solr/handler/OldBackupDirectory.java |    9 +-
 .../apache/solr/handler/ReplicationHandler.java |    2 +-
 .../apache/solr/handler/RequestHandlerBase.java |   23 +-
 .../org/apache/solr/handler/RestoreCore.java    |   16 +-
 .../org/apache/solr/handler/SQLHandler.java     |   66 +-
 .../org/apache/solr/handler/SnapShooter.java    |    8 +-
 .../apache/solr/handler/SolrConfigHandler.java  |  110 +-
 .../org/apache/solr/handler/StreamHandler.java  |   45 +-
 .../apache/solr/handler/admin/BackupCoreOp.java |   84 +
 .../solr/handler/admin/CollectionsHandler.java  |   54 +-
 .../solr/handler/admin/CoreAdminOperation.java  |  713 +------
 .../solr/handler/admin/CreateSnapshotOp.java    |   64 +
 .../solr/handler/admin/DeleteSnapshotOp.java    |   51 +
 .../org/apache/solr/handler/admin/InvokeOp.java |   58 +
 .../solr/handler/admin/LukeRequestHandler.java  |    3 +-
 .../solr/handler/admin/MergeIndexesOp.java      |  142 ++
 .../solr/handler/admin/PrepRecoveryOp.java      |  217 +++
 .../handler/admin/RequestApplyUpdatesOp.java    |   71 +
 .../solr/handler/admin/RequestSyncShardOp.java  |  100 +
 .../solr/handler/admin/RestoreCoreOp.java       |   71 +
 .../solr/handler/admin/SecurityConfHandler.java |  162 +-
 .../handler/admin/SecurityConfHandlerLocal.java |  104 +
 .../handler/admin/SecurityConfHandlerZk.java    |   92 +
 .../org/apache/solr/handler/admin/SplitOp.java  |  142 ++
 .../org/apache/solr/handler/admin/StatusOp.java |   64 +
 .../solr/handler/component/ExpandComponent.java |   72 +-
 .../solr/handler/component/FieldFacetStats.java |   24 +-
 .../component/HttpShardHandlerFactory.java      |    2 +-
 .../component/MoreLikeThisComponent.java        |    2 +-
 .../solr/handler/component/QueryComponent.java  |   13 +-
 .../handler/component/RealTimeGetComponent.java |   23 +-
 .../solr/handler/component/ResponseBuilder.java |    4 +-
 .../handler/component/SpellCheckComponent.java  |    2 +-
 .../solr/handler/component/StatsValues.java     |    2 +-
 .../handler/component/StatsValuesFactory.java   |    8 +-
 .../solr/handler/component/TermsComponent.java  |    2 +-
 .../apache/solr/handler/loader/XMLLoader.java   |    2 +-
 .../solr/highlight/DefaultSolrHighlighter.java  |    8 +-
 .../apache/solr/index/NoMergePolicyFactory.java |   34 +
 .../solr/index/SlowCompositeReaderWrapper.java  |   41 +-
 .../org/apache/solr/logging/LogWatcher.java     |    8 +-
 .../apache/solr/request/DocValuesFacets.java    |   62 +-
 .../org/apache/solr/request/DocValuesStats.java |   52 +-
 .../org/apache/solr/request/IntervalFacets.java |   74 +-
 .../org/apache/solr/request/NumericFacets.java  |   37 +-
 .../request/PerSegmentSingleValuedFaceting.java |   34 +-
 .../org/apache/solr/request/SimpleFacets.java   |    2 +-
 .../solr/request/macro/MacroExpander.java       |    1 -
 .../solr/response/BinaryResponseWriter.java     |    3 +-
 .../solr/response/JSONResponseWriter.java       |  203 +-
 .../solr/response/SortingResponseWriter.java    |  186 +-
 .../solr/response/TextResponseWriter.java       |    8 +-
 .../transform/SubQueryAugmenterFactory.java     |    1 -
 .../response/transform/TransformerFactory.java  |    2 +-
 .../transform/ValueSourceAugmenter.java         |    8 +-
 .../solr/rest/ManagedResourceStorage.java       |   10 +-
 .../java/org/apache/solr/rest/RestManager.java  |    4 +-
 .../analysis/ManagedStopFilterFactory.java      |    2 +-
 .../java/org/apache/solr/schema/BoolField.java  |   34 +-
 .../org/apache/solr/schema/CurrencyField.java   |   40 +-
 .../java/org/apache/solr/schema/EnumField.java  |    4 +-
 .../java/org/apache/solr/schema/FieldType.java  |   38 +-
 .../org/apache/solr/schema/IndexSchema.java     |   36 +-
 .../java/org/apache/solr/schema/LatLonType.java |    8 +-
 .../solr/schema/ManagedIndexSchemaFactory.java  |    1 +
 .../schema/OpenExchangeRatesOrgProvider.java    |    4 +-
 .../java/org/apache/solr/schema/PointType.java  |    6 +-
 .../apache/solr/schema/PreAnalyzedField.java    |    4 +-
 .../schema/RptWithGeometrySpatialField.java     |   10 +-
 .../org/apache/solr/schema/SchemaField.java     |   63 +-
 .../org/apache/solr/schema/SchemaManager.java   |   14 +-
 .../org/apache/solr/schema/StrFieldSource.java  |    8 +-
 .../org/apache/solr/schema/TrieDoubleField.java |   49 +-
 .../java/org/apache/solr/schema/TrieField.java  |    3 +-
 .../org/apache/solr/schema/TrieFloatField.java  |   49 +-
 .../org/apache/solr/schema/TrieIntField.java    |   50 +-
 .../org/apache/solr/schema/TrieLongField.java   |   50 +-
 .../apache/solr/schema/ZkIndexSchemaReader.java |   10 +-
 .../apache/solr/search/AbstractReRankQuery.java |   83 +
 .../org/apache/solr/search/CacheConfig.java     |    4 +-
 .../solr/search/CollapsingQParserPlugin.java    |  247 ++-
 .../solr/search/ExtendedDismaxQParser.java      |   25 +-
 .../java/org/apache/solr/search/Grouping.java   |    4 +-
 .../apache/solr/search/HashQParserPlugin.java   |   33 +-
 .../solr/search/IGainTermsQParserPlugin.java    |   13 +-
 .../apache/solr/search/JoinQParserPlugin.java   |    8 +-
 .../apache/solr/search/LuceneQParserPlugin.java |    8 +-
 .../java/org/apache/solr/search/QParser.java    |   10 +
 .../org/apache/solr/search/ReRankCollector.java |  175 ++
 .../apache/solr/search/ReRankQParserPlugin.java |  245 +--
 .../apache/solr/search/SolrIndexSearcher.java   |  162 +-
 .../TextLogisticRegressionQParserPlugin.java    |   13 +-
 .../apache/solr/search/ValueSourceParser.java   |   78 +-
 .../apache/solr/search/facet/FacetBucket.java   |  189 ++
 .../apache/solr/search/facet/FacetField.java    |    1 +
 .../solr/search/facet/FacetFieldMerger.java     |    8 +-
 .../solr/search/facet/FacetFieldProcessor.java  |   22 +-
 .../facet/FacetFieldProcessorByArrayDV.java     |   60 +-
 .../facet/FacetFieldProcessorByArrayUIF.java    |    2 +-
 .../FacetFieldProcessorByEnumTermsStream.java   |    4 +-
 .../facet/FacetFieldProcessorByHashDV.java      |   29 +-
 .../apache/solr/search/facet/FacetModule.java   |  166 --
 .../solr/search/facet/FacetProcessor.java       |   59 +-
 .../apache/solr/search/facet/FacetRequest.java  |   33 +-
 .../search/facet/FacetRequestSortedMerger.java  |   25 +-
 .../org/apache/solr/search/facet/FieldUtil.java |    4 +-
 .../org/apache/solr/search/facet/HLLAgg.java    |   13 +-
 .../apache/solr/search/facet/PercentileAgg.java |    2 +-
 .../org/apache/solr/search/facet/SlotAcc.java   |   12 +-
 .../solr/search/facet/UnInvertedField.java      |    4 +-
 .../org/apache/solr/search/facet/UniqueAgg.java |   12 +-
 .../solr/search/facet/UniqueMultiDvSlotAcc.java |   37 +-
 .../search/facet/UniqueSinglevaluedSlotAcc.java |   26 +-
 .../solr/search/function/OrdFieldSource.java    |   35 +-
 .../search/function/ReverseOrdFieldSource.java  |   15 +-
 .../function/SolrComparisonBoolFunction.java    |    4 +-
 .../function/distance/GeohashFunction.java      |    4 +-
 .../distance/GeohashHaversineFunction.java      |    6 +-
 .../distance/HaversineConstFunction.java        |    4 +-
 .../function/distance/HaversineFunction.java    |    6 +-
 .../distance/SquaredEuclideanFunction.java      |    4 +-
 .../distance/StringDistanceFunction.java        |    6 +-
 .../distance/VectorDistanceFunction.java        |    6 +-
 .../apache/solr/search/grouping/Command.java    |    2 +-
 .../command/SearchGroupsFieldCommand.java       |    2 +-
 .../TopGroupsShardResponseProcessor.java        |    9 +-
 .../search/join/BlockJoinFacetAccsHolder.java   |    2 +-
 .../join/BlockJoinFieldFacetAccumulator.java    |   37 +-
 .../solr/search/join/GraphTermsCollector.java   |   20 +-
 .../solr/search/mlt/SimpleMLTQParser.java       |   33 +-
 .../similarities/SchemaSimilarityFactory.java   |   39 +-
 .../apache/solr/security/BasicAuthPlugin.java   |   17 +-
 .../security/DelegationTokenKerberosFilter.java |   11 +-
 .../apache/solr/security/KerberosPlugin.java    |    9 +-
 .../solr/security/PKIAuthenticationPlugin.java  |   18 +-
 .../security/Sha256AuthenticationProvider.java  |    2 +-
 .../org/apache/solr/servlet/HttpSolrCall.java   |    3 +-
 .../apache/solr/servlet/SolrDispatchFilter.java |   74 +-
 .../solr/servlet/StartupLoggingUtils.java       |  116 ++
 .../apache/solr/uninverting/DocTermOrds.java    |    6 +-
 .../org/apache/solr/uninverting/FieldCache.java |   15 +-
 .../apache/solr/uninverting/FieldCacheImpl.java |  363 +++-
 .../solr/uninverting/UninvertingReader.java     |   64 +-
 .../apache/solr/update/AddUpdateCommand.java    |    1 -
 .../solr/update/DefaultSolrCoreState.java       |   15 +-
 .../solr/update/DirectUpdateHandler2.java       |   22 +-
 .../apache/solr/update/HdfsTransactionLog.java  |    3 +-
 .../apache/solr/update/IndexFingerprint.java    |  126 +-
 .../apache/solr/update/MergeIndexesCommand.java |   12 +-
 .../java/org/apache/solr/update/PeerSync.java   |  105 +-
 .../org/apache/solr/update/SolrCoreState.java   |    2 +-
 .../org/apache/solr/update/SolrIndexConfig.java |   12 +-
 .../apache/solr/update/SolrIndexSplitter.java   |   11 +-
 .../org/apache/solr/update/SolrIndexWriter.java |   30 +-
 .../java/org/apache/solr/update/UpdateLog.java  |    4 +-
 .../apache/solr/update/UpdateShardHandler.java  |    2 +-
 .../org/apache/solr/update/VersionInfo.java     |    6 +-
 .../AddSchemaFieldsUpdateProcessorFactory.java  |   35 +-
 .../ConcatFieldUpdateProcessorFactory.java      |   66 +-
 .../CountFieldValuesUpdateProcessorFactory.java |   17 +-
 .../FieldLengthUpdateProcessorFactory.java      |   29 +-
 .../processor/FieldMutatingUpdateProcessor.java |   72 +-
 .../FieldMutatingUpdateProcessorFactory.java    |   16 +-
 .../FieldValueMutatingUpdateProcessor.java      |   12 +
 .../FieldValueSubsetUpdateProcessorFactory.java |   23 +-
 .../FirstFieldValueUpdateProcessorFactory.java  |   13 +-
 .../HTMLStripFieldUpdateProcessorFactory.java   |   51 +-
 .../IgnoreFieldUpdateProcessorFactory.java      |   33 +-
 .../LastFieldValueUpdateProcessorFactory.java   |    9 +-
 .../MaxFieldValueUpdateProcessorFactory.java    |   15 +-
 .../MinFieldValueUpdateProcessorFactory.java    |   15 +-
 ...ParseBooleanFieldUpdateProcessorFactory.java |   28 +-
 .../ParseDateFieldUpdateProcessorFactory.java   |   25 +-
 ...ParseNumericFieldUpdateProcessorFactory.java |   16 +-
 .../processor/RegexReplaceProcessorFactory.java |   25 +-
 .../RemoveBlankFieldUpdateProcessorFactory.java |   18 +-
 .../processor/SimpleUpdateProcessorFactory.java |   46 +-
 .../TemplateUpdateProcessorFactory.java         |  120 ++
 .../TrimFieldUpdateProcessorFactory.java        |   17 +-
 .../TruncateFieldUpdateProcessorFactory.java    |   28 +-
 .../UniqFieldsUpdateProcessorFactory.java       |    9 +-
 .../processor/UpdateRequestProcessorChain.java  |   49 +-
 .../org/apache/solr/util/SimplePostTool.java    |   59 +-
 .../src/java/org/apache/solr/util/SolrCLI.java  |  673 ++++++-
 .../org/apache/solr/util/SolrPluginUtils.java   |    5 +-
 solr/core/src/resources/ImplicitPlugins.json    |   61 +-
 .../resources/SystemCollectionSolrConfig.xml    |    1 -
 .../solr/collection1/conf/schema_codec.xml      |    2 +-
 .../conf/solrconfig-analytics-query.xml         |    8 -
 .../conf/solrconfig-collapseqparser.xml         |    8 -
 .../conf/solrconfig-nomergepolicyfactory.xml    |   32 +
 .../conf/solrconfig-plugcollector.xml           |    9 -
 .../conf/solrconfig-spellcheckcomponent.xml     |   10 +-
 .../solr/collection1/conf/solrconfig.xml        |    8 -
 .../cdcr-source-disabled/conf/schema.xml        |   29 +
 .../cdcr-source-disabled/conf/solrconfig.xml    |   60 +
 .../configsets/cdcr-source-disabled/schema.xml  |   29 -
 .../cdcr-source-disabled/solrconfig.xml         |   60 -
 .../solr/configsets/cdcr-source/conf/schema.xml |   29 +
 .../configsets/cdcr-source/conf/solrconfig.xml  |   76 +
 .../solr/configsets/cdcr-source/schema.xml      |   29 -
 .../solr/configsets/cdcr-source/solrconfig.xml  |   76 -
 .../solr/configsets/cdcr-target/conf/schema.xml |   29 +
 .../configsets/cdcr-target/conf/solrconfig.xml  |   63 +
 .../solr/configsets/cdcr-target/schema.xml      |   29 -
 .../solr/configsets/cdcr-target/solrconfig.xml  |   63 -
 .../configsets/cloud-dynamic/conf/schema.xml    |    2 +
 .../solr/configsets/cloud-hdfs/conf/schema.xml  |   28 +
 .../configsets/cloud-hdfs/conf/solrconfig.xml   |   50 +
 .../cloud-managed-upgrade/conf/schema.xml       |   27 +
 .../cloud-managed-upgrade/conf/solrconfig.xml   |   50 +
 .../cloud-minimal-jmx/conf/schema.xml           |   28 +
 .../cloud-minimal-jmx/conf/solrconfig.xml       |   50 +
 .../solr/HelloWorldSolrCloudTestCase.java       |   94 +
 .../test/org/apache/solr/MinimalSchemaTest.java |    5 +-
 .../apache/solr/TestDistributedGrouping.java    |   59 +-
 .../org/apache/solr/TestDistributedSearch.java  |    4 +-
 .../solr/client/solrj/ConnectionReuseTest.java  |  198 --
 .../solrj/embedded/TestJettySolrRunner.java     |    4 +-
 .../client/solrj/impl/ConnectionReuseTest.java  |  196 ++
 .../apache/solr/cloud/AliasIntegrationTest.java |  251 +--
 .../AsyncCallRequestStatusResponseTest.java     |   44 +-
 .../solr/cloud/AsyncMigrateRouteKeyTest.java    |  121 --
 .../solr/cloud/BaseCdcrDistributedZkTest.java   |    1 +
 .../solr/cloud/BasicDistributedZkTest.java      |    5 -
 .../apache/solr/cloud/CdcrBootstrapTest.java    |   76 +-
 .../solr/cloud/ClusterStateUpdateTest.java      |    5 -
 .../apache/solr/cloud/CollectionReloadTest.java |   81 +-
 .../solr/cloud/CollectionStateFormat2Test.java  |   73 +-
 .../cloud/CollectionTooManyReplicasTest.java    |  301 ++-
 .../cloud/CollectionsAPIDistributedZkTest.java  | 1303 ++++---------
 ...ConcurrentDeleteAndCreateCollectionTest.java |   12 +-
 .../solr/cloud/CreateCollectionCleanupTest.java |    6 +-
 .../apache/solr/cloud/CustomCollectionTest.java |  496 ++---
 .../solr/cloud/DeleteInactiveReplicaTest.java   |  185 +-
 .../apache/solr/cloud/DeleteReplicaTest.java    |  337 +---
 .../cloud/DistribJoinFromCollectionTest.java    |   25 +-
 .../apache/solr/cloud/DistributedQueueTest.java |   13 +-
 .../apache/solr/cloud/HttpPartitionTest.java    |   19 +-
 .../apache/solr/cloud/KerberosTestServices.java |   14 +-
 .../cloud/LeaderElectionContextKeyTest.java     |  114 ++
 .../cloud/LeaderElectionIntegrationTest.java    |   10 +-
 .../cloud/LeaderFailureAfterFreshStartTest.java |  263 +++
 .../apache/solr/cloud/MigrateRouteKeyTest.java  |  159 +-
 .../org/apache/solr/cloud/OverseerTest.java     |   18 +-
 .../solr/cloud/PeerSyncReplicationTest.java     |    9 +-
 .../org/apache/solr/cloud/RecoveryZkTest.java   |  166 +-
 .../org/apache/solr/cloud/ShardSplitTest.java   |  373 +++-
 .../solr/cloud/TestAuthenticationFramework.java |   45 +-
 .../solr/cloud/TestCloudDeleteByQuery.java      |   15 +-
 .../solr/cloud/TestCloudPseudoReturnFields.java |   21 +-
 .../apache/solr/cloud/TestCloudRecovery.java    |  154 ++
 .../solr/cloud/TestClusterProperties.java       |   45 +
 .../apache/solr/cloud/TestConfigSetsAPI.java    |   13 +-
 .../cloud/TestConfigSetsAPIExclusivity.java     |   10 +-
 .../solr/cloud/TestConfigSetsAPIZkFailure.java  |    2 +-
 .../cloud/TestDeleteCollectionOnDownNodes.java  |   65 +
 .../TestLeaderElectionWithEmptyReplica.java     |  125 ++
 .../solr/cloud/TestMiniSolrCloudCluster.java    |  121 +-
 .../cloud/TestMiniSolrCloudClusterBase.java     |  207 --
 .../cloud/TestMiniSolrCloudClusterKerberos.java |   14 -
 .../solr/cloud/TestMiniSolrCloudClusterSSL.java |   50 +-
 .../apache/solr/cloud/TestRandomFlRTGCloud.java |   40 +-
 .../solr/cloud/TestRequestForwarding.java       |    4 +-
 .../TestSolrCloudWithDelegationTokens.java      |   26 +-
 .../cloud/TestSolrCloudWithKerberosAlt.java     |    3 +-
 .../TestSolrCloudWithSecureImpersonation.java   |    8 +-
 .../TestStressCloudBlindAtomicUpdates.java      |   35 +-
 .../cloud/TestTolerantUpdateProcessorCloud.java |   23 +-
 .../TestTolerantUpdateProcessorRandomCloud.java |   29 +-
 .../solr/cloud/UnloadDistributedZkTest.java     |    4 -
 .../test/org/apache/solr/cloud/ZkCLITest.java   |    7 +-
 .../org/apache/solr/cloud/ZkControllerTest.java |    5 -
 .../org/apache/solr/cloud/ZkSolrClientTest.java |   16 -
 .../HdfsCollectionsAPIDistributedZkTest.java    |   33 +-
 .../solr/cloud/hdfs/HdfsRecoveryZkTest.java     |   24 +-
 .../solr/cloud/rule/ImplicitSnitchTest.java     |   16 +-
 .../apache/solr/cloud/rule/RuleEngineTest.java  |    9 +-
 .../org/apache/solr/cloud/rule/RulesTest.java   |  254 +--
 .../solr/core/BlobRepositoryCloudTest.java      |   12 +-
 .../test/org/apache/solr/core/HelloStream.java  |  100 +
 .../test/org/apache/solr/core/SolrCoreTest.java |    3 +
 .../apache/solr/core/TestArbitraryIndexDir.java |  113 --
 .../org/apache/solr/core/TestCodecSupport.java  |    2 +-
 .../test/org/apache/solr/core/TestConfig.java   |    5 +-
 .../solr/core/TestCorePropertiesReload.java     |   74 +
 .../org/apache/solr/core/TestCustomStream.java  |   94 +
 .../org/apache/solr/core/TestLazyCores.java     |   16 +-
 .../apache/solr/core/TestMergePolicyConfig.java |   20 +
 .../core/snapshots/TestSolrCloudSnapshots.java  |  285 +++
 .../core/snapshots/TestSolrCoreSnapshots.java   |  127 +-
 .../DocumentAnalysisRequestHandlerTest.java     |    8 +-
 .../FieldAnalysisRequestHandlerTest.java        |   10 +-
 .../solr/handler/PingRequestHandlerTest.java    |   12 +-
 .../apache/solr/handler/TestBlobHandler.java    |   26 +-
 .../solr/handler/TestHdfsBackupRestoreCore.java |   11 +
 .../apache/solr/handler/TestReqParamsAPI.java   |   19 +-
 .../org/apache/solr/handler/TestSQLHandler.java |  235 ++-
 .../solr/handler/TestSQLHandlerNonCloud.java    |   92 +
 .../handler/admin/CoreAdminHandlerTest.java     |   59 +-
 .../SecurityConfHandlerLocalForTesting.java     |   39 +
 .../handler/admin/SecurityConfHandlerTest.java  |   66 +-
 .../solr/handler/admin/StatsReloadRaceTest.java |   99 +
 .../solr/handler/admin/TestCoreAdminApis.java   |    2 +-
 .../handler/component/SearchHandlerTest.java    |   15 +-
 .../org/apache/solr/request/JSONWriterTest.java |  138 --
 .../apache/solr/request/SmileWriterTest.java    |  253 ---
 .../solr/request/TestBinaryResponseWriter.java  |  105 -
 .../org/apache/solr/request/TestFaceting.java   |   22 +-
 .../solr/request/TestIntervalFaceting.java      |    7 +-
 .../apache/solr/response/JSONWriterTest.java    |  261 +++
 .../apache/solr/response/SmileWriterTest.java   |  253 +++
 .../solr/response/TestBinaryResponseWriter.java |  104 +
 .../response/TestSortingResponseWriter.java     |   16 +-
 .../TestSubQueryTransformerDistrib.java         |   18 +-
 .../solr/rest/schema/TestBulkSchemaAPI.java     |   69 +-
 .../solr/schema/ChangedSchemaMergeTest.java     |    8 +-
 .../apache/solr/schema/DocValuesMultiTest.java  |   12 +-
 .../org/apache/solr/schema/DocValuesTest.java   |   60 +-
 .../org/apache/solr/schema/TestBinaryField.java |    4 +-
 .../solr/schema/TestManagedSchemaAPI.java       |    3 +-
 .../schema/TestManagedSchemaThreadSafety.java   |  164 ++
 .../solr/search/TestCollapseQParserPlugin.java  |   17 +
 .../test/org/apache/solr/search/TestDocSet.java |    7 +-
 .../apache/solr/search/TestIndexSearcher.java   |    6 +-
 .../apache/solr/search/TestRankQueryPlugin.java |   14 +-
 .../apache/solr/search/TestSmileRequest.java    |    2 +-
 .../search/facet/TestJsonFacetRefinement.java   |  214 +++
 .../solr/search/facet/TestJsonFacets.java       |   96 +
 .../search/function/NvlValueSourceParser.java   |    6 +-
 .../search/join/BlockJoinFacetDistribTest.java  |   13 +-
 .../solr/search/join/TestScoreJoinQPScore.java  |    2 +-
 .../solr/search/stats/TestDistribIDF.java       |    7 +-
 .../solr/security/BasicAuthIntegrationTest.java |   83 +-
 .../solr/security/BasicAuthStandaloneTest.java  |  221 +++
 .../security/TestAuthorizationFramework.java    |    4 +-
 .../solr/uninverting/TestDocTermOrds.java       |   44 +-
 .../apache/solr/uninverting/TestFieldCache.java |  211 +-
 .../solr/uninverting/TestFieldCacheReopen.java  |   10 +-
 .../TestFieldCacheSanityChecker.java            |   18 +-
 .../uninverting/TestFieldCacheSortRandom.java   |    3 +-
 .../uninverting/TestFieldCacheVsDocValues.java  |   55 +-
 .../uninverting/TestFieldCacheWithThreads.java  |   64 +-
 .../solr/uninverting/TestLegacyFieldCache.java  |  147 +-
 .../solr/uninverting/TestUninvertingReader.java |   16 +-
 .../org/apache/solr/update/AutoCommitTest.java  |   29 +
 .../apache/solr/update/DocumentBuilderTest.java |    9 +-
 .../apache/solr/update/HardAutoCommitTest.java  |    2 +
 .../org/apache/solr/update/PeerSyncTest.java    |    4 +-
 ...PeerSyncWithIndexFingerprintCachingTest.java |  108 ++
 .../apache/solr/update/SolrIndexConfigTest.java |   12 +-
 .../processor/TemplateUpdateProcessorTest.java  |   48 +
 .../UpdateRequestProcessorFactoryTest.java      |   15 +
 .../solr/util/TestObjectReleaseTracker.java     |   12 +-
 .../org/apache/solr/util/UtilsToolTest.java     |  189 ++
 .../example-DIH/solr/db/conf/solrconfig.xml     |   84 +-
 .../example-DIH/solr/mail/conf/solrconfig.xml   |   84 +-
 .../example-DIH/solr/rss/conf/solrconfig.xml    |   84 +-
 .../example-DIH/solr/solr/conf/solrconfig.xml   |   85 +-
 .../example-DIH/solr/tika/conf/solrconfig.xml   |   85 +-
 solr/example/files/conf/solrconfig.xml          |   66 -
 solr/licenses/gimap-LICENSE-CDDL.txt            |  262 +--
 solr/licenses/javax.mail-LICENSE-CDDL.txt       |  262 +--
 .../licenses/javax.servlet-api-LICENSE-CDDL.txt |  137 --
 solr/licenses/jaxb-impl-LICENSE-CDDL.txt        |  262 +--
 .../jcl-over-slf4j-LICENSE-BSD_LIKE.txt         |   21 -
 solr/licenses/jcl-over-slf4j-LICENSE-MIT.txt    |   21 +
 solr/licenses/jul-to-slf4j-LICENSE-BSD_LIKE.txt |   21 -
 solr/licenses/jul-to-slf4j-LICENSE-MIT.txt      |   21 +
 solr/licenses/junit4-ant-2.3.4.jar.sha1         |    1 -
 solr/licenses/junit4-ant-2.4.0.jar.sha1         |    1 +
 solr/licenses/mail-LICENSE-CDDL.txt             |  263 ---
 solr/licenses/morfologik-fsa-2.1.0.jar.sha1     |    1 -
 solr/licenses/morfologik-fsa-2.1.1.jar.sha1     |    1 +
 solr/licenses/morfologik-polish-2.1.0.jar.sha1  |    1 -
 solr/licenses/morfologik-polish-2.1.1.jar.sha1  |    1 +
 .../licenses/morfologik-stemming-2.1.0.jar.sha1 |    1 -
 .../licenses/morfologik-stemming-2.1.1.jar.sha1 |    1 +
 .../randomizedtesting-runner-2.3.4.jar.sha1     |    1 -
 .../randomizedtesting-runner-2.4.0.jar.sha1     |    1 +
 solr/licenses/servlet-api-LICENSE-CDDL.txt      |  137 --
 solr/licenses/slf4j-LICENSE-BSD_LIKE.txt        |   21 -
 solr/licenses/slf4j-LICENSE-MIT.txt             |   21 +
 solr/server/resources/log4j.properties          |   13 +-
 .../basic_configs/conf/solrconfig.xml           |   66 -
 .../conf/solrconfig.xml                         |   65 -
 .../conf/params.json                            |    7 +-
 .../conf/solrconfig.xml                         |   66 -
 .../quickstart-admin-ui-facet-options.png       |  Bin 8787 -> 10646 bytes
 .../assets/images/quickstart-query-screen.png   |  Bin 400716 -> 278938 bytes
 solr/site/quickstart.mdtext                     |  279 +--
 solr/solrj/ivy.xml                              |    4 -
 .../org/apache/solr/client/solrj/SolrQuery.java |  248 +++
 .../solr/client/solrj/impl/CloudSolrClient.java |   50 +-
 .../solrj/impl/ConcurrentUpdateSolrClient.java  |    3 +-
 .../impl/DelegationTokenHttpSolrClient.java     |   73 +
 .../solr/client/solrj/impl/HttpClientUtil.java  |   15 +-
 .../solr/client/solrj/impl/HttpSolrClient.java  |   65 +-
 .../solrj/impl/Krb5HttpClientBuilder.java       |    3 +-
 .../client/solrj/impl/LBHttpSolrClient.java     |   94 +-
 .../impl/SolrHttpClientContextBuilder.java      |   12 +-
 .../apache/solr/client/solrj/io/ModelCache.java |  143 ++
 .../client/solrj/io/stream/CommitStream.java    |  260 +++
 .../client/solrj/io/stream/DaemonStream.java    |   44 +-
 .../client/solrj/io/stream/ExecutorStream.java  |  224 +++
 .../client/solrj/io/stream/FacetStream.java     |    8 +
 .../client/solrj/io/stream/FetchStream.java     |  314 +++
 .../client/solrj/io/stream/ModelStream.java     |  200 ++
 .../solrj/io/stream/ScoreNodesStream.java       |   25 +-
 .../client/solrj/io/stream/StreamContext.java   |   11 +
 .../client/solrj/io/stream/TopicStream.java     |   10 +-
 .../client/solrj/io/stream/UpdateStream.java    |    3 +-
 .../solrj/io/stream/expr/StreamFactory.java     |   79 +-
 .../solrj/request/CollectionAdminRequest.java   |  256 ++-
 .../client/solrj/request/CoreAdminRequest.java  |   12 +
 .../solr/client/solrj/request/CoreStatus.java   |   48 +
 .../client/solrj/request/UpdateRequest.java     |    8 +
 .../solrj/response/DelegationTokenResponse.java |   13 +-
 .../client/solrj/response/LukeResponse.java     |   15 +-
 .../client/solrj/response/QueryResponse.java    |    8 +
 .../org/apache/solr/common/MapSerializable.java |   30 +
 .../solr/common/cloud/ConnectionManager.java    |   16 +-
 .../org/apache/solr/common/cloud/Slice.java     |   21 +-
 .../apache/solr/common/cloud/SolrZkClient.java  |   25 +-
 ...ParamsAllAndReadonlyDigestZkACLProvider.java |    5 +-
 .../apache/solr/common/cloud/ZkStateReader.java |   53 +-
 .../solr/common/cloud/rule/ImplicitSnitch.java  |  160 ++
 .../solr/common/cloud/rule/RemoteCallback.java  |   23 +
 .../apache/solr/common/cloud/rule/Snitch.java   |   36 +
 .../solr/common/cloud/rule/SnitchContext.java   |  103 +
 .../solr/common/cloud/rule/package-info.java    |   23 +
 .../solr/common/params/CollectionParams.java    |    3 +
 .../apache/solr/common/params/CommonParams.java |  140 +-
 .../solr/common/params/CoreAdminParams.java     |    5 +
 .../solr/common/params/MoreLikeThisParams.java  |    3 +
 .../apache/solr/common/params/SolrParams.java   |  146 +-
 .../java/org/apache/solr/common/util/Cache.java |   26 +-
 .../apache/solr/common/util/JavaBinCodec.java   |    7 +
 .../apache/solr/common/util/MapBackedCache.java |   57 +
 .../org/apache/solr/common/util/NamedList.java  |   31 +-
 .../solr/common/util/ObjectReleaseTracker.java  |   21 +-
 .../org/apache/solr/common/util/RetryUtil.java  |   11 +-
 .../solrj/solr/configsets/ml/conf/schema.xml    |    2 +-
 .../solr/multicore/core0/conf/solrconfig.xml    |    1 -
 .../solr/multicore/core1/conf/solrconfig.xml    |    2 -
 .../solr/client/solrj/SolrExampleTests.java     |   60 +-
 .../apache/solr/client/solrj/SolrQueryTest.java |   25 +
 .../client/solrj/SolrSchemalessExampleTest.java |   16 +-
 .../solrj/embedded/SolrExampleJettyTest.java    |    6 +-
 .../solrj/impl/HttpSolrClientConPoolTest.java   |  189 ++
 .../impl/HttpSolrClientSSLAuthConPoolTest.java  |   40 +
 .../solrj/io/graph/GraphExpressionTest.java     |   75 +
 .../solrj/io/stream/StreamExpressionTest.java   | 1217 ++++++++++--
 .../solr/client/solrj/request/SchemaTest.java   |   11 +-
 .../cloud/TestCollectionStateWatchers.java      |    6 +-
 .../solr/common/cloud/TestZkConfigManager.java  |   10 +-
 .../apache/solr/common/util/NamedListTest.java  |   18 +
 .../solr/common/util/TestJavaBinCodec.java      |   52 +-
 solr/test-framework/build.xml                   |    3 -
 .../java/org/apache/solr/SolrTestCaseHS.java    |    5 +-
 .../java/org/apache/solr/SolrTestCaseJ4.java    |   68 +-
 .../solr/cloud/AbstractDistribZkTestBase.java   |   35 +-
 .../apache/solr/cloud/AbstractZkTestCase.java   |    6 -
 .../apache/solr/cloud/MiniSolrCloudCluster.java |  141 +-
 .../apache/solr/cloud/SolrCloudTestCase.java    |  150 +-
 .../org/apache/solr/cloud/ZkTestServer.java     |   11 +-
 .../src/java/org/apache/solr/util/LogLevel.java |   91 +
 .../org/apache/solr/util/RestTestHarness.java   |   14 +-
 .../src/test-files/log4j.properties             |   11 +
 .../apache/solr/TestLogLevelAnnotations.java    |   47 +
 .../apache/solr/cloud/JettySolrRunnerTest.java  |   66 +
 .../solr/cloud/MiniSolrCloudClusterTest.java    |  105 +
 solr/webapp/web/WEB-INF/web.xml                 |   17 +
 solr/webapp/web/css/angular/dashboard.css       |    8 +-
 solr/webapp/web/css/angular/schema.css          |   23 +
 .../web/js/angular/controllers/core-overview.js |   83 -
 .../webapp/web/js/angular/controllers/schema.js |   60 +-
 solr/webapp/web/partials/core_overview.html     |   17 -
 solr/webapp/web/partials/schema.html            |   15 +-
 1252 files changed, 67552 insertions(+), 26304 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/27baf3fb/solr/core/src/java/org/apache/solr/core/CoreContainer.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/27baf3fb/solr/core/src/java/org/apache/solr/core/PluginBag.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/27baf3fb/solr/core/src/java/org/apache/solr/handler/DumpRequestHandler.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/27baf3fb/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/27baf3fb/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java
----------------------------------------------------------------------
diff --cc solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java
index a21a2c9,b6cb596..37e7258
--- a/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java
@@@ -75,9 -74,8 +74,10 @@@ import org.apache.solr.security.Permiss
  import org.apache.solr.util.CommandOperation;
  import org.apache.solr.util.DefaultSolrThreadFactory;
  import org.apache.solr.util.RTimer;
+ import org.apache.solr.util.SolrPluginUtils;
  import org.apache.solr.util.plugin.SolrCoreAware;
 +import org.apache.solr.api.Api;
 +import org.apache.solr.api.ApiBag;
  import org.slf4j.Logger;
  import org.slf4j.LoggerFactory;
  

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/27baf3fb/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/27baf3fb/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandler.java
----------------------------------------------------------------------
diff --cc solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandler.java
index 238e7aa,88e4b01..494dced
--- a/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/SecurityConfHandler.java
@@@ -17,18 -17,16 +17,19 @@@
  package org.apache.solr.handler.admin;
  
  import java.io.IOException;
+ import java.io.InputStream;
+ import java.lang.invoke.MethodHandles;
  import java.util.ArrayList;
 +import java.util.Collection;
  import java.util.Collections;
  import java.util.LinkedHashMap;
  import java.util.List;
  import java.util.Map;
  import java.util.Objects;
  
 +import com.google.common.collect.ImmutableList;
- import org.apache.solr.api.ApiBag.ReqHandlerToApi;
++import org.apache.solr.api.ApiBag;
  import org.apache.solr.common.SolrException;
- import org.apache.solr.common.cloud.ZkStateReader.ConfigData;
  import org.apache.solr.common.params.CommonParams;
  import org.apache.solr.common.util.Utils;
  import org.apache.solr.core.CoreContainer;
@@@ -36,20 -34,18 +37,24 @@@ import org.apache.solr.handler.RequestH
  import org.apache.solr.handler.SolrConfigHandler;
  import org.apache.solr.request.SolrQueryRequest;
  import org.apache.solr.response.SolrQueryResponse;
- import org.apache.solr.security.AuthorizationContext;
 +import org.apache.solr.security.AuthenticationPlugin;
+ import org.apache.solr.security.AuthorizationContext;
 +import org.apache.solr.security.AuthorizationPlugin;
  import org.apache.solr.security.ConfigEditablePlugin;
  import org.apache.solr.security.PermissionNameProvider;
  import org.apache.solr.util.CommandOperation;
 +import org.apache.solr.api.Api;
- import org.apache.solr.api.ApiBag;
++import org.apache.solr.api.ApiBag.ReqHandlerToApi;
 +import org.apache.solr.api.SpecProvider;
 +import org.apache.solr.util.JsonSchemaValidator;
- import org.apache.zookeeper.KeeperException;
+ import org.slf4j.Logger;
+ import org.slf4j.LoggerFactory;
  
- public class SecurityConfHandler extends RequestHandlerBase implements PermissionNameProvider {
-   private CoreContainer cores;
+ import static org.apache.solr.common.SolrException.ErrorCode.SERVER_ERROR;
+ 
+ public abstract class SecurityConfHandler extends RequestHandlerBase implements PermissionNameProvider {
+   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+   protected CoreContainer cores;
  
    public SecurityConfHandler(CoreContainer coreContainer) {
      this.cores = coreContainer;
@@@ -184,66 -163,81 +172,142 @@@
      return "Edit or read security configuration";
    }
  
+   /**
+    * Gets security.json from source
+    */
+   public abstract SecurityConfig getSecurityConfig(boolean getFresh);
+ 
+   /**
+    * Persist security.json to the source, optionally with a version
+    */
+   protected abstract boolean persistConf(SecurityConfig securityConfig) throws IOException;
+ 
+   /**
+    * Object to hold security.json as nested <code>Map&lt;String,Object&gt;</code> and optionally its version.
+    * The version property is optional and defaults to -1 if not initialized.
+    * The data object defaults to EMPTY_MAP if not set
+    */
+   public static class SecurityConfig {
+     private Map<String, Object> data = Collections.EMPTY_MAP;
+     private int version = -1;
+ 
+     public SecurityConfig() {}
+ 
+     /**
+      * Sets the data as a Map
+      * @param data a Map
+      * @return SecurityConf object (builder pattern)
+      */
+     public SecurityConfig setData(Map<String, Object> data) {
+       this.data = data;
+       return this;
+     }
+ 
+     /**
+      * Sets the data as an Object, but the object needs to be of type Map
+      * @param data an Object of type Map&lt;String,Object&gt;
+      * @return SecurityConf object (builder pattern)
+      */
+     public SecurityConfig setData(Object data) {
+       if (data instanceof Map) {
+         this.data = (Map<String, Object>) data;
+         return this;
+       } else {
+         throw new SolrException(SERVER_ERROR, "Illegal format when parsing security.json, not object");
+       }
+     }
+ 
+     /**
+      * Sets version
+      * @param version integer for version. Depends on underlying storage
+      * @return SecurityConf object (builder pattern)
+      */
+     public SecurityConfig setVersion(int version) {
+       this.version = version;
+       return this;
+     }
+ 
+     public Map<String, Object> getData() {
+       return data;
+     }
+ 
+     public int getVersion() {
+       return version;
+     }
+ 
+     /**
+      * Set data from input stream
+      * @param securityJsonInputStream an input stream for security.json
+      * @return this (builder pattern)
+      */
+     public SecurityConfig setData(InputStream securityJsonInputStream) {
+       return setData(Utils.fromJSON(securityJsonInputStream));
+     }
+ 
+     public String toString() {
+       return "SecurityConfig: version=" + version + ", data=" + Utils.toJSONString(data);
+     } 
+   }
 +
 +  private Collection<Api> apis;
 +  private AuthenticationPlugin authcPlugin;
 +  private AuthorizationPlugin authzPlugin;
 +
 +  @Override
 +  public Collection<Api> getApis() {
 +    if (apis == null) {
 +      synchronized (this) {
 +        if (apis == null) {
 +          Collection<Api> apis = new ArrayList<>();
 +          final SpecProvider authcCommands = ApiBag.getSpec("cluster.security.authentication.Commands");
 +          final SpecProvider authzCommands = ApiBag.getSpec("cluster.security.authorization.Commands");
 +          apis.add(new ReqHandlerToApi(this, ApiBag.getSpec("cluster.security.authentication")));
 +          apis.add(new ReqHandlerToApi(this, ApiBag.getSpec("cluster.security.authorization")));
 +          SpecProvider authcSpecProvider = () -> {
 +            AuthenticationPlugin authcPlugin = cores.getAuthenticationPlugin();
 +            return authcPlugin != null && authcPlugin instanceof SpecProvider ?
 +                ((SpecProvider) authcPlugin).getSpec() :
 +                authcCommands.getSpec();
 +          };
 +
-           apis.add(new ApiBag.ReqHandlerToApi(this, authcSpecProvider) {
++          apis.add(new ReqHandlerToApi(this, authcSpecProvider) {
 +            @Override
 +            public synchronized Map<String, JsonSchemaValidator> getCommandSchema() {
-               //it is possible that the Auhentication plugin is modified since the last call. invalidate the
++              //it is possible that the Authentication plugin is modified since the last call. invalidate the
 +              // the cached commandSchema
 +              if(SecurityConfHandler.this.authcPlugin != cores.getAuthenticationPlugin()) commandSchema = null;
 +              SecurityConfHandler.this.authcPlugin = cores.getAuthenticationPlugin();
 +              return super.getCommandSchema();
 +            }
 +          });
 +
 +          SpecProvider authzSpecProvider = () -> {
 +            AuthorizationPlugin authzPlugin = cores.getAuthorizationPlugin();
 +            return authzPlugin != null && authzPlugin instanceof SpecProvider ?
 +                ((SpecProvider) authzPlugin).getSpec() :
 +                authzCommands.getSpec();
 +          };
 +          apis.add(new ApiBag.ReqHandlerToApi(this, authzSpecProvider) {
 +            @Override
 +            public synchronized Map<String, JsonSchemaValidator> getCommandSchema() {
 +              //it is possible that the Authorization plugin is modified since the last call. invalidate the
 +              // the cached commandSchema
 +              if(SecurityConfHandler.this.authzPlugin != cores.getAuthorizationPlugin()) commandSchema = null;
 +              SecurityConfHandler.this.authzPlugin = cores.getAuthorizationPlugin();
 +              return super.getCommandSchema();
 +            }
 +          });
 +
 +          this.apis = ImmutableList.copyOf(apis);
 +        }
 +      }
 +    }
 +    return this.apis;
 +  }
 +
 +  @Override
 +  public Boolean registerV2() {
 +    return Boolean.TRUE;
 +  }
  }
  

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/27baf3fb/solr/core/src/java/org/apache/solr/schema/SchemaManager.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/27baf3fb/solr/core/src/java/org/apache/solr/security/BasicAuthPlugin.java
----------------------------------------------------------------------
diff --cc solr/core/src/java/org/apache/solr/security/BasicAuthPlugin.java
index 833c345,5c0717b..f1665c7
--- a/solr/core/src/java/org/apache/solr/security/BasicAuthPlugin.java
+++ b/solr/core/src/java/org/apache/solr/security/BasicAuthPlugin.java
@@@ -43,9 -41,9 +43,9 @@@ import org.apache.solr.api.SpecProvider
  import org.slf4j.Logger;
  import org.slf4j.LoggerFactory;
  
 -public class BasicAuthPlugin extends AuthenticationPlugin implements ConfigEditablePlugin {
 +public class BasicAuthPlugin extends AuthenticationPlugin implements ConfigEditablePlugin , SpecProvider {
    private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-   private AuthenticationProvider zkAuthentication;
+   private AuthenticationProvider authenticationProvider;
    private final static ThreadLocal<Header> authHeader = new ThreadLocal<>();
    private boolean blockUnknown = false;
  
@@@ -171,10 -170,6 +172,10 @@@
      Map<String, String> getPromptHeaders();
    }
  
 +  @Override
 +  public ValidatingJsonMap getSpec() {
-     return zkAuthentication.getSpec();
++    return authenticationProvider.getSpec();
 +  }
    public boolean getBlockUnknown(){
      return blockUnknown;
    }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/27baf3fb/solr/core/src/java/org/apache/solr/security/Sha256AuthenticationProvider.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/27baf3fb/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/27baf3fb/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
----------------------------------------------------------------------
diff --cc solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
index c405bb9,5a4cfb6..ed81466
--- a/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
+++ b/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
@@@ -111,10 -114,25 +115,25 @@@ public class SolrDispatchFilter extend
  
    public static final String SOLRHOME_ATTRIBUTE = "solr.solr.home";
  
+   public static final String SOLR_LOG_MUTECONSOLE = "solr.log.muteconsole";
+ 
+   public static final String SOLR_LOG_LEVEL = "solr.log.level";
+ 
    @Override
 -  public void init(FilterConfig config) throws ServletException
 +  public void init(final FilterConfig config) throws ServletException
    {
-     log.info("SolrDispatchFilter.init(): {}", this.getClass().getClassLoader());
+     log.trace("SolrDispatchFilter.init(): {}", this.getClass().getClassLoader());
+ 
+     StartupLoggingUtils.checkLogDir();
+     logWelcomeBanner();
+     String muteConsole = System.getProperty(SOLR_LOG_MUTECONSOLE);
+     if (muteConsole != null && !Arrays.asList("false","0","off","no").contains(muteConsole.toLowerCase(Locale.ROOT))) {
+       StartupLoggingUtils.muteConsole();
+     }
+     String logLevel = System.getProperty(SOLR_LOG_LEVEL);
+     if (logLevel != null) {
+       StartupLoggingUtils.changeLogLevel(logLevel);
+     }
  
      String exclude = config.getInitParameter("excludePatterns");
      if(exclude != null) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/27baf3fb/solr/core/src/resources/ImplicitPlugins.json
----------------------------------------------------------------------
diff --cc solr/core/src/resources/ImplicitPlugins.json
index ee89fb8,fdc6b3a..5d655c7
--- a/solr/core/src/resources/ImplicitPlugins.json
+++ b/solr/core/src/resources/ImplicitPlugins.json
@@@ -16,18 -19,15 +19,20 @@@
        }
      },
      "/update/json/docs": {
+       "useParams":"_UPDATE_JSON_DOCS",
        "class": "solr.UpdateRequestHandler",
 +      "useParams": "update_json_docs",
        "invariants": {
          "update.contentType": "application/json",
          "json.command": "false"
        }
      },
 +    "update":{
 +      "class":"solr.UpdateRequestHandlerApi",
 +      "useParams": "update_json_docs"
 +    },
      "/config": {
+       "useParams":"_CONFIG",
        "class": "solr.SolrConfigHandler"
      },
      "/schema": {


[08/50] [abbrv] lucene-solr:apiv2: Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/lucene-solr

Posted by sa...@apache.org.
Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/lucene-solr


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/c8906b2f
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/c8906b2f
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/c8906b2f

Branch: refs/heads/apiv2
Commit: c8906b2f2d1f523255477823a6c50983d94ae88a
Parents: e6ce903 1344d89
Author: Noble Paul <no...@apache.org>
Authored: Fri Oct 28 08:25:26 2016 +0530
Committer: Noble Paul <no...@apache.org>
Committed: Fri Oct 28 08:25:26 2016 +0530

----------------------------------------------------------------------
 lucene/CHANGES.txt                              |   3 +
 .../lucene/analysis/custom/CustomAnalyzer.java  |   2 +-
 .../lucene/collation/CollationKeyAnalyzer.java  |   2 +-
 .../org/apache/lucene/analysis/Analyzer.java    |   9 +-
 .../apache/lucene/analysis/AnalyzerWrapper.java |  50 +++-
 .../analysis/DelegatingAnalyzerWrapper.java     |  14 +-
 .../analysis/TestDelegatingAnalyzerWrapper.java | 107 ++++++++
 .../lucene/analysis/MockBytesAnalyzer.java      |   2 +-
 solr/CHANGES.txt                                |   7 +
 solr/bin/solr                                   |  39 ++-
 solr/bin/solr.in.sh                             |   6 +
 .../apache/solr/analysis/TokenizerChain.java    |   2 +-
 .../component/MoreLikeThisComponent.java        |   2 +-
 .../src/java/org/apache/solr/util/SolrCLI.java  |   2 +-
 .../org/apache/solr/client/solrj/SolrQuery.java | 248 +++++++++++++++++++
 .../client/solrj/response/QueryResponse.java    |   8 +
 .../solr/common/params/MoreLikeThisParams.java  |   3 +
 .../solr/client/solrj/SolrExampleTests.java     |  21 +-
 .../apache/solr/client/solrj/SolrQueryTest.java |  25 ++
 19 files changed, 521 insertions(+), 31 deletions(-)
----------------------------------------------------------------------



[35/50] [abbrv] lucene-solr:apiv2: Add some debugging to HdfsRecoveryZkTest

Posted by sa...@apache.org.
Add some debugging to HdfsRecoveryZkTest


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/b6ff3fda
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/b6ff3fda
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/b6ff3fda

Branch: refs/heads/apiv2
Commit: b6ff3fdace1ca333529d1a4f00cb49c8211bcba9
Parents: 359f981
Author: Alan Woodward <ro...@apache.org>
Authored: Tue Nov 1 16:10:51 2016 +0000
Committer: Alan Woodward <ro...@apache.org>
Committed: Tue Nov 1 16:10:58 2016 +0000

----------------------------------------------------------------------
 solr/core/src/java/org/apache/solr/update/HdfsTransactionLog.java | 3 ++-
 .../src/test/org/apache/solr/cloud/hdfs/HdfsRecoveryZkTest.java   | 2 ++
 2 files changed, 4 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b6ff3fda/solr/core/src/java/org/apache/solr/update/HdfsTransactionLog.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/HdfsTransactionLog.java b/solr/core/src/java/org/apache/solr/update/HdfsTransactionLog.java
index e725127..90f6856 100644
--- a/solr/core/src/java/org/apache/solr/update/HdfsTransactionLog.java
+++ b/solr/core/src/java/org/apache/solr/update/HdfsTransactionLog.java
@@ -128,6 +128,7 @@ public class HdfsTransactionLog extends TransactionLog {
       success = true;
 
       assert ObjectReleaseTracker.track(this);
+      log.debug("Opening new tlog {}", this);
       
     } catch (IOException e) {
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
@@ -334,7 +335,7 @@ public class HdfsTransactionLog extends TransactionLog {
   public void close() {
     try {
       if (debug) {
-        log.debug("Closing tlog" + this);
+        log.debug("Closing tlog {}", this);
       }
 
       doCloseOutput();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b6ff3fda/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsRecoveryZkTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsRecoveryZkTest.java b/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsRecoveryZkTest.java
index 2dfc32b..53823ef 100644
--- a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsRecoveryZkTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsRecoveryZkTest.java
@@ -22,6 +22,7 @@ import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.solr.cloud.RecoveryZkTest;
 import org.apache.solr.common.cloud.ZkConfigManager;
 import org.apache.solr.util.BadHdfsThreadsFilter;
+import org.apache.solr.util.LogLevel;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 
@@ -30,6 +31,7 @@ import org.junit.BeforeClass;
 @ThreadLeakFilters(defaultFilters = true, filters = {
     BadHdfsThreadsFilter.class // hdfs currently leaks thread(s)
 })
+@LogLevel("org.apache.solr.update.HdfsTransactionLog=DEBUG")
 public class HdfsRecoveryZkTest extends RecoveryZkTest {
 
   private static MiniDFSCluster dfsCluster;


[47/50] [abbrv] lucene-solr:apiv2: SOLR-8542: Add maven config and improve IntelliJ config.

Posted by sa...@apache.org.
SOLR-8542: Add maven config and improve IntelliJ config.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/9eb806a2
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/9eb806a2
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/9eb806a2

Branch: refs/heads/apiv2
Commit: 9eb806a23339a4c6ade88ac86da889b8b889a936
Parents: d0e32f3
Author: Steve Rowe <sa...@apache.org>
Authored: Wed Nov 2 09:57:35 2016 -0400
Committer: Steve Rowe <sa...@apache.org>
Committed: Wed Nov 2 09:57:54 2016 -0400

----------------------------------------------------------------------
 dev-tools/idea/.idea/modules.xml                |  4 +-
 dev-tools/idea/.idea/workspace.xml              | 72 ++++++++++--------
 dev-tools/idea/solr/contrib/ltr/ltr.iml         |  6 +-
 .../maven/solr/contrib/ltr/pom.xml.template     | 80 ++++++++++++++++++++
 dev-tools/maven/solr/contrib/pom.xml.template   |  1 +
 5 files changed, 127 insertions(+), 36 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9eb806a2/dev-tools/idea/.idea/modules.xml
----------------------------------------------------------------------
diff --git a/dev-tools/idea/.idea/modules.xml b/dev-tools/idea/.idea/modules.xml
index 5d2d106..d6b44f4 100644
--- a/dev-tools/idea/.idea/modules.xml
+++ b/dev-tools/idea/.idea/modules.xml
@@ -49,18 +49,18 @@
       <module group="Solr" filepath="$PROJECT_DIR$/solr/test-framework/solr-test-framework.iml" />
 
       <module group="Solr/Contrib" filepath="$PROJECT_DIR$/solr/contrib/analysis-extras/analysis-extras.iml" />
+      <module group="Solr/Contrib" filepath="$PROJECT_DIR$/solr/contrib/analytics/analytics.iml" />
       <module group="Solr/Contrib" filepath="$PROJECT_DIR$/solr/contrib/clustering/clustering.iml" />
       <module group="Solr/Contrib" filepath="$PROJECT_DIR$/solr/contrib/dataimporthandler-extras/dataimporthandler-extras.iml" />
       <module group="Solr/Contrib" filepath="$PROJECT_DIR$/solr/contrib/dataimporthandler/dataimporthandler.iml" />
       <module group="Solr/Contrib" filepath="$PROJECT_DIR$/solr/contrib/extraction/extraction.iml" />
       <module group="Solr/Contrib" filepath="$PROJECT_DIR$/solr/contrib/langid/langid.iml" />
+      <module group="Solr/Contrib" filepath="$PROJECT_DIR$/solr/contrib/ltr/ltr.iml" />
       <module group="Solr/Contrib" filepath="$PROJECT_DIR$/solr/contrib/map-reduce/map-reduce.iml" />
       <module group="Solr/Contrib" filepath="$PROJECT_DIR$/solr/contrib/morphlines-cell/morphlines-cell.iml" />
       <module group="Solr/Contrib" filepath="$PROJECT_DIR$/solr/contrib/morphlines-core/morphlines-core.iml" />
       <module group="Solr/Contrib" filepath="$PROJECT_DIR$/solr/contrib/uima/uima.iml" />
       <module group="Solr/Contrib" filepath="$PROJECT_DIR$/solr/contrib/velocity/velocity.iml" />
-      <module group="Solr/Contrib" filepath="$PROJECT_DIR$/solr/contrib/analytics/analytics.iml" />
-      <module group="Solr/Contrib" filepath="$PROJECT_DIR$/solr/contrib/ltr/ltr.iml" />
     </modules>
   </component>
 </project>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9eb806a2/dev-tools/idea/.idea/workspace.xml
----------------------------------------------------------------------
diff --git a/dev-tools/idea/.idea/workspace.xml b/dev-tools/idea/.idea/workspace.xml
index 7750e90..65be571 100644
--- a/dev-tools/idea/.idea/workspace.xml
+++ b/dev-tools/idea/.idea/workspace.xml
@@ -228,6 +228,14 @@
       <option name="TEST_SEARCH_SCOPE"><value defaultName="singleModule" /></option>
       <patterns><pattern testClass=".*\.Test[^.]*|.*\.[^.]*Test" /></patterns>
     </configuration>
+    <configuration default="false" name="solrcloud" type="Application" factoryName="Application" singleton="true">
+      <option name="MAIN_CLASS_NAME" value="org.eclipse.jetty.start.Main" />
+      <option name="VM_PARAMETERS" value="-DzkRun -Dhost=127.0.0.1 -Duser.timezone=UTC -Djetty.home=$PROJECT_DIR$/solr/server -Dsolr.solr.home=$PROJECT_DIR$/solr/server/solr -Dsolr.install.dir=$PROJECT_DIR$/solr -Dsolr.log=$PROJECT_DIR$/solr/server/logs/solr.log" />
+      <option name="PROGRAM_PARAMETERS" value="--module=http" />
+      <option name="WORKING_DIRECTORY" value="file://$PROJECT_DIR$/solr/server" />
+      <option name="PARENT_ENVS" value="true" />
+      <module name="server" />
+    </configuration>
     <configuration default="false" name="Solr core" type="JUnit" factoryName="JUnit">
       <module name="solr-core-tests" />
       <option name="TEST_OBJECT" value="pattern" />
@@ -252,6 +260,14 @@
       <option name="TEST_SEARCH_SCOPE"><value defaultName="singleModule" /></option>
       <patterns><pattern testClass=".*\.Test[^.]*|.*\.[^.]*Test" /></patterns>
     </configuration>
+    <configuration default="false" name="Solr analytics contrib" type="JUnit" factoryName="JUnit">
+      <module name="analytics" />
+      <option name="TEST_OBJECT" value="pattern" />
+      <option name="WORKING_DIRECTORY" value="file://$PROJECT_DIR$/idea-build/solr/contrib/solr-analytics" />
+      <option name="VM_PARAMETERS" value="-ea" />
+      <option name="TEST_SEARCH_SCOPE"><value defaultName="singleModule" /></option>
+      <patterns><pattern testClass=".*\.Test[^.]*|.*\.[^.]*Test" /></patterns>
+    </configuration>
     <configuration default="false" name="Solr clustering contrib" type="JUnit" factoryName="JUnit">
       <module name="clustering" />
       <option name="TEST_OBJECT" value="pattern" />
@@ -292,6 +308,14 @@
       <option name="TEST_SEARCH_SCOPE"><value defaultName="singleModule" /></option>
       <patterns><pattern testClass=".*\.Test[^.]*|.*\.[^.]*Test" /></patterns>
     </configuration>
+    <configuration default="false" name="Solr ltr contrib" type="JUnit" factoryName="JUnit">
+      <module name="ltr" />
+      <option name="TEST_OBJECT" value="pattern" />
+      <option name="WORKING_DIRECTORY" value="file://$PROJECT_DIR$/idea-build/solr/contrib/ltr" />
+      <option name="VM_PARAMETERS" value="-ea" />
+      <option name="TEST_SEARCH_SCOPE"><value defaultName="singleModule" /></option>
+      <patterns><pattern testClass=".*\.Test[^.]*|.*\.[^.]*Test" /></patterns>
+    </configuration>
     <configuration default="false" name="Solr map-reduce contrib" type="JUnit" factoryName="JUnit">
       <module name="map-reduce" />
       <option name="TEST_OBJECT" value="pattern" />
@@ -332,24 +356,8 @@
       <option name="TEST_SEARCH_SCOPE"><value defaultName="singleModule" /></option>
       <patterns><pattern testClass=".*\.Test[^.]*|.*\.[^.]*Test" /></patterns>
     </configuration>
-    <configuration default="false" name="Solr analytics contrib" type="JUnit" factoryName="JUnit">
-      <module name="analytics" />
-      <option name="TEST_OBJECT" value="pattern" />
-      <option name="WORKING_DIRECTORY" value="file://$PROJECT_DIR$/idea-build/solr/contrib/solr-analytics" />
-      <option name="VM_PARAMETERS" value="-ea" />
-      <option name="TEST_SEARCH_SCOPE"><value defaultName="singleModule" /></option>
-      <patterns><pattern testClass=".*\.Test[^.]*|.*\.[^.]*Test" /></patterns>
-    </configuration>
-    <configuration default="false" name="solrcloud" type="Application" factoryName="Application" singleton="true">
-      <option name="MAIN_CLASS_NAME" value="org.eclipse.jetty.start.Main" />
-      <option name="VM_PARAMETERS" value="-DzkRun -Dhost=127.0.0.1 -Duser.timezone=UTC -Djetty.home=$PROJECT_DIR$/solr/server -Dsolr.solr.home=$PROJECT_DIR$/solr/server/solr -Dsolr.install.dir=$PROJECT_DIR$/solr -Dsolr.log=$PROJECT_DIR$/solr/server/logs/solr.log" />
-      <option name="PROGRAM_PARAMETERS" value="--module=http" />
-      <option name="WORKING_DIRECTORY" value="file://$PROJECT_DIR$/solr/server" />
-      <option name="PARENT_ENVS" value="true" />
-      <module name="server" />
-    </configuration>
 
-    <list size="42">
+    <list size="44">
       <item index="0" class="java.lang.String" itemvalue="JUnit.Lucene core" />
       <item index="1" class="java.lang.String" itemvalue="JUnit.Module analyzers-common" />
       <item index="2" class="java.lang.String" itemvalue="JUnit.Module analyzers-icu" />
@@ -378,20 +386,22 @@
       <item index="25" class="java.lang.String" itemvalue="JUnit.Module spatial-extras" />
       <item index="26" class="java.lang.String" itemvalue="JUnit.Module spatial3d" />
       <item index="27" class="java.lang.String" itemvalue="JUnit.Module suggest" />
-      <item index="28" class="java.lang.String" itemvalue="JUnit.Solr core" />
-      <item index="29" class="java.lang.String" itemvalue="JUnit.Solr analysis-extras contrib" />
-      <item index="30" class="java.lang.String" itemvalue="JUnit.Solr clustering contrib" />
-      <item index="31" class="java.lang.String" itemvalue="JUnit.Solr dataimporthandler contrib" />
-      <item index="32" class="java.lang.String" itemvalue="JUnit.Solr dataimporthandler-extras contrib" />
-      <item index="33" class="java.lang.String" itemvalue="JUnit.Solr extraction contrib" />
-      <item index="34" class="java.lang.String" itemvalue="JUnit.Solr map-reduce contrib" />
-      <item index="35" class="java.lang.String" itemvalue="JUnit.Solr morphlines-cell contrib" />
-      <item index="36" class="java.lang.String" itemvalue="JUnit.Solr morphlines-core contrib" />
-      <item index="37" class="java.lang.String" itemvalue="JUnit.Solr langid contrib" />
-      <item index="38" class="java.lang.String" itemvalue="JUnit.Solr uima contrib" />
-      <item index="39" class="java.lang.String" itemvalue="JUnit.Solr velocity contrib" />
-      <item index="40" class="java.lang.String" itemvalue="JUnit.Solrj" />
-      <item index="41" class="java.lang.String" itemvalue="Application.solrcloud" />
+      <item index="28" class="java.lang.String" itemvalue="Application.solrcloud" />
+      <item index="29" class="java.lang.String" itemvalue="JUnit.Solr core" />
+      <item index="30" class="java.lang.String" itemvalue="JUnit.Solrj" />
+      <item index="31" class="java.lang.String" itemvalue="JUnit.Solr analysis-extras contrib" />
+      <item index="32" class="java.lang.String" itemvalue="JUnit.Solr analytics contrib" />
+      <item index="33" class="java.lang.String" itemvalue="JUnit.Solr clustering contrib" />
+      <item index="34" class="java.lang.String" itemvalue="JUnit.Solr dataimporthandler contrib" />
+      <item index="35" class="java.lang.String" itemvalue="JUnit.Solr dataimporthandler-extras contrib" />
+      <item index="36" class="java.lang.String" itemvalue="JUnit.Solr extraction contrib" />
+      <item index="37" class="java.lang.String" itemvalue="JUnit.Solr map-reduce contrib" />
+      <item index="38" class="java.lang.String" itemvalue="JUnit.Solr morphlines-cell contrib" />
+      <item index="39" class="java.lang.String" itemvalue="JUnit.Solr morphlines-core contrib" />
+      <item index="40" class="java.lang.String" itemvalue="JUnit.Solr langid contrib" />
+      <item index="41" class="java.lang.String" itemvalue="JUnit.Solr ltr contrib" />
+      <item index="42" class="java.lang.String" itemvalue="JUnit.Solr uima contrib" />
+      <item index="43" class="java.lang.String" itemvalue="JUnit.Solr velocity contrib" />
     </list>
   </component>
 </project>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9eb806a2/dev-tools/idea/solr/contrib/ltr/ltr.iml
----------------------------------------------------------------------
diff --git a/dev-tools/idea/solr/contrib/ltr/ltr.iml b/dev-tools/idea/solr/contrib/ltr/ltr.iml
index efc505d..37369e6 100644
--- a/dev-tools/idea/solr/contrib/ltr/ltr.iml
+++ b/dev-tools/idea/solr/contrib/ltr/ltr.iml
@@ -15,14 +15,14 @@
     <orderEntry type="library" scope="TEST" name="JUnit" level="project" />
     <orderEntry type="library" name="Solr core library" level="project" />
     <orderEntry type="library" name="Solrj library" level="project" />
-    <orderEntry type="module-library">
+    <orderEntry type="module-library" scope="TEST">
       <library>
         <CLASSES>
-          <root url="file://$MODULE_DIR$/lib" />
+          <root url="file://$MODULE_DIR$/test-lib" />
         </CLASSES>
         <JAVADOC />
         <SOURCES />
-        <jarDirectory url="file://$MODULE_DIR$/lib" recursive="false" />
+        <jarDirectory url="file://$MODULE_DIR$/test-lib" recursive="false" />
       </library>
     </orderEntry>
     <orderEntry type="library" scope="TEST" name="Solr example library" level="project" />

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9eb806a2/dev-tools/maven/solr/contrib/ltr/pom.xml.template
----------------------------------------------------------------------
diff --git a/dev-tools/maven/solr/contrib/ltr/pom.xml.template b/dev-tools/maven/solr/contrib/ltr/pom.xml.template
new file mode 100644
index 0000000..67d74e7
--- /dev/null
+++ b/dev-tools/maven/solr/contrib/ltr/pom.xml.template
@@ -0,0 +1,80 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <!--
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+  -->
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.solr</groupId>
+    <artifactId>solr-parent</artifactId>
+    <version>@version@</version>
+    <relativePath>../../pom.xml</relativePath>
+  </parent>
+  <groupId>org.apache.solr</groupId>
+  <artifactId>solr-ltr</artifactId>
+  <packaging>jar</packaging>
+  <name>Apache Solr Analytics Package</name>
+  <description>
+    Apache Solr Learning to Rank Package
+  </description>
+  <properties>
+    <module-directory>solr/contrib/ltr</module-directory>
+    <relative-top-level>../../../..</relative-top-level>
+    <module-path>${relative-top-level}/${module-directory}</module-path>
+  </properties>
+  <scm>
+    <connection>scm:git:${vc-anonymous-base-url}</connection>
+    <developerConnection>scm:git:${vc-dev-base-url}</developerConnection>
+    <url>${vc-browse-base-url};f=${module-directory}</url>
+  </scm>
+  <dependencies>
+    <dependency>
+      <!-- lucene-test-framework dependency must be declared before lucene-core -->
+      <!-- This dependency cannot be put into solr-parent, because local        -->
+      <!-- dependencies are always ordered before inherited dependencies.       -->
+      <groupId>org.apache.lucene</groupId>
+      <artifactId>lucene-test-framework</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.solr</groupId>
+      <artifactId>solr-test-framework</artifactId>
+      <scope>test</scope>
+    </dependency>
+    @solr-analytics.internal.dependencies@
+    @solr-analytics.external.dependencies@
+    @solr-analytics.internal.test.dependencies@
+    @solr-analytics.external.test.dependencies@
+  </dependencies>
+  <build>
+    <sourceDirectory>${module-path}/src/java</sourceDirectory>
+    <testSourceDirectory>${module-path}/src/test</testSourceDirectory>
+    <testResources>
+      <testResource>
+        <directory>${module-path}/src/test-files</directory>
+      </testResource>
+      <testResource>
+        <directory>${top-level}/dev-tools/maven/solr</directory>
+        <includes>
+          <include>maven.testlogging.properties</include>
+        </includes>
+      </testResource>
+    </testResources>
+  </build>
+</project>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9eb806a2/dev-tools/maven/solr/contrib/pom.xml.template
----------------------------------------------------------------------
diff --git a/dev-tools/maven/solr/contrib/pom.xml.template b/dev-tools/maven/solr/contrib/pom.xml.template
index 9b020108..6ca72b2 100644
--- a/dev-tools/maven/solr/contrib/pom.xml.template
+++ b/dev-tools/maven/solr/contrib/pom.xml.template
@@ -38,6 +38,7 @@
     <module>dataimporthandler-extras</module>
     <module>extraction</module>
     <module>langid</module>
+    <module>ltr</module>
     <module>morphlines-cell</module>
     <module>morphlines-core</module>
     <module>map-reduce</module>


[15/50] [abbrv] lucene-solr:apiv2: SOLR-9132: Don't require indexInfo from corestatus over reloads

Posted by sa...@apache.org.
SOLR-9132: Don't require indexInfo from corestatus over reloads


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/3b49705c
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/3b49705c
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/3b49705c

Branch: refs/heads/apiv2
Commit: 3b49705c43178fcd75dc85e56bcd2820cb35e166
Parents: cff2774
Author: Alan Woodward <ro...@apache.org>
Authored: Fri Oct 28 15:17:53 2016 +0100
Committer: Alan Woodward <ro...@apache.org>
Committed: Fri Oct 28 15:17:53 2016 +0100

----------------------------------------------------------------------
 .../apache/solr/cloud/CollectionsAPIDistributedZkTest.java    | 7 ++++---
 .../apache/solr/client/solrj/request/CoreAdminRequest.java    | 6 ++++++
 2 files changed, 10 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3b49705c/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIDistributedZkTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIDistributedZkTest.java b/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIDistributedZkTest.java
index c1ad2bd..7586f2a 100644
--- a/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIDistributedZkTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIDistributedZkTest.java
@@ -49,6 +49,7 @@ import org.apache.solr.client.solrj.impl.HttpSolrClient;
 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
 import org.apache.solr.client.solrj.request.CoreAdminRequest;
 import org.apache.solr.client.solrj.request.CoreAdminRequest.Create;
+import org.apache.solr.client.solrj.request.CoreStatus;
 import org.apache.solr.client.solrj.request.QueryRequest;
 import org.apache.solr.client.solrj.request.UpdateRequest;
 import org.apache.solr.client.solrj.response.CollectionAdminResponse;
@@ -590,11 +591,11 @@ public class CollectionsAPIDistributedZkTest extends SolrCloudTestCase {
       for (Slice shard : collectionState) {
         for (Replica replica : shard) {
           ZkCoreNodeProps coreProps = new ZkCoreNodeProps(replica);
-          CoreAdminResponse mcr;
+          CoreStatus coreStatus;
           try (HttpSolrClient server = getHttpSolrClient(coreProps.getBaseUrl())) {
-            mcr = CoreAdminRequest.getStatus(coreProps.getCoreName(), server);
+            coreStatus = CoreAdminRequest.getCoreStatus(coreProps.getCoreName(), false, server);
           }
-          long before = mcr.getStartTime(coreProps.getCoreName()).getTime();
+          long before = coreStatus.getCoreStartTime().getTime();
           urlToTime.put(coreProps.getCoreUrl(), before);
         }
       }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3b49705c/solr/solrj/src/java/org/apache/solr/client/solrj/request/CoreAdminRequest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CoreAdminRequest.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CoreAdminRequest.java
index 011844c..002bbc3 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CoreAdminRequest.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CoreAdminRequest.java
@@ -620,8 +620,14 @@ public class CoreAdminRequest extends SolrRequest<CoreAdminResponse> {
   }
 
   public static CoreStatus getCoreStatus(String coreName, SolrClient client) throws SolrServerException, IOException {
+    return getCoreStatus(coreName, true, client);
+  }
+
+  public static CoreStatus getCoreStatus(String coreName, boolean getIndexInfo, SolrClient client)
+      throws SolrServerException, IOException {
     CoreAdminRequest req = new CoreAdminRequest();
     req.setAction(CoreAdminAction.STATUS);
+    req.setIndexInfoNeeded(getIndexInfo);
     return new CoreStatus(req.process(client).getCoreStatus(coreName));
   }
 


[29/50] [abbrv] lucene-solr:apiv2: Minor typo fix in solr.cmd help

Posted by sa...@apache.org.
Minor typo fix in solr.cmd help


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/417e29a6
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/417e29a6
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/417e29a6

Branch: refs/heads/apiv2
Commit: 417e29a6fdd37e3bf37b9eed99c7ea246cd0f45a
Parents: c444658
Author: Jan H�ydahl <ja...@apache.org>
Authored: Mon Oct 31 15:11:59 2016 +0100
Committer: Jan H�ydahl <ja...@apache.org>
Committed: Mon Oct 31 15:11:59 2016 +0100

----------------------------------------------------------------------
 solr/bin/solr.cmd | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/417e29a6/solr/bin/solr.cmd
----------------------------------------------------------------------
diff --git a/solr/bin/solr.cmd b/solr/bin/solr.cmd
index 9860d77..a7ad956 100644
--- a/solr/bin/solr.cmd
+++ b/solr/bin/solr.cmd
@@ -208,7 +208,7 @@ goto done
 @echo.
 @echo   -p port       Specify the port to start the Solr HTTP listener on; default is 8983
 @echo.
-@echo   -d dir        Specify the Solr server directory; defaults to example
+@echo   -d dir        Specify the Solr server directory; defaults to server
 @echo.
 @echo   -z zkHost     Zookeeper connection string; only used when running in SolrCloud mode using -c
 @echo                   To launch an embedded Zookeeper instance, don't pass this parameter.


[49/50] [abbrv] lucene-solr:apiv2: merge trunk

Posted by sa...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/27baf3fb/solr/core/src/test/org/apache/solr/cloud/rule/RulesTest.java
----------------------------------------------------------------------
diff --cc solr/core/src/test/org/apache/solr/cloud/rule/RulesTest.java
index 5c06d45,6955418..13649e1
--- a/solr/core/src/test/org/apache/solr/cloud/rule/RulesTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/rule/RulesTest.java
@@@ -20,21 -20,17 +20,19 @@@ import java.lang.invoke.MethodHandles
  import java.nio.file.Paths;
  import java.util.List;
  import java.util.Map;
- import java.util.regex.Matcher;
- import java.util.regex.Pattern;
  
+ import org.apache.lucene.util.LuceneTestCase;
 +import org.apache.solr.client.solrj.SolrClient;
+ import org.apache.solr.client.solrj.embedded.JettySolrRunner;
  import org.apache.solr.client.solrj.impl.HttpSolrClient;
  import org.apache.solr.client.solrj.request.CollectionAdminRequest;
  import org.apache.solr.client.solrj.request.GenericSolrRequest;
- import org.apache.solr.client.solrj.response.CollectionAdminResponse;
 +import org.apache.solr.client.solrj.response.SimpleSolrResponse;
- import org.apache.solr.cloud.AbstractFullDistribZkTestBase;
+ import org.apache.solr.cloud.SolrCloudTestCase;
  import org.apache.solr.common.cloud.DocCollection;
- import org.apache.solr.common.cloud.ImplicitDocRouter;
- import org.apache.solr.common.cloud.ZkStateReader;
  import org.apache.solr.common.params.ModifiableSolrParams;
+ import org.junit.After;
+ import org.junit.BeforeClass;
  import org.junit.Test;
  import org.junit.rules.ExpectedException;
  import org.slf4j.Logger;
@@@ -206,23 -161,6 +164,21 @@@ public class RulesTest extends SolrClou
  
    }
  
 +  @Test
 +  public void testInvokeApi() throws Exception {
-     String baseUrl = getBaseUrl((HttpSolrClient) clients.get(0));
-     try (SolrClient client = createNewSolrClient("", baseUrl)) {
++    JettySolrRunner jetty = cluster.getRandomJetty(random());
++    try (SolrClient client = getHttpSolrClient(jetty.getBaseUrl().toString())) {
 +      GenericSolrRequest req =  new GenericSolrRequest(GET, "/v2/node/invoke", new ModifiableSolrParams()
 +          .add("class", ImplicitSnitch.class.getName())
 +          .add("cores", "1")
 +          .add("freedisk", "1")
 +      );
 +      SimpleSolrResponse rsp = req.process(client);
 +      assertNotNull(((Map) rsp.getResponse().get(ImplicitSnitch.class.getName())).get("cores"));
 +      assertNotNull(((Map) rsp.getResponse().get(ImplicitSnitch.class.getName())).get("freedisk"));
 +    }
- 
- 
 +  }
 +
  
    @Test
    public void testModifyColl() throws Exception {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/27baf3fb/solr/core/src/test/org/apache/solr/core/SolrCoreTest.java
----------------------------------------------------------------------
diff --cc solr/core/src/test/org/apache/solr/core/SolrCoreTest.java
index 6b18bed,2799078..39b9340
--- a/solr/core/src/test/org/apache/solr/core/SolrCoreTest.java
+++ b/solr/core/src/test/org/apache/solr/core/SolrCoreTest.java
@@@ -109,7 -109,9 +109,10 @@@ public class SolrCoreTest extends SolrT
        ++ihCount; assertEquals(pathToClassMap.get("/update/csv"), "solr.UpdateRequestHandler");
        ++ihCount; assertEquals(pathToClassMap.get("/update/json"), "solr.UpdateRequestHandler");
        ++ihCount; assertEquals(pathToClassMap.get("/update/json/docs"), "solr.UpdateRequestHandler");
+       ++ihCount; assertEquals(pathToClassMap.get("/analysis/document"), "solr.DocumentAnalysisRequestHandler");
+       ++ihCount; assertEquals(pathToClassMap.get("/analysis/field"), "solr.FieldAnalysisRequestHandler");
+       ++ihCount; assertEquals(pathToClassMap.get("/debug/dump"), "solr.DumpRequestHandler");
 +      ++ihCount; assertEquals(pathToClassMap.get("update"), "solr.UpdateRequestHandlerApi");
      }
      assertEquals("wrong number of implicit handlers", ihCount, implicitHandlers.size());
    }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/27baf3fb/solr/core/src/test/org/apache/solr/handler/admin/TestCoreAdminApis.java
----------------------------------------------------------------------
diff --cc solr/core/src/test/org/apache/solr/handler/admin/TestCoreAdminApis.java
index 3d627aa,0000000..55d4eac
mode 100644,000000..100644
--- a/solr/core/src/test/org/apache/solr/handler/admin/TestCoreAdminApis.java
+++ b/solr/core/src/test/org/apache/solr/handler/admin/TestCoreAdminApis.java
@@@ -1,115 -1,0 +1,115 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one or more
 + * contributor license agreements.  See the NOTICE file distributed with
 + * this work for additional information regarding copyright ownership.
 + * The ASF licenses this file to You under the Apache License, Version 2.0
 + * (the "License"); you may not use this file except in compliance with
 + * the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +
 +package org.apache.solr.handler.admin;
 +
 +import java.nio.file.Path;
 +import java.nio.file.Paths;
 +import java.util.Collections;
 +import java.util.HashMap;
 +import java.util.Map;
 +import java.util.Properties;
 +
 +import org.apache.solr.SolrTestCaseJ4;
 +import org.apache.solr.client.solrj.SolrRequest;
 +import org.apache.solr.core.CoreContainer;
 +import org.apache.solr.api.Api;
 +import org.apache.solr.api.ApiBag;
 +import org.easymock.EasyMock;
 +
 +import static org.apache.solr.common.util.Utils.fromJSONString;
 +import static org.easymock.EasyMock.anyBoolean;
 +import static org.easymock.EasyMock.anyObject;
 +import static org.easymock.EasyMock.getCurrentArguments;
 +
 +public class TestCoreAdminApis extends SolrTestCaseJ4 {
 +
 +  public void testCalls() throws Exception {
 +    Map<String, Object[]> calls = new HashMap<>();
 +    CoreContainer mockCC = getCoreContainerMock(calls, new HashMap<>());
 +
 +    CoreAdminHandler  coreAdminHandler = new CoreAdminHandler(mockCC);
 +    ApiBag apiBag = new ApiBag();
 +    for (Api api : coreAdminHandler.getApis()) {
 +      apiBag.register(api, Collections.EMPTY_MAP);
 +    }
 +    TestCollectionAPIs.makeCall(apiBag, "/cores", SolrRequest.METHOD.POST,
 +        "{create:{name: hello, instanceDir : someDir, schema: 'schema.xml'}}", mockCC);
 +    Object[] params = calls.get("create");
 +    assertEquals("hello" ,params[0]);
 +    assertEquals(fromJSONString("{schema : schema.xml}") ,params[2]);
 +
 +    TestCollectionAPIs.makeCall(apiBag, "/cores/core1", SolrRequest.METHOD.POST,
 +        "{swap:{with: core2}}", mockCC);
 +    params = calls.get("swap");
 +    assertEquals("core1" ,params[0]);
 +    assertEquals("core2" ,params[1]);
 +
 +    TestCollectionAPIs.makeCall(apiBag, "/cores/core1", SolrRequest.METHOD.POST,
 +        "{rename:{to: core2}}", mockCC);
 +    params = calls.get("swap");
 +    assertEquals("core1" ,params[0]);
 +    assertEquals("core2" ,params[1]);
 +
 +    TestCollectionAPIs.makeCall(apiBag, "/cores/core1", SolrRequest.METHOD.POST,
 +        "{unload:{deleteIndex : true}}", mockCC);
 +    params = calls.get("unload");
 +    assertEquals("core1" ,params[0]);
 +    assertEquals(Boolean.TRUE ,params[1]);
 +  }
 +
 +  public static CoreContainer getCoreContainerMock(final Map<String, Object[]> in,Map<String,Object> out ) {
 +    CoreContainer mockCC = EasyMock.createMock(CoreContainer.class);
 +    EasyMock.reset(mockCC);
-     mockCC.create(anyObject(String.class), anyObject(Path.class) , anyObject(Map.class));
++    mockCC.create(anyObject(String.class), anyObject(Path.class) , anyObject(Map.class), false);
 +    EasyMock.expectLastCall().andAnswer(() -> {
 +      in.put("create", getCurrentArguments());
 +      return null;
 +    }).anyTimes();
 +    mockCC.swap(anyObject(String.class), anyObject(String.class));
 +    EasyMock.expectLastCall().andAnswer(() -> {
 +      in.put("swap", getCurrentArguments());
 +      return null;
 +    }).anyTimes();
 +
 +    mockCC.rename(anyObject(String.class), anyObject(String.class));
 +    EasyMock.expectLastCall().andAnswer(() -> {
 +      in.put("rename", getCurrentArguments());
 +      return null;
 +    }).anyTimes();
 +
 +    mockCC.unload(anyObject(String.class), anyBoolean(),
 +        anyBoolean(), anyBoolean());
 +    EasyMock.expectLastCall().andAnswer(() -> {
 +      in.put("unload", getCurrentArguments());
 +      return null;
 +    }).anyTimes();
 +
 +    mockCC.getCoreRootDirectory();
 +    EasyMock.expectLastCall().andAnswer(() -> Paths.get("coreroot")).anyTimes();
 +    mockCC.getContainerProperties();
 +    EasyMock.expectLastCall().andAnswer(() -> new Properties()).anyTimes();
 +
 +    mockCC.getRequestHandlers();
 +    EasyMock.expectLastCall().andAnswer(() -> out.get("getRequestHandlers")).anyTimes();
 +
 +    EasyMock.replay(mockCC);
 +    return mockCC;
 +  }
 +
 +
 +}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/27baf3fb/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/27baf3fb/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java
----------------------------------------------------------------------
diff --cc solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java
index 405bac7,137fcdd..397f4e8
--- a/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java
+++ b/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java
@@@ -80,14 -85,7 +85,11 @@@ public class BasicAuthIntegrationTest e
  
      String authcPrefix = "/admin/authentication";
      String authzPrefix = "/admin/authorization";
 +    if(random().nextBoolean()){
 +      authcPrefix = "/v2/cluster/security/authentication";
 +      authzPrefix = "/v2/cluster/security/authorization";
 +    }
  
-     String old = cloudSolrClient.getDefaultCollection();
-     cloudSolrClient.setDefaultCollection(null);
- 
      NamedList<Object> rsp;
      HttpClient cl = null;
      try {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/27baf3fb/solr/server/solr/configsets/sample_techproducts_configs/conf/solrconfig.xml
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/27baf3fb/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/27baf3fb/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/27baf3fb/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/SolrExampleJettyTest.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/27baf3fb/solr/test-framework/src/java/org/apache/solr/util/RestTestHarness.java
----------------------------------------------------------------------


[09/50] [abbrv] lucene-solr:apiv2: SOLR-9132: Cut over some collections API and recovery tests

Posted by sa...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f56d111a/solr/core/src/test/org/apache/solr/cloud/CustomCollectionTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/CustomCollectionTest.java b/solr/core/src/test/org/apache/solr/cloud/CustomCollectionTest.java
index 477641d..63a3272 100644
--- a/solr/core/src/test/org/apache/solr/cloud/CustomCollectionTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/CustomCollectionTest.java
@@ -16,41 +16,19 @@
  */
 package org.apache.solr.cloud;
 
-import org.apache.lucene.util.LuceneTestCase.Slow;
+import java.util.Map;
+
 import org.apache.lucene.util.TestUtil;
-import org.apache.solr.SolrTestCaseJ4.SuppressSSL;
-import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrQuery;
-import org.apache.solr.client.solrj.SolrRequest;
-import org.apache.solr.client.solrj.SolrServerException;
-import org.apache.solr.client.solrj.impl.CloudSolrClient;
-import org.apache.solr.client.solrj.impl.HttpSolrClient;
-import org.apache.solr.client.solrj.request.QueryRequest;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
 import org.apache.solr.client.solrj.request.UpdateRequest;
-import org.apache.solr.client.solrj.response.QueryResponse;
-import org.apache.solr.common.cloud.ClusterState;
+import org.apache.solr.common.SolrInputDocument;
 import org.apache.solr.common.cloud.DocCollection;
-import org.apache.solr.common.cloud.ImplicitDocRouter;
 import org.apache.solr.common.cloud.Replica;
-import org.apache.solr.common.cloud.ZkStateReader;
-import org.apache.solr.common.params.CollectionParams.CollectionAction;
-import org.apache.solr.common.params.ModifiableSolrParams;
-import org.apache.solr.common.util.Utils;
+import org.junit.Before;
+import org.junit.BeforeClass;
 import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.lang.invoke.MethodHandles;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
 
-import static org.apache.solr.cloud.OverseerCollectionMessageHandler.NUM_SLICES;
-import static org.apache.solr.cloud.OverseerCollectionMessageHandler.SHARDS_PROP;
 import static org.apache.solr.common.cloud.DocCollection.DOC_ROUTER;
 import static org.apache.solr.common.cloud.ZkStateReader.MAX_SHARDS_PER_NODE;
 import static org.apache.solr.common.cloud.ZkStateReader.REPLICATION_FACTOR;
@@ -59,371 +37,162 @@ import static org.apache.solr.common.params.ShardParams._ROUTE_;
 /**
  * Tests the Custom Sharding API.
  */
-@Slow
-@SuppressSSL(bugUrl = "https://issues.apache.org/jira/browse/SOLR-5776")
-public class CustomCollectionTest extends AbstractFullDistribZkTestBase {
+public class CustomCollectionTest extends SolrCloudTestCase {
 
-  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final int NODE_COUNT = 4;
 
-  protected String getSolrXml() {
-    return "solr.xml";
+  @BeforeClass
+  public static void setupCluster() throws Exception {
+    configureCluster(NODE_COUNT)
+        .addConfig("conf", configset("cloud-dynamic"))
+        .configure();
   }
 
-
-  public CustomCollectionTest() {
-    sliceCount = 2;
-  }
-
-  @Override
-  protected void setDistributedParams(ModifiableSolrParams params) {
-
-    if (r.nextBoolean()) {
-      // don't set shards, let that be figured out from the cloud state
-    } else {
-      // use shard ids rather than physical locations
-      StringBuilder sb = new StringBuilder();
-      for (int i = 0; i < getShardCount(); i++) {
-        if (i > 0)
-          sb.append(',');
-        sb.append("shard" + (i + 3));
-      }
-      params.set("shards", sb.toString());
-    }
+  @Before
+  public void ensureClusterEmpty() throws Exception {
+    cluster.deleteAllCollections();
   }
 
   @Test
-  @ShardsFixed(num = 4)
-  public void test() throws Exception {
-    testCustomCollectionsAPI();
-    testRouteFieldForHashRouter();
-    testCreateShardRepFactor();
-  }
-
+  public void testCustomCollectionsAPI() throws Exception {
 
-  private void testCustomCollectionsAPI() throws Exception {
-    String COLL_PREFIX = "implicitcoll";
+    final String collection = "implicitcoll";
+    int replicationFactor = TestUtil.nextInt(random(), 0, 3) + 2;
+    int numShards = 3;
+    int maxShardsPerNode = (((numShards + 1) * replicationFactor) / NODE_COUNT) + 1;
 
-    // TODO: fragile - because we dont pass collection.confName, it will only
-    // find a default if a conf set with a name matching the collection name is found, or
-    // if there is only one conf set. That and the fact that other tests run first in this
-    // env make this pretty fragile
+    CollectionAdminRequest.createCollectionWithImplicitRouter(collection, "conf", "a,b,c", replicationFactor)
+        .setMaxShardsPerNode(maxShardsPerNode)
+        .process(cluster.getSolrClient());
 
-    // create new collections rapid fire
-    Map<String,List<Integer>> collectionInfos = new HashMap<>();
-    int replicationFactor = TestUtil.nextInt(random(), 0, 3) + 2;
+    DocCollection coll = getCollectionState(collection);
+    assertEquals("implicit", ((Map) coll.get(DOC_ROUTER)).get("name"));
+    assertNotNull(coll.getStr(REPLICATION_FACTOR));
+    assertNotNull(coll.getStr(MAX_SHARDS_PER_NODE));
+    assertNull("A shard of a Collection configured with implicit router must have null range",
+        coll.getSlice("a").getRange());
 
-    int cnt = random().nextInt(6) + 1;
-
-    for (int i = 0; i < cnt; i++) {
-      int numShards = 3;
-      int maxShardsPerNode = ((((numShards+1) * replicationFactor) / getCommonCloudSolrClient()
-          .getZkStateReader().getClusterState().getLiveNodes().size())) + 1;
-
-
-      CloudSolrClient client = null;
-      try {
-        if (i == 0) {
-          // Test if we can create a collection through CloudSolrServer where
-          // you havnt set default-collection
-          // This is nice because you want to be able to create you first
-          // collection using CloudSolrServer, and in such case there is
-          // nothing reasonable to set as default-collection
-          client = createCloudClient(null);
-        } else if (i == 1) {
-          // Test if we can create a collection through CloudSolrServer where
-          // you have set default-collection to a non-existing collection
-          // This is nice because you want to be able to create you first
-          // collection using CloudSolrServer, and in such case there is
-          // nothing reasonable to set as default-collection, but you might want
-          // to use the same CloudSolrServer throughout the entire
-          // lifetime of your client-application, so it is nice to be able to
-          // set a default-collection on this CloudSolrServer once and for all
-          // and use this CloudSolrServer to create the collection
-          client = createCloudClient(COLL_PREFIX + i);
-        }
-
-        Map<String, Object> props = Utils.makeMap(
-            "router.name", ImplicitDocRouter.NAME,
-            REPLICATION_FACTOR, replicationFactor,
-            MAX_SHARDS_PER_NODE, maxShardsPerNode,
-            SHARDS_PROP, "a,b,c");
-
-        createCollection(collectionInfos, COLL_PREFIX + i,props,client);
-      } finally {
-        if (client != null) client.close();
+    new UpdateRequest()
+        .add("id", "6")
+        .add("id", "7")
+        .add("id", "8")
+        .withRoute("a")
+        .commit(cluster.getSolrClient(), collection);
+
+    assertEquals(3, cluster.getSolrClient().query(collection, new SolrQuery("*:*")).getResults().getNumFound());
+    assertEquals(0, cluster.getSolrClient().query(collection, new SolrQuery("*:*").setParam(_ROUTE_, "b")).getResults().getNumFound());
+    assertEquals(3, cluster.getSolrClient().query(collection, new SolrQuery("*:*").setParam(_ROUTE_, "a")).getResults().getNumFound());
+
+    cluster.getSolrClient().deleteByQuery(collection, "*:*");
+    cluster.getSolrClient().commit(collection, true, true);
+    assertEquals(0, cluster.getSolrClient().query(collection, new SolrQuery("*:*")).getResults().getNumFound());
+
+    new UpdateRequest()
+        .add("id", "9")
+        .add("id", "10")
+        .add("id", "11")
+        .withRoute("c")
+        .commit(cluster.getSolrClient(), collection);
+
+    assertEquals(3, cluster.getSolrClient().query(collection, new SolrQuery("*:*")).getResults().getNumFound());
+    assertEquals(0, cluster.getSolrClient().query(collection, new SolrQuery("*:*").setParam(_ROUTE_, "a")).getResults().getNumFound());
+    assertEquals(3, cluster.getSolrClient().query(collection, new SolrQuery("*:*").setParam(_ROUTE_, "c")).getResults().getNumFound());
+
+    //Testing CREATESHARD
+    CollectionAdminRequest.createShard(collection, "x")
+        .process(cluster.getSolrClient());
+    waitForState("Expected shard 'x' to be active", collection, (n, c) -> {
+      if (c.getSlice("x") == null)
+        return false;
+      for (Replica r : c.getSlice("x")) {
+        if (r.getState() != Replica.State.ACTIVE)
+          return false;
       }
-    }
+      return true;
+    });
 
-    Set<Entry<String,List<Integer>>> collectionInfosEntrySet = collectionInfos.entrySet();
-    for (Entry<String,List<Integer>> entry : collectionInfosEntrySet) {
-      String collection = entry.getKey();
-      List<Integer> list = entry.getValue();
-      checkForCollection(collection, list, null);
+    new UpdateRequest()
+        .add("id", "66", _ROUTE_, "x")
+        .commit(cluster.getSolrClient(), collection);
+    // TODO - the local state is cached and causes the request to fail with 'unknown shard'
+    // assertEquals(1, cluster.getSolrClient().query(collection, new SolrQuery("*:*").setParam(_ROUTE_, "x")).getResults().getNumFound());
 
-      String url = getUrlFromZk(getCommonCloudSolrClient().getZkStateReader().getClusterState(), collection);
+  }
 
-      try (HttpSolrClient collectionClient = getHttpSolrClient(url)) {
-        // poll for a second - it can take a moment before we are ready to serve
-        waitForNon403or404or503(collectionClient);
-      }
-    }
-    ZkStateReader zkStateReader = getCommonCloudSolrClient().getZkStateReader();
-    for (int j = 0; j < cnt; j++) {
-      waitForRecoveriesToFinish(COLL_PREFIX + j, zkStateReader, false);
-    }
+  @Test
+  public void testRouteFieldForImplicitRouter() throws Exception {
 
-    ClusterState clusterState = zkStateReader.getClusterState();
+    int numShards = 4;
+    int replicationFactor = TestUtil.nextInt(random(), 0, 3) + 2;
+    int maxShardsPerNode = ((numShards * replicationFactor) / NODE_COUNT) + 1;
+    String shard_fld = "shard_s";
 
-    DocCollection coll = clusterState.getCollection(COLL_PREFIX + 0);
-    assertEquals("implicit", ((Map)coll.get(DOC_ROUTER)).get("name") );
-    assertNotNull(coll.getStr(REPLICATION_FACTOR));
-    assertNotNull(coll.getStr(MAX_SHARDS_PER_NODE));
-    assertNull("A shard of a Collection configured with implicit router must have null range",
-        coll.getSlice("a").getRange());
+    final String collection = "withShardField";
 
-    List<String> collectionNameList = new ArrayList<>();
-    collectionNameList.addAll(collectionInfos.keySet());
-    log.info("Collections created : "+collectionNameList );
+    CollectionAdminRequest.createCollectionWithImplicitRouter(collection, "conf", "a,b,c,d", replicationFactor)
+        .setMaxShardsPerNode(maxShardsPerNode)
+        .setRouterField(shard_fld)
+        .process(cluster.getSolrClient());
 
-    String collectionName = collectionNameList.get(random().nextInt(collectionNameList.size()));
+    new UpdateRequest()
+        .add("id", "6", shard_fld, "a")
+        .add("id", "7", shard_fld, "a")
+        .add("id", "8", shard_fld, "b")
+        .commit(cluster.getSolrClient(), collection);
 
-    String url = getUrlFromZk(getCommonCloudSolrClient().getZkStateReader().getClusterState(), collectionName);
+    assertEquals(3, cluster.getSolrClient().query(collection, new SolrQuery("*:*")).getResults().getNumFound());
+    assertEquals(1, cluster.getSolrClient().query(collection, new SolrQuery("*:*").setParam(_ROUTE_, "b")).getResults().getNumFound());
+    assertEquals(2, cluster.getSolrClient().query(collection, new SolrQuery("*:*").setParam(_ROUTE_, "a")).getResults().getNumFound());
 
-    String shard_fld = "shard_s";
-    try (HttpSolrClient collectionClient = getHttpSolrClient(url)) {
-
-      // lets try and use the solrj client to index a couple documents
-  
-      collectionClient.add(getDoc(id, 6, i1, -600, tlong, 600, t1,
-          "humpty dumpy sat on a wall", _ROUTE_,"a"));
-  
-      collectionClient.add(getDoc(id, 7, i1, -600, tlong, 600, t1,
-          "humpty dumpy3 sat on a walls", _ROUTE_,"a"));
-  
-      collectionClient.add(getDoc(id, 8, i1, -600, tlong, 600, t1,
-          "humpty dumpy2 sat on a walled", _ROUTE_,"a"));
-  
-      collectionClient.commit();
-  
-      assertEquals(3, collectionClient.query(new SolrQuery("*:*")).getResults().getNumFound());
-      assertEquals(0, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"b")).getResults().getNumFound());
-      assertEquals(3, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"a")).getResults().getNumFound());
-  
-      collectionClient.deleteByQuery("*:*");
-      collectionClient.commit(true,true);
-      assertEquals(0, collectionClient.query(new SolrQuery("*:*")).getResults().getNumFound());
-  
-      UpdateRequest up = new UpdateRequest();
-      up.setParam(_ROUTE_, "c");
-      up.setParam("commit","true");
-  
-      up.add(getDoc(id, 9, i1, -600, tlong, 600, t1,
-          "humpty dumpy sat on a wall"));
-      up.add(getDoc(id, 10, i1, -600, tlong, 600, t1,
-          "humpty dumpy3 sat on a walls"));
-      up.add(getDoc(id, 11, i1, -600, tlong, 600, t1,
-          "humpty dumpy2 sat on a walled"));
-  
-      collectionClient.request(up);
-  
-      assertEquals(3, collectionClient.query(new SolrQuery("*:*")).getResults().getNumFound());
-      assertEquals(0, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"a")).getResults().getNumFound());
-      assertEquals(3, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"c")).getResults().getNumFound());
-  
-      //Testing CREATESHARD
-      ModifiableSolrParams params = new ModifiableSolrParams();
-      params.set("action", CollectionAction.CREATESHARD.toString());
-      params.set("collection", collectionName);
-      params.set("shard", "x");
-      SolrRequest request = new QueryRequest(params);
-      request.setPath("/admin/collections");
-      try (SolrClient server = createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0)))) {
-        server.request(request);
-      }
-      waitForCollection(zkStateReader,collectionName,4);
-      //wait for all the replicas to become active
-      int attempts = 0;
-      while(true){
-        if(attempts>30 ) fail("Not enough active replicas in the shard 'x'");
-        attempts++;
-        int activeReplicaCount = 0;
-        for (Replica x : zkStateReader.getClusterState().getCollection(collectionName).getSlice("x").getReplicas()) {
-          if (x.getState() == Replica.State.ACTIVE) {
-            activeReplicaCount++;
-          }
-        }
-        Thread.sleep(500);
-        if(activeReplicaCount >= replicationFactor) break;
-      }
-      log.info(zkStateReader.getClusterState().toString());
-  
-      collectionClient.add(getDoc(id, 66, i1, -600, tlong, 600, t1,
-          "humpty dumpy sat on a wall", _ROUTE_,"x"));
-      collectionClient.commit();
-      assertEquals(1, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"x")).getResults().getNumFound());
-  
-  
-      int numShards = 4;
-      replicationFactor = TestUtil.nextInt(random(), 0, 3) + 2;
-      int maxShardsPerNode = (((numShards * replicationFactor) / getCommonCloudSolrClient()
-          .getZkStateReader().getClusterState().getLiveNodes().size())) + 1;
-
-      try (CloudSolrClient client = createCloudClient(null)) {
-        Map<String, Object> props = Utils.makeMap(
-            "router.name", ImplicitDocRouter.NAME,
-            REPLICATION_FACTOR, replicationFactor,
-            MAX_SHARDS_PER_NODE, maxShardsPerNode,
-            SHARDS_PROP, "a,b,c,d",
-            "router.field", shard_fld);
-  
-        collectionName = COLL_PREFIX + "withShardField";
-        createCollection(collectionInfos, collectionName,props,client);
-      }
-  
-      List<Integer> list = collectionInfos.get(collectionName);
-      checkForCollection(collectionName, list, null);
-  
-  
-      url = getUrlFromZk(getCommonCloudSolrClient().getZkStateReader().getClusterState(), collectionName);
-    }
-
-    try (HttpSolrClient collectionClient = getHttpSolrClient(url)) {
-         // poll for a second - it can take a moment before we are ready to serve
-      waitForNon403or404or503(collectionClient);
-    }
-
-    try (HttpSolrClient collectionClient = getHttpSolrClient(url)) {
-      // lets try and use the solrj client to index a couple documents
-  
-      collectionClient.add(getDoc(id, 6, i1, -600, tlong, 600, t1,
-          "humpty dumpy sat on a wall", shard_fld,"a"));
-  
-      collectionClient.add(getDoc(id, 7, i1, -600, tlong, 600, t1,
-          "humpty dumpy3 sat on a walls", shard_fld,"a"));
-  
-      collectionClient.add(getDoc(id, 8, i1, -600, tlong, 600, t1,
-          "humpty dumpy2 sat on a walled", shard_fld,"a"));
-  
-      collectionClient.commit();
-  
-      assertEquals(3, collectionClient.query(new SolrQuery("*:*")).getResults().getNumFound());
-      assertEquals(0, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"b")).getResults().getNumFound());
-      //TODO debug the following case
-      assertEquals(3, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_, "a")).getResults().getNumFound());
-    }
   }
 
-  private void testRouteFieldForHashRouter()throws Exception{
+  @Test
+  public void testRouteFieldForHashRouter()throws Exception{
     String collectionName = "routeFieldColl";
     int numShards = 4;
     int replicationFactor = 2;
-    int maxShardsPerNode = (((numShards * replicationFactor) / getCommonCloudSolrClient()
-        .getZkStateReader().getClusterState().getLiveNodes().size())) + 1;
-
-    HashMap<String, List<Integer>> collectionInfos = new HashMap<>();
+    int maxShardsPerNode = ((numShards * replicationFactor) / NODE_COUNT) + 1;
     String shard_fld = "shard_s";
-    try (CloudSolrClient client = createCloudClient(null)) {
-      Map<String, Object> props = Utils.makeMap(
-          REPLICATION_FACTOR, replicationFactor,
-          MAX_SHARDS_PER_NODE, maxShardsPerNode,
-          NUM_SLICES, numShards,
-          "router.field", shard_fld);
-
-      createCollection(collectionInfos, collectionName,props,client);
-    }
-
-    List<Integer> list = collectionInfos.get(collectionName);
-    checkForCollection(collectionName, list, null);
-
-
-    String url = getUrlFromZk(getCommonCloudSolrClient().getZkStateReader().getClusterState(), collectionName);
-
-    try (HttpSolrClient collectionClient = getHttpSolrClient(url)) {
-      // poll for a second - it can take a moment before we are ready to serve
-      waitForNon403or404or503(collectionClient);
-    }
-
-
-    try (HttpSolrClient collectionClient = getHttpSolrClient(url)) {
-      // lets try and use the solrj client to index a couple documents
-  
-      collectionClient.add(getDoc(id, 6, i1, -600, tlong, 600, t1,
-          "humpty dumpy sat on a wall", shard_fld,"a"));
-  
-      collectionClient.add(getDoc(id, 7, i1, -600, tlong, 600, t1,
-          "humpty dumpy3 sat on a walls", shard_fld,"a"));
-  
-      collectionClient.add(getDoc(id, 8, i1, -600, tlong, 600, t1,
-          "humpty dumpy2 sat on a walled", shard_fld,"a"));
-  
-      collectionClient.commit();
-  
-      assertEquals(3, collectionClient.query(new SolrQuery("*:*")).getResults().getNumFound());
-      //TODO debug the following case
-      assertEquals(3, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_, "a")).getResults().getNumFound());
-  
-      collectionClient.deleteByQuery("*:*");
-      collectionClient.commit();
-  
-      collectionClient.add (getDoc( id,100,shard_fld, "b!doc1"));
-      collectionClient.commit();
-      assertEquals(1, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_, "b!")).getResults().getNumFound());
-    }
-  }
 
-  private void testCreateShardRepFactor() throws Exception  {
-    String collectionName = "testCreateShardRepFactor";
-    HashMap<String, List<Integer>> collectionInfos = new HashMap<>();
-    try (CloudSolrClient client = createCloudClient(null)) {
-      Map<String, Object> props = Utils.makeMap(
-          REPLICATION_FACTOR, 1,
-          MAX_SHARDS_PER_NODE, 5,
-          NUM_SLICES, 2,
-          "shards", "a,b",
-          "router.name", "implicit");
-
-      createCollection(collectionInfos, collectionName, props, client);
-    }
-    ZkStateReader zkStateReader = getCommonCloudSolrClient().getZkStateReader();
-    waitForRecoveriesToFinish(collectionName, zkStateReader, false);
-
-    ModifiableSolrParams params = new ModifiableSolrParams();
-    params.set("action", CollectionAction.CREATESHARD.toString());
-    params.set("collection", collectionName);
-    params.set("shard", "x");
-    SolrRequest request = new QueryRequest(params);
-    request.setPath("/admin/collections");
-
-    try (SolrClient server = createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0)))) {
-      server.request(request);
-    }
-
-    waitForRecoveriesToFinish(collectionName, zkStateReader, false);
-
-    int replicaCount = 0;
-    int attempts = 0;
-    while (true) {
-      if (attempts > 30) fail("Not enough active replicas in the shard 'x'");
-      attempts++;
-      replicaCount = zkStateReader.getClusterState().getSlice(collectionName, "x").getReplicas().size();
-      if (replicaCount >= 1) break;
-      Thread.sleep(500);
-    }
-
-    assertEquals("CREATESHARD API created more than replicationFactor number of replicas", 1, replicaCount);
-  }
+    CollectionAdminRequest.createCollection(collectionName, "conf", numShards, replicationFactor)
+        .setMaxShardsPerNode(maxShardsPerNode)
+        .setRouterField(shard_fld)
+        .process(cluster.getSolrClient());
+
+    new UpdateRequest()
+        .add("id", "6", shard_fld, "a")
+        .add("id", "7", shard_fld, "a")
+        .add("id", "8", shard_fld, "b")
+        .commit(cluster.getSolrClient(), collectionName);
+
+    assertEquals(3, cluster.getSolrClient().query(collectionName, new SolrQuery("*:*")).getResults().getNumFound());
+    assertEquals(2, cluster.getSolrClient().query(collectionName, new SolrQuery("*:*").setParam(_ROUTE_, "a")).getResults().getNumFound());
+    assertEquals(1, cluster.getSolrClient().query(collectionName, new SolrQuery("*:*").setParam(_ROUTE_, "b")).getResults().getNumFound());
+    assertEquals(0, cluster.getSolrClient().query(collectionName, new SolrQuery("*:*").setParam(_ROUTE_, "c")).getResults().getNumFound());
+
 
+    cluster.getSolrClient().deleteByQuery(collectionName, "*:*");
+    cluster.getSolrClient().commit(collectionName);
 
-  @Override
-  protected QueryResponse queryServer(ModifiableSolrParams params) throws SolrServerException, IOException {
+    cluster.getSolrClient().add(collectionName, new SolrInputDocument("id", "100", shard_fld, "c!doc1"));
+    cluster.getSolrClient().commit(collectionName);
+    assertEquals(1, cluster.getSolrClient().query(collectionName, new SolrQuery("*:*").setParam(_ROUTE_, "c!")).getResults().getNumFound());
 
-    if (r.nextBoolean())
-      return super.queryServer(params);
+  }
+
+  @Test
+  public void testCreateShardRepFactor() throws Exception  {
+    final String collectionName = "testCreateShardRepFactor";
+    CollectionAdminRequest.createCollectionWithImplicitRouter(collectionName, "conf", "a,b", 1)
+        .process(cluster.getSolrClient());
 
-    if (r.nextBoolean())
-      params.set("collection",DEFAULT_COLLECTION);
+    CollectionAdminRequest.createShard(collectionName, "x")
+        .process(cluster.getSolrClient());
+
+    waitForState("Not enough active replicas in shard 'x'", collectionName, (n, c) -> {
+      return c.getSlice("x").getReplicas().size() == 1;
+    });
 
-    QueryResponse rsp = getCommonCloudSolrClient().query(params);
-    return rsp;
   }
+
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f56d111a/solr/core/src/test/org/apache/solr/cloud/MigrateRouteKeyTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/MigrateRouteKeyTest.java b/solr/core/src/test/org/apache/solr/cloud/MigrateRouteKeyTest.java
index 78f82ed..8e3f63d 100644
--- a/solr/core/src/test/org/apache/solr/cloud/MigrateRouteKeyTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/MigrateRouteKeyTest.java
@@ -51,6 +51,7 @@ public class MigrateRouteKeyTest extends SolrCloudTestCase {
 
     if (usually()) {
       CollectionAdminRequest.setClusterProperty("legacyCloud", "false").process(cluster.getSolrClient());
+      log.info("Using legacyCloud=false for cluster");
     }
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f56d111a/solr/core/src/test/org/apache/solr/cloud/RecoveryZkTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/RecoveryZkTest.java b/solr/core/src/test/org/apache/solr/cloud/RecoveryZkTest.java
index a0cb4dc..0aecdf9 100644
--- a/solr/core/src/test/org/apache/solr/cloud/RecoveryZkTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/RecoveryZkTest.java
@@ -16,46 +16,59 @@
  */
 package org.apache.solr.cloud;
 
-import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
 
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.solr.client.solrj.SolrQuery;
-import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.embedded.JettySolrRunner;
-import org.apache.solr.common.SolrInputDocument;
-import org.apache.solr.common.cloud.ZkStateReader;
+import org.apache.solr.client.solrj.impl.HttpSolrClient;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.client.solrj.request.UpdateRequest;
+import org.apache.solr.common.cloud.DocCollection;
+import org.apache.solr.common.cloud.Replica;
+import org.apache.solr.common.cloud.Slice;
+import org.junit.After;
+import org.junit.BeforeClass;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 @Slow
-public class RecoveryZkTest extends AbstractFullDistribZkTestBase {
+public class RecoveryZkTest extends SolrCloudTestCase {
 
-  //private static final String DISTRIB_UPDATE_CHAIN = "distrib-update-chain";
-  private StoppableIndexingThread indexThread;
-  private StoppableIndexingThread indexThread2;
+  private static Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
-  public RecoveryZkTest() {
-    super();
-    sliceCount = 1;
-    fixShardCount(2);
-    schemaString = "schema15.xml";      // we need a string id
-  }
-  
-  public static String[] fieldNames = new String[]{"f_i", "f_f", "f_d", "f_l", "f_dt"};
-  public static RandVal[] randVals = new RandVal[]{rint, rfloat, rdouble, rlong, rdate};
-  
-  protected String[] getFieldNames() {
-    return fieldNames;
+  @BeforeClass
+  public static void setupCluster() throws Exception {
+    configureCluster(2)
+        .addConfig("conf", configset("cloud-minimal"))
+        .configure();
   }
 
-  protected RandVal[] getRandValues() {
-    return randVals;
+  private StoppableIndexingThread indexThread;
+  private StoppableIndexingThread indexThread2;
+
+  @After
+  public void stopThreads() throws InterruptedException {
+    indexThread.safeStop();
+    indexThread2.safeStop();
+    indexThread.join();
+    indexThread2.join();
   }
 
   @Test
   public void test() throws Exception {
-    handle.clear();
-    handle.put("timestamp", SKIPVAL);
-    
+
+    final String collection = "recoverytest";
+
+    CollectionAdminRequest.createCollection(collection, "conf", 1, 2)
+        .setMaxShardsPerNode(1)
+        .process(cluster.getSolrClient());
+    waitForState("Expected a collection with one shard and two replicas", collection, clusterShape(1, 2));
+    cluster.getSolrClient().setDefaultCollection(collection);
+
     // start a couple indexing threads
     
     int[] maxDocList = new int[] {300, 700, 1200, 1350, 3000};
@@ -67,12 +80,12 @@ public class RecoveryZkTest extends AbstractFullDistribZkTestBase {
     } else {
       maxDoc = maxDocNightlyList[random().nextInt(maxDocList.length - 1)];
     }
+    log.info("Indexing {} documents", maxDoc);
     
-    indexThread = new StoppableIndexingThread(controlClient, cloudClient, "1", true, maxDoc, 1, true);
+    indexThread = new StoppableIndexingThread(null, cluster.getSolrClient(), "1", true, maxDoc, 1, true);
     indexThread.start();
     
-    indexThread2 = new StoppableIndexingThread(controlClient, cloudClient, "2", true, maxDoc, 1, true);
-
+    indexThread2 = new StoppableIndexingThread(null, cluster.getSolrClient(), "2", true, maxDoc, 1, true);
     indexThread2.start();
 
     // give some time to index...
@@ -80,88 +93,57 @@ public class RecoveryZkTest extends AbstractFullDistribZkTestBase {
     Thread.sleep(waitTimes[random().nextInt(waitTimes.length - 1)]);
      
     // bring shard replica down
-    JettySolrRunner replica = chaosMonkey.stopShard("shard1", 1).jetty;
+    DocCollection state = getCollectionState(collection);
+    Replica leader = state.getLeader("shard1");
+    Replica replica = getRandomReplica(state.getSlice("shard1"), (r) -> leader != r);
 
+    JettySolrRunner jetty = cluster.getReplicaJetty(replica);
+    jetty.stop();
     
     // wait a moment - lets allow some docs to be indexed so replication time is non 0
     Thread.sleep(waitTimes[random().nextInt(waitTimes.length - 1)]);
     
     // bring shard replica up
-    replica.start();
+    jetty.start();
     
     // make sure replication can start
     Thread.sleep(3000);
-    ZkStateReader zkStateReader = cloudClient.getZkStateReader();
-    
+
     // stop indexing threads
     indexThread.safeStop();
     indexThread2.safeStop();
     
     indexThread.join();
     indexThread2.join();
-    
-    Thread.sleep(1000);
-  
-    waitForThingsToLevelOut(120);
-    
-    Thread.sleep(2000);
-    
-    waitForThingsToLevelOut(30);
-    
-    Thread.sleep(5000);
-    
-    waitForRecoveriesToFinish(DEFAULT_COLLECTION, zkStateReader, false, true);
+
+    new UpdateRequest()
+        .commit(cluster.getSolrClient(), collection);
+
+    cluster.getSolrClient().waitForState(collection, 120, TimeUnit.SECONDS, clusterShape(1, 2));
 
     // test that leader and replica have same doc count
-    
-    String fail = checkShardConsistency("shard1", false, false);
-    if (fail != null) {
-      fail(fail);
-    }
-    
-    SolrQuery query = new SolrQuery("*:*");
-    query.setParam("distrib", "false");
-    long client1Docs = shardToJetty.get("shard1").get(0).client.solrClient.query(query).getResults().getNumFound();
-    long client2Docs = shardToJetty.get("shard1").get(1).client.solrClient.query(query).getResults().getNumFound();
-    
-    assertTrue(client1Docs > 0);
-    assertEquals(client1Docs, client2Docs);
- 
-    // won't always pass yet...
-    //query("q", "*:*", "sort", "id desc");
-  }
-  
-  @Override
-  protected void indexDoc(SolrInputDocument doc) throws IOException,
-      SolrServerException {
-    controlClient.add(doc);
-    
-    // UpdateRequest ureq = new UpdateRequest();
-    // ureq.add(doc);
-    // ureq.setParam("update.chain", DISTRIB_UPDATE_CHAIN);
-    // ureq.process(cloudClient);
-    cloudClient.add(doc);
-  }
+    state = getCollectionState(collection);
+    assertShardConsistency(state.getSlice("shard1"), true);
 
-  
-  @Override
-  public void distribTearDown() throws Exception {
-    // make sure threads have been stopped...
-    indexThread.safeStop();
-    indexThread2.safeStop();
-    
-    indexThread.join();
-    indexThread2.join();
-    
-    super.distribTearDown();
   }
-  
-  // skip the randoms - they can deadlock...
-  @Override
-  protected void indexr(Object... fields) throws Exception {
-    SolrInputDocument doc = new SolrInputDocument();
-    addFields(doc, fields);
-    addFields(doc, "rnd_b", true);
-    indexDoc(doc);
+
+  private void assertShardConsistency(Slice shard, boolean expectDocs) throws Exception {
+    List<Replica> replicas = shard.getReplicas(r -> r.getState() == Replica.State.ACTIVE);
+    long[] numCounts = new long[replicas.size()];
+    int i = 0;
+    for (Replica replica : replicas) {
+      try (HttpSolrClient client = new HttpSolrClient.Builder(replica.getCoreUrl())
+          .withHttpClient(cluster.getSolrClient().getHttpClient()).build()) {
+        numCounts[i] = client.query(new SolrQuery("*:*").add("distrib", "false")).getResults().getNumFound();
+        i++;
+      }
+    }
+    for (int j = 1; j < replicas.size(); j++) {
+      if (numCounts[j] != numCounts[j - 1])
+        fail("Mismatch in counts between replicas");  // nocommit improve this!
+      if (numCounts[j] == 0 && expectDocs)
+        fail("Expected docs on shard " + shard.getName() + " but found none");
+    }
   }
+
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f56d111a/solr/core/src/test/org/apache/solr/cloud/ShardSplitTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/ShardSplitTest.java b/solr/core/src/test/org/apache/solr/cloud/ShardSplitTest.java
index 7388476..72f0694 100644
--- a/solr/core/src/test/org/apache/solr/cloud/ShardSplitTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/ShardSplitTest.java
@@ -93,7 +93,8 @@ public class ShardSplitTest extends BasicDistributedZkTest {
 
     if (usually()) {
       log.info("Using legacyCloud=false for cluster");
-      CollectionsAPIDistributedZkTest.setClusterProp(cloudClient, "legacyCloud", "false");
+      CollectionAdminRequest.setClusterProperty(ZkStateReader.LEGACY_CLOUD, "false")
+          .process(cloudClient);
     }
     incompleteOrOverlappingCustomRangeTest();
     splitByUniqueKeyTest();
@@ -516,7 +517,8 @@ public class ShardSplitTest extends BasicDistributedZkTest {
 
     if (usually()) {
       log.info("Using legacyCloud=false for cluster");
-      CollectionsAPIDistributedZkTest.setClusterProp(cloudClient, "legacyCloud", "false");
+      CollectionAdminRequest.setClusterProperty(ZkStateReader.LEGACY_CLOUD, "false")
+          .process(cloudClient);
     }
 
     log.info("Starting testSplitShardWithRule");

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f56d111a/solr/core/src/test/org/apache/solr/cloud/TestClusterProperties.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestClusterProperties.java b/solr/core/src/test/org/apache/solr/cloud/TestClusterProperties.java
new file mode 100644
index 0000000..c5575af
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/cloud/TestClusterProperties.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.cloud;
+
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.common.cloud.ClusterProperties;
+import org.apache.solr.common.cloud.ZkStateReader;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestClusterProperties extends SolrCloudTestCase {
+
+  @BeforeClass
+  public static void setupCluster() throws Exception {
+    configureCluster(1).configure();
+  }
+
+  @Test
+  public void testClusterProperties() throws Exception {
+    ClusterProperties props = new ClusterProperties(zkClient());
+    assertEquals("false", props.getClusterProperty(ZkStateReader.LEGACY_CLOUD, "false"));
+
+    CollectionAdminRequest.setClusterProperty(ZkStateReader.LEGACY_CLOUD, "true").process(cluster.getSolrClient());
+    assertEquals("true", props.getClusterProperty(ZkStateReader.LEGACY_CLOUD, "false"));
+
+    CollectionAdminRequest.setClusterProperty(ZkStateReader.LEGACY_CLOUD, "false").process(cluster.getSolrClient());
+    assertEquals("false", props.getClusterProperty(ZkStateReader.LEGACY_CLOUD, "true"));
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f56d111a/solr/core/src/test/org/apache/solr/cloud/TestDeleteCollectionOnDownNodes.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestDeleteCollectionOnDownNodes.java b/solr/core/src/test/org/apache/solr/cloud/TestDeleteCollectionOnDownNodes.java
new file mode 100644
index 0000000..33820b3
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/cloud/TestDeleteCollectionOnDownNodes.java
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.cloud;
+
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.common.cloud.Slice;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestDeleteCollectionOnDownNodes extends SolrCloudTestCase {
+
+  @BeforeClass
+  public static void setupCluster() throws Exception {
+    configureCluster(4)
+        .addConfig("conf", configset("cloud-minimal"))
+        .addConfig("conf2", configset("cloud-minimal"))
+        .configure();
+  }
+
+  @Test
+  public void deleteCollectionWithDownNodes() throws Exception {
+
+    CollectionAdminRequest.createCollection("halfdeletedcollection2", "conf", 4, 2)
+        .setMaxShardsPerNode(3)
+        .process(cluster.getSolrClient());
+
+    // stop a couple nodes
+    cluster.stopJettySolrRunner(cluster.getRandomJetty(random()));
+    cluster.stopJettySolrRunner(cluster.getRandomJetty(random()));
+
+    // wait for leaders to settle out
+    waitForState("Timed out waiting for leader elections", "halfdeletedcollection2", (n, c) -> {
+      for (Slice slice : c) {
+        if (slice.getLeader() == null)
+          return false;
+        if (slice.getLeader().isActive(n) == false)
+          return false;
+      }
+      return true;
+    });
+
+    // delete the collection
+    CollectionAdminRequest.deleteCollection("halfdeletedcollection2").process(cluster.getSolrClient());
+    waitForState("Timed out waiting for collection to be deleted", "halfdeletedcollection2", (n, c) -> c == null);
+
+    assertFalse("Still found collection that should be gone",
+        cluster.getSolrClient().getZkStateReader().getClusterState().hasCollection("halfdeletedcollection2"));
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f56d111a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsCollectionsAPIDistributedZkTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsCollectionsAPIDistributedZkTest.java b/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsCollectionsAPIDistributedZkTest.java
index 3b02477..55fb6cd 100644
--- a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsCollectionsAPIDistributedZkTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsCollectionsAPIDistributedZkTest.java
@@ -16,48 +16,41 @@
  */
 package org.apache.solr.cloud.hdfs;
 
-import java.io.IOException;
-
+import com.carrotsearch.randomizedtesting.annotations.Nightly;
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.solr.cloud.CollectionsAPIDistributedZkTest;
-import org.apache.solr.update.HdfsUpdateLog;
+import org.apache.solr.common.cloud.ZkConfigManager;
 import org.apache.solr.util.BadHdfsThreadsFilter;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 
-import com.carrotsearch.randomizedtesting.annotations.Nightly;
-import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
-
 @Slow
 @Nightly
 @ThreadLeakFilters(defaultFilters = true, filters = {
     BadHdfsThreadsFilter.class // hdfs currently leaks thread(s)
 })
 public class HdfsCollectionsAPIDistributedZkTest extends CollectionsAPIDistributedZkTest {
+
   private static MiniDFSCluster dfsCluster;
-  private static long initialFailLogsCount;
-  
+
   @BeforeClass
   public static void setupClass() throws Exception {
     dfsCluster = HdfsTestUtil.setupClass(createTempDir().toFile().getAbsolutePath());
-    System.setProperty("solr.hdfs.blockcache.enabled", "false");
-    initialFailLogsCount = HdfsUpdateLog.INIT_FAILED_LOGS_COUNT.get();
+    System.setProperty("solr.hdfs.blockcache.blocksperbank", "2048");
+
+    ZkConfigManager configManager = new ZkConfigManager(zkClient());
+    configManager.uploadConfigDir(configset("cloud-hdfs"), "conf");
+
+    System.setProperty("solr.hdfs.home", HdfsTestUtil.getDataDir(dfsCluster, "data"));
   }
-  
+
   @AfterClass
   public static void teardownClass() throws Exception {
-    // there should be no new fails from this test
-    assertEquals(0, HdfsUpdateLog.INIT_FAILED_LOGS_COUNT.get() - initialFailLogsCount);
+    cluster.shutdown(); // need to close before the MiniDFSCluster
     HdfsTestUtil.teardownClass(dfsCluster);
-    System.clearProperty("solr.hdfs.blockcache.enabled");
     dfsCluster = null;
   }
 
-  
-  @Override
-  protected String getDataDir(String dataDir) throws IOException {
-    return HdfsTestUtil.getDataDir(dfsCluster, dataDir);
-  }
-
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f56d111a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsRecoveryZkTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsRecoveryZkTest.java b/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsRecoveryZkTest.java
index a8e6fb0..2dfc32b 100644
--- a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsRecoveryZkTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsRecoveryZkTest.java
@@ -16,42 +16,40 @@
  */
 package org.apache.solr.cloud.hdfs;
 
-import java.io.IOException;
-
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.solr.cloud.RecoveryZkTest;
+import org.apache.solr.common.cloud.ZkConfigManager;
 import org.apache.solr.util.BadHdfsThreadsFilter;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 
-import com.carrotsearch.randomizedtesting.annotations.Nightly;
-import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
-
 @Slow
-@Nightly
+//@Nightly
 @ThreadLeakFilters(defaultFilters = true, filters = {
     BadHdfsThreadsFilter.class // hdfs currently leaks thread(s)
 })
 public class HdfsRecoveryZkTest extends RecoveryZkTest {
+
   private static MiniDFSCluster dfsCluster;
   
   @BeforeClass
   public static void setupClass() throws Exception {
     dfsCluster = HdfsTestUtil.setupClass(createTempDir().toFile().getAbsolutePath());
     System.setProperty("solr.hdfs.blockcache.blocksperbank", "2048");
+
+    ZkConfigManager configManager = new ZkConfigManager(zkClient());
+    configManager.uploadConfigDir(configset("cloud-hdfs"), "conf");
+
+    System.setProperty("solr.hdfs.home", HdfsTestUtil.getDataDir(dfsCluster, "data"));
   }
   
   @AfterClass
   public static void teardownClass() throws Exception {
+    cluster.shutdown(); // need to close before the MiniDFSCluster
     HdfsTestUtil.teardownClass(dfsCluster);
     dfsCluster = null;
   }
 
-  
-  @Override
-  protected String getDataDir(String dataDir) throws IOException {
-    return HdfsTestUtil.getDataDir(dfsCluster, dataDir);
-  }
-
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f56d111a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java
index 72406ef..92ea99b 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java
@@ -191,6 +191,10 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
     @Deprecated
     public abstract AsyncCollectionSpecificAdminRequest setCollectionName(String collection);
 
+    public String getCollectionName() {
+      return collection;
+    }
+
     @Override
     public SolrParams getParams() {
       ModifiableSolrParams params = new ModifiableSolrParams(super.getParams());
@@ -1601,6 +1605,13 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
       return this;
     }
 
+    public AddReplica withProperty(String key, String value) {
+      if (this.properties == null)
+        this.properties = new Properties();
+      this.properties.setProperty(key, value);
+      return this;
+    }
+
     public String getNode() {
       return node;
     }
@@ -2178,8 +2189,9 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
   /**
    * Returns a SolrRequest to get a list of collections in the cluster
    */
-  public static List listCollections() {
-    return new List();
+  public static java.util.List<String> listCollections(SolrClient client) throws IOException, SolrServerException {
+    CollectionAdminResponse resp = new List().process(client);
+    return (java.util.List<String>) resp.getResponse().get("collections");
   }
 
   // LIST request

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f56d111a/solr/solrj/src/java/org/apache/solr/client/solrj/request/UpdateRequest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/UpdateRequest.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/UpdateRequest.java
index aec6e22..e7ca0fa 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/UpdateRequest.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/UpdateRequest.java
@@ -218,6 +218,13 @@ public class UpdateRequest extends AbstractUpdateRequest {
     return this;
   }
 
+  public UpdateRequest withRoute(String route) {
+    if (params == null)
+      params = new ModifiableSolrParams();
+    params.set(ROUTE, route);
+    return this;
+  }
+
   public UpdateResponse commit(SolrClient client, String collection) throws IOException, SolrServerException {
     if (params == null)
       params = new ModifiableSolrParams();
@@ -524,4 +531,5 @@ public class UpdateRequest extends AbstractUpdateRequest {
   public void lastDocInBatch() {
     isLastDocInBatch = true;
   }
+
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f56d111a/solr/solrj/src/java/org/apache/solr/common/cloud/Slice.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/Slice.java b/solr/solrj/src/java/org/apache/solr/common/cloud/Slice.java
index e4be009..bd3bafd 100644
--- a/solr/solrj/src/java/org/apache/solr/common/cloud/Slice.java
+++ b/solr/solrj/src/java/org/apache/solr/common/cloud/Slice.java
@@ -21,8 +21,11 @@ import java.util.Collections;
 import java.util.HashMap;
 import java.util.Iterator;
 import java.util.LinkedHashMap;
+import java.util.List;
 import java.util.Locale;
 import java.util.Map;
+import java.util.function.Predicate;
+import java.util.stream.Collectors;
 
 import org.noggit.JSONUtil;
 import org.noggit.JSONWriter;
@@ -219,6 +222,13 @@ public class Slice extends ZkNodeProps implements Iterable<Replica> {
   }
 
   /**
+   * Gets all replicas that match a predicate
+   */
+  public List<Replica> getReplicas(Predicate<Replica> pred) {
+    return replicas.values().stream().filter(pred).collect(Collectors.toList());
+  }
+
+  /**
    * Get the map of coreNodeName to replicas for this slice.
    */
   public Map<String, Replica> getReplicasMap() {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f56d111a/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java
----------------------------------------------------------------------
diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java b/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java
index 3c5aa16..2c1ae3b 100644
--- a/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java
+++ b/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java
@@ -88,7 +88,7 @@ public class MiniSolrCloudCluster {
       "  \n" +
       "</solr>\n";
 
-  private final ZkTestServer zkServer;
+  private ZkTestServer zkServer; // non-final due to injectChaos()
   private final boolean externalZkServer;
   private final List<JettySolrRunner> jettys = new CopyOnWriteArrayList<>();
   private final Path baseDir;
@@ -328,6 +328,10 @@ public class MiniSolrCloudCluster {
         .build());
   }
 
+  public JettySolrRunner getJettySolrRunner(int index) {
+    return jettys.get(index);
+  }
+
   /**
    * Start a new Solr instance on a particular servlet context
    *
@@ -440,6 +444,10 @@ public class MiniSolrCloudCluster {
   public CloudSolrClient getSolrClient() {
     return solrClient;
   }
+
+  public SolrZkClient getZkClient() {
+    return solrClient.getZkStateReader().getZkClient();
+  }
   
   protected CloudSolrClient buildSolrClient() {
     return new Builder()
@@ -497,4 +505,29 @@ public class MiniSolrCloudCluster {
       log.info("Expired zookeeper session {} from node {}", sessionId, jetty.getBaseUrl());
     }
   }
+
+  public void injectChaos(Random random) throws Exception {
+
+    // sometimes we restart one of the jetty nodes
+    if (random.nextBoolean()) {
+      JettySolrRunner jetty = jettys.get(random.nextInt(jettys.size()));
+      ChaosMonkey.stop(jetty);
+      log.info("============ Restarting jetty");
+      ChaosMonkey.start(jetty);
+    }
+
+    // sometimes we restart zookeeper
+    if (random.nextBoolean()) {
+      zkServer.shutdown();
+      log.info("============ Restarting zookeeper");
+      zkServer = new ZkTestServer(zkServer.getZkDir(), zkServer.getPort());
+      zkServer.run();
+    }
+
+    // sometimes we cause a connection loss - sometimes it will hit the overseer
+    if (random.nextBoolean()) {
+      JettySolrRunner jetty = jettys.get(random.nextInt(jettys.size()));
+      ChaosMonkey.causeConnectionLoss(jetty);
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f56d111a/solr/test-framework/src/java/org/apache/solr/cloud/SolrCloudTestCase.java
----------------------------------------------------------------------
diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/SolrCloudTestCase.java b/solr/test-framework/src/java/org/apache/solr/cloud/SolrCloudTestCase.java
index b64b1ce..77a527e 100644
--- a/solr/test-framework/src/java/org/apache/solr/cloud/SolrCloudTestCase.java
+++ b/solr/test-framework/src/java/org/apache/solr/cloud/SolrCloudTestCase.java
@@ -44,6 +44,7 @@ import org.apache.solr.common.cloud.DocCollection;
 import org.apache.solr.common.cloud.Replica;
 import org.apache.solr.common.cloud.Slice;
 import org.apache.solr.common.cloud.SolrZkClient;
+import org.apache.solr.common.cloud.ZkStateReader;
 import org.junit.AfterClass;
 import org.junit.Before;
 
@@ -174,7 +175,10 @@ public class SolrCloudTestCase extends SolrTestCaseJ4 {
   /** The cluster */
   protected static MiniSolrCloudCluster cluster;
 
-  protected SolrZkClient zkClient() {
+  protected static SolrZkClient zkClient() {
+    ZkStateReader reader = cluster.getSolrClient().getZkStateReader();
+    if (reader == null)
+      cluster.getSolrClient().connect();
     return cluster.getSolrClient().getZkStateReader().getZkClient();
   }
 


[14/50] [abbrv] lucene-solr:apiv2: SOLR-9132: Fix test bug

Posted by sa...@apache.org.
SOLR-9132: Fix test bug


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/cff2774a
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/cff2774a
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/cff2774a

Branch: refs/heads/apiv2
Commit: cff2774a3749378a040ce417f00560b95c93e10f
Parents: b6e0ab0
Author: Alan Woodward <ro...@apache.org>
Authored: Fri Oct 28 14:29:57 2016 +0100
Committer: Alan Woodward <ro...@apache.org>
Committed: Fri Oct 28 14:29:57 2016 +0100

----------------------------------------------------------------------
 .../org/apache/solr/cloud/TestDeleteCollectionOnDownNodes.java     | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/cff2774a/solr/core/src/test/org/apache/solr/cloud/TestDeleteCollectionOnDownNodes.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestDeleteCollectionOnDownNodes.java b/solr/core/src/test/org/apache/solr/cloud/TestDeleteCollectionOnDownNodes.java
index 33820b3..cb33e26 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestDeleteCollectionOnDownNodes.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestDeleteCollectionOnDownNodes.java
@@ -35,7 +35,7 @@ public class TestDeleteCollectionOnDownNodes extends SolrCloudTestCase {
   @Test
   public void deleteCollectionWithDownNodes() throws Exception {
 
-    CollectionAdminRequest.createCollection("halfdeletedcollection2", "conf", 4, 2)
+    CollectionAdminRequest.createCollection("halfdeletedcollection2", "conf", 4, 3)
         .setMaxShardsPerNode(3)
         .process(cluster.getSolrClient());
 


[20/50] [abbrv] lucene-solr:apiv2: SOLR-9701: NPE in export handler when fl parameter is omitted.

Posted by sa...@apache.org.
SOLR-9701: NPE in export handler when fl parameter is omitted.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/42eab703
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/42eab703
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/42eab703

Branch: refs/heads/apiv2
Commit: 42eab7035ed0d5ebc7ba87f8c08a7677b87b7bef
Parents: 0f8802b
Author: Erick Erickson <er...@apache.org>
Authored: Sat Oct 29 19:47:21 2016 -0700
Committer: Erick Erickson <er...@apache.org>
Committed: Sat Oct 29 19:47:21 2016 -0700

----------------------------------------------------------------------
 solr/CHANGES.txt                                |  5 ++++
 .../solr/response/SortingResponseWriter.java    | 27 ++++++++++----------
 .../response/TestSortingResponseWriter.java     | 16 +++++++++++-
 3 files changed, 33 insertions(+), 15 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/42eab703/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 3b3fba7..6c3ffcc 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -91,6 +91,11 @@ Optimizations
 * SOLR-9704: Facet Module / JSON Facet API: Optimize blockChildren facets that have
   filters specified by using those filters as acceptDocs. (yonik)
 
+Bug Fixes
+----------------------
+* SOLR-9701: NPE in export handler when "fl" parameter is omitted.
+  (Erick Erickson)
+
 Other Changes
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/42eab703/solr/core/src/java/org/apache/solr/response/SortingResponseWriter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/response/SortingResponseWriter.java b/solr/core/src/java/org/apache/solr/response/SortingResponseWriter.java
index d99d3dc..56c4f27 100644
--- a/solr/core/src/java/org/apache/solr/response/SortingResponseWriter.java
+++ b/solr/core/src/java/org/apache/solr/response/SortingResponseWriter.java
@@ -85,21 +85,23 @@ public class SortingResponseWriter implements QueryResponseWriter {
 
     SolrRequestInfo info = SolrRequestInfo.getRequestInfo();
     SortSpec sortSpec = info.getResponseBuilder().getSortSpec();
-    Exception exception = null;
 
     if(sortSpec == null) {
-      exception = new IOException(new SyntaxError("No sort criteria was provided."));
+      writeException((new IOException(new SyntaxError("No sort criteria was provided."))), writer, true);
+      return;
     }
 
     SolrIndexSearcher searcher = req.getSearcher();
     Sort sort = searcher.weightSort(sortSpec.getSort());
 
     if(sort == null) {
-      exception = new IOException(new SyntaxError("No sort criteria was provided."));
+      writeException((new IOException(new SyntaxError("No sort criteria was provided."))), writer, true);
+      return;
     }
 
     if(sort != null && sort.needsScores()) {
-      exception = new IOException(new SyntaxError("Scoring is not currently supported with xsort."));
+      writeException((new IOException(new SyntaxError("Scoring is not currently supported with xsort."))), writer, true);
+      return;
     }
 
     // There is a bailout in SolrIndexSearcher.getDocListNC when there are _no_ docs in the index at all.
@@ -117,7 +119,8 @@ public class SortingResponseWriter implements QueryResponseWriter {
       totalHits = ((Integer)req.getContext().get("totalHits")).intValue();
       sets = (FixedBitSet[]) req.getContext().get("export");
       if (sets == null) {
-        exception = new IOException(new SyntaxError("xport RankQuery is required for xsort: rq={!xport}"));
+        writeException((new IOException(new SyntaxError("xport RankQuery is required for xsort: rq={!xport}"))), writer, true);
+        return;
       }
     }
     SolrParams params = req.getParams();
@@ -126,7 +129,8 @@ public class SortingResponseWriter implements QueryResponseWriter {
     String[] fields = null;
 
     if(fl == null) {
-      exception = new IOException(new SyntaxError("export field list (fl) must be specified."));
+      writeException((new IOException(new SyntaxError("export field list (fl) must be specified."))), writer, true);
+      return;
     } else  {
       fields = fl.split(",");
 
@@ -135,8 +139,8 @@ public class SortingResponseWriter implements QueryResponseWriter {
         fields[i] = fields[i].trim();
 
         if(fields[i].equals("score")) {
-          exception =  new IOException(new SyntaxError("Scoring is not currently supported with xsort."));
-          break;
+          writeException((new IOException(new SyntaxError("Scoring is not currently supported with xsort."))), writer, true);
+          return;
         }
       }
     }
@@ -146,12 +150,7 @@ public class SortingResponseWriter implements QueryResponseWriter {
     try {
       fieldWriters = getFieldWriters(fields, req.getSearcher());
     } catch (Exception e) {
-      exception = e;
-    }
-
-
-    if(exception != null) {
-      writeException(exception, writer, true);
+      writeException(e, writer, true);
       return;
     }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/42eab703/solr/core/src/test/org/apache/solr/response/TestSortingResponseWriter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/response/TestSortingResponseWriter.java b/solr/core/src/test/org/apache/solr/response/TestSortingResponseWriter.java
index 2691d98d..4b18133 100644
--- a/solr/core/src/test/org/apache/solr/response/TestSortingResponseWriter.java
+++ b/solr/core/src/test/org/apache/solr/response/TestSortingResponseWriter.java
@@ -170,7 +170,21 @@ public class TestSortingResponseWriter extends SolrTestCaseJ4 {
 
     s =  h.query(req("q", "id:8", "qt", "/export", "fl", "stringdv", "sort", "intdv asc"));
     assertEquals(s, "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":1, \"docs\":[{\"stringdv\":\"chello \\\"world\\\"\"}]}}");
+  }
 
-
+  @Test
+  public void testExportRequiredParams() throws Exception {
+
+    //Test whether missing required parameters returns expected errors.
+
+    //String s =  h.query(req("q", "id:1", "qt", "/export", "fl", "floatdv,intdv,stringdv,longdv,doubledv", "sort", "intdv asc"));
+    String s;
+    s = h.query(req("qt", "/export"));
+    assertTrue("Should have had a sort error", s.contains("No sort criteria"));
+    s = h.query(req("sort", "intdv asc", "qt", "/export"));
+    assertTrue("Should have had fl error", s.contains("export field list (fl) must be specified"));
+    s = h.query(req("sort", "intdv asc", "qt", "/export", "fl", "stringdv"));
+    // Interesting you don't even need to specify a "q" parameter.
+    
   }
 }


[42/50] [abbrv] lucene-solr:apiv2: SOLR-8542: Adds Solr Learning to Rank (LTR) plugin for reranking results with machine learning models. (Michael Nilsson, Diego Ceccarelli, Joshua Pantony, Jon Dorando, Naveen Santhapuri, Alessandro Benedetti, David Groh

Posted by sa...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/example/solrconfig.xml
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/example/solrconfig.xml b/solr/contrib/ltr/example/solrconfig.xml
new file mode 100644
index 0000000..18d6cb8
--- /dev/null
+++ b/solr/contrib/ltr/example/solrconfig.xml
@@ -0,0 +1,1722 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<!--
+     For more details about configurations options that may appear in
+     this file, see http://wiki.apache.org/solr/SolrConfigXml.
+-->
+<config>
+  <!-- In all configuration below, a prefix of "solr." for class names
+       is an alias that causes solr to search appropriate packages,
+       including org.apache.solr.(search|update|request|core|analysis)
+
+       You may also specify a fully qualified Java classname if you
+       have your own custom plugins.
+    -->
+
+  <!-- Controls what version of Lucene various components of Solr
+       adhere to.  Generally, you want to use the latest version to
+       get all bug fixes and improvements. It is highly recommended
+       that you fully re-index after changing this setting as it can
+       affect both how text is indexed and queried.
+  -->
+  <luceneMatchVersion>6.0.0</luceneMatchVersion>
+
+  <!-- <lib/> directives can be used to instruct Solr to load any Jars
+       identified and use them to resolve any "plugins" specified in
+       your solrconfig.xml or schema.xml (ie: Analyzers, Request
+       Handlers, etc...).
+
+       All directories and paths are resolved relative to the
+       instanceDir.
+
+       Please note that <lib/> directives are processed in the order
+       that they appear in your solrconfig.xml file, and are "stacked"
+       on top of each other when building a ClassLoader - so if you have
+       plugin jars with dependencies on other jars, the "lower level"
+       dependency jars should be loaded first.
+
+       If a "./lib" directory exists in your instanceDir, all files
+       found in it are included as if you had used the following
+       syntax...
+   -->
+              <lib dir="./lib" />
+
+
+  <!-- A 'dir' option by itself adds any files found in the directory
+       to the classpath, this is useful for including all jars in a
+       directory.
+
+       When a 'regex' is specified in addition to a 'dir', only the
+       files in that directory which completely match the regex
+       (anchored on both ends) will be included.
+
+       If a 'dir' option (with or without a regex) is used and nothing
+       is found that matches, a warning will be logged.
+
+       The examples below can be used to load some solr-contribs along
+       with their external dependencies.
+    -->
+  <lib dir="${solr.install.dir:../../../..}/contrib/extraction/lib" regex=".*\.jar" />
+  <lib dir="${solr.install.dir:../../../..}/dist/" regex="solr-cell-\d.*\.jar" />
+
+  <lib dir="${solr.install.dir:../../../..}/contrib/clustering/lib/" regex=".*\.jar" />
+  <lib dir="${solr.install.dir:../../../..}/dist/" regex="solr-clustering-\d.*\.jar" />
+
+  <lib dir="${solr.install.dir:../../../..}/contrib/langid/lib/" regex=".*\.jar" />
+  <lib dir="${solr.install.dir:../../../..}/dist/" regex="solr-langid-\d.*\.jar" />
+
+  <lib dir="${solr.install.dir:../../../..}/contrib/velocity/lib" regex=".*\.jar" />
+  <lib dir="${solr.install.dir:../../../..}/dist/" regex="solr-velocity-\d.*\.jar" />
+
+  <!-- an exact 'path' can be used instead of a 'dir' to specify a
+       specific jar file.  This will cause a serious error to be logged
+       if it can't be loaded.
+    -->
+  <!--
+     <lib path="../a-jar-that-does-not-exist.jar" />
+  -->
+
+  <!-- Data Directory
+
+       Used to specify an alternate directory to hold all index data
+       other than the default ./data under the Solr home.  If
+       replication is in use, this should match the replication
+       configuration.
+    -->
+  <dataDir>${solr.data.dir:}</dataDir>
+
+
+  <!-- The DirectoryFactory to use for indexes.
+
+       solr.StandardDirectoryFactory is filesystem
+       based and tries to pick the best implementation for the current
+       JVM and platform.  solr.NRTCachingDirectoryFactory, the default,
+       wraps solr.StandardDirectoryFactory and caches small files in memory
+       for better NRT performance.
+
+       One can force a particular implementation via solr.MMapDirectoryFactory,
+       solr.NIOFSDirectoryFactory, or solr.SimpleFSDirectoryFactory.
+
+       solr.RAMDirectoryFactory is memory based, not
+       persistent, and doesn't work with replication.
+    -->
+  <directoryFactory name="DirectoryFactory"
+                    class="${solr.directoryFactory:solr.NRTCachingDirectoryFactory}"/>
+
+  <!-- The CodecFactory for defining the format of the inverted index.
+       The default implementation is SchemaCodecFactory, which is the official Lucene
+       index format, but hooks into the schema to provide per-field customization of
+       the postings lists and per-document values in the fieldType element
+       (postingsFormat/docValuesFormat). Note that most of the alternative implementations
+       are experimental, so if you choose to customize the index format, it's a good
+       idea to convert back to the official format e.g. via IndexWriter.addIndexes(IndexReader)
+       before upgrading to a newer version to avoid unnecessary reindexing.
+       A "compressionMode" string element can be added to <codecFactory> to choose
+       between the existing compression modes in the default codec: "BEST_SPEED" (default)
+       or "BEST_COMPRESSION".
+  -->
+  <codecFactory class="solr.SchemaCodecFactory"/>
+
+  <!-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+       Index Config - These settings control low-level behavior of indexing
+       Most example settings here show the default value, but are commented
+       out, to more easily see where customizations have been made.
+
+       Note: This replaces <indexDefaults> and <mainIndex> from older versions
+       ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -->
+  <indexConfig>
+    <!-- maxFieldLength was removed in 4.0. To get similar behavior, include a
+         LimitTokenCountFilterFactory in your fieldType definition. E.g.
+     <filter class="solr.LimitTokenCountFilterFactory" maxTokenCount="10000"/>
+    -->
+    <!-- Maximum time to wait for a write lock (ms) for an IndexWriter. Default: 1000 -->
+    <!-- <writeLockTimeout>1000</writeLockTimeout>  -->
+
+    <!-- Expert: Enabling compound file will use less files for the index,
+         using fewer file descriptors on the expense of performance decrease.
+         Default in Lucene is "true". Default in Solr is "false" (since 3.6) -->
+    <!-- <useCompoundFile>false</useCompoundFile> -->
+
+    <!-- ramBufferSizeMB sets the amount of RAM that may be used by Lucene
+         indexing for buffering added documents and deletions before they are
+         flushed to the Directory.
+         maxBufferedDocs sets a limit on the number of documents buffered
+         before flushing.
+         If both ramBufferSizeMB and maxBufferedDocs is set, then
+         Lucene will flush based on whichever limit is hit first.
+         The default is 100 MB.  -->
+    <!-- <ramBufferSizeMB>100</ramBufferSizeMB> -->
+    <!-- <maxBufferedDocs>1000</maxBufferedDocs> -->
+
+    <!-- Expert: Merge Policy
+      -->
+    <!--
+        <mergePolicyFactory class="...">
+          ...
+        </mergePolicyFactory>
+      -->
+
+    <!-- Expert: Merge Scheduler
+         The Merge Scheduler in Lucene controls how merges are
+         performed.  The ConcurrentMergeScheduler (Lucene 2.3 default)
+         can perform merges in the background using separate threads.
+         The SerialMergeScheduler (Lucene 2.2 default) does not.
+     -->
+    <!--
+       <mergeScheduler class="org.apache.lucene.index.ConcurrentMergeScheduler"/>
+       -->
+
+    <!-- LockFactory
+
+         This option specifies which Lucene LockFactory implementation
+         to use.
+
+         single = SingleInstanceLockFactory - suggested for a
+                  read-only index or when there is no possibility of
+                  another process trying to modify the index.
+         native = NativeFSLockFactory - uses OS native file locking.
+                  Do not use when multiple solr webapps in the same
+                  JVM are attempting to share a single index.
+         simple = SimpleFSLockFactory  - uses a plain file for locking
+
+         Defaults: 'native' is default for Solr3.6 and later, otherwise
+                   'simple' is the default
+
+         More details on the nuances of each LockFactory...
+         http://wiki.apache.org/lucene-java/AvailableLockFactories
+    -->
+    <lockType>${solr.lock.type:native}</lockType>
+
+    <!-- Commit Deletion Policy
+         Custom deletion policies can be specified here. The class must
+         implement org.apache.lucene.index.IndexDeletionPolicy.
+
+         The default Solr IndexDeletionPolicy implementation supports
+         deleting index commit points on number of commits, age of
+         commit point and optimized status.
+
+         The latest commit point should always be preserved regardless
+         of the criteria.
+    -->
+    <!--
+    <deletionPolicy class="solr.SolrDeletionPolicy">
+    -->
+      <!-- The number of commit points to be kept -->
+      <!-- <str name="maxCommitsToKeep">1</str> -->
+      <!-- The number of optimized commit points to be kept -->
+      <!-- <str name="maxOptimizedCommitsToKeep">0</str> -->
+      <!--
+          Delete all commit points once they have reached the given age.
+          Supports DateMathParser syntax e.g.
+        -->
+      <!--
+         <str name="maxCommitAge">30MINUTES</str>
+         <str name="maxCommitAge">1DAY</str>
+      -->
+    <!--
+    </deletionPolicy>
+    -->
+
+    <!-- Lucene Infostream
+
+         To aid in advanced debugging, Lucene provides an "InfoStream"
+         of detailed information when indexing.
+
+         Setting the value to true will instruct the underlying Lucene
+         IndexWriter to write its info stream to solr's log. By default,
+         this is enabled here, and controlled through log4j.properties.
+      -->
+     <infoStream>true</infoStream>
+  </indexConfig>
+
+
+  <!-- JMX
+
+       This example enables JMX if and only if an existing MBeanServer
+       is found, use this if you want to configure JMX through JVM
+       parameters. Remove this to disable exposing Solr configuration
+       and statistics to JMX.
+
+       For more details see http://wiki.apache.org/solr/SolrJmx
+    -->
+  <jmx />
+  <!-- If you want to connect to a particular server, specify the
+       agentId
+    -->
+  <!-- <jmx agentId="myAgent" /> -->
+  <!-- If you want to start a new MBeanServer, specify the serviceUrl -->
+  <!-- <jmx serviceUrl="service:jmx:rmi:///jndi/rmi://localhost:9999/solr"/>
+    -->
+
+  <!-- The default high-performance update handler -->
+  <updateHandler class="solr.DirectUpdateHandler2">
+
+    <!-- Enables a transaction log, used for real-time get, durability, and
+         and solr cloud replica recovery.  The log can grow as big as
+         uncommitted changes to the index, so use of a hard autoCommit
+         is recommended (see below).
+         "dir" - the target directory for transaction logs, defaults to the
+                solr data directory.
+         "numVersionBuckets" - sets the number of buckets used to keep
+                track of max version values when checking for re-ordered
+                updates; increase this value to reduce the cost of
+                synchronizing access to version buckets during high-volume
+                indexing, this requires 8 bytes (long) * numVersionBuckets
+                of heap space per Solr core.
+    -->
+    <updateLog>
+      <str name="dir">${solr.ulog.dir:}</str>
+      <int name="numVersionBuckets">${solr.ulog.numVersionBuckets:65536}</int>
+    </updateLog>
+
+    <!-- AutoCommit
+
+         Perform a hard commit automatically under certain conditions.
+         Instead of enabling autoCommit, consider using "commitWithin"
+         when adding documents.
+
+         http://wiki.apache.org/solr/UpdateXmlMessages
+
+         maxDocs - Maximum number of documents to add since the last
+                   commit before automatically triggering a new commit.
+
+         maxTime - Maximum amount of time in ms that is allowed to pass
+                   since a document was added before automatically
+                   triggering a new commit.
+         openSearcher - if false, the commit causes recent index changes
+           to be flushed to stable storage, but does not cause a new
+           searcher to be opened to make those changes visible.
+
+         If the updateLog is enabled, then it's highly recommended to
+         have some sort of hard autoCommit to limit the log size.
+      -->
+     <autoCommit>
+       <maxTime>${solr.autoCommit.maxTime:15000}</maxTime>
+       <openSearcher>false</openSearcher>
+     </autoCommit>
+
+    <!-- softAutoCommit is like autoCommit except it causes a
+         'soft' commit which only ensures that changes are visible
+         but does not ensure that data is synced to disk.  This is
+         faster and more near-realtime friendly than a hard commit.
+      -->
+
+     <autoSoftCommit>
+       <maxTime>${solr.autoSoftCommit.maxTime:-1}</maxTime>
+     </autoSoftCommit>
+
+    <!-- Update Related Event Listeners
+
+         Various IndexWriter related events can trigger Listeners to
+         take actions.
+
+         postCommit - fired after every commit or optimize command
+         postOptimize - fired after every optimize command
+      -->
+    <!-- The RunExecutableListener executes an external command from a
+         hook such as postCommit or postOptimize.
+
+         exe - the name of the executable to run
+         dir - dir to use as the current working directory. (default=".")
+         wait - the calling thread waits until the executable returns.
+                (default="true")
+         args - the arguments to pass to the program.  (default is none)
+         env - environment variables to set.  (default is none)
+      -->
+    <!-- This example shows how RunExecutableListener could be used
+         with the script based replication...
+         http://wiki.apache.org/solr/CollectionDistribution
+      -->
+    <!--
+       <listener event="postCommit" class="solr.RunExecutableListener">
+         <str name="exe">solr/bin/snapshooter</str>
+         <str name="dir">.</str>
+         <bool name="wait">true</bool>
+         <arr name="args"> <str>arg1</str> <str>arg2</str> </arr>
+         <arr name="env"> <str>MYVAR=val1</str> </arr>
+       </listener>
+      -->
+
+  </updateHandler>
+
+  <!-- IndexReaderFactory
+
+       Use the following format to specify a custom IndexReaderFactory,
+       which allows for alternate IndexReader implementations.
+
+       ** Experimental Feature **
+
+       Please note - Using a custom IndexReaderFactory may prevent
+       certain other features from working. The API to
+       IndexReaderFactory may change without warning or may even be
+       removed from future releases if the problems cannot be
+       resolved.
+
+
+       ** Features that may not work with custom IndexReaderFactory **
+
+       The ReplicationHandler assumes a disk-resident index. Using a
+       custom IndexReader implementation may cause incompatibility
+       with ReplicationHandler and may cause replication to not work
+       correctly. See SOLR-1366 for details.
+
+    -->
+  <!--
+  <indexReaderFactory name="IndexReaderFactory" class="package.class">
+    <str name="someArg">Some Value</str>
+  </indexReaderFactory >
+  -->
+
+  <!-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+       Query section - these settings control query time things like caches
+       ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -->
+  <query>
+    <!-- Max Boolean Clauses
+
+         Maximum number of clauses in each BooleanQuery,  an exception
+         is thrown if exceeded.
+
+         ** WARNING **
+
+         This option actually modifies a global Lucene property that
+         will affect all SolrCores.  If multiple solrconfig.xml files
+         disagree on this property, the value at any given moment will
+         be based on the last SolrCore to be initialized.
+
+      -->
+    <maxBooleanClauses>1024</maxBooleanClauses>
+
+
+    <!-- Slow Query Threshold (in millis)
+
+         At high request rates, logging all requests can become a bottleneck
+         and therefore INFO logging is often turned off. However, it is still
+         useful to be able to set a latency threshold above which a request
+         is considered "slow" and log that request at WARN level so we can
+         easily identify slow queries.
+    -->
+    <slowQueryThresholdMillis>-1</slowQueryThresholdMillis>
+
+
+    <!-- Solr Internal Query Caches
+
+         There are two implementations of cache available for Solr,
+         LRUCache, based on a synchronized LinkedHashMap, and
+         FastLRUCache, based on a ConcurrentHashMap.
+
+         FastLRUCache has faster gets and slower puts in single
+         threaded operation and thus is generally faster than LRUCache
+         when the hit ratio of the cache is high (> 75%), and may be
+         faster under other scenarios on multi-cpu systems.
+    -->
+
+    <!-- Filter Cache
+
+         Cache used by SolrIndexSearcher for filters (DocSets),
+         unordered sets of *all* documents that match a query.  When a
+         new searcher is opened, its caches may be prepopulated or
+         "autowarmed" using data from caches in the old searcher.
+         autowarmCount is the number of items to prepopulate.  For
+         LRUCache, the autowarmed items will be the most recently
+         accessed items.
+
+         Parameters:
+           class - the SolrCache implementation LRUCache or
+               (LRUCache or FastLRUCache)
+           size - the maximum number of entries in the cache
+           initialSize - the initial capacity (number of entries) of
+               the cache.  (see java.util.HashMap)
+           autowarmCount - the number of entries to prepopulate from
+               and old cache.
+      -->
+    <filterCache class="solr.FastLRUCache"
+                 size="512"
+                 initialSize="512"
+                 autowarmCount="0"/>
+
+    <!-- Query Result Cache
+
+        Caches results of searches - ordered lists of document ids
+        (DocList) based on a query, a sort, and the range of documents requested.
+        Additional supported parameter by LRUCache:
+           maxRamMB - the maximum amount of RAM (in MB) that this cache is allowed
+                      to occupy
+     -->
+    <queryResultCache class="solr.LRUCache"
+                     size="512"
+                     initialSize="512"
+                     autowarmCount="0"/>
+
+    <!-- Document Cache
+
+         Caches Lucene Document objects (the stored fields for each
+         document).  Since Lucene internal document ids are transient,
+         this cache will not be autowarmed.
+      -->
+    <documentCache class="solr.LRUCache"
+                   size="512"
+                   initialSize="512"
+                   autowarmCount="0"/>
+
+    <!-- custom cache currently used by block join -->
+    <cache name="perSegFilter"
+      class="solr.search.LRUCache"
+      size="10"
+      initialSize="0"
+      autowarmCount="10"
+      regenerator="solr.NoOpRegenerator" />
+
+    <!-- Field Value Cache
+
+         Cache used to hold field values that are quickly accessible
+         by document id.  The fieldValueCache is created by default
+         even if not configured here.
+      -->
+    <!--
+       <fieldValueCache class="solr.FastLRUCache"
+                        size="512"
+                        autowarmCount="128"
+                        showItems="32" />
+      -->
+
+    <!-- Custom Cache
+
+         Example of a generic cache.  These caches may be accessed by
+         name through SolrIndexSearcher.getCache(),cacheLookup(), and
+         cacheInsert().  The purpose is to enable easy caching of
+         user/application level data.  The regenerator argument should
+         be specified as an implementation of solr.CacheRegenerator
+         if autowarming is desired.
+      -->
+      <!-- Cache for storing and fetching feature vectors -->
+    <cache name="QUERY_DOC_FV"
+      class="solr.search.LRUCache"
+      size="4096"
+      initialSize="2048"
+      autowarmCount="4096"
+      regenerator="solr.search.NoOpRegenerator" />
+    <!--
+       <cache name="myUserCache"
+              class="solr.LRUCache"
+              size="4096"
+              initialSize="1024"
+              autowarmCount="1024"
+              regenerator="com.mycompany.MyRegenerator"
+              />
+      -->
+
+
+    <!-- Lazy Field Loading
+
+         If true, stored fields that are not requested will be loaded
+         lazily.  This can result in a significant speed improvement
+         if the usual case is to not load all stored fields,
+         especially if the skipped fields are large compressed text
+         fields.
+    -->
+    <enableLazyFieldLoading>true</enableLazyFieldLoading>
+
+   <!-- Use Filter For Sorted Query
+
+        A possible optimization that attempts to use a filter to
+        satisfy a search.  If the requested sort does not include
+        score, then the filterCache will be checked for a filter
+        matching the query. If found, the filter will be used as the
+        source of document ids, and then the sort will be applied to
+        that.
+
+        For most situations, this will not be useful unless you
+        frequently get the same search repeatedly with different sort
+        options, and none of them ever use "score"
+     -->
+   <!--
+      <useFilterForSortedQuery>true</useFilterForSortedQuery>
+     -->
+
+   <!-- Result Window Size
+
+        An optimization for use with the queryResultCache.  When a search
+        is requested, a superset of the requested number of document ids
+        are collected.  For example, if a search for a particular query
+        requests matching documents 10 through 19, and queryWindowSize is 50,
+        then documents 0 through 49 will be collected and cached.  Any further
+        requests in that range can be satisfied via the cache.
+     -->
+   <queryResultWindowSize>20</queryResultWindowSize>
+
+   <!-- Maximum number of documents to cache for any entry in the
+        queryResultCache.
+     -->
+   <queryResultMaxDocsCached>200</queryResultMaxDocsCached>
+
+   <!-- Query Related Event Listeners
+
+        Various IndexSearcher related events can trigger Listeners to
+        take actions.
+
+        newSearcher - fired whenever a new searcher is being prepared
+        and there is a current searcher handling requests (aka
+        registered).  It can be used to prime certain caches to
+        prevent long request times for certain requests.
+
+        firstSearcher - fired whenever a new searcher is being
+        prepared but there is no current registered searcher to handle
+        requests or to gain autowarming data from.
+
+
+     -->
+    <!-- QuerySenderListener takes an array of NamedList and executes a
+         local query request for each NamedList in sequence.
+      -->
+    <listener event="newSearcher" class="solr.QuerySenderListener">
+      <arr name="queries">
+        <!--
+           <lst><str name="q">solr</str><str name="sort">price asc</str></lst>
+           <lst><str name="q">rocks</str><str name="sort">weight asc</str></lst>
+          -->
+      </arr>
+    </listener>
+    <listener event="firstSearcher" class="solr.QuerySenderListener">
+      <arr name="queries">
+        <lst>
+          <str name="q">static firstSearcher warming in solrconfig.xml</str>
+        </lst>
+      </arr>
+    </listener>
+
+    <!-- Use Cold Searcher
+
+         If a search request comes in and there is no current
+         registered searcher, then immediately register the still
+         warming searcher and use it.  If "false" then all requests
+         will block until the first searcher is done warming.
+      -->
+    <useColdSearcher>false</useColdSearcher>
+
+    <!-- Max Warming Searchers
+
+         Maximum number of searchers that may be warming in the
+         background concurrently.  An error is returned if this limit
+         is exceeded.
+
+         Recommend values of 1-2 for read-only slaves, higher for
+         masters w/o cache warming.
+      -->
+    <maxWarmingSearchers>2</maxWarmingSearchers>
+
+  </query>
+
+
+  <!-- Request Dispatcher
+
+       This section contains instructions for how the SolrDispatchFilter
+       should behave when processing requests for this SolrCore.
+
+       handleSelect is a legacy option that affects the behavior of requests
+       such as /select?qt=XXX
+
+       handleSelect="true" will cause the SolrDispatchFilter to process
+       the request and dispatch the query to a handler specified by the
+       "qt" param, assuming "/select" isn't already registered.
+
+       handleSelect="false" will cause the SolrDispatchFilter to
+       ignore "/select" requests, resulting in a 404 unless a handler
+       is explicitly registered with the name "/select"
+
+       handleSelect="true" is not recommended for new users, but is the default
+       for backwards compatibility
+    -->
+  <requestDispatcher handleSelect="false" >
+    <!-- Request Parsing
+
+         These settings indicate how Solr Requests may be parsed, and
+         what restrictions may be placed on the ContentStreams from
+         those requests
+
+         enableRemoteStreaming - enables use of the stream.file
+         and stream.url parameters for specifying remote streams.
+
+         multipartUploadLimitInKB - specifies the max size (in KiB) of
+         Multipart File Uploads that Solr will allow in a Request.
+
+         formdataUploadLimitInKB - specifies the max size (in KiB) of
+         form data (application/x-www-form-urlencoded) sent via
+         POST. You can use POST to pass request parameters not
+         fitting into the URL.
+
+         addHttpRequestToContext - if set to true, it will instruct
+         the requestParsers to include the original HttpServletRequest
+         object in the context map of the SolrQueryRequest under the
+         key "httpRequest". It will not be used by any of the existing
+         Solr components, but may be useful when developing custom
+         plugins.
+
+         *** WARNING ***
+         The settings below authorize Solr to fetch remote files, You
+         should make sure your system has some authentication before
+         using enableRemoteStreaming="true"
+
+      -->
+    <requestParsers enableRemoteStreaming="true"
+                    multipartUploadLimitInKB="2048000"
+                    formdataUploadLimitInKB="2048"
+                    addHttpRequestToContext="false"/>
+
+    <!-- HTTP Caching
+
+         Set HTTP caching related parameters (for proxy caches and clients).
+
+         The options below instruct Solr not to output any HTTP Caching
+         related headers
+      -->
+    <httpCaching never304="true" />
+    <!-- If you include a <cacheControl> directive, it will be used to
+         generate a Cache-Control header (as well as an Expires header
+         if the value contains "max-age=")
+
+         By default, no Cache-Control header is generated.
+
+         You can use the <cacheControl> option even if you have set
+         never304="true"
+      -->
+    <!--
+       <httpCaching never304="true" >
+         <cacheControl>max-age=30, public</cacheControl>
+       </httpCaching>
+      -->
+    <!-- To enable Solr to respond with automatically generated HTTP
+         Caching headers, and to response to Cache Validation requests
+         correctly, set the value of never304="false"
+
+         This will cause Solr to generate Last-Modified and ETag
+         headers based on the properties of the Index.
+
+         The following options can also be specified to affect the
+         values of these headers...
+
+         lastModFrom - the default value is "openTime" which means the
+         Last-Modified value (and validation against If-Modified-Since
+         requests) will all be relative to when the current Searcher
+         was opened.  You can change it to lastModFrom="dirLastMod" if
+         you want the value to exactly correspond to when the physical
+         index was last modified.
+
+         etagSeed="..." is an option you can change to force the ETag
+         header (and validation against If-None-Match requests) to be
+         different even if the index has not changed (ie: when making
+         significant changes to your config file)
+
+         (lastModifiedFrom and etagSeed are both ignored if you use
+         the never304="true" option)
+      -->
+    <!--
+       <httpCaching lastModifiedFrom="openTime"
+                    etagSeed="Solr">
+         <cacheControl>max-age=30, public</cacheControl>
+       </httpCaching>
+      -->
+  </requestDispatcher>
+
+  <!-- Request Handlers
+
+       http://wiki.apache.org/solr/SolrRequestHandler
+
+       Incoming queries will be dispatched to a specific handler by name
+       based on the path specified in the request.
+
+       Legacy behavior: If the request path uses "/select" but no Request
+       Handler has that name, and if handleSelect="true" has been specified in
+       the requestDispatcher, then the Request Handler is dispatched based on
+       the qt parameter.  Handlers without a leading '/' are accessed this way
+       like so: http://host/app/[core/]select?qt=name  If no qt is
+       given, then the requestHandler that declares default="true" will be
+       used or the one named "standard".
+
+       If a Request Handler is declared with startup="lazy", then it will
+       not be initialized until the first request that uses it.
+
+    -->
+  <!-- SearchHandler
+
+       http://wiki.apache.org/solr/SearchHandler
+
+       For processing Search Queries, the primary Request Handler
+       provided with Solr is "SearchHandler" It delegates to a sequent
+       of SearchComponents (see below) and supports distributed
+       queries across multiple shards
+    -->
+  <requestHandler name="/select" class="solr.SearchHandler">
+    <!-- default values for query parameters can be specified, these
+         will be overridden by parameters in the request
+      -->
+     <lst name="defaults">
+       <str name="echoParams">explicit</str>
+       <int name="rows">10</int>
+       <!-- Controls the distribution of a query to shards other than itself.
+            Consider making 'preferLocalShards' true when:
+              1) maxShardsPerNode > 1
+              2) Number of shards > 1
+              3) CloudSolrClient or LbHttpSolrServer is used by clients.
+            Without this option, every core broadcasts the distributed query to
+            a replica of each shard where the replicas are chosen randomly.
+            This option directs the cores to prefer cores hosted locally, thus
+            preventing network delays between machines.
+            This behavior also immunizes a bad/slow machine from slowing down all
+            the good machines (if those good machines were querying this bad one).
+
+            Specify this option=false for clients connecting through HttpSolrServer
+       -->
+       <bool name="preferLocalShards">false</bool>
+     </lst>
+    <!-- In addition to defaults, "appends" params can be specified
+         to identify values which should be appended to the list of
+         multi-val params from the query (or the existing "defaults").
+      -->
+    <!-- In this example, the param "fq=instock:true" would be appended to
+         any query time fq params the user may specify, as a mechanism for
+         partitioning the index, independent of any user selected filtering
+         that may also be desired (perhaps as a result of faceted searching).
+
+         NOTE: there is *absolutely* nothing a client can do to prevent these
+         "appends" values from being used, so don't use this mechanism
+         unless you are sure you always want it.
+      -->
+    <!--
+       <lst name="appends">
+         <str name="fq">inStock:true</str>
+       </lst>
+      -->
+    <!-- "invariants" are a way of letting the Solr maintainer lock down
+         the options available to Solr clients.  Any params values
+         specified here are used regardless of what values may be specified
+         in either the query, the "defaults", or the "appends" params.
+
+         In this example, the facet.field and facet.query params would
+         be fixed, limiting the facets clients can use.  Faceting is
+         not turned on by default - but if the client does specify
+         facet=true in the request, these are the only facets they
+         will be able to see counts for; regardless of what other
+         facet.field or facet.query params they may specify.
+
+         NOTE: there is *absolutely* nothing a client can do to prevent these
+         "invariants" values from being used, so don't use this mechanism
+         unless you are sure you always want it.
+      -->
+    <!--
+       <lst name="invariants">
+         <str name="facet.field">cat</str>
+         <str name="facet.field">manu_exact</str>
+         <str name="facet.query">price:[* TO 500]</str>
+         <str name="facet.query">price:[500 TO *]</str>
+       </lst>
+      -->
+    <!-- If the default list of SearchComponents is not desired, that
+         list can either be overridden completely, or components can be
+         prepended or appended to the default list.  (see below)
+      -->
+    <!--
+       <arr name="components">
+         <str>nameOfCustomComponent1</str>
+         <str>nameOfCustomComponent2</str>
+       </arr>
+      -->
+    </requestHandler>
+
+ <!-- Query parser used to rerank top docs with a provided model -->
+  <queryParser name="ltr" class="org.apache.solr.search.LTRQParserPlugin" >
+    <int name="threadModule.totalPoolThreads">10</int> <!-- Maximum threads to use for all queries -->
+    <int name="threadModule.numThreadsPerRequest">10</int> <!-- Maximum threads to use for a single query-->
+  </queryParser>
+
+  <!--  Transformer that will encode the document features in the response. For each document the transformer
+  will add the features as an extra field in the response. The name of the field we will be the the name of the
+  transformer enclosed between brackets (in this case [features]). In order to get the feature vector you will have to
+  specify that you want the field (e.g., fl="*,[features])  -->
+  <transformer name="features" class="org.apache.solr.response.transform.LTRFeatureLoggerTransformerFactory" />
+
+
+  <!-- A request handler that returns indented JSON by default -->
+  <requestHandler name="/query" class="solr.SearchHandler">
+     <lst name="defaults">
+       <str name="echoParams">explicit</str>
+       <str name="wt">json</str>
+       <str name="indent">true</str>
+       <str name="df">text</str>
+     </lst>
+ </requestHandler>
+
+
+  <!-- A Robust Example
+
+       This example SearchHandler declaration shows off usage of the
+       SearchHandler with many defaults declared
+
+       Note that multiple instances of the same Request Handler
+       (SearchHandler) can be registered multiple times with different
+       names (and different init parameters)
+    -->
+  <requestHandler name="/browse" class="solr.SearchHandler">
+     <lst name="defaults">
+       <str name="echoParams">explicit</str>
+
+       <!-- VelocityResponseWriter settings -->
+       <str name="wt">velocity</str>
+       <str name="v.template">browse</str>
+       <str name="v.layout">layout</str>
+       <str name="title">Solritas</str>
+
+       <!-- Query settings -->
+       <str name="defType">edismax</str>
+       <str name="qf">
+          text^0.5 features^1.0 name^1.2 sku^1.5 id^10.0 manu^1.1 cat^1.4
+          title^10.0 description^5.0 keywords^5.0 author^2.0 resourcename^1.0
+       </str>
+       <str name="mm">100%</str>
+       <str name="q.alt">*:*</str>
+       <str name="rows">10</str>
+       <str name="fl">*,score</str>
+
+       <str name="mlt.qf">
+         text^0.5 features^1.0 name^1.2 sku^1.5 id^10.0 manu^1.1 cat^1.4
+         title^10.0 description^5.0 keywords^5.0 author^2.0 resourcename^1.0
+       </str>
+       <str name="mlt.fl">text,features,name,sku,id,manu,cat,title,description,keywords,author,resourcename</str>
+       <int name="mlt.count">3</int>
+
+       <!-- Faceting defaults -->
+       <str name="facet">on</str>
+       <str name="facet.missing">true</str>
+       <str name="facet.field">cat</str>
+       <str name="facet.field">manu_exact</str>
+       <str name="facet.field">content_type</str>
+       <str name="facet.field">author_s</str>
+       <str name="facet.query">ipod</str>
+       <str name="facet.query">GB</str>
+       <str name="facet.mincount">1</str>
+       <str name="facet.pivot">cat,inStock</str>
+       <str name="facet.range.other">after</str>
+       <str name="facet.range">price</str>
+       <int name="f.price.facet.range.start">0</int>
+       <int name="f.price.facet.range.end">600</int>
+       <int name="f.price.facet.range.gap">50</int>
+       <str name="facet.range">popularity</str>
+       <int name="f.popularity.facet.range.start">0</int>
+       <int name="f.popularity.facet.range.end">10</int>
+       <int name="f.popularity.facet.range.gap">3</int>
+       <str name="facet.range">manufacturedate_dt</str>
+       <str name="f.manufacturedate_dt.facet.range.start">NOW/YEAR-10YEARS</str>
+       <str name="f.manufacturedate_dt.facet.range.end">NOW</str>
+       <str name="f.manufacturedate_dt.facet.range.gap">+1YEAR</str>
+       <str name="f.manufacturedate_dt.facet.range.other">before</str>
+       <str name="f.manufacturedate_dt.facet.range.other">after</str>
+
+       <!-- Highlighting defaults -->
+       <str name="hl">on</str>
+       <str name="hl.fl">content features title name</str>
+       <str name="hl.preserveMulti">true</str>
+       <str name="hl.encoder">html</str>
+       <str name="hl.simple.pre">&lt;b&gt;</str>
+       <str name="hl.simple.post">&lt;/b&gt;</str>
+       <str name="f.title.hl.fragsize">0</str>
+       <str name="f.title.hl.alternateField">title</str>
+       <str name="f.name.hl.fragsize">0</str>
+       <str name="f.name.hl.alternateField">name</str>
+       <str name="f.content.hl.snippets">3</str>
+       <str name="f.content.hl.fragsize">200</str>
+       <str name="f.content.hl.alternateField">content</str>
+       <str name="f.content.hl.maxAlternateFieldLength">750</str>
+
+       <!-- Spell checking defaults -->
+       <str name="spellcheck">on</str>
+       <str name="spellcheck.extendedResults">false</str>
+       <str name="spellcheck.count">5</str>
+       <str name="spellcheck.alternativeTermCount">2</str>
+       <str name="spellcheck.maxResultsForSuggest">5</str>
+       <str name="spellcheck.collate">true</str>
+       <str name="spellcheck.collateExtendedResults">true</str>
+       <str name="spellcheck.maxCollationTries">5</str>
+       <str name="spellcheck.maxCollations">3</str>
+     </lst>
+
+     <!-- append spellchecking to our list of components -->
+     <arr name="last-components">
+       <str>spellcheck</str>
+     </arr>
+  </requestHandler>
+
+
+  <initParams path="/update/**,/query,/select,/tvrh,/elevate,/spell,/browse">
+    <lst name="defaults">
+      <str name="df">text</str>
+    </lst>
+  </initParams>
+
+  <initParams path="/update/json/docs">
+    <lst name="defaults">
+      <!--this ensures that the entire json doc will be stored verbatim into one field-->
+      <str name="srcField">_src_</str>
+      <!--This means a the uniqueKeyField will be extracted from the fields and
+       all fields go into the 'df' field. In this config df is already configured to be 'text'
+        -->
+      <str name="mapUniqueKeyOnly">true</str>
+    </lst>
+
+  </initParams>
+
+  <!-- The following are implicitly added
+  <requestHandler name="/update/json" class="solr.UpdateRequestHandler">
+        <lst name="defaults">
+         <str name="stream.contentType">application/json</str>
+       </lst>
+  </requestHandler>
+  <requestHandler name="/update/csv" class="solr.UpdateRequestHandler">
+        <lst name="defaults">
+         <str name="stream.contentType">application/csv</str>
+       </lst>
+  </requestHandler>
+  -->
+
+  <!-- Solr Cell Update Request Handler
+
+       http://wiki.apache.org/solr/ExtractingRequestHandler
+
+    -->
+  <requestHandler name="/update/extract"
+                  startup="lazy"
+                  class="solr.extraction.ExtractingRequestHandler" >
+    <lst name="defaults">
+      <str name="lowernames">true</str>
+      <str name="uprefix">ignored_</str>
+
+      <!-- capture link hrefs but ignore div attributes -->
+      <str name="captureAttr">true</str>
+      <str name="fmap.a">links</str>
+      <str name="fmap.div">ignored_</str>
+    </lst>
+  </requestHandler>
+
+
+  <!-- Field Analysis Request Handler
+
+       RequestHandler that provides much the same functionality as
+       analysis.jsp. Provides the ability to specify multiple field
+       types and field names in the same request and outputs
+       index-time and query-time analysis for each of them.
+
+       Request parameters are:
+       analysis.fieldname - field name whose analyzers are to be used
+
+       analysis.fieldtype - field type whose analyzers are to be used
+       analysis.fieldvalue - text for index-time analysis
+       q (or analysis.q) - text for query time analysis
+       analysis.showmatch (true|false) - When set to true and when
+           query analysis is performed, the produced tokens of the
+           field value analysis will be marked as "matched" for every
+           token that is produces by the query analysis
+   -->
+  <requestHandler name="/analysis/field"
+                  startup="lazy"
+                  class="solr.FieldAnalysisRequestHandler" />
+
+
+  <!-- Document Analysis Handler
+
+       http://wiki.apache.org/solr/AnalysisRequestHandler
+
+       An analysis handler that provides a breakdown of the analysis
+       process of provided documents. This handler expects a (single)
+       content stream with the following format:
+
+       <docs>
+         <doc>
+           <field name="id">1</field>
+           <field name="name">The Name</field>
+           <field name="text">The Text Value</field>
+         </doc>
+         <doc>...</doc>
+         <doc>...</doc>
+         ...
+       </docs>
+
+    Note: Each document must contain a field which serves as the
+    unique key. This key is used in the returned response to associate
+    an analysis breakdown to the analyzed document.
+
+    Like the FieldAnalysisRequestHandler, this handler also supports
+    query analysis by sending either an "analysis.query" or "q"
+    request parameter that holds the query text to be analyzed. It
+    also supports the "analysis.showmatch" parameter which when set to
+    true, all field tokens that match the query tokens will be marked
+    as a "match".
+  -->
+  <requestHandler name="/analysis/document"
+                  class="solr.DocumentAnalysisRequestHandler"
+                  startup="lazy" />
+
+  <!-- Echo the request contents back to the client -->
+  <requestHandler name="/debug/dump" class="solr.DumpRequestHandler" >
+    <lst name="defaults">
+     <str name="echoParams">explicit</str>
+     <str name="echoHandler">true</str>
+    </lst>
+  </requestHandler>
+
+  <!-- Search Components
+
+       Search components are registered to SolrCore and used by
+       instances of SearchHandler (which can access them by name)
+
+       By default, the following components are available:
+
+       <searchComponent name="query"     class="solr.QueryComponent" />
+       <searchComponent name="facet"     class="solr.FacetComponent" />
+       <searchComponent name="mlt"       class="solr.MoreLikeThisComponent" />
+       <searchComponent name="highlight" class="solr.HighlightComponent" />
+       <searchComponent name="stats"     class="solr.StatsComponent" />
+       <searchComponent name="debug"     class="solr.DebugComponent" />
+
+       Default configuration in a requestHandler would look like:
+
+       <arr name="components">
+         <str>query</str>
+         <str>facet</str>
+         <str>mlt</str>
+         <str>highlight</str>
+         <str>stats</str>
+         <str>debug</str>
+       </arr>
+
+       If you register a searchComponent to one of the standard names,
+       that will be used instead of the default.
+
+       To insert components before or after the 'standard' components, use:
+
+       <arr name="first-components">
+         <str>myFirstComponentName</str>
+       </arr>
+
+       <arr name="last-components">
+         <str>myLastComponentName</str>
+       </arr>
+
+       NOTE: The component registered with the name "debug" will
+       always be executed after the "last-components"
+
+     -->
+
+   <!-- Spell Check
+
+        The spell check component can return a list of alternative spelling
+        suggestions.
+
+        http://wiki.apache.org/solr/SpellCheckComponent
+     -->
+  <searchComponent name="spellcheck" class="solr.SpellCheckComponent">
+
+    <str name="queryAnalyzerFieldType">text_general</str>
+
+    <!-- Multiple "Spell Checkers" can be declared and used by this
+         component
+      -->
+
+    <!-- a spellchecker built from a field of the main index -->
+    <lst name="spellchecker">
+      <str name="name">default</str>
+      <str name="field">text</str>
+      <str name="classname">solr.DirectSolrSpellChecker</str>
+      <!-- the spellcheck distance measure used, the default is the internal levenshtein -->
+      <str name="distanceMeasure">internal</str>
+      <!-- minimum accuracy needed to be considered a valid spellcheck suggestion -->
+      <float name="accuracy">0.5</float>
+      <!-- the maximum #edits we consider when enumerating terms: can be 1 or 2 -->
+      <int name="maxEdits">2</int>
+      <!-- the minimum shared prefix when enumerating terms -->
+      <int name="minPrefix">1</int>
+      <!-- maximum number of inspections per result. -->
+      <int name="maxInspections">5</int>
+      <!-- minimum length of a query term to be considered for correction -->
+      <int name="minQueryLength">4</int>
+      <!-- maximum threshold of documents a query term can appear to be considered for correction -->
+      <float name="maxQueryFrequency">0.01</float>
+      <!-- uncomment this to require suggestions to occur in 1% of the documents
+        <float name="thresholdTokenFrequency">.01</float>
+      -->
+    </lst>
+
+    <!-- a spellchecker that can break or combine words.  See "/spell" handler below for usage -->
+    <lst name="spellchecker">
+      <str name="name">wordbreak</str>
+      <str name="classname">solr.WordBreakSolrSpellChecker</str>
+      <str name="field">name</str>
+      <str name="combineWords">true</str>
+      <str name="breakWords">true</str>
+      <int name="maxChanges">10</int>
+    </lst>
+
+    <!-- a spellchecker that uses a different distance measure -->
+    <!--
+       <lst name="spellchecker">
+         <str name="name">jarowinkler</str>
+         <str name="field">spell</str>
+         <str name="classname">solr.DirectSolrSpellChecker</str>
+         <str name="distanceMeasure">
+           org.apache.lucene.search.spell.JaroWinklerDistance
+         </str>
+       </lst>
+     -->
+
+    <!-- a spellchecker that use an alternate comparator
+
+         comparatorClass be one of:
+          1. score (default)
+          2. freq (Frequency first, then score)
+          3. A fully qualified class name
+      -->
+    <!--
+       <lst name="spellchecker">
+         <str name="name">freq</str>
+         <str name="field">lowerfilt</str>
+         <str name="classname">solr.DirectSolrSpellChecker</str>
+         <str name="comparatorClass">freq</str>
+      -->
+
+    <!-- A spellchecker that reads the list of words from a file -->
+    <!--
+       <lst name="spellchecker">
+         <str name="classname">solr.FileBasedSpellChecker</str>
+         <str name="name">file</str>
+         <str name="sourceLocation">spellings.txt</str>
+         <str name="characterEncoding">UTF-8</str>
+         <str name="spellcheckIndexDir">spellcheckerFile</str>
+       </lst>
+      -->
+  </searchComponent>
+
+  <!-- A request handler for demonstrating the spellcheck component.
+
+       NOTE: This is purely as an example.  The whole purpose of the
+       SpellCheckComponent is to hook it into the request handler that
+       handles your normal user queries so that a separate request is
+       not needed to get suggestions.
+
+       IN OTHER WORDS, THERE IS REALLY GOOD CHANCE THE SETUP BELOW IS
+       NOT WHAT YOU WANT FOR YOUR PRODUCTION SYSTEM!
+
+       See http://wiki.apache.org/solr/SpellCheckComponent for details
+       on the request parameters.
+    -->
+  <requestHandler name="/spell" class="solr.SearchHandler" startup="lazy">
+    <lst name="defaults">
+      <!-- Solr will use suggestions from both the 'default' spellchecker
+           and from the 'wordbreak' spellchecker and combine them.
+           collations (re-written queries) can include a combination of
+           corrections from both spellcheckers -->
+      <str name="spellcheck.dictionary">default</str>
+      <str name="spellcheck.dictionary">wordbreak</str>
+      <str name="spellcheck">on</str>
+      <str name="spellcheck.extendedResults">true</str>
+      <str name="spellcheck.count">10</str>
+      <str name="spellcheck.alternativeTermCount">5</str>
+      <str name="spellcheck.maxResultsForSuggest">5</str>
+      <str name="spellcheck.collate">true</str>
+      <str name="spellcheck.collateExtendedResults">true</str>
+      <str name="spellcheck.maxCollationTries">10</str>
+      <str name="spellcheck.maxCollations">5</str>
+    </lst>
+    <arr name="last-components">
+      <str>spellcheck</str>
+    </arr>
+  </requestHandler>
+
+  <!-- The SuggestComponent in Solr provides users with automatic suggestions for query terms.
+       You can use this to implement a powerful auto-suggest feature in your search application.
+       As with the rest of this solrconfig.xml file, the configuration of this component is purely
+       an example that applies specifically to this configset and example documents.
+
+       More information about this component and other configuration options are described in the
+       "Suggester" section of the reference guide available at
+       http://archive.apache.org/dist/lucene/solr/ref-guide
+    -->
+  <searchComponent name="suggest" class="solr.SuggestComponent">
+    <lst name="suggester">
+      <str name="name">mySuggester</str>
+      <str name="lookupImpl">FuzzyLookupFactory</str>
+      <str name="dictionaryImpl">DocumentDictionaryFactory</str>
+      <str name="field">cat</str>
+      <str name="weightField">price</str>
+      <str name="suggestAnalyzerFieldType">string</str>
+      <str name="buildOnStartup">false</str>
+    </lst>
+  </searchComponent>
+
+  <requestHandler name="/suggest" class="solr.SearchHandler"
+                  startup="lazy" >
+    <lst name="defaults">
+      <str name="suggest">true</str>
+      <str name="suggest.count">10</str>
+    </lst>
+    <arr name="components">
+      <str>suggest</str>
+    </arr>
+  </requestHandler>
+
+
+  <!-- Term Vector Component
+
+       http://wiki.apache.org/solr/TermVectorComponent
+    -->
+  <searchComponent name="tvComponent" class="solr.TermVectorComponent"/>
+
+  <!-- A request handler for demonstrating the term vector component
+
+       This is purely as an example.
+
+       In reality you will likely want to add the component to your
+       already specified request handlers.
+    -->
+  <requestHandler name="/tvrh" class="solr.SearchHandler" startup="lazy">
+    <lst name="defaults">
+      <bool name="tv">true</bool>
+    </lst>
+    <arr name="last-components">
+      <str>tvComponent</str>
+    </arr>
+  </requestHandler>
+
+  <!-- Clustering Component
+
+       You'll need to set the solr.clustering.enabled system property
+       when running solr to run with clustering enabled:
+       -Dsolr.clustering.enabled=true
+
+       https://cwiki.apache.org/confluence/display/solr/Result+Clustering
+    -->
+  <searchComponent name="clustering"
+                   enable="${solr.clustering.enabled:false}"
+                   class="solr.clustering.ClusteringComponent" >
+    <!--
+    Declaration of "engines" (clustering algorithms).
+
+    The open source algorithms from Carrot2.org project:
+      * org.carrot2.clustering.lingo.LingoClusteringAlgorithm
+      * org.carrot2.clustering.stc.STCClusteringAlgorithm
+      * org.carrot2.clustering.kmeans.BisectingKMeansClusteringAlgorithm
+    See http://project.carrot2.org/algorithms.html for more information.
+
+    Commercial algorithm Lingo3G (needs to be installed separately):
+      * com.carrotsearch.lingo3g.Lingo3GClusteringAlgorithm
+    -->
+
+    <lst name="engine">
+      <str name="name">lingo3g</str>
+      <bool name="optional">true</bool>
+      <str name="carrot.algorithm">com.carrotsearch.lingo3g.Lingo3GClusteringAlgorithm</str>
+      <str name="carrot.resourcesDir">clustering/carrot2</str>
+    </lst>
+
+    <lst name="engine">
+      <str name="name">lingo</str>
+      <str name="carrot.algorithm">org.carrot2.clustering.lingo.LingoClusteringAlgorithm</str>
+      <str name="carrot.resourcesDir">clustering/carrot2</str>
+    </lst>
+
+    <lst name="engine">
+      <str name="name">stc</str>
+      <str name="carrot.algorithm">org.carrot2.clustering.stc.STCClusteringAlgorithm</str>
+      <str name="carrot.resourcesDir">clustering/carrot2</str>
+    </lst>
+
+    <lst name="engine">
+      <str name="name">kmeans</str>
+      <str name="carrot.algorithm">org.carrot2.clustering.kmeans.BisectingKMeansClusteringAlgorithm</str>
+      <str name="carrot.resourcesDir">clustering/carrot2</str>
+    </lst>
+  </searchComponent>
+
+  <!-- A request handler for demonstrating the clustering component.
+       This is meant as an example.
+       In reality you will likely want to add the component to your
+       already specified request handlers.
+    -->
+  <requestHandler name="/clustering"
+                  startup="lazy"
+                  enable="${solr.clustering.enabled:false}"
+                  class="solr.SearchHandler">
+    <lst name="defaults">
+      <bool name="clustering">true</bool>
+      <bool name="clustering.results">true</bool>
+      <!-- Field name with the logical "title" of a each document (optional) -->
+      <str name="carrot.title">name</str>
+      <!-- Field name with the logical "URL" of a each document (optional) -->
+      <str name="carrot.url">id</str>
+      <!-- Field name with the logical "content" of a each document (optional) -->
+      <str name="carrot.snippet">features</str>
+      <!-- Apply highlighter to the title/ content and use this for clustering. -->
+      <bool name="carrot.produceSummary">true</bool>
+      <!-- the maximum number of labels per cluster -->
+      <!--<int name="carrot.numDescriptions">5</int>-->
+      <!-- produce sub clusters -->
+      <bool name="carrot.outputSubClusters">false</bool>
+
+      <!-- Configure the remaining request handler parameters. -->
+      <str name="defType">edismax</str>
+      <str name="qf">
+        text^0.5 features^1.0 name^1.2 sku^1.5 id^10.0 manu^1.1 cat^1.4
+      </str>
+      <str name="q.alt">*:*</str>
+      <str name="rows">100</str>
+      <str name="fl">*,score</str>
+    </lst>
+    <arr name="last-components">
+      <str>clustering</str>
+    </arr>
+  </requestHandler>
+
+  <!-- Terms Component
+
+       http://wiki.apache.org/solr/TermsComponent
+
+       A component to return terms and document frequency of those
+       terms
+    -->
+  <searchComponent name="terms" class="solr.TermsComponent"/>
+
+  <!-- A request handler for demonstrating the terms component -->
+  <requestHandler name="/terms" class="solr.SearchHandler" startup="lazy">
+     <lst name="defaults">
+      <bool name="terms">true</bool>
+      <bool name="distrib">false</bool>
+    </lst>
+    <arr name="components">
+      <str>terms</str>
+    </arr>
+  </requestHandler>
+
+
+  <!-- Query Elevation Component
+
+       http://wiki.apache.org/solr/QueryElevationComponent
+
+       a search component that enables you to configure the top
+       results for a given query regardless of the normal lucene
+       scoring.
+    -->
+  <searchComponent name="elevator" class="solr.QueryElevationComponent" >
+    <!-- pick a fieldType to analyze queries -->
+    <str name="queryFieldType">string</str>
+    <str name="config-file">elevate.xml</str>
+  </searchComponent>
+
+  <!-- A request handler for demonstrating the elevator component -->
+  <requestHandler name="/elevate" class="solr.SearchHandler" startup="lazy">
+    <lst name="defaults">
+      <str name="echoParams">explicit</str>
+    </lst>
+    <arr name="last-components">
+      <str>elevator</str>
+    </arr>
+  </requestHandler>
+
+  <!-- Highlighting Component
+
+       http://wiki.apache.org/solr/HighlightingParameters
+    -->
+  <searchComponent class="solr.HighlightComponent" name="highlight">
+    <highlighting>
+      <!-- Configure the standard fragmenter -->
+      <!-- This could most likely be commented out in the "default" case -->
+      <fragmenter name="gap"
+                  default="true"
+                  class="solr.highlight.GapFragmenter">
+        <lst name="defaults">
+          <int name="hl.fragsize">100</int>
+        </lst>
+      </fragmenter>
+
+      <!-- A regular-expression-based fragmenter
+           (for sentence extraction)
+        -->
+      <fragmenter name="regex"
+                  class="solr.highlight.RegexFragmenter">
+        <lst name="defaults">
+          <!-- slightly smaller fragsizes work better because of slop -->
+          <int name="hl.fragsize">70</int>
+          <!-- allow 50% slop on fragment sizes -->
+          <float name="hl.regex.slop">0.5</float>
+          <!-- a basic sentence pattern -->
+          <str name="hl.regex.pattern">[-\w ,/\n\&quot;&apos;]{20,200}</str>
+        </lst>
+      </fragmenter>
+
+      <!-- Configure the standard formatter -->
+      <formatter name="html"
+                 default="true"
+                 class="solr.highlight.HtmlFormatter">
+        <lst name="defaults">
+          <str name="hl.simple.pre"><![CDATA[<em>]]></str>
+          <str name="hl.simple.post"><![CDATA[</em>]]></str>
+        </lst>
+      </formatter>
+
+      <!-- Configure the standard encoder -->
+      <encoder name="html"
+               class="solr.highlight.HtmlEncoder" />
+
+      <!-- Configure the standard fragListBuilder -->
+      <fragListBuilder name="simple"
+                       class="solr.highlight.SimpleFragListBuilder"/>
+
+      <!-- Configure the single fragListBuilder -->
+      <fragListBuilder name="single"
+                       class="solr.highlight.SingleFragListBuilder"/>
+
+      <!-- Configure the weighted fragListBuilder -->
+      <fragListBuilder name="weighted"
+                       default="true"
+                       class="solr.highlight.WeightedFragListBuilder"/>
+
+      <!-- default tag FragmentsBuilder -->
+      <fragmentsBuilder name="default"
+                        default="true"
+                        class="solr.highlight.ScoreOrderFragmentsBuilder">
+        <!--
+        <lst name="defaults">
+          <str name="hl.multiValuedSeparatorChar">/</str>
+        </lst>
+        -->
+      </fragmentsBuilder>
+
+      <!-- multi-colored tag FragmentsBuilder -->
+      <fragmentsBuilder name="colored"
+                        class="solr.highlight.ScoreOrderFragmentsBuilder">
+        <lst name="defaults">
+          <str name="hl.tag.pre"><![CDATA[
+               <b style="background:yellow">,<b style="background:lawgreen">,
+               <b style="background:aquamarine">,<b style="background:magenta">,
+               <b style="background:palegreen">,<b style="background:coral">,
+               <b style="background:wheat">,<b style="background:khaki">,
+               <b style="background:lime">,<b style="background:deepskyblue">]]></str>
+          <str name="hl.tag.post"><![CDATA[</b>]]></str>
+        </lst>
+      </fragmentsBuilder>
+
+      <boundaryScanner name="default"
+                       default="true"
+                       class="solr.highlight.SimpleBoundaryScanner">
+        <lst name="defaults">
+          <str name="hl.bs.maxScan">10</str>
+          <str name="hl.bs.chars">.,!? &#9;&#10;&#13;</str>
+        </lst>
+      </boundaryScanner>
+
+      <boundaryScanner name="breakIterator"
+                       class="solr.highlight.BreakIteratorBoundaryScanner">
+        <lst name="defaults">
+          <!-- type should be one of CHARACTER, WORD(default), LINE and SENTENCE -->
+          <str name="hl.bs.type">WORD</str>
+          <!-- language and country are used when constructing Locale object.  -->
+          <!-- And the Locale object will be used when getting instance of BreakIterator -->
+          <str name="hl.bs.language">en</str>
+          <str name="hl.bs.country">US</str>
+        </lst>
+      </boundaryScanner>
+    </highlighting>
+  </searchComponent>
+
+  <!-- Update Processors
+
+       Chains of Update Processor Factories for dealing with Update
+       Requests can be declared, and then used by name in Update
+       Request Processors
+
+       http://wiki.apache.org/solr/UpdateRequestProcessor
+
+    -->
+  <!-- Deduplication
+
+       An example dedup update processor that creates the "id" field
+       on the fly based on the hash code of some other fields.  This
+       example has overwriteDupes set to false since we are using the
+       id field as the signatureField and Solr will maintain
+       uniqueness based on that anyway.
+
+    -->
+  <!--
+     <updateRequestProcessorChain name="dedupe">
+       <processor class="solr.processor.SignatureUpdateProcessorFactory">
+         <bool name="enabled">true</bool>
+         <str name="signatureField">id</str>
+         <bool name="overwriteDupes">false</bool>
+         <str name="fields">name,features,cat</str>
+         <str name="signatureClass">solr.processor.Lookup3Signature</str>
+       </processor>
+       <processor class="solr.LogUpdateProcessorFactory" />
+       <processor class="solr.RunUpdateProcessorFactory" />
+     </updateRequestProcessorChain>
+    -->
+
+  <!-- Language identification
+
+       This example update chain identifies the language of the incoming
+       documents using the langid contrib. The detected language is
+       written to field language_s. No field name mapping is done.
+       The fields used for detection are text, title, subject and description,
+       making this example suitable for detecting languages form full-text
+       rich documents injected via ExtractingRequestHandler.
+       See more about langId at http://wiki.apache.org/solr/LanguageDetection
+    -->
+    <!--
+     <updateRequestProcessorChain name="langid">
+       <processor class="org.apache.solr.update.processor.TikaLanguageIdentifierUpdateProcessorFactory">
+         <str name="langid.fl">text,title,subject,description</str>
+         <str name="langid.langField">language_s</str>
+         <str name="langid.fallback">en</str>
+       </processor>
+       <processor class="solr.LogUpdateProcessorFactory" />
+       <processor class="solr.RunUpdateProcessorFactory" />
+     </updateRequestProcessorChain>
+    -->
+
+  <!-- Script update processor
+
+    This example hooks in an update processor implemented using JavaScript.
+
+    See more about the script update processor at http://wiki.apache.org/solr/ScriptUpdateProcessor
+  -->
+  <!--
+    <updateRequestProcessorChain name="script">
+      <processor class="solr.StatelessScriptUpdateProcessorFactory">
+        <str name="script">update-script.js</str>
+        <lst name="params">
+          <str name="config_param">example config parameter</str>
+        </lst>
+      </processor>
+      <processor class="solr.RunUpdateProcessorFactory" />
+    </updateRequestProcessorChain>
+  -->
+
+  <!-- Response Writers
+
+       http://wiki.apache.org/solr/QueryResponseWriter
+
+       Request responses will be written using the writer specified by
+       the 'wt' request parameter matching the name of a registered
+       writer.
+
+       The "default" writer is the default and will be used if 'wt' is
+       not specified in the request.
+    -->
+  <!-- The following response writers are implicitly configured unless
+       overridden...
+    -->
+  <!--
+     <queryResponseWriter name="xml"
+                          default="true"
+                          class="solr.XMLResponseWriter" />
+     <queryResponseWriter name="json" class="solr.JSONResponseWriter"/>
+     <queryResponseWriter name="python" class="solr.PythonResponseWriter"/>
+     <queryResponseWriter name="ruby" class="solr.RubyResponseWriter"/>
+     <queryResponseWriter name="php" class="solr.PHPResponseWriter"/>
+     <queryResponseWriter name="phps" class="solr.PHPSerializedResponseWriter"/>
+     <queryResponseWriter name="csv" class="solr.CSVResponseWriter"/>
+     <queryResponseWriter name="schema.xml" class="solr.SchemaXmlResponseWriter"/>
+    -->
+
+  <queryResponseWriter name="json" class="solr.JSONResponseWriter">
+     <!-- For the purposes of the tutorial, JSON responses are written as
+      plain text so that they are easy to read in *any* browser.
+      If you expect a MIME type of "application/json" just remove this override.
+     -->
+    <str name="content-type">text/plain; charset=UTF-8</str>
+  </queryResponseWriter>
+
+  <!--
+     Custom response writers can be declared as needed...
+    -->
+    <queryResponseWriter name="velocity" class="solr.VelocityResponseWriter" startup="lazy">
+      <str name="template.base.dir">${velocity.template.base.dir:}</str>
+    </queryResponseWriter>
+
+
+  <!-- XSLT response writer transforms the XML output by any xslt file found
+       in Solr's conf/xslt directory.  Changes to xslt files are checked for
+       every xsltCacheLifetimeSeconds.
+    -->
+  <queryResponseWriter name="xslt" class="solr.XSLTResponseWriter">
+    <int name="xsltCacheLifetimeSeconds">5</int>
+  </queryResponseWriter>
+
+  <!-- Query Parsers
+
+       http://wiki.apache.org/solr/SolrQuerySyntax
+
+       Multiple QParserPlugins can be registered by name, and then
+       used in either the "defType" param for the QueryComponent (used
+       by SearchHandler) or in LocalParams
+    -->
+  <!-- example of registering a query parser -->
+  <!--
+     <queryParser name="myparser" class="com.mycompany.MyQParserPlugin"/>
+    -->
+
+  <!-- Function Parsers
+
+       http://wiki.apache.org/solr/FunctionQuery
+
+       Multiple ValueSourceParsers can be registered by name, and then
+       used as function names when using the "func" QParser.
+    -->
+  <!-- example of registering a custom function parser  -->
+  <!--
+     <valueSourceParser name="myfunc"
+                        class="com.mycompany.MyValueSourceParser" />
+    -->
+
+
+  <!-- Document Transformers
+       http://wiki.apache.org/solr/DocTransformers
+    -->
+  <!--
+     Could be something like:
+     <transformer name="db" class="com.mycompany.LoadFromDatabaseTransformer" >
+       <int name="connection">jdbc://....</int>
+     </transformer>
+
+     To add a constant value to all docs, use:
+     <transformer name="mytrans2" class="org.apache.solr.response.transform.ValueAugmenterFactory" >
+       <int name="value">5</int>
+     </transformer>
+
+     If you want the user to still be able to change it with _value:something_ use this:
+     <transformer name="mytrans3" class="org.apache.solr.response.transform.ValueAugmenterFactory" >
+       <double name="defaultValue">5</double>
+     </transformer>
+
+      If you are using the QueryElevationComponent, you may wish to mark documents that get boosted.  The
+      EditorialMarkerFactory will do exactly that:
+     <transformer name="qecBooster" class="org.apache.solr.response.transform.EditorialMarkerFactory" />
+    -->
+
+
+  <!-- Legacy config for the admin interface -->
+  <admin>
+    <defaultQuery>*:*</defaultQuery>
+  </admin>
+
+</config>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/example/techproducts-features.json
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/example/techproducts-features.json b/solr/contrib/ltr/example/techproducts-features.json
new file mode 100644
index 0000000..f358f8b
--- /dev/null
+++ b/solr/contrib/ltr/example/techproducts-features.json
@@ -0,0 +1,26 @@
+[
+{
+    "name":  "isInStock",
+    "class": "org.apache.solr.ltr.feature.FieldValueFeature",
+    "params": {
+        "field": "inStock"
+    }
+},
+{
+    "name":  "price",
+    "class": "org.apache.solr.ltr.feature.FieldValueFeature",
+    "params": {
+        "field": "price"
+    }
+},
+{
+    "name":"originalScore",
+    "class":"org.apache.solr.ltr.feature.OriginalScoreFeature",
+    "params":{}
+},
+{
+    "name" : "productNameMatchQuery",
+    "class" : "org.apache.solr.ltr.feature.SolrFeature",
+    "params" : { "q" : "{!field f=name}${user_query}" }
+}
+]

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/example/techproducts-model.json
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/example/techproducts-model.json b/solr/contrib/ltr/example/techproducts-model.json
new file mode 100644
index 0000000..0efded7
--- /dev/null
+++ b/solr/contrib/ltr/example/techproducts-model.json
@@ -0,0 +1,18 @@
+{
+    "class":"org.apache.solr.ltr.model.LinearModel",
+    "name":"linear",
+    "features":[
+    {"name":"isInStock"},
+    {"name":"price"},
+    {"name":"originalScore"},
+    {"name":"productNameMatchQuery"}
+    ],
+    "params":{
+        "weights":{
+            "isInStock":15.0,
+            "price":1.0,
+            "originalScore":5.0,
+            "productNameMatchQuery":1.0
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/example/train_and_upload_demo_model.py
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/example/train_and_upload_demo_model.py b/solr/contrib/ltr/example/train_and_upload_demo_model.py
new file mode 100755
index 0000000..c3762de
--- /dev/null
+++ b/solr/contrib/ltr/example/train_and_upload_demo_model.py
@@ -0,0 +1,163 @@
+#!/usr/bin/env python
+
+import sys
+import json
+import httplib
+import urllib
+import libsvm_formatter
+
+from optparse import OptionParser
+
+solrQueryUrl = ""
+
+def generateQueries(config):
+        with open(config["userQueriesFile"]) as input:
+            solrQueryUrls = [] #A list of tuples with solrQueryUrl,solrQuery,docId,scoreForPQ,source
+
+            for line in input:
+                line = line.strip();
+                searchText,docId,score,source = line.split("|");
+                solrQuery = generateHttpRequest(config,searchText,docId)
+                solrQueryUrls.append((solrQuery,searchText,docId,score,source))
+
+        return solrQueryUrls;
+
+def generateHttpRequest(config,searchText,docId):
+    global solrQueryUrl
+    if len(solrQueryUrl) < 1:
+        solrQueryUrl = "/solr/%(collection)s/%(requestHandler)s?%(otherParams)s&q=" % config
+        solrQueryUrl = solrQueryUrl.replace(" ","+")
+        solrQueryUrl += urllib.quote_plus("id:")
+
+
+    userQuery = urllib.quote_plus(searchText.strip().replace("'","\\'").replace("/","\\\\/"))
+    solrQuery = solrQueryUrl + '"' + urllib.quote_plus(docId) + '"' #+ solrQueryUrlEnd
+    solrQuery = solrQuery.replace("%24USERQUERY", userQuery).replace('$USERQUERY', urllib.quote_plus("\\'" + userQuery + "\\'"))
+
+    return solrQuery
+
+def generateTrainingData(solrQueries, config):
+    '''Given a list of solr queries, yields a tuple of query , docId , score , source , feature vector for each query.
+    Feature Vector is a list of strings of form "key:value"'''
+    conn = httplib.HTTPConnection(config["host"], config["port"])
+    headers = {"Connection":" keep-alive"}
+
+    try:
+        for queryUrl,query,docId,score,source in solrQueries:
+            conn.request("GET", queryUrl, headers=headers)
+            r = conn.getresponse()
+            msg = r.read()
+            msgDict = json.loads(msg)
+            fv = ""
+            docs = msgDict['response']['docs']
+            if len(docs) > 0 and "[features]" in docs[0]:
+                if not msgDict['response']['docs'][0]["[features]"] == None:
+                    fv = msgDict['response']['docs'][0]["[features]"];
+                else:
+                    print "ERROR NULL FV FOR: " + docId;
+                    print msg
+                    continue;
+            else:
+                print "ERROR FOR: " + docId;
+                print msg
+                continue;
+
+            if r.status == httplib.OK:
+                #print "http connection was ok for: " + queryUrl
+                yield(query,docId,score,source,fv.split(";"));
+            else:
+                raise Exception("Status: {0} {1}\nResponse: {2}".format(r.status, r.reason, msg))
+    except Exception as e:
+        print msg
+        print e
+
+    conn.close()
+
+def setupSolr(config):
+    '''Sets up solr with the proper features for the test'''
+
+    conn = httplib.HTTPConnection(config["host"], config["port"])
+
+    baseUrl = "/solr/" + config["collection"]
+    featureUrl = baseUrl + "/schema/feature-store"
+
+    # CAUTION! This will delete all feature stores. This is just for demo purposes
+    conn.request("DELETE", featureUrl+"/*")
+    r = conn.getresponse()
+    msg = r.read()
+    if (r.status != httplib.OK and
+        r.status != httplib.CREATED and
+        r.status != httplib.ACCEPTED and
+        r.status != httplib.NOT_FOUND):
+        raise Exception("Status: {0} {1}\nResponse: {2}".format(r.status, r.reason, msg))
+
+
+    # Add features
+    headers = {'Content-type': 'application/json'}
+    featuresBody = open(config["featuresFile"])
+
+    conn.request("POST", featureUrl, featuresBody, headers)
+    r = conn.getresponse()
+    msg = r.read()
+    if (r.status != httplib.OK and
+        r.status != httplib.ACCEPTED):
+        print r.status
+        print ""
+        print r.reason;
+        raise Exception("Status: {0} {1}\nResponse: {2}".format(r.status, r.reason, msg))
+
+    conn.close()
+
+
+def main(argv=None):
+    if argv is None:
+        argv = sys.argv
+
+    parser = OptionParser(usage="usage: %prog [options] ", version="%prog 1.0")
+    parser.add_option('-c', '--config',
+                      dest='configFile',
+                      help='File of configuration for the test')
+    (options, args) = parser.parse_args()
+
+    if options.configFile == None:
+        parser.print_help()
+        return 1
+
+    with open(options.configFile) as configFile:
+        config = json.load(configFile)
+
+        print "Uploading feature space to Solr"
+        setupSolr(config)
+
+        print "Generating feature extraction Solr queries"
+        reRankQueries = generateQueries(config)
+
+        print "Extracting features"
+        fvGenerator = generateTrainingData(reRankQueries, config);
+        formatter = libsvm_formatter.LibSvmFormatter();
+        formatter.processQueryDocFeatureVector(fvGenerator,config["trainingFile"]);
+
+        print "Training ranksvm model"
+        libsvm_formatter.trainLibSvm(config["trainingLibraryLocation"],config["trainingFile"])
+
+        print "Converting ranksvm model to solr model"
+        formatter.convertLibSvmModelToLtrModel(config["trainingFile"] + ".model", config["solrModelFile"], config["solrModelName"])
+
+        print "Uploading model to solr"
+        uploadModel(config["collection"], config["host"], config["port"], config["solrModelFile"])
+
+def uploadModel(collection, host, port, modelFile):    
+    modelUrl = "/solr/" + collection + "/schema/model-store"
+    headers = {'Content-type': 'application/json'}
+    with open(modelFile) as modelBody:
+        conn = httplib.HTTPConnection(host, port)
+        conn.request("POST", modelUrl, modelBody, headers)
+        r = conn.getresponse()
+        msg = r.read()
+        if (r.status != httplib.OK and
+            r.status != httplib.CREATED and
+            r.status != httplib.ACCEPTED):
+                raise Exception("Status: {0} {1}\nResponse: {2}".format(r.status, r.reason, msg))
+
+if __name__ == '__main__':
+    sys.exit(main())

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/example/user_queries.txt
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/example/user_queries.txt b/solr/contrib/ltr/example/user_queries.txt
new file mode 100644
index 0000000..a3a3455
--- /dev/null
+++ b/solr/contrib/ltr/example/user_queries.txt
@@ -0,0 +1,8 @@
+hard drive|SP2514N|0.6666666|CLICK_LOGS
+hard drive|6H500F0|0.330082034|CLICK_LOGS
+hard drive|F8V7067-APL-KIT|0.0|CLICK_LOGS
+hard drive|IW-02|0.0|CLICK_LOGS
+ipod|MA147LL/A|1.0|EXPLICIT
+ipod|F8V7067-APL-KIT|0.25|EXPLICIT
+ipod|IW-02|0.25|EXPLICIT
+ipod|6H500F0|0.0|EXPLICIT

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/ivy.xml
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/ivy.xml b/solr/contrib/ltr/ivy.xml
new file mode 100644
index 0000000..68e9797
--- /dev/null
+++ b/solr/contrib/ltr/ivy.xml
@@ -0,0 +1,32 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one
+   or more contributor license agreements.  See the NOTICE file
+   distributed with this work for additional information
+   regarding copyright ownership.  The ASF licenses this file
+   to you under the Apache License, Version 2.0 (the
+   "License"); you may not use this file except in compliance
+   with the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing,
+   software distributed under the License is distributed on an
+   "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+   KIND, either express or implied.  See the License for the
+   specific language governing permissions and limitations
+   under the License.
+-->
+<ivy-module version="2.0">
+  <info organisation="org.apache.solr" module="ltr"/>
+    <configurations defaultconfmapping="compile->master;test->master">
+      <conf name="compile" transitive="false"/> <!-- keep unused 'compile' configuration to allow build to succeed -->
+      <conf name="test" transitive="false"/>
+    </configurations>
+
+   <dependencies>
+
+
+     <dependency org="org.slf4j" name="jcl-over-slf4j" rev="${/org.slf4j/jcl-over-slf4j}" conf="test"/>
+     <exclude org="*" ext="*" matcher="regexp" type="${ivy.exclude.types}"/>
+   </dependencies>
+</ivy-module>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/java/org/apache/solr/ltr/DocInfo.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/DocInfo.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/DocInfo.java
new file mode 100644
index 0000000..b3dfb9e
--- /dev/null
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/DocInfo.java
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr;
+
+import java.util.HashMap;
+
+public class DocInfo extends HashMap<String,Object> {
+
+  // Name of key used to store the original score of a doc
+  private static final String ORIGINAL_DOC_SCORE = "ORIGINAL_DOC_SCORE";
+
+  public DocInfo() {
+    super();
+  }
+
+  public void setOriginalDocScore(Float score) {
+    put(ORIGINAL_DOC_SCORE, score);
+  }
+
+  public Float getOriginalDocScore() {
+    return (Float)get(ORIGINAL_DOC_SCORE);
+  }
+
+  public boolean hasOriginalDocScore() {
+    return containsKey(ORIGINAL_DOC_SCORE);
+  }
+
+}


[11/50] [abbrv] lucene-solr:apiv2: SOLR-9132: Cut over some collections API and recovery tests

Posted by sa...@apache.org.
SOLR-9132: Cut over some collections API and recovery tests


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/f56d111a
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/f56d111a
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/f56d111a

Branch: refs/heads/apiv2
Commit: f56d111adf46e127c62a3fd11fdae9b9725c1024
Parents: c8906b2
Author: Alan Woodward <ro...@apache.org>
Authored: Tue Sep 27 14:03:45 2016 +0100
Committer: Alan Woodward <ro...@apache.org>
Committed: Fri Oct 28 10:33:21 2016 +0100

----------------------------------------------------------------------
 .../client/solrj/embedded/JettySolrRunner.java  |   10 +
 .../configsets/cloud-dynamic/conf/schema.xml    |    2 +
 .../solr/configsets/cloud-hdfs/conf/schema.xml  |   28 +
 .../configsets/cloud-hdfs/conf/solrconfig.xml   |   50 +
 .../cloud-minimal-jmx/conf/schema.xml           |   28 +
 .../cloud-minimal-jmx/conf/solrconfig.xml       |   50 +
 .../cloud/CollectionTooManyReplicasTest.java    |  301 ++--
 .../cloud/CollectionsAPIDistributedZkTest.java  | 1296 +++++-------------
 .../solr/cloud/CreateCollectionCleanupTest.java |    5 +-
 .../apache/solr/cloud/CustomCollectionTest.java |  491 ++-----
 .../apache/solr/cloud/MigrateRouteKeyTest.java  |    1 +
 .../org/apache/solr/cloud/RecoveryZkTest.java   |  166 +--
 .../org/apache/solr/cloud/ShardSplitTest.java   |    6 +-
 .../solr/cloud/TestClusterProperties.java       |   45 +
 .../cloud/TestDeleteCollectionOnDownNodes.java  |   65 +
 .../HdfsCollectionsAPIDistributedZkTest.java    |   33 +-
 .../solr/cloud/hdfs/HdfsRecoveryZkTest.java     |   22 +-
 .../solrj/request/CollectionAdminRequest.java   |   16 +-
 .../client/solrj/request/UpdateRequest.java     |    8 +
 .../org/apache/solr/common/cloud/Slice.java     |   10 +
 .../apache/solr/cloud/MiniSolrCloudCluster.java |   35 +-
 .../apache/solr/cloud/SolrCloudTestCase.java    |    6 +-
 22 files changed, 1042 insertions(+), 1632 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f56d111a/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java b/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java
index a0ca7eb..f4887e6 100644
--- a/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java
+++ b/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java
@@ -38,6 +38,8 @@ import java.util.Random;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicLong;
 
+import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.impl.HttpSolrClient;
 import org.apache.solr.core.CoreContainer;
 import org.apache.solr.servlet.SolrDispatchFilter;
 import org.eclipse.jetty.server.Connector;
@@ -293,6 +295,10 @@ public class JettySolrRunner {
     return getSolrDispatchFilter().getCores();
   }
 
+  public String getNodeName() {
+    return getCoreContainer().getZkController().getNodeName();
+  }
+
   public boolean isRunning() {
     return server.isRunning();
   }
@@ -453,6 +459,10 @@ public class JettySolrRunner {
     }
   }
 
+  public SolrClient newClient() {
+    return new HttpSolrClient.Builder(getBaseUrl().toString()).build();
+  }
+
   public DebugFilter getDebugFilter() {
     return (DebugFilter)debugFilter.getFilter();
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f56d111a/solr/core/src/test-files/solr/configsets/cloud-dynamic/conf/schema.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/configsets/cloud-dynamic/conf/schema.xml b/solr/core/src/test-files/solr/configsets/cloud-dynamic/conf/schema.xml
index 41822a3..af201c0 100644
--- a/solr/core/src/test-files/solr/configsets/cloud-dynamic/conf/schema.xml
+++ b/solr/core/src/test-files/solr/configsets/cloud-dynamic/conf/schema.xml
@@ -223,6 +223,8 @@
   <!-- points to the root document of a block of nested documents -->
   <field name="_root_" type="string" indexed="true" stored="true"/>
 
+  <field name="_route_" type="string" indexed="true" stored="true" multiValued="false"/>
+
   <field name="multi_int_with_docvals" type="tint" multiValued="true" docValues="true" indexed="false"/>
 
   <dynamicField name="*_coordinate" type="tdouble" indexed="true" stored="false"/>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f56d111a/solr/core/src/test-files/solr/configsets/cloud-hdfs/conf/schema.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/configsets/cloud-hdfs/conf/schema.xml b/solr/core/src/test-files/solr/configsets/cloud-hdfs/conf/schema.xml
new file mode 100644
index 0000000..aab5e81
--- /dev/null
+++ b/solr/core/src/test-files/solr/configsets/cloud-hdfs/conf/schema.xml
@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<schema name="minimal" version="1.1">
+  <fieldType name="string" class="solr.StrField"/>
+  <fieldType name="int" class="solr.TrieIntField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
+  <fieldType name="long" class="solr.TrieLongField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
+  <dynamicField name="*" type="string" indexed="true" stored="true"/>
+  <!-- for versioning -->
+  <field name="_version_" type="long" indexed="true" stored="true"/>
+  <field name="_root_" type="int" indexed="true" stored="true" multiValued="false" required="false"/>
+  <field name="id" type="string" indexed="true" stored="true"/>
+  <uniqueKey>id</uniqueKey>
+</schema>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f56d111a/solr/core/src/test-files/solr/configsets/cloud-hdfs/conf/solrconfig.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/configsets/cloud-hdfs/conf/solrconfig.xml b/solr/core/src/test-files/solr/configsets/cloud-hdfs/conf/solrconfig.xml
new file mode 100644
index 0000000..88290da
--- /dev/null
+++ b/solr/core/src/test-files/solr/configsets/cloud-hdfs/conf/solrconfig.xml
@@ -0,0 +1,50 @@
+<?xml version="1.0" ?>
+
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<!-- Minimal solrconfig.xml with /select, /admin and /update only -->
+
+<config>
+
+  <directoryFactory name="DirectoryFactory"
+                    class="solr.HdfsDirectoryFactory"/>
+  <indexConfig>
+    <lockType>hdfs</lockType>
+  </indexConfig>
+
+  <schemaFactory class="ClassicIndexSchemaFactory"/>
+
+  <luceneMatchVersion>${tests.luceneMatchVersion:LATEST}</luceneMatchVersion>
+
+  <updateHandler class="solr.DirectUpdateHandler2">
+    <commitWithin>
+      <softCommit>${solr.commitwithin.softcommit:true}</softCommit>
+    </commitWithin>
+    <updateLog></updateLog>
+  </updateHandler>
+
+  <requestHandler name="/select" class="solr.SearchHandler">
+    <lst name="defaults">
+      <str name="echoParams">explicit</str>
+      <str name="indent">true</str>
+      <str name="df">text</str>
+    </lst>
+
+  </requestHandler>
+</config>
+

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f56d111a/solr/core/src/test-files/solr/configsets/cloud-minimal-jmx/conf/schema.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/configsets/cloud-minimal-jmx/conf/schema.xml b/solr/core/src/test-files/solr/configsets/cloud-minimal-jmx/conf/schema.xml
new file mode 100644
index 0000000..aab5e81
--- /dev/null
+++ b/solr/core/src/test-files/solr/configsets/cloud-minimal-jmx/conf/schema.xml
@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<schema name="minimal" version="1.1">
+  <fieldType name="string" class="solr.StrField"/>
+  <fieldType name="int" class="solr.TrieIntField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
+  <fieldType name="long" class="solr.TrieLongField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
+  <dynamicField name="*" type="string" indexed="true" stored="true"/>
+  <!-- for versioning -->
+  <field name="_version_" type="long" indexed="true" stored="true"/>
+  <field name="_root_" type="int" indexed="true" stored="true" multiValued="false" required="false"/>
+  <field name="id" type="string" indexed="true" stored="true"/>
+  <uniqueKey>id</uniqueKey>
+</schema>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f56d111a/solr/core/src/test-files/solr/configsets/cloud-minimal-jmx/conf/solrconfig.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/configsets/cloud-minimal-jmx/conf/solrconfig.xml b/solr/core/src/test-files/solr/configsets/cloud-minimal-jmx/conf/solrconfig.xml
new file mode 100644
index 0000000..7f27c91
--- /dev/null
+++ b/solr/core/src/test-files/solr/configsets/cloud-minimal-jmx/conf/solrconfig.xml
@@ -0,0 +1,50 @@
+<?xml version="1.0" ?>
+
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<!-- Minimal solrconfig.xml with JMX enabled -->
+
+<config>
+
+  <jmx/>
+
+  <dataDir>${solr.data.dir:}</dataDir>
+
+  <directoryFactory name="DirectoryFactory"
+                    class="${solr.directoryFactory:solr.NRTCachingDirectoryFactory}"/>
+  <schemaFactory class="ClassicIndexSchemaFactory"/>
+
+  <luceneMatchVersion>${tests.luceneMatchVersion:LATEST}</luceneMatchVersion>
+
+  <updateHandler class="solr.DirectUpdateHandler2">
+    <commitWithin>
+      <softCommit>${solr.commitwithin.softcommit:true}</softCommit>
+    </commitWithin>
+    <updateLog></updateLog>
+  </updateHandler>
+
+  <requestHandler name="/select" class="solr.SearchHandler">
+    <lst name="defaults">
+      <str name="echoParams">explicit</str>
+      <str name="indent">true</str>
+      <str name="df">text</str>
+    </lst>
+
+  </requestHandler>
+</config>
+

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f56d111a/solr/core/src/test/org/apache/solr/cloud/CollectionTooManyReplicasTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/CollectionTooManyReplicasTest.java b/solr/core/src/test/org/apache/solr/cloud/CollectionTooManyReplicasTest.java
index afc7c48..a1c2175 100644
--- a/solr/core/src/test/org/apache/solr/cloud/CollectionTooManyReplicasTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/CollectionTooManyReplicasTest.java
@@ -16,186 +16,153 @@
  */
 package org.apache.solr.cloud;
 
-import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
-import java.util.Properties;
+import java.util.stream.Collectors;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.solr.client.solrj.embedded.JettySolrRunner;
-import org.apache.solr.client.solrj.impl.HttpSolrClient;
 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
-import org.apache.solr.client.solrj.response.CollectionAdminResponse;
+import org.apache.solr.common.cloud.DocCollection;
 import org.apache.solr.common.cloud.Replica;
 import org.apache.solr.common.cloud.Slice;
-import org.apache.solr.common.cloud.SolrZkClient;
-import org.apache.solr.common.cloud.ZkStateReader;
-import org.apache.solr.common.util.NamedList;
 import org.apache.zookeeper.KeeperException;
+import org.junit.Before;
+import org.junit.BeforeClass;
 import org.junit.Test;
 
 @Slow
-public class CollectionTooManyReplicasTest extends AbstractFullDistribZkTestBase {
+public class CollectionTooManyReplicasTest extends SolrCloudTestCase {
 
-  public CollectionTooManyReplicasTest() {
-    sliceCount = 1;
+  @BeforeClass
+  public static void setupCluster() throws Exception {
+    configureCluster(3)
+        .addConfig("conf", configset("cloud-minimal"))
+        .configure();
+  }
+
+  @Before
+  public void deleteCollections() throws Exception {
+    cluster.deleteAllCollections();
   }
 
   @Test
-  @ShardsFixed(num = 1)
   public void testAddTooManyReplicas() throws Exception {
-    String collectionName = "TooManyReplicasInSeveralFlavors";
-    CollectionAdminRequest.Create create = new CollectionAdminRequest.Create()
-        .setCollectionName(collectionName)
-        .setNumShards(2)
-        .setReplicationFactor(1)
-        .setMaxShardsPerNode(2)
-        .setStateFormat(2);
+    final String collectionName = "TooManyReplicasInSeveralFlavors";
+    CollectionAdminRequest.createCollection(collectionName, "conf", 2, 1)
+        .setMaxShardsPerNode(1)
+        .process(cluster.getSolrClient());
 
-    CollectionAdminResponse response = create.process(cloudClient);
-    assertEquals(0, response.getStatus());
-    assertTrue(response.isSuccess());
-    // Now I have the fixed Jetty plus the control instnace, I have two replicas, one for each shard
+    // I have two replicas, one for each shard
 
     // Curiously, I should be able to add a bunch of replicas if I specify the node, even more than maxShardsPerNode
     // Just get the first node any way we can.
     // Get a node to use for the "node" parameter.
-
     String nodeName = getAllNodeNames(collectionName).get(0);
+
     // Add a replica using the "node" parameter (no "too many replicas check")
     // this node should have 2 replicas on it
-    CollectionAdminRequest.AddReplica addReplicaNode = new CollectionAdminRequest.AddReplica()
-        .setCollectionName(collectionName)
-        .setShardName("shard1")
-        .setNode(nodeName);
-    response = addReplicaNode.process(cloudClient);
-    assertEquals(0, response.getStatus());
+    CollectionAdminRequest.addReplicaToShard(collectionName, "shard1")
+        .setNode(nodeName)
+        .process(cluster.getSolrClient());
 
     // Three replicas so far, should be able to create another one "normally"
-    CollectionAdminRequest.AddReplica addReplica = new CollectionAdminRequest.AddReplica()
-        .setCollectionName(collectionName)
-        .setShardName("shard1");
-
-    response = addReplica.process(cloudClient);
-    assertEquals(0, response.getStatus());
+    CollectionAdminRequest.addReplicaToShard(collectionName, "shard1")
+        .process(cluster.getSolrClient());
 
     // This one should fail though, no "node" parameter specified
-    try {
-      addReplica.process(cloudClient);
-      fail("Should have thrown an error because the nodes are full");
-    } catch (HttpSolrClient.RemoteSolrException se) {
-      assertTrue("Should have gotten the right error message back",
-          se.getMessage().contains("given the current number of live nodes and a maxShardsPerNode of"));
-    }
+    Exception e = expectThrows(Exception.class, () -> {
+      CollectionAdminRequest.addReplicaToShard(collectionName, "shard1")
+          .process(cluster.getSolrClient());
+    });
+
+    assertTrue("Should have gotten the right error message back",
+          e.getMessage().contains("given the current number of live nodes and a maxShardsPerNode of"));
+
 
     // Oddly, we should succeed next just because setting property.name will not check for nodes being "full up"
-    Properties props = new Properties();
-    props.setProperty("name", "bogus2");
-    addReplicaNode.setProperties(props);
-    response = addReplicaNode.process(cloudClient);
-    assertEquals(0, response.getStatus());
-
-    ZkStateReader zkStateReader = getCommonCloudSolrClient().getZkStateReader();
-    zkStateReader.forceUpdateCollection(collectionName);
-    Slice slice = zkStateReader.getClusterState().getSlicesMap(collectionName).get("shard1");
-
-    Replica rep = null;
-    for (Replica rep1 : slice.getReplicas()) { // Silly compiler
-      if (rep1.get("core").equals("bogus2")) {
-        rep = rep1;
-        break;
-      }
-    }
-    assertNotNull("Should have found a replica named 'bogus2'", rep);
-    assertEquals("Replica should have been put on correct core", nodeName, rep.getNodeName());
+    // TODO: Isn't this a bug?
+    CollectionAdminRequest.addReplicaToShard(collectionName, "shard1")
+        .withProperty("name", "bogus2")
+        .setNode(nodeName)
+        .process(cluster.getSolrClient());
+
+    DocCollection collectionState = getCollectionState(collectionName);
+    Slice slice = collectionState.getSlice("shard1");
+    Replica replica = getRandomReplica(slice, r -> r.getCoreName().equals("bogus2"));
+    assertNotNull("Should have found a replica named 'bogus2'", replica);
+    assertEquals("Replica should have been put on correct core", nodeName, replica.getNodeName());
 
     // Shard1 should have 4 replicas
     assertEquals("There should be 4 replicas for shard 1", 4, slice.getReplicas().size());
 
-    // And let's fail one more time because to insure that the math doesn't do weird stuff it we have more replicas
+    // And let's fail one more time because to ensure that the math doesn't do weird stuff it we have more replicas
     // than simple calcs would indicate.
-    try {
-      addReplica.process(cloudClient);
-      fail("Should have thrown an error because the nodes are full");
-    } catch (HttpSolrClient.RemoteSolrException se) {
-      assertTrue("Should have gotten the right error message back",
-          se.getMessage().contains("given the current number of live nodes and a maxShardsPerNode of"));
-    }
+    Exception e2 = expectThrows(Exception.class, () -> {
+      CollectionAdminRequest.addReplicaToShard(collectionName, "shard1")
+          .process(cluster.getSolrClient());
+    });
+
+    assertTrue("Should have gotten the right error message back",
+        e2.getMessage().contains("given the current number of live nodes and a maxShardsPerNode of"));
+
+    // wait for recoveries to finish, for a clean shutdown - see SOLR-9645
+    waitForState("Expected to see all replicas active", collectionName, (n, c) -> {
+      for (Replica r : c.getReplicas()) {
+        if (r.getState() != Replica.State.ACTIVE)
+          return false;
+      }
+      return true;
+    });
   }
 
   @Test
-  @ShardsFixed(num = 2)
   public void testAddShard() throws Exception {
+
     String collectionName = "TooManyReplicasWhenAddingShards";
-    CollectionAdminRequest.Create create = new CollectionAdminRequest.Create()
-        .setCollectionName(collectionName)
-        .setReplicationFactor(2)
+    CollectionAdminRequest.createCollectionWithImplicitRouter(collectionName, "conf", "shardstart", 2)
         .setMaxShardsPerNode(2)
-        .setStateFormat(2)
-        .setRouterName("implicit")
-        .setShards("shardstart");
-
-    NamedList<Object> request = create.process(cloudClient).getResponse();
+        .process(cluster.getSolrClient());
 
-    assertTrue("Could not create the collection", request.get("success") != null);
     // We have two nodes, maxShardsPerNode is set to 2. Therefore, we should be able to add 2 shards each with
     // two replicas, but fail on the third.
-
-    CollectionAdminRequest.CreateShard createShard = new CollectionAdminRequest.CreateShard()
-        .setCollectionName(collectionName)
-        .setShardName("shard1");
-    CollectionAdminResponse resp = createShard.process(cloudClient);
-    assertEquals(0, resp.getStatus());
+    CollectionAdminRequest.createShard(collectionName, "shard1")
+        .process(cluster.getSolrClient());
 
     // Now we should have one replica on each Jetty, add another to reach maxShardsPerNode
-
-    createShard = new CollectionAdminRequest.CreateShard()
-        .setCollectionName(collectionName)
-        .setShardName("shard2");
-    resp = createShard.process(cloudClient);
-    assertEquals(0, resp.getStatus());
-
+    CollectionAdminRequest.createShard(collectionName, "shard2")
+        .process(cluster.getSolrClient());
 
     // Now fail to add the third as it should exceed maxShardsPerNode
-    createShard = new CollectionAdminRequest.CreateShard()
-        .setCollectionName(collectionName)
-        .setShardName("shard3");
-    try {
-      createShard.process(cloudClient);
-      fail("Should have exceeded the max number of replicas allowed");
-    } catch (HttpSolrClient.RemoteSolrException se) {
-      assertTrue("Should have gotten the right error message back",
-          se.getMessage().contains("given the current number of live nodes and a maxShardsPerNode of"));
-    }
+    Exception e = expectThrows(Exception.class, () -> {
+      CollectionAdminRequest.createShard(collectionName, "shard3")
+          .process(cluster.getSolrClient());
+    });
+    assertTrue("Should have gotten the right error message back",
+        e.getMessage().contains("given the current number of live nodes and a maxShardsPerNode of"));
 
     // Hmmm, providing a nodeset also overrides the checks for max replicas, so prove it.
     List<String> nodes = getAllNodeNames(collectionName);
 
-    createShard = new CollectionAdminRequest.CreateShard()
-        .setCollectionName(collectionName)
-        .setShardName("shard4")
-        .setNodeSet(StringUtils.join(nodes, ","));
-    resp = createShard.process(cloudClient);
-    assertEquals(0, resp.getStatus());
+    CollectionAdminRequest.createShard(collectionName, "shard4")
+        .setNodeSet(StringUtils.join(nodes, ","))
+        .process(cluster.getSolrClient());
 
     // And just for yucks, insure we fail the "regular" one again.
-    createShard = new CollectionAdminRequest.CreateShard()
-        .setCollectionName(collectionName)
-        .setShardName("shard5");
-    try {
-      createShard.process(cloudClient);
-      fail("Should have exceeded the max number of replicas allowed");
-    } catch (HttpSolrClient.RemoteSolrException se) {
-      assertTrue("Should have gotten the right error message back",
-          se.getMessage().contains("given the current number of live nodes and a maxShardsPerNode of"));
-    }
+    Exception e2 = expectThrows(Exception.class, () -> {
+      CollectionAdminRequest.createShard(collectionName, "shard5")
+          .process(cluster.getSolrClient());
+    });
+    assertTrue("Should have gotten the right error message back",
+        e2.getMessage().contains("given the current number of live nodes and a maxShardsPerNode of"));
 
     // And finally, insure that there are all the replcias we expect. We should have shards 1, 2 and 4 and each
     // should have exactly two replicas
-    ZkStateReader zkStateReader = getCommonCloudSolrClient().getZkStateReader();
-    zkStateReader.forceUpdateCollection(collectionName);
-    Map<String, Slice> slices = zkStateReader.getClusterState().getSlicesMap(collectionName);
+    waitForState("Expected shards shardstart, 1, 2 and 4, each with two active replicas", collectionName, (n, c) -> {
+      return DocCollection.isFullyActive(n, c, 4, 2);
+    });
+    Map<String, Slice> slices = getCollectionState(collectionName).getSlicesMap();
     assertEquals("There should be exaclty four slices", slices.size(), 4);
     assertNotNull("shardstart should exist", slices.get("shardstart"));
     assertNotNull("shard1 should exist", slices.get("shard1"));
@@ -209,82 +176,46 @@ public class CollectionTooManyReplicasTest extends AbstractFullDistribZkTestBase
   }
 
   @Test
-  @ShardsFixed(num = 2)
   public void testDownedShards() throws Exception {
     String collectionName = "TooManyReplicasWhenAddingDownedNode";
-    CollectionAdminRequest.Create create = new CollectionAdminRequest.Create()
-        .setCollectionName(collectionName)
-        .setReplicationFactor(1)
+    CollectionAdminRequest.createCollectionWithImplicitRouter(collectionName, "conf", "shardstart", 1)
         .setMaxShardsPerNode(2)
-        .setStateFormat(2)
-        .setRouterName("implicit")
-        .setShards("shardstart");
-
-    NamedList<Object> request = create.process(cloudClient).getResponse();
-
-    assertTrue("Could not create the collection", request.get("success") != null);
-    try (SolrZkClient zkClient = new SolrZkClient(zkServer.getZkAddress(),
-        AbstractZkTestCase.TIMEOUT)) {
+        .process(cluster.getSolrClient());
 
-      List<String> liveNodes = zkClient.getChildren("/live_nodes", null, true);
+    // Shut down a Jetty, I really don't care which
+    JettySolrRunner jetty = cluster.getRandomJetty(random());
+    String deadNode = jetty.getBaseUrl().toString();
+    cluster.stopJettySolrRunner(jetty);
 
-      // Shut down a Jetty, I really don't care which
-      JettySolrRunner downJetty = jettys.get(r.nextInt(2));
+    try {
 
-      downJetty.stop();
-      List<String> liveNodesNow = null;
-      for (int idx = 0; idx < 150; ++idx) {
-        liveNodesNow = zkClient.getChildren("/live_nodes", null, true);
-        if (liveNodesNow.size() != liveNodes.size()) break;
-        Thread.sleep(100);
-      }
-      List<String> deadNodes = new ArrayList<>(liveNodes);
-      assertTrue("Should be a downed node", deadNodes.removeAll(liveNodesNow));
-      liveNodes.removeAll(deadNodes);
-
-      //OK, we've killed a node. Insure we get errors when we ask to create a replica or shard that involves it.
-      // First try adding a  replica to the downed node.
-      CollectionAdminRequest.AddReplica addReplicaNode = new CollectionAdminRequest.AddReplica()
-          .setCollectionName(collectionName)
-          .setShardName("shardstart")
-          .setNode(deadNodes.get(0));
-
-      try {
-        addReplicaNode.process(cloudClient);
-        fail("Should have gotten an exception");
-      } catch (HttpSolrClient.RemoteSolrException se) {
-        assertTrue("Should have gotten a message about shard not ",
-            se.getMessage().contains("At least one of the node(s) specified are not currently active, no action taken."));
-      }
+      // Adding a replica on a dead node should fail
+      Exception e1 = expectThrows(Exception.class, () -> {
+        CollectionAdminRequest.addReplicaToShard(collectionName, "shardstart")
+            .setNode(deadNode)
+            .process(cluster.getSolrClient());
+      });
+      assertTrue("Should have gotten a message about shard not ",
+          e1.getMessage().contains("At least one of the node(s) specified are not currently active, no action taken."));
 
       // Should also die if we just add a shard
-      CollectionAdminRequest.CreateShard createShard = new CollectionAdminRequest.CreateShard()
-          .setCollectionName(collectionName)
-          .setShardName("shard1")
-          .setNodeSet(deadNodes.get(0));
-      try {
-        createShard.process(cloudClient);
-        fail("Should have gotten an exception");
-      } catch (HttpSolrClient.RemoteSolrException se) {
-        assertTrue("Should have gotten a message about shard not ",
-            se.getMessage().contains("At least one of the node(s) specified are not currently active, no action taken."));
-      }
-      //downJetty.start();
+      Exception e2 = expectThrows(Exception.class, () -> {
+        CollectionAdminRequest.createShard(collectionName, "shard1")
+            .setNodeSet(deadNode)
+            .process(cluster.getSolrClient());
+      });
+
+      assertTrue("Should have gotten a message about shard not ",
+          e2.getMessage().contains("At least one of the node(s) specified are not currently active, no action taken."));
+    }
+    finally {
+      cluster.startJettySolrRunner(jetty);
     }
   }
 
   private List<String> getAllNodeNames(String collectionName) throws KeeperException, InterruptedException {
-    ZkStateReader zkStateReader = getCommonCloudSolrClient().getZkStateReader();
-    zkStateReader.forceUpdateCollection(collectionName);
-    Slice slice = zkStateReader.getClusterState().getSlicesMap(collectionName).get("shard1");
-
-    List<String> nodes = new ArrayList<>();
-    for (Replica rep : slice.getReplicas()) {
-      nodes.add(rep.getNodeName());
-    }
-
-    assertTrue("Should have some nodes!", nodes.size() > 0);
-    return nodes;
+    DocCollection state = getCollectionState(collectionName);
+    return state.getReplicas().stream().map(Replica::getNodeName).distinct().collect(Collectors.toList());
   }
 
 }


[02/50] [abbrv] lucene-solr:apiv2: general cleanup to use java 8 feautures

Posted by sa...@apache.org.
general cleanup to use java 8 feautures


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/0feca1a9
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/0feca1a9
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/0feca1a9

Branch: refs/heads/apiv2
Commit: 0feca1a9742a5c945024e6e2bbc9098a003ab538
Parents: d6b6e74
Author: Noble Paul <no...@apache.org>
Authored: Thu Oct 27 15:03:05 2016 +0530
Committer: Noble Paul <no...@apache.org>
Committed: Thu Oct 27 15:05:07 2016 +0530

----------------------------------------------------------------------
 .../solr/handler/dataimport/DIHCache.java       | 16 ++++----
 .../solr/handler/dataimport/EventListener.java  |  2 +-
 .../dataimport/XPathEntityProcessor.java        | 40 ++++++++------------
 .../handler/dataimport/XPathRecordReader.java   | 15 +++-----
 .../AbstractDataImportHandlerTestCase.java      | 14 ++-----
 .../dataimport/TestXPathRecordReader.java       | 15 +++-----
 6 files changed, 38 insertions(+), 64 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0feca1a9/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DIHCache.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DIHCache.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DIHCache.java
index 7e72d4e..a67b3e4 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DIHCache.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DIHCache.java
@@ -35,7 +35,7 @@ public interface DIHCache extends Iterable<Map<String,Object>> {
    * includes any parameters needed by the cache impl. This must be called
    * before any read/write operations are permitted.
    */
-  public void open(Context context);
+  void open(Context context);
   
   /**
    * <p>
@@ -43,14 +43,14 @@ public interface DIHCache extends Iterable<Map<String,Object>> {
    * but not destroyed.
    * </p>
    */
-  public void close();
+  void close();
   
   /**
    * <p>
    * Persists any pending data to the cache
    * </p>
    */
-  public void flush();
+  void flush();
   
   /**
    * <p>
@@ -67,7 +67,7 @@ public interface DIHCache extends Iterable<Map<String,Object>> {
    * update a key's documents, first call delete(Object key).
    * </p>
    */
-  public void add(Map<String,Object> rec);
+  void add(Map<String, Object> rec);
   
   /**
    * <p>
@@ -76,7 +76,7 @@ public interface DIHCache extends Iterable<Map<String,Object>> {
    * </p>
    */
   @Override
-  public Iterator<Map<String,Object>> iterator();
+  Iterator<Map<String,Object>> iterator();
   
   /**
    * <p>
@@ -84,20 +84,20 @@ public interface DIHCache extends Iterable<Map<String,Object>> {
    * match the given key in insertion order.
    * </p>
    */
-  public Iterator<Map<String,Object>> iterator(Object key);
+  Iterator<Map<String,Object>> iterator(Object key);
   
   /**
    * <p>
    * Delete all documents associated with the given key
    * </p>
    */
-  public void delete(Object key);
+  void delete(Object key);
   
   /**
    * <p>
    * Delete all data from the cache,leaving the empty cache intact.
    * </p>
    */
-  public void deleteAll();
+  void deleteAll();
   
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0feca1a9/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/EventListener.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/EventListener.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/EventListener.java
index 0f22513..0c43a0b 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/EventListener.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/EventListener.java
@@ -30,6 +30,6 @@ public interface EventListener {
    *
    * @param ctx the Context in which this event was called
    */
-  public void onEvent(Context ctx);
+  void onEvent(Context ctx);
 
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0feca1a9/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathEntityProcessor.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathEntityProcessor.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathEntityProcessor.java
index cc28dc4..70b9aba 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathEntityProcessor.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathEntityProcessor.java
@@ -323,13 +323,7 @@ public class XPathEntityProcessor extends EntityProcessorBase {
         rowIterator = getRowIterator(data, s);
       } else {
         try {
-          xpathReader.streamRecords(data, new XPathRecordReader.Handler() {
-            @Override
-            @SuppressWarnings("unchecked")
-            public void handle(Map<String, Object> record, String xpath) {
-              rows.add(readRow(record, xpath));
-            }
-          });
+          xpathReader.streamRecords(data, (record, xpath) -> rows.add(readRow(record, xpath)));
         } catch (Exception e) {
           String msg = "Parsing failed for xml, url:" + s + " rows processed:" + rows.size();
           if (rows.size() > 0) msg += " last row: " + rows.get(rows.size() - 1);
@@ -433,25 +427,21 @@ public class XPathEntityProcessor extends EntityProcessorBase {
       @Override
       public void run() {
         try {
-          xpathReader.streamRecords(data, new XPathRecordReader.Handler() {
-            @Override
-            @SuppressWarnings("unchecked")
-            public void handle(Map<String, Object> record, String xpath) {
-              if (isEnd.get()) {
-                throwExp.set(false);
-                //To end the streaming . otherwise the parsing will go on forever
-                //though consumer has gone away
-                throw new RuntimeException("BREAK");
-              }
-              Map<String, Object> row;
-              try {
-                row = readRow(record, xpath);
-              } catch (Exception e) {
-                isEnd.set(true);
-                return;
-              }
-              offer(row);
+          xpathReader.streamRecords(data, (record, xpath) -> {
+            if (isEnd.get()) {
+              throwExp.set(false);
+              //To end the streaming . otherwise the parsing will go on forever
+              //though consumer has gone away
+              throw new RuntimeException("BREAK");
+            }
+            Map<String, Object> row;
+            try {
+              row = readRow(record, xpath);
+            } catch (Exception e) {
+              isEnd.set(true);
+              return;
             }
+            offer(row);
           });
         } catch (Exception e) {
           if(throwExp.get()) exp.set(e);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0feca1a9/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathRecordReader.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathRecordReader.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathRecordReader.java
index 266e047..69c6c57 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathRecordReader.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathRecordReader.java
@@ -162,12 +162,7 @@ public class XPathRecordReader {
    */
   public List<Map<String, Object>> getAllRecords(Reader r) {
     final List<Map<String, Object>> results = new ArrayList<>();
-    streamRecords(r, new Handler() {
-      @Override
-      public void handle(Map<String, Object> record, String s) {
-        results.add(record);
-      }
-    });
+    streamRecords(r, (record, s) -> results.add(record));
     return results;
   }
 
@@ -182,8 +177,8 @@ public class XPathRecordReader {
   public void streamRecords(Reader r, Handler handler) {
     try {
       XMLStreamReader parser = factory.createXMLStreamReader(r);
-      rootNode.parse(parser, handler, new HashMap<String, Object>(),
-              new Stack<Set<String>>(), false);
+      rootNode.parse(parser, handler, new HashMap<>(),
+          new Stack<>(), false);
     } catch (Exception e) {
       throw new RuntimeException(e);
     }
@@ -657,7 +652,7 @@ public class XPathRecordReader {
   /**Implement this interface to stream records as and when one is found.
    *
    */
-  public static interface Handler {
+  public interface Handler {
     /**
      * @param record The record map. The key is the field name as provided in 
      * the addField() methods. The value can be a single String (for single 
@@ -666,7 +661,7 @@ public class XPathRecordReader {
      * If there is any change all parsing will be aborted and the Exception
      * is propagated up
      */
-    public void handle(Map<String, Object> record, String xpath);
+    void handle(Map<String, Object> record, String xpath);
   }
 
   private static final Pattern ATTRIB_PRESENT_WITHVAL = Pattern

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0feca1a9/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractDataImportHandlerTestCase.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractDataImportHandlerTestCase.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractDataImportHandlerTestCase.java
index 60f0811..db02993 100644
--- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractDataImportHandlerTestCase.java
+++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractDataImportHandlerTestCase.java
@@ -20,7 +20,6 @@ import java.io.File;
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.util.HashMap;
-import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 
@@ -28,6 +27,7 @@ import org.apache.commons.io.FileUtils;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SuppressForbidden;
+import org.apache.solr.common.util.Utils;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.request.LocalSolrQueryRequest;
 import org.apache.solr.request.SolrQueryRequest;
@@ -145,7 +145,7 @@ public abstract class AbstractDataImportHandlerTestCase extends
     if (resolver == null) resolver = new VariableResolver();
     final Context delegate = new ContextImpl(parent, resolver,
             parentDataSource, currProcess,
-            new HashMap<String, Object>(), null, null);
+        new HashMap<>(), null, null);
     return new TestContext(entityAttrs, delegate, entityFields, parent == null);
   }
 
@@ -155,15 +155,7 @@ public abstract class AbstractDataImportHandlerTestCase extends
    */
   @SuppressWarnings("unchecked")
   public static Map createMap(Object... args) {
-    Map result = new LinkedHashMap();
-
-    if (args == null || args.length == 0)
-      return result;
-
-    for (int i = 0; i < args.length - 1; i += 2)
-      result.put(args[i], args[i + 1]);
-
-    return result;
+   return Utils.makeMap(args);
   }
 
   @SuppressForbidden(reason = "Needs currentTimeMillis to set modified time for a file")

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0feca1a9/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathRecordReader.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathRecordReader.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathRecordReader.java
index 5f9a5ca..d8e3cbe 100644
--- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathRecordReader.java
+++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathRecordReader.java
@@ -16,13 +16,13 @@
  */
 package org.apache.solr.handler.dataimport;
 
-import org.junit.Test;
-
 import java.io.StringReader;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 
+import org.junit.Test;
+
 /**
  * <p> Test for XPathRecordReader </p>
  *
@@ -138,13 +138,10 @@ public class TestXPathRecordReader extends AbstractDataImportHandlerTestCase {
 
     final List<Map<String, Object>> a = new ArrayList<>();
     final List<Map<String, Object>> x = new ArrayList<>();
-    rr.streamRecords(new StringReader(xml), new XPathRecordReader.Handler() {
-      @Override
-      public void handle(Map<String, Object> record, String xpath) {
-        if (record == null) return;
-        if (xpath.equals("/root/a")) a.add(record);
-        if (xpath.equals("/root/x")) x.add(record);
-      }
+    rr.streamRecords(new StringReader(xml), (record, xpath) -> {
+      if (record == null) return;
+      if (xpath.equals("/root/a")) a.add(record);
+      if (xpath.equals("/root/x")) x.add(record);
     });
 
     assertEquals(1, a.size());


[05/50] [abbrv] lucene-solr:apiv2: SOLR-9610: Fix typo in usage for assertTool timeout option. use --timeout instead of --ms

Posted by sa...@apache.org.
SOLR-9610: Fix typo in usage for assertTool timeout option. use --timeout instead of --ms


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/fa4e599d
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/fa4e599d
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/fa4e599d

Branch: refs/heads/apiv2
Commit: fa4e599d1dbedf8093eaddc4091ac0a7b9970928
Parents: af60048
Author: Jan H�ydahl <ja...@apache.org>
Authored: Thu Oct 27 23:41:43 2016 +0200
Committer: Jan H�ydahl <ja...@apache.org>
Committed: Thu Oct 27 23:41:43 2016 +0200

----------------------------------------------------------------------
 solr/core/src/java/org/apache/solr/util/SolrCLI.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/fa4e599d/solr/core/src/java/org/apache/solr/util/SolrCLI.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/util/SolrCLI.java b/solr/core/src/java/org/apache/solr/util/SolrCLI.java
index 3750190..1975d8e 100644
--- a/solr/core/src/java/org/apache/solr/util/SolrCLI.java
+++ b/solr/core/src/java/org/apache/solr/util/SolrCLI.java
@@ -3262,7 +3262,7 @@ public class SolrCLI {
               .create("m"),
           OptionBuilder
               .withDescription("Timeout in ms for commands supporting a timeout")
-              .withLongOpt("ms")
+              .withLongOpt("timeout")
               .hasArg(true)
               .withType(Long.class)
               .withArgName("ms")


[16/50] [abbrv] lucene-solr:apiv2: SOLR-9621: Remove several Guava & Apache Commons calls in favor of java 8 alternatives.

Posted by sa...@apache.org.
SOLR-9621: Remove several Guava & Apache Commons calls in favor of java 8 alternatives.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/2e21511c
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/2e21511c
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/2e21511c

Branch: refs/heads/apiv2
Commit: 2e21511cd37310044e7d167fd80b5277cb942603
Parents: 3b49705
Author: David Smiley <ds...@apache.org>
Authored: Fri Oct 28 17:18:58 2016 -0400
Committer: David Smiley <ds...@apache.org>
Committed: Fri Oct 28 17:18:58 2016 -0400

----------------------------------------------------------------------
 lucene/tools/forbiddenApis/solr.txt             | 20 ++++++++++++++++
 solr/CHANGES.txt                                | 22 +++++++++++++++++
 .../analytics/accumulator/BasicAccumulator.java |  2 +-
 .../StatsCollectorSupplierFactory.java          |  2 +-
 .../hadoop/morphline/MorphlineMapRunner.java    |  5 ++--
 .../solr/morphlines/cell/SolrCellBuilder.java   | 14 ++++++-----
 .../solr/SanitizeUnknownSolrFieldsBuilder.java  | 17 +++++++------
 .../solr/morphlines/solr/SolrLocator.java       |  5 ++--
 .../morphlines/solr/TokenizeTextBuilder.java    |  8 +++----
 .../solr/AbstractSolrMorphlineTestBase.java     |  8 +++----
 .../solr/SolrMorphlineZkAvroTest.java           |  9 ++++---
 .../org/apache/solr/cloud/DistributedQueue.java |  4 ++--
 .../org/apache/solr/core/CoreContainer.java     |  4 ++--
 .../src/java/org/apache/solr/core/SolrCore.java |  6 ++---
 .../apache/solr/core/backup/BackupManager.java  | 11 +++++----
 .../repository/BackupRepositoryFactory.java     |  9 ++++---
 .../backup/repository/HdfsBackupRepository.java |  8 +++----
 .../repository/LocalFileSystemRepository.java   |  3 ++-
 .../snapshots/SolrSnapshotMetaDataManager.java  |  6 ++---
 .../apache/solr/handler/OldBackupDirectory.java |  7 +++---
 .../org/apache/solr/handler/SnapShooter.java    |  8 +++----
 .../solr/search/ExtendedDismaxQParser.java      | 25 ++------------------
 .../apache/solr/search/SolrIndexSearcher.java   | 14 ++++-------
 .../apache/solr/update/MergeIndexesCommand.java | 12 ++--------
 .../solrj/embedded/TestJettySolrRunner.java     |  4 ++--
 .../apache/solr/cloud/DistributedQueueTest.java | 13 +++++-----
 .../apache/solr/cloud/KerberosTestServices.java | 14 ++++-------
 .../org/apache/solr/core/TestLazyCores.java     | 16 ++++++-------
 .../handler/admin/CoreAdminHandlerTest.java     |  4 ++--
 .../solr/schema/ChangedSchemaMergeTest.java     |  8 +++----
 .../org/apache/solr/schema/TestBinaryField.java |  4 ++--
 .../security/TestAuthorizationFramework.java    |  4 ++--
 .../solr/common/cloud/TestZkConfigManager.java  | 10 ++++----
 .../java/org/apache/solr/SolrTestCaseHS.java    |  4 ++--
 .../java/org/apache/solr/SolrTestCaseJ4.java    |  9 ++++---
 .../apache/solr/cloud/MiniSolrCloudCluster.java |  4 ++--
 .../org/apache/solr/cloud/ZkTestServer.java     | 11 ++++-----
 37 files changed, 167 insertions(+), 167 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e21511c/lucene/tools/forbiddenApis/solr.txt
----------------------------------------------------------------------
diff --git a/lucene/tools/forbiddenApis/solr.txt b/lucene/tools/forbiddenApis/solr.txt
index 38cf667..af0cca6 100644
--- a/lucene/tools/forbiddenApis/solr.txt
+++ b/lucene/tools/forbiddenApis/solr.txt
@@ -34,3 +34,23 @@ java.util.logging.**
 
 @defaultMessage Use RTimer/TimeOut/System.nanoTime for time comparisons, and `new Date()` output/debugging/stats of timestamps. If for some miscellaneous reason, you absolutely need to use this, use a SuppressForbidden.
 java.lang.System#currentTimeMillis()
+
+@defaultMessage Use corresponding Java 8 functional/streaming interfaces
+com.google.common.base.Function
+com.google.common.base.Joiner
+com.google.common.base.Predicate
+com.google.common.base.Supplier
+
+@defaultMessage Use java.nio.charset.StandardCharsets instead
+com.google.common.base.Charsets
+org.apache.commons.codec.Charsets
+
+@defaultMessage Use methods in java.util.Objects instead
+com.google.common.base.Objects#equal(java.lang.Object,java.lang.Object)
+com.google.common.base.Objects#hashCode(java.lang.Object[])
+com.google.common.base.Preconditions#checkNotNull(java.lang.Object)
+com.google.common.base.Preconditions#checkNotNull(java.lang.Object,java.lang.Object)
+
+@defaultMessage Use methods in java.util.Comparator instead
+com.google.common.collect.Ordering
+

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e21511c/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index b1daf1b..f81ff75 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -63,6 +63,28 @@ Optimizations
   creation of a Lucene FieldType every time a field is indexed. (John Call, yonik) 
 
 
+==================  6.4.0 ==================
+
+Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
+
+Versions of Major Components
+---------------------
+Apache Tika 1.13
+Carrot2 3.12.0
+Velocity 1.7 and Velocity Tools 2.0
+Apache UIMA 2.3.1
+Apache ZooKeeper 3.4.6
+Jetty 9.3.8.v20160314
+
+Detailed Change List
+----------------------
+
+Other Changes
+----------------------
+
+* SOLR-9621: Remove several Guava & Apache Commons calls in favor of java 8 alternatives.
+  (Michael Braun via David Smiley)
+
 ==================  6.3.0 ==================
 
 Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e21511c/solr/contrib/analytics/src/java/org/apache/solr/analytics/accumulator/BasicAccumulator.java
----------------------------------------------------------------------
diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/accumulator/BasicAccumulator.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/accumulator/BasicAccumulator.java
index bd21209..cbd8078 100644
--- a/solr/contrib/analytics/src/java/org/apache/solr/analytics/accumulator/BasicAccumulator.java
+++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/accumulator/BasicAccumulator.java
@@ -23,8 +23,8 @@ import java.util.Collections;
 import java.util.Date;
 import java.util.List;
 import java.util.Set;
+import java.util.function.Supplier;
 
-import com.google.common.base.Supplier;
 import org.apache.lucene.index.LeafReaderContext;
 import org.apache.solr.analytics.expression.Expression;
 import org.apache.solr.analytics.expression.ExpressionFactory;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e21511c/solr/contrib/analytics/src/java/org/apache/solr/analytics/statistics/StatsCollectorSupplierFactory.java
----------------------------------------------------------------------
diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/statistics/StatsCollectorSupplierFactory.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/statistics/StatsCollectorSupplierFactory.java
index bd9f65d..30baec4 100644
--- a/solr/contrib/analytics/src/java/org/apache/solr/analytics/statistics/StatsCollectorSupplierFactory.java
+++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/statistics/StatsCollectorSupplierFactory.java
@@ -23,8 +23,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.TreeMap;
+import java.util.function.Supplier;
 
-import com.google.common.base.Supplier;
 import org.apache.lucene.queries.function.ValueSource;
 import org.apache.lucene.queries.function.valuesource.BytesRefFieldSource;
 import org.apache.lucene.queries.function.valuesource.DoubleFieldSource;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e21511c/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/morphline/MorphlineMapRunner.java
----------------------------------------------------------------------
diff --git a/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/morphline/MorphlineMapRunner.java b/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/morphline/MorphlineMapRunner.java
index 9e46203..372c48b 100644
--- a/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/morphline/MorphlineMapRunner.java
+++ b/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/morphline/MorphlineMapRunner.java
@@ -25,6 +25,7 @@ import java.lang.invoke.MethodHandles;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.TreeMap;
+import java.util.stream.Collectors;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
@@ -52,7 +53,6 @@ import org.kitesdk.morphline.base.Notifications;
 import com.codahale.metrics.MetricRegistry;
 import com.codahale.metrics.Timer;
 import com.google.common.annotations.Beta;
-import com.google.common.base.Joiner;
 import com.typesafe.config.Config;
 import com.typesafe.config.ConfigFactory;
 
@@ -109,7 +109,8 @@ public final class MorphlineMapRunner {
       for (Map.Entry<String,String> entry : configuration) {
         map.put(entry.getKey(), entry.getValue());
       }
-      LOG.trace("Configuration:\n{}", Joiner.on("\n").join(map.entrySet()));
+      LOG.trace("Configuration:\n" +
+          map.entrySet().stream().map(Object::toString).collect(Collectors.joining("\n")));
     }
     
     FaultTolerance faultTolerance = new FaultTolerance(

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e21511c/solr/contrib/morphlines-cell/src/java/org/apache/solr/morphlines/cell/SolrCellBuilder.java
----------------------------------------------------------------------
diff --git a/solr/contrib/morphlines-cell/src/java/org/apache/solr/morphlines/cell/SolrCellBuilder.java b/solr/contrib/morphlines-cell/src/java/org/apache/solr/morphlines/cell/SolrCellBuilder.java
index 00045b2..d4483a5 100644
--- a/solr/contrib/morphlines-cell/src/java/org/apache/solr/morphlines/cell/SolrCellBuilder.java
+++ b/solr/contrib/morphlines-cell/src/java/org/apache/solr/morphlines/cell/SolrCellBuilder.java
@@ -27,10 +27,9 @@ import java.util.List;
 import java.util.Locale;
 import java.util.Map;
 import java.util.Map.Entry;
-import java.util.TreeMap;
+import java.util.Objects;
+import java.util.stream.Collectors;
 
-import com.google.common.base.Joiner;
-import com.google.common.base.Preconditions;
 import com.google.common.collect.ArrayListMultimap;
 import com.google.common.collect.ListMultimap;
 import com.google.common.io.Closeables;
@@ -114,9 +113,12 @@ public final class SolrCellBuilder implements CommandBuilder {
       Config solrLocatorConfig = getConfigs().getConfig(config, "solrLocator");
       SolrLocator locator = new SolrLocator(solrLocatorConfig, context);
       LOG.debug("solrLocator: {}", locator);
-      this.schema = locator.getIndexSchema();
-      Preconditions.checkNotNull(schema);
-      LOG.trace("Solr schema: \n{}", Joiner.on("\n").join(new TreeMap<>(schema.getFields()).values()));
+      this.schema = Objects.requireNonNull(locator.getIndexSchema());
+      if (LOG.isTraceEnabled()) {
+        LOG.trace("Solr schema: \n" + schema.getFields().entrySet().stream()
+                .sorted(Map.Entry.comparingByKey()).map(Map.Entry::getValue).map(Object::toString)
+                .collect(Collectors.joining("\n")));
+      }
 
       ListMultimap<String, String> cellParams = ArrayListMultimap.create();
       String uprefix = getConfigs().getString(config, ExtractingParams.UNKNOWN_FIELD_PREFIX, null);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e21511c/solr/contrib/morphlines-core/src/java/org/apache/solr/morphlines/solr/SanitizeUnknownSolrFieldsBuilder.java
----------------------------------------------------------------------
diff --git a/solr/contrib/morphlines-core/src/java/org/apache/solr/morphlines/solr/SanitizeUnknownSolrFieldsBuilder.java b/solr/contrib/morphlines-core/src/java/org/apache/solr/morphlines/solr/SanitizeUnknownSolrFieldsBuilder.java
index a11e45d..9ede714 100644
--- a/solr/contrib/morphlines-core/src/java/org/apache/solr/morphlines/solr/SanitizeUnknownSolrFieldsBuilder.java
+++ b/solr/contrib/morphlines-core/src/java/org/apache/solr/morphlines/solr/SanitizeUnknownSolrFieldsBuilder.java
@@ -20,7 +20,8 @@ import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.Map;
-import java.util.TreeMap;
+import java.util.Objects;
+import java.util.stream.Collectors;
 
 import org.apache.solr.schema.IndexSchema;
 
@@ -29,8 +30,6 @@ import org.kitesdk.morphline.api.CommandBuilder;
 import org.kitesdk.morphline.api.MorphlineContext;
 import org.kitesdk.morphline.api.Record;
 import org.kitesdk.morphline.base.AbstractCommand;
-import com.google.common.base.Joiner;
-import com.google.common.base.Preconditions;
 import com.typesafe.config.Config;
 
 /**
@@ -68,10 +67,14 @@ public final class SanitizeUnknownSolrFieldsBuilder implements CommandBuilder {
       Config solrLocatorConfig = getConfigs().getConfig(config, "solrLocator");
       SolrLocator locator = new SolrLocator(solrLocatorConfig, context);
       LOG.debug("solrLocator: {}", locator);
-      this.schema = locator.getIndexSchema();
-      Preconditions.checkNotNull(schema);
-      LOG.trace("Solr schema: \n{}", Joiner.on("\n").join(new TreeMap(schema.getFields()).values()));
-      
+      this.schema = Objects.requireNonNull(locator.getIndexSchema());
+      if (LOG.isTraceEnabled()) {
+        LOG.trace("Solr schema: \n" +
+            schema.getFields().entrySet().stream().sorted(Map.Entry.comparingByKey())
+                .map(Map.Entry::getValue).map(Object::toString).collect(Collectors.joining("\n"))
+        );
+      }
+
       String str = getConfigs().getString(config, "renameToPrefix", "").trim();
       this.renameToPrefix = str.length() > 0 ? str : null;  
       validateArguments();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e21511c/solr/contrib/morphlines-core/src/java/org/apache/solr/morphlines/solr/SolrLocator.java
----------------------------------------------------------------------
diff --git a/solr/contrib/morphlines-core/src/java/org/apache/solr/morphlines/solr/SolrLocator.java b/solr/contrib/morphlines-core/src/java/org/apache/solr/morphlines/solr/SolrLocator.java
index 39cee54..1d177a6 100644
--- a/solr/contrib/morphlines-core/src/java/org/apache/solr/morphlines/solr/SolrLocator.java
+++ b/solr/contrib/morphlines-core/src/java/org/apache/solr/morphlines/solr/SolrLocator.java
@@ -21,8 +21,8 @@ import java.io.File;
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
 import java.nio.file.Paths;
+import java.util.Objects;
 
-import com.google.common.base.Preconditions;
 import com.google.common.io.Files;
 import com.typesafe.config.Config;
 import com.typesafe.config.ConfigFactory;
@@ -64,8 +64,7 @@ public class SolrLocator {
   private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   protected SolrLocator(MorphlineContext context) {
-    Preconditions.checkNotNull(context);
-    this.context = context;
+    this.context = Objects.requireNonNull(context);
   }
 
   public SolrLocator(Config config, MorphlineContext context) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e21511c/solr/contrib/morphlines-core/src/java/org/apache/solr/morphlines/solr/TokenizeTextBuilder.java
----------------------------------------------------------------------
diff --git a/solr/contrib/morphlines-core/src/java/org/apache/solr/morphlines/solr/TokenizeTextBuilder.java b/solr/contrib/morphlines-core/src/java/org/apache/solr/morphlines/solr/TokenizeTextBuilder.java
index 6e1a969..7c96f3f 100644
--- a/solr/contrib/morphlines-core/src/java/org/apache/solr/morphlines/solr/TokenizeTextBuilder.java
+++ b/solr/contrib/morphlines-core/src/java/org/apache/solr/morphlines/solr/TokenizeTextBuilder.java
@@ -21,6 +21,7 @@ import java.io.Reader;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.List;
+import java.util.Objects;
 
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.TokenStream;
@@ -35,7 +36,6 @@ import org.kitesdk.morphline.api.MorphlineContext;
 import org.kitesdk.morphline.api.MorphlineRuntimeException;
 import org.kitesdk.morphline.api.Record;
 import org.kitesdk.morphline.base.AbstractCommand;
-import com.google.common.base.Preconditions;
 import com.typesafe.config.Config;
 
 /**
@@ -79,11 +79,9 @@ public final class TokenizeTextBuilder implements CommandBuilder {
       if (fieldType == null) {
         throw new MorphlineCompilationException("Missing Solr field type in schema.xml for name: " + solrFieldType, config);
       }
-      this.analyzer = fieldType.getIndexAnalyzer();
-      Preconditions.checkNotNull(analyzer);
+      this.analyzer = Objects.requireNonNull(fieldType.getIndexAnalyzer());
       // register CharTermAttribute for later (implicit) reuse
-      this.token = analyzer.tokenStream("content", reader).addAttribute(CharTermAttribute.class);
-      Preconditions.checkNotNull(token);
+      this.token = Objects.requireNonNull(analyzer.tokenStream("content", reader).addAttribute(CharTermAttribute.class));
       validateArguments();
     }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e21511c/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/AbstractSolrMorphlineTestBase.java
----------------------------------------------------------------------
diff --git a/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/AbstractSolrMorphlineTestBase.java b/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/AbstractSolrMorphlineTestBase.java
index 6fd7cdb..9a5791e 100644
--- a/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/AbstractSolrMorphlineTestBase.java
+++ b/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/AbstractSolrMorphlineTestBase.java
@@ -17,7 +17,6 @@
 package org.apache.solr.morphlines.solr;
 
 import com.codahale.metrics.MetricRegistry;
-import com.google.common.base.Joiner;
 import com.google.common.io.Files;
 import com.typesafe.config.Config;
 import org.apache.commons.io.FileUtils;
@@ -108,11 +107,10 @@ public class AbstractSolrMorphlineTestBase extends SolrTestCaseJ4 {
   }
 
   protected static void myInitCore(String baseDirName) throws Exception {
-    Joiner joiner = Joiner.on(File.separator);
+    String solrHome = RESOURCES_DIR + File.separator + baseDirName;
     initCore(
-        "solrconfig.xml", "schema.xml",
-        joiner.join(RESOURCES_DIR, baseDirName)
-    );    
+        "solrconfig.xml", "schema.xml", solrHome
+    );
   }
   
   @Before

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e21511c/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/SolrMorphlineZkAvroTest.java
----------------------------------------------------------------------
diff --git a/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/SolrMorphlineZkAvroTest.java b/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/SolrMorphlineZkAvroTest.java
index 4f3b27f..49e2a0e 100644
--- a/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/SolrMorphlineZkAvroTest.java
+++ b/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/SolrMorphlineZkAvroTest.java
@@ -24,9 +24,9 @@ import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Iterator;
 import java.util.List;
+import java.util.Objects;
 
 import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
-import com.google.common.base.Preconditions;
 import org.apache.avro.file.DataFileReader;
 import org.apache.avro.file.FileReader;
 import org.apache.avro.generic.GenericData;
@@ -103,8 +103,7 @@ public class SolrMorphlineZkAvroTest extends AbstractSolrMorphlineZkTestBase {
     for (int i = 0; i < records.size(); i++) {  
       // verify morphline spat out expected data
       Record actual = collector.getRecords().get(i);
-      GenericData.Record expected = records.get(i);
-      Preconditions.checkNotNull(expected);
+      GenericData.Record expected = Objects.requireNonNull(records.get(i));
       assertTweetEquals(expected, actual, i);
       
       // verify Solr result set contains expected data
@@ -119,8 +118,8 @@ public class SolrMorphlineZkAvroTest extends AbstractSolrMorphlineZkTestBase {
   }
   
   private void assertTweetEquals(GenericData.Record expected, Record actual, int i) {
-    Preconditions.checkNotNull(expected);
-    Preconditions.checkNotNull(actual);
+    Objects.requireNonNull(expected);
+    Objects.requireNonNull(actual);
 //    System.out.println("\n\nexpected: " + toString(expected));
 //    System.out.println("actual:   " + actual);
     String[] fieldNames = new String[] { 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e21511c/solr/core/src/java/org/apache/solr/cloud/DistributedQueue.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/DistributedQueue.java b/solr/core/src/java/org/apache/solr/cloud/DistributedQueue.java
index 7576ae5..145d236 100644
--- a/solr/core/src/java/org/apache/solr/cloud/DistributedQueue.java
+++ b/solr/core/src/java/org/apache/solr/cloud/DistributedQueue.java
@@ -25,10 +25,10 @@ import java.util.TreeSet;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.locks.Condition;
 import java.util.concurrent.locks.ReentrantLock;
+import java.util.function.Predicate;
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
-import com.google.common.base.Predicate;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
 import org.apache.solr.common.cloud.SolrZkClient;
@@ -330,7 +330,7 @@ public class DistributedQueue {
       updateLock.lockInterruptibly();
       try {
         for (String child : knownChildren) {
-          if (acceptFilter.apply(child)) {
+          if (acceptFilter.test(child)) {
             foundChildren.add(child);
           }
         }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e21511c/solr/core/src/java/org/apache/solr/core/CoreContainer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/CoreContainer.java b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
index e641b2c..ad4560e 100644
--- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java
+++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
@@ -78,7 +78,7 @@ import org.apache.zookeeper.KeeperException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static com.google.common.base.Preconditions.checkNotNull;
+import static java.util.Objects.requireNonNull;
 import static org.apache.solr.common.params.CommonParams.AUTHC_PATH;
 import static org.apache.solr.common.params.CommonParams.AUTHZ_PATH;
 import static org.apache.solr.common.params.CommonParams.COLLECTIONS_HANDLER_PATH;
@@ -246,7 +246,7 @@ public class CoreContainer {
   public CoreContainer(NodeConfig config, Properties properties, CoresLocator locator, boolean asyncSolrCoreLoad) {
     this.loader = config.getSolrResourceLoader();
     this.solrHome = loader.getInstancePath().toString();
-    this.cfg = checkNotNull(config);
+    this.cfg = requireNonNull(config);
     this.coresLocator = locator;
     this.containerProperties = new Properties(properties);
     this.asyncSolrCoreLoad = asyncSolrCoreLoad;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e21511c/solr/core/src/java/org/apache/solr/core/SolrCore.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java
index 96f8738..5019cd0 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrCore.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java
@@ -39,6 +39,7 @@ import java.util.LinkedHashMap;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
+import java.util.Objects;
 import java.util.Optional;
 import java.util.Properties;
 import java.util.Set;
@@ -152,7 +153,6 @@ import org.apache.zookeeper.data.Stat;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static com.google.common.base.Preconditions.checkNotNull;
 import static org.apache.solr.common.params.CommonParams.NAME;
 import static org.apache.solr.common.params.CommonParams.PATH;
 
@@ -816,9 +816,7 @@ public final class SolrCore implements SolrInfoMBean, Closeable {
     
     assert ObjectReleaseTracker.track(searcherExecutor); // ensure that in unclean shutdown tests we still close this
     
-    checkNotNull(coreDescriptor, "coreDescriptor cannot be null");
-    
-    this.coreDescriptor = coreDescriptor;
+    this.coreDescriptor = Objects.requireNonNull(coreDescriptor, "coreDescriptor cannot be null");
     setName(name);
     MDCLoggingContext.setCore(this);
     

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e21511c/solr/core/src/java/org/apache/solr/core/backup/BackupManager.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/backup/BackupManager.java b/solr/core/src/java/org/apache/solr/core/backup/BackupManager.java
index e650553..c80b2b7 100644
--- a/solr/core/src/java/org/apache/solr/core/backup/BackupManager.java
+++ b/solr/core/src/java/org/apache/solr/core/backup/BackupManager.java
@@ -27,6 +27,7 @@ import java.net.URI;
 import java.nio.charset.StandardCharsets;
 import java.util.Collections;
 import java.util.List;
+import java.util.Objects;
 import java.util.Properties;
 
 import com.google.common.base.Preconditions;
@@ -68,8 +69,8 @@ public class BackupManager {
   protected final BackupRepository repository;
 
   public BackupManager(BackupRepository repository, ZkStateReader zkStateReader, String collectionName) {
-    this.repository = Preconditions.checkNotNull(repository);
-    this.zkStateReader = Preconditions.checkNotNull(zkStateReader);
+    this.repository = Objects.requireNonNull(repository);
+    this.zkStateReader = Objects.requireNonNull(zkStateReader);
   }
 
   /**
@@ -88,8 +89,8 @@ public class BackupManager {
    * @throws IOException In case of errors.
    */
   public Properties readBackupProperties(URI backupLoc, String backupId) throws IOException {
-    Preconditions.checkNotNull(backupLoc);
-    Preconditions.checkNotNull(backupId);
+    Objects.requireNonNull(backupLoc);
+    Objects.requireNonNull(backupId);
 
     // Backup location
     URI backupPath = repository.resolve(backupLoc, backupId);
@@ -129,7 +130,7 @@ public class BackupManager {
    * @throws IOException in case of errors.
    */
   public DocCollection readCollectionState(URI backupLoc, String backupId, String collectionName) throws IOException {
-    Preconditions.checkNotNull(collectionName);
+    Objects.requireNonNull(collectionName);
 
     URI zkStateDir = repository.resolve(backupLoc, backupId, ZK_STATE_DIR);
     try (IndexInput is = repository.openInput(zkStateDir, COLLECTION_PROPS_FILE, IOContext.DEFAULT)) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e21511c/solr/core/src/java/org/apache/solr/core/backup/repository/BackupRepositoryFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/backup/repository/BackupRepositoryFactory.java b/solr/core/src/java/org/apache/solr/core/backup/repository/BackupRepositoryFactory.java
index aaccceb..22a4895 100644
--- a/solr/core/src/java/org/apache/solr/core/backup/repository/BackupRepositoryFactory.java
+++ b/solr/core/src/java/org/apache/solr/core/backup/repository/BackupRepositoryFactory.java
@@ -20,6 +20,7 @@ package org.apache.solr.core.backup.repository;
 import java.lang.invoke.MethodHandles;
 import java.util.HashMap;
 import java.util.Map;
+import java.util.Objects;
 
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
@@ -29,8 +30,6 @@ import org.apache.solr.core.SolrResourceLoader;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.base.Preconditions;
-
 public class BackupRepositoryFactory {
   private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
@@ -67,9 +66,9 @@ public class BackupRepositoryFactory {
   }
 
   public BackupRepository newInstance(SolrResourceLoader loader, String name) {
-    Preconditions.checkNotNull(loader);
-    Preconditions.checkNotNull(name);
-    PluginInfo repo = Preconditions.checkNotNull(backupRepoPluginByName.get(name),
+    Objects.requireNonNull(loader);
+    Objects.requireNonNull(name);
+    PluginInfo repo = Objects.requireNonNull(backupRepoPluginByName.get(name),
         "Could not find a backup repository with name " + name);
 
     BackupRepository result = loader.newInstance(repo.className, BackupRepository.class);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e21511c/solr/core/src/java/org/apache/solr/core/backup/repository/HdfsBackupRepository.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/backup/repository/HdfsBackupRepository.java b/solr/core/src/java/org/apache/solr/core/backup/repository/HdfsBackupRepository.java
index f465765..99f858a 100644
--- a/solr/core/src/java/org/apache/solr/core/backup/repository/HdfsBackupRepository.java
+++ b/solr/core/src/java/org/apache/solr/core/backup/repository/HdfsBackupRepository.java
@@ -21,7 +21,9 @@ import java.io.IOException;
 import java.io.OutputStream;
 import java.net.URI;
 import java.net.URISyntaxException;
+import java.util.Objects;
 
+import com.google.common.base.Preconditions;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -39,8 +41,6 @@ import org.apache.solr.core.HdfsDirectoryFactory;
 import org.apache.solr.store.hdfs.HdfsDirectory;
 import org.apache.solr.store.hdfs.HdfsDirectory.HdfsIndexInput;
 
-import com.google.common.base.Preconditions;
-
 public class HdfsBackupRepository implements BackupRepository {
   private static final String HDFS_UMASK_MODE_PARAM = "solr.hdfs.permissions.umask-mode";
 
@@ -67,7 +67,7 @@ public class HdfsBackupRepository implements BackupRepository {
       this.hdfsConfig.set(FsPermission.UMASK_LABEL, umaskVal);
     }
 
-    String hdfsSolrHome = (String) Preconditions.checkNotNull(args.get(HdfsDirectoryFactory.HDFS_HOME),
+    String hdfsSolrHome = (String) Objects.requireNonNull(args.get(HdfsDirectoryFactory.HDFS_HOME),
         "Please specify " + HdfsDirectoryFactory.HDFS_HOME + " property.");
     Path path = new Path(hdfsSolrHome);
     while (path != null) { // Compute the path of root file-system (without requiring an additional system property).
@@ -99,7 +99,7 @@ public class HdfsBackupRepository implements BackupRepository {
 
   @Override
   public URI createURI(String location) {
-    Preconditions.checkNotNull(location);
+    Objects.requireNonNull(location);
 
     URI result = null;
     try {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e21511c/solr/core/src/java/org/apache/solr/core/backup/repository/LocalFileSystemRepository.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/backup/repository/LocalFileSystemRepository.java b/solr/core/src/java/org/apache/solr/core/backup/repository/LocalFileSystemRepository.java
index 4ac2558..01810f6 100644
--- a/solr/core/src/java/org/apache/solr/core/backup/repository/LocalFileSystemRepository.java
+++ b/solr/core/src/java/org/apache/solr/core/backup/repository/LocalFileSystemRepository.java
@@ -27,6 +27,7 @@ import java.nio.file.Path;
 import java.nio.file.Paths;
 import java.nio.file.SimpleFileVisitor;
 import java.nio.file.attribute.BasicFileAttributes;
+import java.util.Objects;
 
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.FSDirectory;
@@ -60,7 +61,7 @@ public class LocalFileSystemRepository implements BackupRepository {
 
   @Override
   public URI createURI(String location) {
-    Preconditions.checkNotNull(location);
+    Objects.requireNonNull(location);
 
     URI result = null;
     try {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e21511c/solr/core/src/java/org/apache/solr/core/snapshots/SolrSnapshotMetaDataManager.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/snapshots/SolrSnapshotMetaDataManager.java b/solr/core/src/java/org/apache/solr/core/snapshots/SolrSnapshotMetaDataManager.java
index 54feddc..8e4d185 100644
--- a/solr/core/src/java/org/apache/solr/core/snapshots/SolrSnapshotMetaDataManager.java
+++ b/solr/core/src/java/org/apache/solr/core/snapshots/SolrSnapshotMetaDataManager.java
@@ -26,10 +26,10 @@ import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
+import java.util.Objects;
 import java.util.Optional;
 import java.util.stream.Collectors;
 
-import com.google.common.base.Preconditions;
 import org.apache.lucene.codecs.CodecUtil;
 import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.IndexCommit;
@@ -169,7 +169,7 @@ public class SolrSnapshotMetaDataManager {
    * @throws IOException in case of I/O errors.
    */
   public synchronized void snapshot(String name, String indexDirPath, long gen) throws IOException {
-    Preconditions.checkNotNull(name);
+    Objects.requireNonNull(name);
 
     log.info("Creating the snapshot named {} for core {} associated with index commit with generation {} in directory {}"
         , name, solrCore.getName(), gen, indexDirPath);
@@ -205,7 +205,7 @@ public class SolrSnapshotMetaDataManager {
    */
   public synchronized Optional<SnapshotMetaData> release(String name) throws IOException {
     log.info("Deleting the snapshot named {} for core {}", name, solrCore.getName());
-    SnapshotMetaData result = nameToDetailsMapping.remove(Preconditions.checkNotNull(name));
+    SnapshotMetaData result = nameToDetailsMapping.remove(Objects.requireNonNull(name));
     if(result != null) {
       boolean success = false;
       try {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e21511c/solr/core/src/java/org/apache/solr/handler/OldBackupDirectory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/OldBackupDirectory.java b/solr/core/src/java/org/apache/solr/handler/OldBackupDirectory.java
index 79c5f09..ee78efe 100644
--- a/solr/core/src/java/org/apache/solr/handler/OldBackupDirectory.java
+++ b/solr/core/src/java/org/apache/solr/handler/OldBackupDirectory.java
@@ -21,12 +21,11 @@ import java.text.ParseException;
 import java.text.SimpleDateFormat;
 import java.util.Date;
 import java.util.Locale;
+import java.util.Objects;
 import java.util.Optional;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
-import com.google.common.base.Preconditions;
-
 class OldBackupDirectory implements Comparable<OldBackupDirectory> {
   private static final Pattern dirNamePattern = Pattern.compile("^snapshot[.](.*)$");
 
@@ -35,8 +34,8 @@ class OldBackupDirectory implements Comparable<OldBackupDirectory> {
   private Optional<Date> timestamp = Optional.empty();
 
   public OldBackupDirectory(URI basePath, String dirName) {
-    this.dirName = Preconditions.checkNotNull(dirName);
-    this.basePath = Preconditions.checkNotNull(basePath);
+    this.dirName = Objects.requireNonNull(dirName);
+    this.basePath = Objects.requireNonNull(basePath);
     Matcher m = dirNamePattern.matcher(dirName);
     if (m.find()) {
       try {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e21511c/solr/core/src/java/org/apache/solr/handler/SnapShooter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/SnapShooter.java b/solr/core/src/java/org/apache/solr/handler/SnapShooter.java
index 52f4889..bf02e4c 100644
--- a/solr/core/src/java/org/apache/solr/handler/SnapShooter.java
+++ b/solr/core/src/java/org/apache/solr/handler/SnapShooter.java
@@ -27,10 +27,10 @@ import java.util.Collections;
 import java.util.Date;
 import java.util.List;
 import java.util.Locale;
+import java.util.Objects;
 import java.util.Optional;
 import java.util.function.Consumer;
 
-import com.google.common.base.Preconditions;
 import org.apache.lucene.index.IndexCommit;
 import org.apache.lucene.store.Directory;
 import org.apache.solr.common.SolrException;
@@ -84,8 +84,8 @@ public class SnapShooter {
   }
 
   private void initialize(BackupRepository backupRepo, SolrCore core, URI location, String snapshotName, String commitName) {
-    this.solrCore = Preconditions.checkNotNull(core);
-    this.backupRepo = Preconditions.checkNotNull(backupRepo);
+    this.solrCore = Objects.requireNonNull(core);
+    this.backupRepo = Objects.requireNonNull(backupRepo);
     this.baseSnapDirPath = location;
     this.snapshotName = snapshotName;
     if (snapshotName != null) {
@@ -111,7 +111,7 @@ public class SnapShooter {
   }
 
   public void validateDeleteSnapshot() {
-    Preconditions.checkNotNull(this.snapshotName);
+    Objects.requireNonNull(this.snapshotName);
 
     boolean dirFound = false;
     String[] paths;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e21511c/solr/core/src/java/org/apache/solr/search/ExtendedDismaxQParser.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/ExtendedDismaxQParser.java b/solr/core/src/java/org/apache/solr/search/ExtendedDismaxQParser.java
index 0d0a302..d15b1c1 100644
--- a/solr/core/src/java/org/apache/solr/search/ExtendedDismaxQParser.java
+++ b/solr/core/src/java/org/apache/solr/search/ExtendedDismaxQParser.java
@@ -55,7 +55,6 @@ import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.schema.FieldType;
 import org.apache.solr.util.SolrPluginUtils;
 
-import com.google.common.base.Function;
 import com.google.common.collect.Multimap;
 import com.google.common.collect.Multimaps;
 
@@ -72,26 +71,6 @@ public class ExtendedDismaxQParser extends QParser {
    */
   private static String IMPOSSIBLE_FIELD_NAME = "\uFFFC\uFFFC\uFFFC";
 
-  /**
-   * Helper function which returns the specified {@link FieldParams}' {@link FieldParams#getWordGrams()} value.
-   */
-  private static final Function<FieldParams, Integer> WORD_GRAM_EXTRACTOR = new Function<FieldParams, Integer>() {
-    @Override
-    public Integer apply(FieldParams input) {
-      return input.getWordGrams();
-    }
-  };
-
-  /**
-   * Helper function which returns the specified {@link FieldParams}' {@link FieldParams#getSlop()} value.
-   */
-  private static final Function<FieldParams, Integer> PHRASE_SLOP_EXTRACTOR = new Function<FieldParams, Integer>() {
-    @Override
-    public Integer apply(FieldParams input) {
-      return input.getSlop();
-    }
-  };
-
   /** shorten the class references for utilities */
   private static class U extends SolrPluginUtils {
     /* :NOOP */
@@ -246,7 +225,7 @@ public class ExtendedDismaxQParser extends QParser {
       }
 
       // create a map of {wordGram, [phraseField]}
-      Multimap<Integer, FieldParams> phraseFieldsByWordGram = Multimaps.index(allPhraseFields, WORD_GRAM_EXTRACTOR);
+      Multimap<Integer, FieldParams> phraseFieldsByWordGram = Multimaps.index(allPhraseFields, FieldParams::getWordGrams);
 
       // for each {wordGram, [phraseField]} entry, create and add shingled field queries to the main user query
       for (Map.Entry<Integer, Collection<FieldParams>> phraseFieldsByWordGramEntry : phraseFieldsByWordGram.asMap().entrySet()) {
@@ -254,7 +233,7 @@ public class ExtendedDismaxQParser extends QParser {
         // group the fields within this wordGram collection by their associated slop (it's possible that the same
         // field appears multiple times for the same wordGram count but with different slop values. In this case, we
         // should take the *sum* of those phrase queries, rather than the max across them).
-        Multimap<Integer, FieldParams> phraseFieldsBySlop = Multimaps.index(phraseFieldsByWordGramEntry.getValue(), PHRASE_SLOP_EXTRACTOR);
+        Multimap<Integer, FieldParams> phraseFieldsBySlop = Multimaps.index(phraseFieldsByWordGramEntry.getValue(), FieldParams::getSlop);
         for (Map.Entry<Integer, Collection<FieldParams>> phraseFieldsBySlopEntry : phraseFieldsBySlop.asMap().entrySet()) {
           addShingledPhraseQueries(query, normalClauses, phraseFieldsBySlopEntry.getValue(),
               phraseFieldsByWordGramEntry.getKey(), config.tiebreaker, phraseFieldsBySlopEntry.getKey());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e21511c/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
index 242cd2a..bf381f4 100644
--- a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
@@ -32,6 +32,7 @@ import java.util.HashSet;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
+import java.util.Objects;
 import java.util.Set;
 
 import java.util.concurrent.TimeUnit;
@@ -110,8 +111,6 @@ import org.apache.solr.update.SolrIndexConfig;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.base.Function;
-import com.google.common.base.Objects;
 import com.google.common.collect.Iterables;
 
 /**
@@ -525,12 +524,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
    * Returns a collection of all field names the index reader knows about.
    */
   public Iterable<String> getFieldNames() {
-    return Iterables.transform(fieldInfos, new Function<FieldInfo,String>() {
-      @Override
-      public String apply(FieldInfo fieldInfo) {
-        return fieldInfo.name;
-      }
-    });
+    return Iterables.transform(fieldInfos, fieldInfo -> fieldInfo.name);
   }
 
   public SolrCache<Query,DocSet> getFilterCache() {
@@ -2674,8 +2668,8 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
     }
 
     private boolean equalsTo(FilterImpl other) {
-      return Objects.equal(this.topFilter, other.topFilter) &&
-             Objects.equal(this.weights, other.weights);
+      return Objects.equals(this.topFilter, other.topFilter) &&
+             Objects.equals(this.weights, other.weights);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e21511c/solr/core/src/java/org/apache/solr/update/MergeIndexesCommand.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/MergeIndexesCommand.java b/solr/core/src/java/org/apache/solr/update/MergeIndexesCommand.java
index f330a5c..e0abba0 100644
--- a/solr/core/src/java/org/apache/solr/update/MergeIndexesCommand.java
+++ b/solr/core/src/java/org/apache/solr/update/MergeIndexesCommand.java
@@ -16,13 +16,11 @@
  */
 package org.apache.solr.update;
 
-import com.google.common.base.Function;
-import com.google.common.base.Joiner;
-import com.google.common.collect.Iterables;
 import org.apache.lucene.index.DirectoryReader;
 import org.apache.solr.request.SolrQueryRequest;
 
 import java.util.List;
+import java.util.stream.Collectors;
 
 /**
  * A merge indexes command encapsulated in an object.
@@ -46,13 +44,7 @@ public class MergeIndexesCommand extends UpdateCommand {
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder(super.toString());
-    Joiner joiner = Joiner.on(",");
-    Iterable<String> directories = Iterables.transform(readers, new Function<DirectoryReader, String>() {
-      public String apply(DirectoryReader reader) {
-        return reader.directory().toString();
-      }
-    });
-    joiner.skipNulls().join(sb, directories);
+    sb.append(readers.stream().map(reader-> reader.directory().toString()).collect(Collectors.joining(",")));
     sb.append('}');
     return sb.toString();
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e21511c/solr/core/src/test/org/apache/solr/client/solrj/embedded/TestJettySolrRunner.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/client/solrj/embedded/TestJettySolrRunner.java b/solr/core/src/test/org/apache/solr/client/solrj/embedded/TestJettySolrRunner.java
index 9ded73b..6b21aa1 100644
--- a/solr/core/src/test/org/apache/solr/client/solrj/embedded/TestJettySolrRunner.java
+++ b/solr/core/src/test/org/apache/solr/client/solrj/embedded/TestJettySolrRunner.java
@@ -16,12 +16,12 @@
  */
 package org.apache.solr.client.solrj.embedded;
 
-import com.google.common.base.Charsets;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.request.CoreAdminRequest;
 import org.junit.Test;
 
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.nio.file.Paths;
@@ -44,7 +44,7 @@ public class TestJettySolrRunner extends SolrTestCaseJ4 {
         = "<solr><str name=\"configSetBaseDir\">CONFIGSETS</str><str name=\"coreRootDirectory\">COREROOT</str></solr>"
         .replace("CONFIGSETS", configsets.toString())
         .replace("COREROOT", coresDir.toString());
-    Files.write(solrHome.resolve("solr.xml"), solrxml.getBytes(Charsets.UTF_8));
+    Files.write(solrHome.resolve("solr.xml"), solrxml.getBytes(StandardCharsets.UTF_8));
 
     JettyConfig jettyConfig = buildJettyConfig("/solr");
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e21511c/solr/core/src/test/org/apache/solr/cloud/DistributedQueueTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/DistributedQueueTest.java b/solr/core/src/test/org/apache/solr/cloud/DistributedQueueTest.java
index cb904a7..d1192a1 100644
--- a/solr/core/src/test/org/apache/solr/cloud/DistributedQueueTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/DistributedQueueTest.java
@@ -22,6 +22,7 @@ import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Future;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.TimeoutException;
+import java.util.function.Predicate;
 
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.cloud.SolrZkClient;
@@ -31,9 +32,6 @@ import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
-import static com.google.common.base.Predicates.alwaysFalse;
-import static com.google.common.base.Predicates.alwaysTrue;
-
 public class DistributedQueueTest extends SolrTestCaseJ4 {
 
   private static final Charset UTF8 = Charset.forName("UTF-8");
@@ -151,17 +149,20 @@ public class DistributedQueueTest extends SolrTestCaseJ4 {
     dq.offer(data);
     dq.offer(data);
 
+    Predicate<String> alwaysTrue = s -> true;
+    Predicate<String> alwaysFalse = s -> false;
+
     // Should be able to get 0, 1, 2, or 3 instantly
     for (int i = 0; i <= 3; ++i) {
-      assertEquals(i, dq.peekElements(i, 0, alwaysTrue()).size());
+      assertEquals(i, dq.peekElements(i, 0, alwaysTrue).size());
     }
 
     // Asking for more should return only 3.
-    assertEquals(3, dq.peekElements(4, 0, alwaysTrue()).size());
+    assertEquals(3, dq.peekElements(4, 0, alwaysTrue).size());
 
     // If we filter everything out, we should block for the full time.
     long start = System.nanoTime();
-    assertEquals(0, dq.peekElements(4, 1000, alwaysFalse()).size());
+    assertEquals(0, dq.peekElements(4, 1000, alwaysFalse).size());
     assertTrue(System.nanoTime() - start >= TimeUnit.MILLISECONDS.toNanos(500));
 
     // If someone adds a new matching element while we're waiting, we should return immediately.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e21511c/solr/core/src/test/org/apache/solr/cloud/KerberosTestServices.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/KerberosTestServices.java b/solr/core/src/test/org/apache/solr/cloud/KerberosTestServices.java
index 6295ddd..ab8761a 100644
--- a/solr/core/src/test/org/apache/solr/cloud/KerberosTestServices.java
+++ b/solr/core/src/test/org/apache/solr/cloud/KerberosTestServices.java
@@ -24,9 +24,9 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Locale;
 import java.util.Map;
+import java.util.Objects;
 import java.util.Properties;
 
-import com.google.common.base.Preconditions;
 import org.apache.hadoop.minikdc.MiniKdc;
 import org.apache.solr.client.solrj.impl.Krb5HttpClientBuilder;
 
@@ -193,10 +193,8 @@ public class KerberosTestServices {
 
     public Builder withJaasConfiguration(String clientPrincipal, File clientKeytab,
                                          String serverPrincipal, File serverKeytab) {
-      Preconditions.checkNotNull(clientPrincipal);
-      Preconditions.checkNotNull(clientKeytab);
-      this.clientPrincipal = clientPrincipal;
-      this.clientKeytab = clientKeytab;
+      this.clientPrincipal = Objects.requireNonNull(clientPrincipal);
+      this.clientKeytab = Objects.requireNonNull(clientKeytab);
       this.serverPrincipal = serverPrincipal;
       this.serverKeytab = serverKeytab;
       this.appName = null;
@@ -204,10 +202,8 @@ public class KerberosTestServices {
     }
 
     public Builder withJaasConfiguration(String principal, File keytab, String appName) {
-      Preconditions.checkNotNull(principal);
-      Preconditions.checkNotNull(keytab);
-      this.clientPrincipal = principal;
-      this.clientKeytab = keytab;
+      this.clientPrincipal = Objects.requireNonNull(principal);
+      this.clientKeytab = Objects.requireNonNull(keytab);
       this.serverPrincipal = null;
       this.serverKeytab = null;
       this.appName = appName;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e21511c/solr/core/src/test/org/apache/solr/core/TestLazyCores.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/TestLazyCores.java b/solr/core/src/test/org/apache/solr/core/TestLazyCores.java
index 33d6934..34cd306 100644
--- a/solr/core/src/test/org/apache/solr/core/TestLazyCores.java
+++ b/solr/core/src/test/org/apache/solr/core/TestLazyCores.java
@@ -18,6 +18,7 @@ package org.apache.solr.core;
 
 import java.io.File;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
@@ -27,7 +28,6 @@ import java.util.Map;
 import java.util.regex.Pattern;
 
 import com.google.common.collect.ImmutableList;
-import org.apache.commons.codec.Charsets;
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.lang.StringUtils;
 import org.apache.solr.SolrTestCaseJ4;
@@ -537,13 +537,13 @@ public class TestLazyCores extends SolrTestCaseJ4 {
     // Write the file for core discovery
     FileUtils.writeStringToFile(new File(coreRoot, "core.properties"), "name=" + coreName +
         System.getProperty("line.separator") + "transient=true" +
-        System.getProperty("line.separator") + "loadOnStartup=true", Charsets.UTF_8.toString());
+        System.getProperty("line.separator") + "loadOnStartup=true", StandardCharsets.UTF_8);
 
-    FileUtils.writeStringToFile(new File(subHome, "solrconfig.snippet.randomindexconfig.xml"), rand_snip, Charsets.UTF_8.toString());
+    FileUtils.writeStringToFile(new File(subHome, "solrconfig.snippet.randomindexconfig.xml"), rand_snip, StandardCharsets.UTF_8);
 
-    FileUtils.writeStringToFile(new File(subHome, "solrconfig.xml"), config, Charsets.UTF_8.toString());
+    FileUtils.writeStringToFile(new File(subHome, "solrconfig.xml"), config, StandardCharsets.UTF_8);
 
-    FileUtils.writeStringToFile(new File(subHome, "schema.xml"), schema, Charsets.UTF_8.toString());
+    FileUtils.writeStringToFile(new File(subHome, "schema.xml"), schema, StandardCharsets.UTF_8);
   }
 
   // Write out the cores' config files, both bad schema files, bad config files as well as some good cores.
@@ -565,11 +565,11 @@ public class TestLazyCores extends SolrTestCaseJ4 {
     // Collect the files that we'll write to the config directories.
     String top = SolrTestCaseJ4.TEST_HOME() + "/collection1/conf";
     String min_schema = FileUtils.readFileToString(new File(top, "schema-tiny.xml"),
-        Charsets.UTF_8.toString());
+        StandardCharsets.UTF_8);
     String min_config = FileUtils.readFileToString(new File(top, "solrconfig-minimal.xml"),
-        Charsets.UTF_8.toString());
+        StandardCharsets.UTF_8);
     String rand_snip = FileUtils.readFileToString(new File(top, "solrconfig.snippet.randomindexconfig.xml"),
-        Charsets.UTF_8.toString());
+        StandardCharsets.UTF_8);
 
     // Now purposely mess up the config files, introducing stupid syntax errors.
     String bad_config = min_config.replace("<requestHandler", "<reqsthalr");

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e21511c/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminHandlerTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminHandlerTest.java
index af5bbc9..04bc3bd 100644
--- a/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminHandlerTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminHandlerTest.java
@@ -17,12 +17,12 @@
 package org.apache.solr.handler.admin;
 
 import java.io.File;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.util.Map;
 
 import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule;
-import org.apache.commons.codec.Charsets;
 import org.apache.commons.io.FileUtils;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.client.solrj.embedded.JettySolrRunner;
@@ -250,7 +250,7 @@ public class CoreAdminHandlerTest extends SolrTestCaseJ4 {
     solrHomeDirectory.mkdirs();
     copySolrHomeToTemp(solrHomeDirectory, "corex");
     File corex = new File(solrHomeDirectory, "corex");
-    FileUtils.write(new File(corex, "core.properties"), "", Charsets.UTF_8.toString());
+    FileUtils.write(new File(corex, "core.properties"), "", StandardCharsets.UTF_8);
     JettySolrRunner runner = new JettySolrRunner(solrHomeDirectory.getAbsolutePath(), buildJettyConfig("/solr"));
     runner.start();
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e21511c/solr/core/src/test/org/apache/solr/schema/ChangedSchemaMergeTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/schema/ChangedSchemaMergeTest.java b/solr/core/src/test/org/apache/solr/schema/ChangedSchemaMergeTest.java
index 70fcc69..d56382e 100644
--- a/solr/core/src/test/org/apache/solr/schema/ChangedSchemaMergeTest.java
+++ b/solr/core/src/test/org/apache/solr/schema/ChangedSchemaMergeTest.java
@@ -19,8 +19,8 @@ package org.apache.solr.schema;
 import java.io.File;
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
+import java.nio.charset.StandardCharsets;
 
-import org.apache.commons.codec.Charsets;
 import org.apache.commons.io.FileUtils;
 
 import org.apache.lucene.search.similarities.Similarity;
@@ -90,11 +90,11 @@ public class ChangedSchemaMergeTest extends SolrTestCaseJ4 {
     copyMinConf(changed, "name=changed");
     // Overlay with my local schema
     schemaFile = new File(new File(changed, "conf"), "schema.xml");
-    FileUtils.writeStringToFile(schemaFile, withWhich, Charsets.UTF_8.toString());
+    FileUtils.writeStringToFile(schemaFile, withWhich, StandardCharsets.UTF_8);
 
     String discoveryXml = "<solr></solr>";
     File solrXml = new File(solrHomeDirectory, "solr.xml");
-    FileUtils.write(solrXml, discoveryXml, Charsets.UTF_8.toString());
+    FileUtils.write(solrXml, discoveryXml, StandardCharsets.UTF_8);
 
     final CoreContainer cores = new CoreContainer(solrHomeDirectory.getAbsolutePath());
     cores.load();
@@ -133,7 +133,7 @@ public class ChangedSchemaMergeTest extends SolrTestCaseJ4 {
       changed.getUpdateHandler().commit(new CommitUpdateCommand(req, false));
 
       // write the new schema out and make it current
-      FileUtils.writeStringToFile(schemaFile, withoutWhich, Charsets.UTF_8.toString());
+      FileUtils.writeStringToFile(schemaFile, withoutWhich, StandardCharsets.UTF_8);
 
       IndexSchema iSchema = IndexSchemaFactory.buildIndexSchema("schema.xml", changed.getSolrConfig());
       changed.setLatestSchema(iSchema);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e21511c/solr/core/src/test/org/apache/solr/schema/TestBinaryField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/schema/TestBinaryField.java b/solr/core/src/test/org/apache/solr/schema/TestBinaryField.java
index 595a9a7..9b42749 100644
--- a/solr/core/src/test/org/apache/solr/schema/TestBinaryField.java
+++ b/solr/core/src/test/org/apache/solr/schema/TestBinaryField.java
@@ -16,7 +16,6 @@
  */
 package org.apache.solr.schema;
 
-import com.google.common.base.Charsets;
 import org.apache.commons.io.FileUtils;
 import org.apache.solr.SolrJettyTestBase;
 import org.apache.solr.SolrTestCaseJ4;
@@ -34,6 +33,7 @@ import java.io.File;
 import java.io.OutputStreamWriter;
 import java.io.Writer;
 import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.util.List;
 import java.util.Properties;
@@ -64,7 +64,7 @@ public class TestBinaryField extends SolrJettyTestBase {
     FileUtils.copyFile(new File(src_dir, "solrconfig.snippet.randomindexconfig.xml"), 
                        new File(confDir, "solrconfig.snippet.randomindexconfig.xml"));
 
-    try (Writer w = new OutputStreamWriter(Files.newOutputStream(collDir.toPath().resolve("core.properties")), Charsets.UTF_8)) {
+    try (Writer w = new OutputStreamWriter(Files.newOutputStream(collDir.toPath().resolve("core.properties")), StandardCharsets.UTF_8)) {
       Properties coreProps = new Properties();
       coreProps.put("name", "collection1");
       coreProps.store(w, "");

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e21511c/solr/core/src/test/org/apache/solr/security/TestAuthorizationFramework.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/security/TestAuthorizationFramework.java b/solr/core/src/test/org/apache/solr/security/TestAuthorizationFramework.java
index 4c4b52e..acdf578 100644
--- a/solr/core/src/test/org/apache/solr/security/TestAuthorizationFramework.java
+++ b/solr/core/src/test/org/apache/solr/security/TestAuthorizationFramework.java
@@ -18,12 +18,12 @@ package org.apache.solr.security;
 
 import java.lang.invoke.MethodHandles;
 
+import java.nio.charset.StandardCharsets;
 import java.util.List;
 import java.util.Map;
 import java.util.Objects;
 import java.util.function.Predicate;
 
-import org.apache.commons.io.Charsets;
 import org.apache.http.client.HttpClient;
 import org.apache.http.client.methods.HttpGet;
 import org.apache.http.util.EntityUtils;
@@ -49,7 +49,7 @@ public class TestAuthorizationFramework extends AbstractFullDistribZkTestBase {
     try (ZkStateReader zkStateReader = new ZkStateReader(zkServer.getZkAddress(),
         TIMEOUT, TIMEOUT)) {
       zkStateReader.getZkClient().create(ZkStateReader.SOLR_SECURITY_CONF_PATH,
-          "{\"authorization\":{\"class\":\"org.apache.solr.security.MockAuthorizationPlugin\"}}".getBytes(Charsets.UTF_8),
+          "{\"authorization\":{\"class\":\"org.apache.solr.security.MockAuthorizationPlugin\"}}".getBytes(StandardCharsets.UTF_8),
           CreateMode.PERSISTENT, true);
     }
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e21511c/solr/solrj/src/test/org/apache/solr/common/cloud/TestZkConfigManager.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/common/cloud/TestZkConfigManager.java b/solr/solrj/src/test/org/apache/solr/common/cloud/TestZkConfigManager.java
index e9b5913..159ff82 100644
--- a/solr/solrj/src/test/org/apache/solr/common/cloud/TestZkConfigManager.java
+++ b/solr/solrj/src/test/org/apache/solr/common/cloud/TestZkConfigManager.java
@@ -16,7 +16,6 @@
  */
 package org.apache.solr.common.cloud;
 
-import com.google.common.base.Charsets;
 import com.google.common.base.Throwables;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.cloud.ZkTestServer;
@@ -30,6 +29,7 @@ import org.junit.BeforeClass;
 import org.junit.Test;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.security.NoSuchAlgorithmException;
@@ -69,7 +69,7 @@ public class TestZkConfigManager extends SolrTestCaseJ4 {
       ZkConfigManager configManager = new ZkConfigManager(zkClient);
       assertEquals(0, configManager.listConfigs().size());
 
-      byte[] testdata = "test data".getBytes(Charsets.UTF_8);
+      byte[] testdata = "test data".getBytes(StandardCharsets.UTF_8);
 
       Path tempConfig = createTempDir("config");
       Files.createFile(tempConfig.resolve("file1"));
@@ -102,7 +102,7 @@ public class TestZkConfigManager extends SolrTestCaseJ4 {
       assertArrayEquals(testdata, checkdata);
 
       // uploading to the same config overwrites
-      byte[] overwritten = "new test data".getBytes(Charsets.UTF_8);
+      byte[] overwritten = "new test data".getBytes(StandardCharsets.UTF_8);
       Files.write(tempConfig.resolve("file1"), overwritten);
       configManager.uploadConfigDir(tempConfig, "testconfig");
 
@@ -147,7 +147,7 @@ public class TestZkConfigManager extends SolrTestCaseJ4 {
       @Override
       protected Collection<ZkCredentials> createCredentials() {
         List<ZkCredentials> credentials = new ArrayList<>();
-        credentials.add(new ZkCredentials("digest", (readOnlyUsername + ":" + readOnlyPassword).getBytes(Charsets.UTF_8)));
+        credentials.add(new ZkCredentials("digest", (readOnlyUsername + ":" + readOnlyPassword).getBytes(StandardCharsets.UTF_8)));
         return credentials;
       }
     };
@@ -156,7 +156,7 @@ public class TestZkConfigManager extends SolrTestCaseJ4 {
       @Override
       protected Collection<ZkCredentials> createCredentials() {
         List<ZkCredentials> credentials = new ArrayList<>();
-        credentials.add(new ZkCredentials("digest", (writeableUsername + ":" + writeablePassword).getBytes(Charsets.UTF_8)));
+        credentials.add(new ZkCredentials("digest", (writeableUsername + ":" + writeablePassword).getBytes(StandardCharsets.UTF_8)));
         return credentials;
       }
     };

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e21511c/solr/test-framework/src/java/org/apache/solr/SolrTestCaseHS.java
----------------------------------------------------------------------
diff --git a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseHS.java b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseHS.java
index aba2603..a27fbf2 100644
--- a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseHS.java
+++ b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseHS.java
@@ -21,6 +21,7 @@ import java.io.IOException;
 import java.io.OutputStreamWriter;
 import java.io.Writer;
 import java.lang.invoke.MethodHandles;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.util.ArrayList;
 import java.util.Collection;
@@ -34,7 +35,6 @@ import java.util.Properties;
 import java.util.Random;
 import java.util.Set;
 
-import com.google.common.base.Charsets;
 import org.apache.commons.io.FileUtils;
 import org.apache.lucene.util.IOUtils;
 import org.apache.solr.client.solrj.SolrClient;
@@ -434,7 +434,7 @@ public class SolrTestCaseHS extends SolrTestCaseJ4 {
       copyConfFile(baseDir, collection, schemaFile);
 
       File collDir = new File(baseDir, collection);
-      try (Writer w = new OutputStreamWriter(Files.newOutputStream(collDir.toPath().resolve("core.properties")), Charsets.UTF_8)) {
+      try (Writer w = new OutputStreamWriter(Files.newOutputStream(collDir.toPath().resolve("core.properties")), StandardCharsets.UTF_8)) {
         Properties coreProps = new Properties();
         coreProps.put("name", "collection1");
         coreProps.put("config", solrconfigFile);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e21511c/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
----------------------------------------------------------------------
diff --git a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
index 53aecb1..3adad49 100644
--- a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
+++ b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
@@ -55,7 +55,6 @@ import java.util.Properties;
 import com.carrotsearch.randomizedtesting.RandomizedContext;
 import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
 import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule;
-import org.apache.commons.codec.Charsets;
 import org.apache.commons.io.FileUtils;
 import org.apache.http.client.HttpClient;
 import org.apache.lucene.analysis.MockAnalyzer;
@@ -127,7 +126,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.xml.sax.SAXException;
 
-import static com.google.common.base.Preconditions.checkNotNull;
+import static java.util.Objects.requireNonNull;
 
 /**
  * A junit4 Solr test harness that extends LuceneTestCaseJ4. To change which core is used when loading the schema and solrconfig.xml, simply
@@ -639,7 +638,7 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase {
   }
 
   public static CoreContainer createCoreContainer(Path solrHome, String solrXML) {
-    testSolrHome = checkNotNull(solrHome);
+    testSolrHome = requireNonNull(solrHome);
     h = new TestHarness(solrHome, solrXML);
     lrf = h.getRequestFactory("standard", 0, 20, CommonParams.VERSION, "2.2");
     return h.getCoreContainer();
@@ -661,7 +660,7 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase {
   }
 
   public static CoreContainer createDefaultCoreContainer(Path solrHome) {
-    testSolrHome = checkNotNull(solrHome);
+    testSolrHome = requireNonNull(solrHome);
     h = new TestHarness("collection1", initCoreDataDir.getAbsolutePath(), "solrconfig.xml", "schema.xml");
     lrf = h.getRequestFactory("standard", 0, 20, CommonParams.VERSION, "2.2");
     return h.getCoreContainer();
@@ -1870,7 +1869,7 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase {
     }
     Files.createFile(dstRoot.toPath().resolve("core.properties"));
     if (propertiesContent != null) {
-      FileUtils.writeStringToFile(new File(dstRoot, "core.properties"), propertiesContent, Charsets.UTF_8.toString());
+      FileUtils.writeStringToFile(new File(dstRoot, "core.properties"), propertiesContent, StandardCharsets.UTF_8);
     }
     String top = SolrTestCaseJ4.TEST_HOME() + "/collection1/conf";
     FileUtils.copyFile(new File(top, "schema-tiny.xml"), new File(subHome, "schema.xml"));

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e21511c/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java
----------------------------------------------------------------------
diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java b/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java
index 2c1ae3b..5ebdfb7 100644
--- a/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java
+++ b/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java
@@ -20,6 +20,7 @@ import javax.servlet.Filter;
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
 import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.util.ArrayList;
@@ -37,7 +38,6 @@ import java.util.concurrent.Future;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicInteger;
 
-import com.google.common.base.Charsets;
 import org.apache.solr.client.solrj.embedded.JettyConfig;
 import org.apache.solr.client.solrj.embedded.JettySolrRunner;
 import org.apache.solr.client.solrj.embedded.SSLConfig;
@@ -200,7 +200,7 @@ public class MiniSolrCloudCluster {
     try (SolrZkClient zkClient = new SolrZkClient(zkServer.getZkHost(), AbstractZkTestCase.TIMEOUT)) {
       zkClient.makePath("/solr/solr.xml", solrXml.getBytes(Charset.defaultCharset()), true);
       if (jettyConfig.sslConfig != null && jettyConfig.sslConfig.isSSLMode()) {
-        zkClient.makePath("/solr" + ZkStateReader.CLUSTER_PROPS, "{'urlScheme':'https'}".getBytes(Charsets.UTF_8), true);
+        zkClient.makePath("/solr" + ZkStateReader.CLUSTER_PROPS, "{'urlScheme':'https'}".getBytes(StandardCharsets.UTF_8), true);
       }
     }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e21511c/solr/test-framework/src/java/org/apache/solr/cloud/ZkTestServer.java
----------------------------------------------------------------------
diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/ZkTestServer.java b/solr/test-framework/src/java/org/apache/solr/cloud/ZkTestServer.java
index 4082c4b..f437863 100644
--- a/solr/test-framework/src/java/org/apache/solr/cloud/ZkTestServer.java
+++ b/solr/test-framework/src/java/org/apache/solr/cloud/ZkTestServer.java
@@ -16,7 +16,6 @@
  */
 package org.apache.solr.cloud;
 
-import com.google.common.collect.Ordering;
 import com.google.common.util.concurrent.AtomicLongMap;
 import org.apache.solr.common.cloud.SolrZkClient;
 import org.apache.solr.util.TimeOut;
@@ -150,18 +149,18 @@ public class ZkTestServer {
       }
 
       private String reportLimitViolations() {
-        Object[] maxKeys = maxCounters.keySet().toArray();
-        Arrays.sort(maxKeys, new Comparator<Object>() {
-          private final Comparator<Long> valComp = Ordering.natural().reverse();
+        String[] maxKeys = maxCounters.keySet().toArray(new String[maxCounters.size()]);
+        Arrays.sort(maxKeys, new Comparator<String>() {
+          private final Comparator<Long> valComp = Comparator.<Long>naturalOrder().reversed();
           @Override
-          public int compare(Object o1, Object o2) {
+          public int compare(String o1, String o2) {
             return valComp.compare(maxCounters.get(o1), maxCounters.get(o2));
           }
         });
 
         StringBuilder sb = new StringBuilder();
         boolean first = true;
-        for (Object key : maxKeys) {
+        for (String key : maxKeys) {
           long value = maxCounters.get(key);
           if (value <= limit) continue;
           if (first) {


[36/50] [abbrv] lucene-solr:apiv2: SOLR-8542: Adds Solr Learning to Rank (LTR) plugin for reranking results with machine learning models. (Michael Nilsson, Diego Ceccarelli, Joshua Pantony, Jon Dorando, Naveen Santhapuri, Alessandro Benedetti, David Groh

Posted by sa...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestRankingFeature.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestRankingFeature.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestRankingFeature.java
new file mode 100644
index 0000000..437e10d
--- /dev/null
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestRankingFeature.java
@@ -0,0 +1,123 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.feature;
+
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.ltr.TestRerankBase;
+import org.apache.solr.ltr.model.LinearModel;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+
+public class TestRankingFeature extends TestRerankBase {
+
+  @BeforeClass
+  public static void before() throws Exception {
+    setuptest("solrconfig-ltr.xml", "schema.xml");
+
+    assertU(adoc("id", "1", "title", "w1", "description", "w1", "popularity",
+        "1"));
+    assertU(adoc("id", "2", "title", "w2 2asd asdd didid", "description",
+        "w2 2asd asdd didid", "popularity", "2"));
+    assertU(adoc("id", "3", "title", "w3", "description", "w3", "popularity",
+        "3"));
+    assertU(adoc("id", "4", "title", "w4", "description", "w4", "popularity",
+        "4"));
+    assertU(adoc("id", "5", "title", "w5", "description", "w5", "popularity",
+        "5"));
+    assertU(adoc("id", "6", "title", "w1 w2", "description", "w1 w2",
+        "popularity", "6"));
+    assertU(adoc("id", "7", "title", "w1 w2 w3 w4 w5", "description",
+        "w1 w2 w3 w4 w5 w8", "popularity", "7"));
+    assertU(adoc("id", "8", "title", "w1 w1 w1 w2 w2 w8", "description",
+        "w1 w1 w1 w2 w2", "popularity", "8"));
+    assertU(commit());
+  }
+
+  @AfterClass
+  public static void after() throws Exception {
+    aftertest();
+  }
+
+  @Test
+  public void testRankingSolrFeature() throws Exception {
+    // before();
+    loadFeature("powpularityS", SolrFeature.class.getCanonicalName(),
+        "{\"q\":\"{!func}pow(popularity,2)\"}");
+    loadFeature("unpopularityS", SolrFeature.class.getCanonicalName(),
+        "{\"q\":\"{!func}div(1,popularity)\"}");
+
+    loadModel("powpularityS-model", LinearModel.class.getCanonicalName(),
+        new String[] {"powpularityS"}, "{\"weights\":{\"powpularityS\":1.0}}");
+    loadModel("unpopularityS-model", LinearModel.class.getCanonicalName(),
+        new String[] {"unpopularityS"}, "{\"weights\":{\"unpopularityS\":1.0}}");
+
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("title:w1");
+    query.add("fl", "*, score");
+    query.add("rows", "4");
+
+    assertJQ("/query" + query.toQueryString(), "/response/numFound/==4");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='1'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/id=='8'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/id=='6'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/id=='7'");
+    // Normal term match
+
+    query.add("rq", "{!ltr model=powpularityS-model reRankDocs=4}");
+    query.set("debugQuery", "on");
+
+    assertJQ("/query" + query.toQueryString(), "/response/numFound/==4");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='8'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/score==64.0");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/id=='7'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/score==49.0");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/id=='6'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/score==36.0");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/id=='1'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/score==1.0");
+
+    query.remove("rq");
+    query.add("rq", "{!ltr model=unpopularityS-model reRankDocs=4}");
+
+    assertJQ("/query" + query.toQueryString(), "/response/numFound/==4");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='1'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/score==1.0");
+
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/id=='6'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/id=='7'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/id=='8'");
+
+    //bad solr ranking feature
+    loadFeature("powdesS", SolrFeature.class.getCanonicalName(),
+        "{\"q\":\"{!func}pow(description,2)\"}");
+    loadModel("powdesS-model", LinearModel.class.getCanonicalName(),
+        new String[] {"powdesS"}, "{\"weights\":{\"powdesS\":1.0}}");
+
+    query.remove("rq");
+    query.add("rq", "{!ltr model=powdesS-model reRankDocs=4}");
+
+    assertJQ("/query" + query.toQueryString(),
+        "/error/msg/=='"+FeatureException.class.getCanonicalName()+": " +
+        "java.lang.UnsupportedOperationException: " +
+        "Unable to extract feature for powdesS'");
+    // aftertest();
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestUserTermScoreWithQ.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestUserTermScoreWithQ.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestUserTermScoreWithQ.java
new file mode 100644
index 0000000..754409a
--- /dev/null
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestUserTermScoreWithQ.java
@@ -0,0 +1,74 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.feature;
+
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.ltr.TestRerankBase;
+import org.apache.solr.ltr.model.LinearModel;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestUserTermScoreWithQ extends TestRerankBase {
+
+  @BeforeClass
+  public static void before() throws Exception {
+    setuptest("solrconfig-ltr.xml", "schema.xml");
+
+    assertU(adoc("id", "1", "title", "w1", "description", "w1", "popularity",
+        "1"));
+    assertU(adoc("id", "2", "title", "w2 2asd asdd didid", "description",
+        "w2 2asd asdd didid", "popularity", "2"));
+    assertU(adoc("id", "3", "title", "w3", "description", "w3", "popularity",
+        "3"));
+    assertU(adoc("id", "4", "title", "w4", "description", "w4", "popularity",
+        "4"));
+    assertU(adoc("id", "5", "title", "w5", "description", "w5", "popularity",
+        "5"));
+    assertU(adoc("id", "6", "title", "w1 w2", "description", "w1 w2",
+        "popularity", "6"));
+    assertU(adoc("id", "7", "title", "w1 w2 w3 w4 w5", "description",
+        "w1 w2 w3 w4 w5 w8", "popularity", "7"));
+    assertU(adoc("id", "8", "title", "w1 w1 w1 w2 w2 w8", "description",
+        "w1 w1 w1 w2 w2", "popularity", "8"));
+    assertU(commit());
+  }
+
+  @AfterClass
+  public static void after() throws Exception {
+    aftertest();
+  }
+
+  @Test
+  public void testUserTermScoreWithQ() throws Exception {
+    // before();
+    loadFeature("SomeTermQ", SolrFeature.class.getCanonicalName(),
+        "{\"q\":\"{!terms f=popularity}88888\"}");
+    loadModel("Term-modelQ", LinearModel.class.getCanonicalName(),
+        new String[] {"SomeTermQ"}, "{\"weights\":{\"SomeTermQ\":1.0}}");
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("title:w1");
+    query.add("fl", "*, score");
+    query.add("rows", "4");
+    query.add("rq", "{!ltr model=Term-modelQ reRankDocs=4}");
+    query.set("debugQuery", "on");
+
+    assertJQ("/query" + query.toQueryString(), "/response/numFound/==4");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/score==0.0");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/score==0.0");
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestUserTermScorerQuery.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestUserTermScorerQuery.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestUserTermScorerQuery.java
new file mode 100644
index 0000000..c79207c
--- /dev/null
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestUserTermScorerQuery.java
@@ -0,0 +1,74 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.feature;
+
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.ltr.TestRerankBase;
+import org.apache.solr.ltr.model.LinearModel;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestUserTermScorerQuery extends TestRerankBase  {
+
+  @BeforeClass
+  public static void before() throws Exception {
+    setuptest("solrconfig-ltr.xml", "schema.xml");
+
+    assertU(adoc("id", "1", "title", "w1", "description", "w1", "popularity",
+        "1"));
+    assertU(adoc("id", "2", "title", "w2 2asd asdd didid", "description",
+        "w2 2asd asdd didid", "popularity", "2"));
+    assertU(adoc("id", "3", "title", "w3", "description", "w3", "popularity",
+        "3"));
+    assertU(adoc("id", "4", "title", "w4", "description", "w4", "popularity",
+        "4"));
+    assertU(adoc("id", "5", "title", "w5", "description", "w5", "popularity",
+        "5"));
+    assertU(adoc("id", "6", "title", "w1 w2", "description", "w1 w2",
+        "popularity", "6"));
+    assertU(adoc("id", "7", "title", "w1 w2 w3 w4 w5", "description",
+        "w1 w2 w3 w4 w5 w8", "popularity", "7"));
+    assertU(adoc("id", "8", "title", "w1 w1 w1 w2 w2 w8", "description",
+        "w1 w1 w1 w2 w2", "popularity", "8"));
+    assertU(commit());
+  }
+
+  @AfterClass
+  public static void after() throws Exception {
+    aftertest();
+  }
+
+  @Test
+  public void testUserTermScorerQuery() throws Exception {
+    // before();
+    loadFeature("matchedTitleDFExt", SolrFeature.class.getCanonicalName(),
+        "{\"q\":\"${user_query}\",\"df\":\"title\"}");
+    loadModel("Term-matchedTitleDFExt", LinearModel.class.getCanonicalName(),
+        new String[] {"matchedTitleDFExt"},
+        "{\"weights\":{\"matchedTitleDFExt\":1.1}}");
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("title:w1");
+    query.add("fl", "*, score");
+    query.add("rows", "4");
+    query.add("rq",
+        "{!ltr model=Term-matchedTitleDFExt reRankDocs=4 efi.user_query=w8}");
+
+    assertJQ("/query" + query.toQueryString(), "/response/numFound/==4");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='8'");
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestUserTermScorereQDF.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestUserTermScorereQDF.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestUserTermScorereQDF.java
new file mode 100644
index 0000000..f47a883
--- /dev/null
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestUserTermScorereQDF.java
@@ -0,0 +1,75 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.feature;
+
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.ltr.TestRerankBase;
+import org.apache.solr.ltr.model.LinearModel;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestUserTermScorereQDF extends TestRerankBase {
+
+  @BeforeClass
+  public static void before() throws Exception {
+    setuptest("solrconfig-ltr.xml", "schema.xml");
+
+    assertU(adoc("id", "1", "title", "w1", "description", "w1", "popularity",
+        "1"));
+    assertU(adoc("id", "2", "title", "w2 2asd asdd didid", "description",
+        "w2 2asd asdd didid", "popularity", "2"));
+    assertU(adoc("id", "3", "title", "w3", "description", "w3", "popularity",
+        "3"));
+    assertU(adoc("id", "4", "title", "w4", "description", "w4", "popularity",
+        "4"));
+    assertU(adoc("id", "5", "title", "w5", "description", "w5", "popularity",
+        "5"));
+    assertU(adoc("id", "6", "title", "w1 w2", "description", "w1 w2",
+        "popularity", "6"));
+    assertU(adoc("id", "7", "title", "w1 w2 w3 w4 w5", "description",
+        "w1 w2 w3 w4 w5 w8", "popularity", "7"));
+    assertU(adoc("id", "8", "title", "w1 w1 w1 w2 w2 w8", "description",
+        "w1 w1 w1 w2 w2", "popularity", "8"));
+    assertU(commit());
+  }
+
+  @AfterClass
+  public static void after() throws Exception {
+    aftertest();
+  }
+
+  @Test
+  public void testUserTermScorerQWithDF() throws Exception {
+    // before();
+    loadFeature("matchedTitleDF", SolrFeature.class.getCanonicalName(),
+        "{\"q\":\"w5\",\"df\":\"title\"}");
+    loadModel("Term-matchedTitleDF", LinearModel.class.getCanonicalName(),
+        new String[] {"matchedTitleDF"},
+        "{\"weights\":{\"matchedTitleDF\":1.0}}");
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("title:w1");
+    query.add("fl", "*, score");
+    query.add("rows", "2");
+    query.add("rq", "{!ltr model=Term-matchedTitleDF reRankDocs=4}");
+    query.set("debugQuery", "on");
+
+    assertJQ("/query" + query.toQueryString(), "/response/numFound/==4");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='7'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/score==0.0");
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestValueFeature.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestValueFeature.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestValueFeature.java
new file mode 100644
index 0000000..084da4a
--- /dev/null
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestValueFeature.java
@@ -0,0 +1,165 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.feature;
+
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.ltr.TestRerankBase;
+import org.apache.solr.ltr.model.LinearModel;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestValueFeature extends TestRerankBase {
+
+  @BeforeClass
+  public static void before() throws Exception {
+    setuptest("solrconfig-ltr.xml", "schema.xml");
+
+    assertU(adoc("id", "1", "title", "w1"));
+    assertU(adoc("id", "2", "title", "w2"));
+    assertU(adoc("id", "3", "title", "w3"));
+    assertU(adoc("id", "4", "title", "w4"));
+    assertU(adoc("id", "5", "title", "w5"));
+    assertU(adoc("id", "6", "title", "w1 w2"));
+    assertU(adoc("id", "7", "title", "w1 w2 w3 w4 w5"));
+    assertU(adoc("id", "8", "title", "w1 w1 w1 w2 w2"));
+    assertU(commit());
+  }
+
+  @AfterClass
+  public static void after() throws Exception {
+    aftertest();
+  }
+
+  @Test
+  public void testValueFeatureWithEmptyValue() throws Exception {
+    final RuntimeException expectedException =
+        new RuntimeException("mismatch: '0'!='500' @ responseHeader/status");
+    try {
+        loadFeature("c2", ValueFeature.class.getCanonicalName(), "{\"value\":\"\"}");
+        fail("testValueFeatureWithEmptyValue failed to throw exception: "+expectedException);
+    } catch (RuntimeException actualException) {
+      assertEquals(expectedException.toString(), actualException.toString());
+    }
+  }
+
+  @Test
+  public void testValueFeatureWithWhitespaceValue() throws Exception {
+    final RuntimeException expectedException =
+        new RuntimeException("mismatch: '0'!='500' @ responseHeader/status");
+    try {
+        loadFeature("c2", ValueFeature.class.getCanonicalName(),
+              "{\"value\":\" \"}");
+        fail("testValueFeatureWithWhitespaceValue failed to throw exception: "+expectedException);
+    } catch (RuntimeException actualException) {
+      assertEquals(expectedException.toString(), actualException.toString());
+    }
+  }
+
+  @Test
+  public void testRerankingWithConstantValueFeatureReplacesDocScore() throws Exception {
+    loadFeature("c3", ValueFeature.class.getCanonicalName(), "c3",
+        "{\"value\":2}");
+    loadModel("m3", LinearModel.class.getCanonicalName(), new String[] {"c3"},
+        "c3", "{\"weights\":{\"c3\":1.0}}");
+
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("title:w1");
+    query.add("fl", "*, score");
+    query.add("rows", "4");
+    query.add("wt", "json");
+    query.add("rq", "{!ltr model=m3 reRankDocs=4}");
+
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/score==2.0");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/score==2.0");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/score==2.0");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/score==2.0");
+  }
+
+  @Test
+  public void testRerankingWithEfiValueFeatureReplacesDocScore() throws Exception {
+    loadFeature("c6", ValueFeature.class.getCanonicalName(), "c6",
+        "{\"value\":\"${val6}\"}");
+    loadModel("m6", LinearModel.class.getCanonicalName(), new String[] {"c6"},
+        "c6", "{\"weights\":{\"c6\":1.0}}");
+
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("title:w1");
+    query.add("fl", "*, score");
+    query.add("rows", "4");
+    query.add("wt", "json");
+    query.add("rq", "{!ltr model=m6 reRankDocs=4 efi.val6='2'}");
+
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/score==2.0");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/score==2.0");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/score==2.0");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/score==2.0");
+  }
+
+
+  @Test
+  public void testValueFeatureImplicitlyNotRequiredShouldReturnOkStatusCode() throws Exception {
+    loadFeature("c5", ValueFeature.class.getCanonicalName(), "c5",
+        "{\"value\":\"${val6}\"}");
+    loadModel("m5", LinearModel.class.getCanonicalName(), new String[] {"c5"},
+        "c5", "{\"weights\":{\"c5\":1.0}}");
+
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("title:w1");
+    query.add("fl", "*, score,fvonly:[fvonly]");
+    query.add("rows", "4");
+    query.add("wt", "json");
+    query.add("rq", "{!ltr model=m5 reRankDocs=4}");
+
+    assertJQ("/query" + query.toQueryString(), "/responseHeader/status==0");
+  }
+
+  @Test
+  public void testValueFeatureExplictlyNotRequiredShouldReturnOkStatusCode() throws Exception {
+    loadFeature("c7", ValueFeature.class.getCanonicalName(), "c7",
+        "{\"value\":\"${val7}\",\"required\":false}");
+    loadModel("m7", LinearModel.class.getCanonicalName(), new String[] {"c7"},
+        "c7", "{\"weights\":{\"c7\":1.0}}");
+
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("title:w1");
+    query.add("fl", "*, score,fvonly:[fvonly]");
+    query.add("rows", "4");
+    query.add("wt", "json");
+    query.add("rq", "{!ltr model=m7 reRankDocs=4}");
+
+    assertJQ("/query" + query.toQueryString(), "/responseHeader/status==0");
+  }
+
+  @Test
+  public void testValueFeatureRequiredShouldReturn400StatusCode() throws Exception {
+    loadFeature("c8", ValueFeature.class.getCanonicalName(), "c8",
+        "{\"value\":\"${val8}\",\"required\":true}");
+    loadModel("m8", LinearModel.class.getCanonicalName(), new String[] {"c8"},
+        "c8", "{\"weights\":{\"c8\":1.0}}");
+
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("title:w1");
+    query.add("fl", "*, score,fvonly:[fvonly]");
+    query.add("rows", "4");
+    query.add("wt", "json");
+    query.add("rq", "{!ltr model=m8 reRankDocs=4}");
+
+    assertJQ("/query" + query.toQueryString(), "/responseHeader/status==400");
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestLinearModel.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestLinearModel.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestLinearModel.java
new file mode 100644
index 0000000..e8ee224
--- /dev/null
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestLinearModel.java
@@ -0,0 +1,207 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.model;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.solr.common.SolrException;
+import org.apache.solr.ltr.TestRerankBase;
+import org.apache.solr.ltr.feature.Feature;
+import org.apache.solr.ltr.norm.IdentityNormalizer;
+import org.apache.solr.ltr.norm.Normalizer;
+import org.apache.solr.ltr.store.FeatureStore;
+import org.apache.solr.ltr.store.rest.ManagedModelStore;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestLinearModel extends TestRerankBase {
+
+  public static LTRScoringModel createLinearModel(String name, List<Feature> features,
+      List<Normalizer> norms,
+      String featureStoreName, List<Feature> allFeatures,
+      Map<String,Object> params) throws ModelException {
+    final LTRScoringModel model = LTRScoringModel.getInstance(solrResourceLoader,
+        LinearModel.class.getCanonicalName(),
+        name,
+        features, norms, featureStoreName, allFeatures, params);
+    return model;
+  }
+
+  static ManagedModelStore store = null;
+  static FeatureStore fstore = null;
+
+  @BeforeClass
+  public static void setup() throws Exception {
+    setuptest();
+    // loadFeatures("features-store-test-model.json");
+    store = getManagedModelStore();
+    fstore = getManagedFeatureStore().getFeatureStore("test");
+
+  }
+
+  @Test
+  public void getInstanceTest() {
+    final Map<String,Object> weights = new HashMap<>();
+    weights.put("constant1", 1d);
+    weights.put("constant5", 1d);
+
+    Map<String,Object> params = new HashMap<String,Object>();
+    final List<Feature> features = getFeatures(new String[] {
+        "constant1", "constant5"});
+    final List<Normalizer> norms =
+        new ArrayList<Normalizer>(
+            Collections.nCopies(features.size(),IdentityNormalizer.INSTANCE));
+    params.put("weights", weights);
+    final LTRScoringModel ltrScoringModel = createLinearModel("test1",
+        features, norms, "test", fstore.getFeatures(),
+        params);
+
+    store.addModel(ltrScoringModel);
+    final LTRScoringModel m = store.getModel("test1");
+    assertEquals(ltrScoringModel, m);
+  }
+
+  @Test
+  public void nullFeatureWeightsTest() {
+    final ModelException expectedException =
+        new ModelException("Model test2 doesn't contain any weights");
+    try {
+      final List<Feature> features = getFeatures(new String[]
+          {"constant1", "constant5"});
+      final List<Normalizer> norms =
+        new ArrayList<Normalizer>(
+            Collections.nCopies(features.size(),IdentityNormalizer.INSTANCE));
+      createLinearModel("test2",
+          features, norms, "test", fstore.getFeatures(), null);
+      fail("unexpectedly got here instead of catching "+expectedException);
+    } catch (ModelException actualException) {
+      assertEquals(expectedException.toString(), actualException.toString());
+    }
+  }
+
+  @Test
+  public void existingNameTest() {
+    final SolrException expectedException =
+        new SolrException(SolrException.ErrorCode.BAD_REQUEST,
+            ModelException.class.getCanonicalName()+": model 'test3' already exists. Please use a different name");
+    try {
+      final List<Feature> features = getFeatures(new String[]
+          {"constant1", "constant5"});
+      final List<Normalizer> norms =
+        new ArrayList<Normalizer>(
+            Collections.nCopies(features.size(),IdentityNormalizer.INSTANCE));
+      final Map<String,Object> weights = new HashMap<>();
+      weights.put("constant1", 1d);
+      weights.put("constant5", 1d);
+
+      Map<String,Object> params = new HashMap<String,Object>();
+      params.put("weights", weights);
+      final LTRScoringModel ltrScoringModel = createLinearModel("test3",
+          features, norms, "test", fstore.getFeatures(),
+              params);
+      store.addModel(ltrScoringModel);
+      final LTRScoringModel m = store.getModel("test3");
+      assertEquals(ltrScoringModel, m);
+      store.addModel(ltrScoringModel);
+      fail("unexpectedly got here instead of catching "+expectedException);
+    } catch (SolrException actualException) {
+      assertEquals(expectedException.toString(), actualException.toString());
+    }
+  }
+
+  @Test
+  public void duplicateFeatureTest() {
+    final ModelException expectedException =
+        new ModelException("duplicated feature constant1 in model test4");
+    try {
+      final List<Feature> features = getFeatures(new String[]
+          {"constant1", "constant1"});
+      final List<Normalizer> norms =
+        new ArrayList<Normalizer>(
+            Collections.nCopies(features.size(),IdentityNormalizer.INSTANCE));
+      final Map<String,Object> weights = new HashMap<>();
+      weights.put("constant1", 1d);
+      weights.put("constant5", 1d);
+
+      Map<String,Object> params = new HashMap<String,Object>();
+      params.put("weights", weights);
+      final LTRScoringModel ltrScoringModel = createLinearModel("test4",
+          features, norms, "test", fstore.getFeatures(),
+              params);
+      store.addModel(ltrScoringModel);
+      fail("unexpectedly got here instead of catching "+expectedException);
+    } catch (ModelException actualException) {
+      assertEquals(expectedException.toString(), actualException.toString());
+    }
+
+  }
+
+  @Test
+  public void missingFeatureWeightTest() {
+    final ModelException expectedException =
+        new ModelException("Model test5 lacks weight(s) for [constant5]");
+    try {
+      final List<Feature> features = getFeatures(new String[]
+          {"constant1", "constant5"});
+      final List<Normalizer> norms =
+        new ArrayList<Normalizer>(
+            Collections.nCopies(features.size(),IdentityNormalizer.INSTANCE));
+      final Map<String,Object> weights = new HashMap<>();
+      weights.put("constant1", 1d);
+      weights.put("constant5missing", 1d);
+
+      Map<String,Object> params = new HashMap<String,Object>();
+      params.put("weights", weights);
+      createLinearModel("test5",
+          features, norms, "test", fstore.getFeatures(),
+              params);
+      fail("unexpectedly got here instead of catching "+expectedException);
+    } catch (ModelException actualException) {
+      assertEquals(expectedException.toString(), actualException.toString());
+    }
+  }
+
+  @Test
+  public void emptyFeaturesTest() {
+    final ModelException expectedException =
+        new ModelException("no features declared for model test6");
+    try {
+      final List<Feature> features = getFeatures(new String[] {});
+      final List<Normalizer> norms =
+        new ArrayList<Normalizer>(
+            Collections.nCopies(features.size(),IdentityNormalizer.INSTANCE));
+      final Map<String,Object> weights = new HashMap<>();
+      weights.put("constant1", 1d);
+      weights.put("constant5missing", 1d);
+
+      Map<String,Object> params = new HashMap<String,Object>();
+      params.put("weights", weights);
+      final LTRScoringModel ltrScoringModel = createLinearModel("test6",
+          features, norms, "test", fstore.getFeatures(),
+          params);
+      store.addModel(ltrScoringModel);
+      fail("unexpectedly got here instead of catching "+expectedException);
+    } catch (ModelException actualException) {
+      assertEquals(expectedException.toString(), actualException.toString());
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestMultipleAdditiveTreesModel.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestMultipleAdditiveTreesModel.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestMultipleAdditiveTreesModel.java
new file mode 100644
index 0000000..3748331
--- /dev/null
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestMultipleAdditiveTreesModel.java
@@ -0,0 +1,246 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.model;
+
+//import static org.junit.internal.matchers.StringContains.containsString;
+
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.ltr.TestRerankBase;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Ignore;
+import org.junit.Test;
+
+public class TestMultipleAdditiveTreesModel extends TestRerankBase {
+
+
+  @BeforeClass
+  public static void before() throws Exception {
+    setuptest("solrconfig-ltr.xml", "schema.xml");
+
+    assertU(adoc("id", "1", "title", "w1", "description", "w1", "popularity","1"));
+    assertU(adoc("id", "2", "title", "w2", "description", "w2", "popularity","2"));
+    assertU(adoc("id", "3", "title", "w3", "description", "w3", "popularity","3"));
+    assertU(adoc("id", "4", "title", "w4", "description", "w4", "popularity","4"));
+    assertU(adoc("id", "5", "title", "w5", "description", "w5", "popularity","5"));
+    assertU(commit());
+
+    loadFeatures("multipleadditivetreesmodel_features.json"); // currently needed to force
+    // scoring on all docs
+    loadModels("multipleadditivetreesmodel.json");
+  }
+
+  @AfterClass
+  public static void after() throws Exception {
+    aftertest();
+  }
+
+
+  @Test
+  public void testMultipleAdditiveTreesScoringWithAndWithoutEfiFeatureMatches() throws Exception {
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("*:*");
+    query.add("rows", "3");
+    query.add("fl", "*,score");
+
+    // Regular scores
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/score==1.0");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/score==1.0");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/score==1.0");
+
+    // No match scores since user_query not passed in to external feature info
+    // and feature depended on it.
+    query.add("rq", "{!ltr reRankDocs=3 model=multipleadditivetreesmodel efi.user_query=dsjkafljjk}");
+
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/score==-120.0");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/score==-120.0");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/score==-120.0");
+
+    // Matched user query since it was passed in
+    query.remove("rq");
+    query.add("rq", "{!ltr reRankDocs=3 model=multipleadditivetreesmodel efi.user_query=w3}");
+
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='3'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/score==-20.0");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/score==-120.0");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/score==-120.0");
+  }
+
+  @Ignore
+  @Test
+  public void multipleAdditiveTreesTestExplain() throws Exception {
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("*:*");
+    query.add("fl", "*,score,[fv]");
+    query.add("rows", "3");
+
+    query.add("rq",
+        "{!ltr reRankDocs=3 model=multipleadditivetreesmodel efi.user_query=w3}");
+
+    // test out the explain feature, make sure it returns something
+    query.setParam("debugQuery", "on");
+    String qryResult = JQ("/query" + query.toQueryString());
+
+    qryResult = qryResult.replaceAll("\n", " ");
+    // FIXME containsString doesn't exist.
+    // assertThat(qryResult, containsString("\"debug\":{"));
+    // qryResult = qryResult.substring(qryResult.indexOf("debug"));
+    //
+    // assertThat(qryResult, containsString("\"explain\":{"));
+    // qryResult = qryResult.substring(qryResult.indexOf("explain"));
+    //
+    // assertThat(qryResult, containsString("multipleadditivetreesmodel"));
+    // assertThat(qryResult,
+    // containsString(MultipleAdditiveTreesModel.class.getCanonicalName()));
+    //
+    // assertThat(qryResult, containsString("-100.0 = tree 0"));
+    // assertThat(qryResult, containsString("50.0 = tree 0"));
+    // assertThat(qryResult, containsString("-20.0 = tree 1"));
+    // assertThat(qryResult, containsString("'matchedTitle':1.0 > 0.5"));
+    // assertThat(qryResult, containsString("'matchedTitle':0.0 <= 0.5"));
+    //
+    // assertThat(qryResult, containsString(" Go Right "));
+    // assertThat(qryResult, containsString(" Go Left "));
+    // assertThat(qryResult,
+    // containsString("'this_feature_doesnt_exist' does not exist in FV"));
+  }
+
+  @Test
+  public void multipleAdditiveTreesTestNoParams() throws Exception {
+    final ModelException expectedException =
+        new ModelException("no trees declared for model multipleadditivetreesmodel_no_params");
+    try {
+        createModelFromFiles("multipleadditivetreesmodel_no_params.json",
+              "multipleadditivetreesmodel_features.json");
+        fail("multipleAdditiveTreesTestNoParams failed to throw exception: "+expectedException);
+    } catch (Exception actualException) {
+      Throwable rootError = getRootCause(actualException);
+      assertEquals(expectedException.toString(), rootError.toString());
+    }
+
+  }
+
+  @Test
+  public void multipleAdditiveTreesTestEmptyParams() throws Exception {
+    final ModelException expectedException =
+        new ModelException("no trees declared for model multipleadditivetreesmodel_no_trees");
+    try {
+        createModelFromFiles("multipleadditivetreesmodel_no_trees.json",
+            "multipleadditivetreesmodel_features.json");
+        fail("multipleAdditiveTreesTestEmptyParams failed to throw exception: "+expectedException);
+    } catch (Exception actualException) {
+      Throwable rootError = getRootCause(actualException);
+      assertEquals(expectedException.toString(), rootError.toString());
+    }
+  }
+
+  @Test
+  public void multipleAdditiveTreesTestNoWeight() throws Exception {
+    final ModelException expectedException =
+        new ModelException("MultipleAdditiveTreesModel tree doesn't contain a weight");
+    try {
+        createModelFromFiles("multipleadditivetreesmodel_no_weight.json",
+            "multipleadditivetreesmodel_features.json");
+        fail("multipleAdditiveTreesTestNoWeight failed to throw exception: "+expectedException);
+    } catch (Exception actualException) {
+      Throwable rootError = getRootCause(actualException);
+      assertEquals(expectedException.toString(), rootError.toString());
+    }
+  }
+
+  @Test
+  public void multipleAdditiveTreesTestTreesParamDoesNotContatinTree() throws Exception {
+    final ModelException expectedException =
+        new ModelException("MultipleAdditiveTreesModel tree doesn't contain a tree");
+    try {
+        createModelFromFiles("multipleadditivetreesmodel_no_tree.json",
+            "multipleadditivetreesmodel_features.json");
+        fail("multipleAdditiveTreesTestTreesParamDoesNotContatinTree failed to throw exception: "+expectedException);
+    } catch (Exception actualException) {
+      Throwable rootError = getRootCause(actualException);
+      assertEquals(expectedException.toString(), rootError.toString());
+    }
+  }
+
+  @Test
+  public void multipleAdditiveTreesTestNoFeaturesSpecified() throws Exception {
+    final ModelException expectedException =
+        new ModelException("no features declared for model multipleadditivetreesmodel_no_features");
+    try {
+        createModelFromFiles("multipleadditivetreesmodel_no_features.json",
+            "multipleadditivetreesmodel_features.json");
+        fail("multipleAdditiveTreesTestNoFeaturesSpecified failed to throw exception: "+expectedException);
+    } catch (ModelException actualException) {
+      assertEquals(expectedException.toString(), actualException.toString());
+    }
+  }
+
+  @Test
+  public void multipleAdditiveTreesTestNoRight() throws Exception {
+    final ModelException expectedException =
+        new ModelException("MultipleAdditiveTreesModel tree node is missing right");
+    try {
+        createModelFromFiles("multipleadditivetreesmodel_no_right.json",
+            "multipleadditivetreesmodel_features.json");
+        fail("multipleAdditiveTreesTestNoRight failed to throw exception: "+expectedException);
+    } catch (Exception actualException) {
+      Throwable rootError = getRootCause(actualException);
+      assertEquals(expectedException.toString(), rootError.toString());
+    }
+  }
+
+  @Test
+  public void multipleAdditiveTreesTestNoLeft() throws Exception {
+    final ModelException expectedException =
+        new ModelException("MultipleAdditiveTreesModel tree node is missing left");
+    try {
+        createModelFromFiles("multipleadditivetreesmodel_no_left.json",
+            "multipleadditivetreesmodel_features.json");
+        fail("multipleAdditiveTreesTestNoLeft failed to throw exception: "+expectedException);
+    } catch (Exception actualException) {
+      Throwable rootError = getRootCause(actualException);
+      assertEquals(expectedException.toString(), rootError.toString());
+    }
+  }
+
+  @Test
+  public void multipleAdditiveTreesTestNoThreshold() throws Exception {
+    final ModelException expectedException =
+        new ModelException("MultipleAdditiveTreesModel tree node is missing threshold");
+    try {
+        createModelFromFiles("multipleadditivetreesmodel_no_threshold.json",
+            "multipleadditivetreesmodel_features.json");
+        fail("multipleAdditiveTreesTestNoThreshold failed to throw exception: "+expectedException);
+    } catch (Exception actualException) {
+      Throwable rootError = getRootCause(actualException);
+      assertEquals(expectedException.toString(), rootError.toString());
+    }
+  }
+
+  @Test
+  public void multipleAdditiveTreesTestMissingTreeFeature() throws Exception {
+    final ModelException expectedException =
+        new ModelException("MultipleAdditiveTreesModel tree node is leaf with left=-100.0 and right=75.0");
+    try {
+        createModelFromFiles("multipleadditivetreesmodel_no_feature.json",
+              "multipleadditivetreesmodel_features.json");
+        fail("multipleAdditiveTreesTestMissingTreeFeature failed to throw exception: "+expectedException);
+    } catch (ModelException actualException) {
+      assertEquals(expectedException.toString(), actualException.toString());
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestMinMaxNormalizer.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestMinMaxNormalizer.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestMinMaxNormalizer.java
new file mode 100644
index 0000000..055b3bc
--- /dev/null
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestMinMaxNormalizer.java
@@ -0,0 +1,120 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.norm;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.solr.core.SolrResourceLoader;
+import org.junit.Test;
+
+public class TestMinMaxNormalizer {
+
+  private final SolrResourceLoader solrResourceLoader = new SolrResourceLoader();
+
+  private Normalizer implTestMinMax(Map<String,Object> params,
+      float expectedMin, float expectedMax) {
+    final Normalizer n = Normalizer.getInstance(
+        solrResourceLoader,
+        MinMaxNormalizer.class.getCanonicalName(),
+        params);
+    assertTrue(n instanceof MinMaxNormalizer);
+    final MinMaxNormalizer mmn = (MinMaxNormalizer)n;
+    assertEquals(mmn.getMin(), expectedMin, 0.0);
+    assertEquals(mmn.getMax(), expectedMax, 0.0);
+    return n;
+  }
+
+  @Test
+  public void testInvalidMinMaxNoParams() {
+    implTestMinMax(new HashMap<String,Object>(),
+        Float.NEGATIVE_INFINITY,
+        Float.POSITIVE_INFINITY);
+  }
+
+  @Test
+  public void testInvalidMinMaxMissingMax() {
+    final Map<String,Object> params = new HashMap<String,Object>();
+    params.put("min", "0.0f");
+    implTestMinMax(params,
+        0.0f,
+        Float.POSITIVE_INFINITY);
+  }
+
+  @Test
+  public void testInvalidMinMaxMissingMin() {
+    final Map<String,Object> params = new HashMap<String,Object>();
+    params.put("max", "0.0f");
+    implTestMinMax(params,
+        Float.NEGATIVE_INFINITY,
+        0.0f);
+  }
+
+  @Test
+  public void testMinMaxNormalizerMinLargerThanMax() {
+    final Map<String,Object> params = new HashMap<String,Object>();
+    params.put("min", "10.0f");
+    params.put("max", "0.0f");
+    implTestMinMax(params,
+        10.0f,
+        0.0f);
+  }
+
+  @Test
+  public void testMinMaxNormalizerMinEqualToMax() {
+    final Map<String,Object> params = new HashMap<String,Object>();
+    params.put("min", "10.0f");
+    params.put("max", "10.0f");
+    final NormalizerException expectedException =
+        new NormalizerException("MinMax Normalizer delta must not be zero "
+            + "| min = 10.0,max = 10.0,delta = 0.0");
+    try {
+        implTestMinMax(params,
+              10.0f,
+              10.0f);
+        fail("testMinMaxNormalizerMinEqualToMax failed to throw exception: "+expectedException);
+    } catch(NormalizerException actualException) {
+        assertEquals(expectedException.toString(), actualException.toString());
+    }
+  }
+
+  @Test
+  public void testNormalizer() {
+    final Map<String,Object> params = new HashMap<String,Object>();
+    params.put("min", "5.0f");
+    params.put("max", "10.0f");
+    final Normalizer n =
+        implTestMinMax(params,
+            5.0f,
+            10.0f);
+
+    float value = 8;
+    assertEquals((value - 5f) / (10f - 5f), n.normalize(value), 0.0001);
+    value = 100;
+    assertEquals((value - 5f) / (10f - 5f), n.normalize(value), 0.0001);
+    value = 150;
+    assertEquals((value - 5f) / (10f - 5f), n.normalize(value), 0.0001);
+    value = -1;
+    assertEquals((value - 5f) / (10f - 5f), n.normalize(value), 0.0001);
+    value = 5;
+    assertEquals((value - 5f) / (10f - 5f), n.normalize(value), 0.0001);
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestStandardNormalizer.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestStandardNormalizer.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestStandardNormalizer.java
new file mode 100644
index 0000000..10fa972
--- /dev/null
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestStandardNormalizer.java
@@ -0,0 +1,132 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.norm;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.solr.core.SolrResourceLoader;
+import org.junit.Test;
+
+public class TestStandardNormalizer {
+
+  private final SolrResourceLoader solrResourceLoader = new SolrResourceLoader();
+
+  private Normalizer implTestStandard(Map<String,Object> params,
+      float expectedAvg, float expectedStd) {
+    final Normalizer n = Normalizer.getInstance(
+        solrResourceLoader,
+        StandardNormalizer.class.getCanonicalName(),
+        params);
+    assertTrue(n instanceof StandardNormalizer);
+    final StandardNormalizer sn = (StandardNormalizer)n;
+    assertEquals(sn.getAvg(), expectedAvg, 0.0);
+    assertEquals(sn.getStd(), expectedStd, 0.0);
+    return n;
+  }
+
+  @Test
+  public void testNormalizerNoParams() {
+    implTestStandard(new HashMap<String,Object>(),
+        0.0f,
+        1.0f);
+  }
+
+  @Test
+  public void testInvalidSTD() {
+    final Map<String,Object> params = new HashMap<String,Object>();
+    params.put("std", "0f");
+    final NormalizerException expectedException =
+        new NormalizerException("Standard Normalizer standard deviation must be positive "
+            + "| avg = 0.0,std = 0.0");
+    try {
+        implTestStandard(params,
+              0.0f,
+              0.0f);
+        fail("testInvalidSTD failed to throw exception: "+expectedException);
+    } catch(NormalizerException actualException) {
+      assertEquals(expectedException.toString(), actualException.toString());
+    }
+  }
+
+  @Test
+  public void testInvalidSTD2() {
+    final Map<String,Object> params = new HashMap<String,Object>();
+    params.put("std", "-1f");
+    final NormalizerException expectedException =
+        new NormalizerException("Standard Normalizer standard deviation must be positive "
+            + "| avg = 0.0,std = -1.0");
+    try {
+        implTestStandard(params,
+              0.0f,
+              -1f);
+        fail("testInvalidSTD2 failed to throw exception: "+expectedException);
+    } catch(NormalizerException actualException) {
+      assertEquals(expectedException.toString(), actualException.toString());
+    }
+  }
+
+  @Test
+  public void testInvalidSTD3() {
+    final Map<String,Object> params = new HashMap<String,Object>();
+    params.put("avg", "1f");
+    params.put("std", "0f");
+    final NormalizerException expectedException =
+        new NormalizerException("Standard Normalizer standard deviation must be positive "
+            + "| avg = 1.0,std = 0.0");
+    try {
+        implTestStandard(params,
+              1f,
+              0f);
+        fail("testInvalidSTD3 failed to throw exception: "+expectedException);
+    } catch(NormalizerException actualException) {
+      assertEquals(expectedException.toString(), actualException.toString());
+    }
+  }
+
+  @Test
+  public void testNormalizer() {
+    Map<String,Object> params = new HashMap<String,Object>();
+    params.put("avg", "0f");
+    params.put("std", "1f");
+    final Normalizer identity =
+        implTestStandard(params,
+            0f,
+            1f);
+
+    float value = 8;
+    assertEquals(value, identity.normalize(value), 0.0001);
+    value = 150;
+    assertEquals(value, identity.normalize(value), 0.0001);
+    params = new HashMap<String,Object>();
+    params.put("avg", "10f");
+    params.put("std", "1.5f");
+    final Normalizer norm = Normalizer.getInstance(
+        solrResourceLoader,
+        StandardNormalizer.class.getCanonicalName(),
+        params);
+
+    for (final float v : new float[] {10f, 20f, 25f, 30f, 31f, 40f, 42f, 100f,
+        10000000f}) {
+      assertEquals((v - 10f) / (1.5f), norm.normalize(v), 0.0001);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test/org/apache/solr/ltr/store/rest/TestManagedFeatureStore.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/store/rest/TestManagedFeatureStore.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/store/rest/TestManagedFeatureStore.java
new file mode 100644
index 0000000..14373fb
--- /dev/null
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/store/rest/TestManagedFeatureStore.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.store.rest;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.lucene.util.LuceneTestCase;
+
+public class TestManagedFeatureStore extends LuceneTestCase {
+
+  public static Map<String,Object> createMap(String name, String className, Map<String,Object> params) {
+    final Map<String,Object> map = new HashMap<String,Object>();
+    map.put(ManagedFeatureStore.NAME_KEY, name);
+    map.put(ManagedFeatureStore.CLASS_KEY, className);
+    if (params != null) {
+      map.put(ManagedFeatureStore.PARAMS_KEY, params);
+    }
+    return map;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test/org/apache/solr/ltr/store/rest/TestModelManager.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/store/rest/TestModelManager.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/store/rest/TestModelManager.java
new file mode 100644
index 0000000..855f053
--- /dev/null
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/store/rest/TestModelManager.java
@@ -0,0 +1,163 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.store.rest;
+
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.core.SolrResourceLoader;
+import org.apache.solr.ltr.TestRerankBase;
+import org.apache.solr.ltr.feature.FieldValueFeature;
+import org.apache.solr.ltr.feature.ValueFeature;
+import org.apache.solr.ltr.model.LinearModel;
+import org.apache.solr.rest.ManagedResource;
+import org.apache.solr.rest.ManagedResourceStorage;
+import org.apache.solr.rest.RestManager;
+import org.apache.solr.search.LTRQParserPlugin;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestModelManager extends TestRerankBase {
+
+  @BeforeClass
+  public static void init() throws Exception {
+    setuptest();
+  }
+
+  @Before
+  public void restart() throws Exception {
+    restTestHarness.delete(ManagedFeatureStore.REST_END_POINT + "/*");
+    restTestHarness.delete(ManagedModelStore.REST_END_POINT + "/*");
+
+  }
+
+  @Test
+  public void test() throws Exception {
+    final SolrResourceLoader loader = new SolrResourceLoader(
+        tmpSolrHome.toPath());
+
+    final RestManager.Registry registry = loader.getManagedResourceRegistry();
+    assertNotNull(
+        "Expected a non-null RestManager.Registry from the SolrResourceLoader!",
+        registry);
+
+    final String resourceId = "/schema/fstore1";
+    registry.registerManagedResource(resourceId, ManagedFeatureStore.class,
+        new LTRQParserPlugin());
+
+    final String resourceId2 = "/schema/mstore1";
+    registry.registerManagedResource(resourceId2, ManagedModelStore.class,
+        new LTRQParserPlugin());
+
+    final NamedList<String> initArgs = new NamedList<>();
+
+    final RestManager restManager = new RestManager();
+    restManager.init(loader, initArgs,
+        new ManagedResourceStorage.InMemoryStorageIO());
+
+    final ManagedResource res = restManager.getManagedResource(resourceId);
+    assertTrue(res instanceof ManagedFeatureStore);
+    assertEquals(res.getResourceId(), resourceId);
+
+  }
+
+  @Test
+  public void testRestManagerEndpoints() throws Exception {
+    // relies on these ManagedResources being activated in the
+    // schema-rest.xml used by this test
+    assertJQ("/schema/managed", "/responseHeader/status==0");
+
+    final String valueFeatureClassName = ValueFeature.class.getCanonicalName();
+
+    // Add features
+    String feature = "{\"name\": \"test1\", \"class\": \""+valueFeatureClassName+"\", \"params\": {\"value\": 1} }";
+    assertJPut(ManagedFeatureStore.REST_END_POINT, feature,
+        "/responseHeader/status==0");
+
+    feature = "{\"name\": \"test2\", \"class\": \""+valueFeatureClassName+"\", \"params\": {\"value\": 1} }";
+    assertJPut(ManagedFeatureStore.REST_END_POINT, feature,
+        "/responseHeader/status==0");
+
+    feature = "{\"name\": \"test3\", \"class\": \""+valueFeatureClassName+"\", \"params\": {\"value\": 1} }";
+    assertJPut(ManagedFeatureStore.REST_END_POINT, feature,
+        "/responseHeader/status==0");
+
+    feature = "{\"name\": \"test33\", \"store\": \"TEST\", \"class\": \""+valueFeatureClassName+"\", \"params\": {\"value\": 1} }";
+    assertJPut(ManagedFeatureStore.REST_END_POINT, feature,
+        "/responseHeader/status==0");
+
+    final String multipleFeatures = "[{\"name\": \"test4\", \"class\": \""+valueFeatureClassName+"\", \"params\": {\"value\": 1} }"
+        + ",{\"name\": \"test5\", \"class\": \""+valueFeatureClassName+"\", \"params\": {\"value\": 1} } ]";
+    assertJPut(ManagedFeatureStore.REST_END_POINT, multipleFeatures,
+        "/responseHeader/status==0");
+
+    final String fieldValueFeatureClassName = FieldValueFeature.class.getCanonicalName();
+
+    // Add bad feature (wrong params)_
+    final String badfeature = "{\"name\": \"fvalue\", \"class\": \""+fieldValueFeatureClassName+"\", \"params\": {\"value\": 1} }";
+    assertJPut(ManagedFeatureStore.REST_END_POINT, badfeature,
+        "/error/msg/=='No setter corrresponding to \\'value\\' in "+fieldValueFeatureClassName+"'");
+
+    final String linearModelClassName = LinearModel.class.getCanonicalName();
+
+    // Add models
+    String model = "{ \"name\":\"testmodel1\", \"class\":\""+linearModelClassName+"\", \"features\":[] }";
+    // fails since it does not have features
+    assertJPut(ManagedModelStore.REST_END_POINT, model,
+        "/responseHeader/status==400");
+    // fails since it does not have weights
+    model = "{ \"name\":\"testmodel2\", \"class\":\""+linearModelClassName+"\", \"features\":[{\"name\":\"test1\"}, {\"name\":\"test2\"}] }";
+    assertJPut(ManagedModelStore.REST_END_POINT, model,
+        "/responseHeader/status==400");
+    // success
+    model = "{ \"name\":\"testmodel3\", \"class\":\""+linearModelClassName+"\", \"features\":[{\"name\":\"test1\"}, {\"name\":\"test2\"}],\"params\":{\"weights\":{\"test1\":1.5,\"test2\":2.0}}}";
+    assertJPut(ManagedModelStore.REST_END_POINT, model,
+        "/responseHeader/status==0");
+    // success
+    final String multipleModels = "[{ \"name\":\"testmodel4\", \"class\":\""+linearModelClassName+"\", \"features\":[{\"name\":\"test1\"}, {\"name\":\"test2\"}],\"params\":{\"weights\":{\"test1\":1.5,\"test2\":2.0}} }\n"
+        + ",{ \"name\":\"testmodel5\", \"class\":\""+linearModelClassName+"\", \"features\":[{\"name\":\"test1\"}, {\"name\":\"test2\"}],\"params\":{\"weights\":{\"test1\":1.5,\"test2\":2.0}} } ]";
+    assertJPut(ManagedModelStore.REST_END_POINT, multipleModels,
+        "/responseHeader/status==0");
+    final String qryResult = JQ(ManagedModelStore.REST_END_POINT);
+
+    assert (qryResult.contains("\"name\":\"testmodel3\"")
+        && qryResult.contains("\"name\":\"testmodel4\"") && qryResult
+          .contains("\"name\":\"testmodel5\""));
+    /*
+     * assertJQ(LTRParams.MSTORE_END_POINT, "/models/[0]/name=='testmodel3'");
+     * assertJQ(LTRParams.MSTORE_END_POINT, "/models/[1]/name=='testmodel4'");
+     * assertJQ(LTRParams.MSTORE_END_POINT, "/models/[2]/name=='testmodel5'");
+     */
+    assertJQ(ManagedFeatureStore.REST_END_POINT,
+        "/featureStores==['TEST','_DEFAULT_']");
+    assertJQ(ManagedFeatureStore.REST_END_POINT + "/_DEFAULT_",
+        "/features/[0]/name=='test1'");
+    assertJQ(ManagedFeatureStore.REST_END_POINT + "/TEST",
+        "/features/[0]/name=='test33'");
+  }
+
+  @Test
+  public void testEndpointsFromFile() throws Exception {
+    loadFeatures("features-linear.json");
+    loadModels("linear-model.json");
+
+    assertJQ(ManagedModelStore.REST_END_POINT,
+        "/models/[0]/name=='6029760550880411648'");
+    assertJQ(ManagedFeatureStore.REST_END_POINT + "/_DEFAULT_",
+        "/features/[1]/name=='description'");
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/test/org/apache/solr/ltr/store/rest/TestModelManagerPersistence.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/store/rest/TestModelManagerPersistence.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/store/rest/TestModelManagerPersistence.java
new file mode 100644
index 0000000..66c26fd
--- /dev/null
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/store/rest/TestModelManagerPersistence.java
@@ -0,0 +1,121 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.store.rest;
+
+import java.util.ArrayList;
+import java.util.Map;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.solr.ltr.TestRerankBase;
+import org.apache.solr.ltr.feature.ValueFeature;
+import org.apache.solr.ltr.model.LinearModel;
+import org.junit.Before;
+import org.junit.Test;
+import org.noggit.ObjectBuilder;
+
+public class TestModelManagerPersistence extends TestRerankBase {
+
+  @Before
+  public void init() throws Exception {
+    setupPersistenttest();
+  }
+
+  // executed first
+  @Test
+  public void testFeaturePersistence() throws Exception {
+
+    loadFeature("feature", ValueFeature.class.getCanonicalName(), "test",
+        "{\"value\":2}");
+    assertJQ(ManagedFeatureStore.REST_END_POINT + "/test",
+        "/features/[0]/name=='feature'");
+    restTestHarness.reload();
+    assertJQ(ManagedFeatureStore.REST_END_POINT + "/test",
+        "/features/[0]/name=='feature'");
+    loadFeature("feature1", ValueFeature.class.getCanonicalName(), "test1",
+        "{\"value\":2}");
+    loadFeature("feature2", ValueFeature.class.getCanonicalName(), "test",
+        "{\"value\":2}");
+    loadFeature("feature3", ValueFeature.class.getCanonicalName(), "test2",
+        "{\"value\":2}");
+    assertJQ(ManagedFeatureStore.REST_END_POINT + "/test",
+        "/features/[0]/name=='feature'");
+    assertJQ(ManagedFeatureStore.REST_END_POINT + "/test",
+        "/features/[1]/name=='feature2'");
+    assertJQ(ManagedFeatureStore.REST_END_POINT + "/test1",
+        "/features/[0]/name=='feature1'");
+    assertJQ(ManagedFeatureStore.REST_END_POINT + "/test2",
+        "/features/[0]/name=='feature3'");
+    restTestHarness.reload();
+    assertJQ(ManagedFeatureStore.REST_END_POINT + "/test",
+        "/features/[0]/name=='feature'");
+    assertJQ(ManagedFeatureStore.REST_END_POINT + "/test",
+        "/features/[1]/name=='feature2'");
+    assertJQ(ManagedFeatureStore.REST_END_POINT + "/test1",
+        "/features/[0]/name=='feature1'");
+    assertJQ(ManagedFeatureStore.REST_END_POINT + "/test2",
+        "/features/[0]/name=='feature3'");
+    loadModel("test-model", LinearModel.class.getCanonicalName(),
+        new String[] {"feature"}, "test", "{\"weights\":{\"feature\":1.0}}");
+    loadModel("test-model2", LinearModel.class.getCanonicalName(),
+        new String[] {"feature1"}, "test1", "{\"weights\":{\"feature1\":1.0}}");
+    final String fstorecontent = FileUtils
+        .readFileToString(fstorefile, "UTF-8");
+    final String mstorecontent = FileUtils
+        .readFileToString(mstorefile, "UTF-8");
+
+    //check feature/model stores on deletion
+    final ArrayList<Object> fStore = (ArrayList<Object>) ((Map<String,Object>)
+        ObjectBuilder.fromJSON(fstorecontent)).get("managedList");
+    for (int idx = 0;idx < fStore.size(); ++ idx) {
+      String store = (String) ((Map<String,Object>)fStore.get(idx)).get("store");
+      assertTrue(store.equals("test") || store.equals("test2") || store.equals("test1"));
+    }
+
+    final ArrayList<Object> mStore = (ArrayList<Object>) ((Map<String,Object>)
+        ObjectBuilder.fromJSON(mstorecontent)).get("managedList");
+    for (int idx = 0;idx < mStore.size(); ++ idx) {
+      String store = (String) ((Map<String,Object>)mStore.get(idx)).get("store");
+      assertTrue(store.equals("test") || store.equals("test1"));
+    }
+
+    assertJDelete(ManagedFeatureStore.REST_END_POINT + "/test2",
+        "/responseHeader/status==0");
+    assertJDelete(ManagedModelStore.REST_END_POINT + "/test-model2",
+        "/responseHeader/status==0");
+    assertJQ(ManagedFeatureStore.REST_END_POINT + "/test2",
+        "/features/==[]");
+    assertJQ(ManagedModelStore.REST_END_POINT + "/test-model2",
+        "/models/[0]/name=='test-model'");
+    restTestHarness.reload();
+    assertJQ(ManagedFeatureStore.REST_END_POINT + "/test2",
+        "/features/==[]");
+    assertJQ(ManagedModelStore.REST_END_POINT + "/test-model2",
+        "/models/[0]/name=='test-model'");
+
+    assertJDelete(ManagedModelStore.REST_END_POINT + "/*",
+        "/responseHeader/status==0");
+    assertJDelete(ManagedFeatureStore.REST_END_POINT + "/*",
+        "/responseHeader/status==0");
+    assertJQ(ManagedFeatureStore.REST_END_POINT + "/test1",
+        "/features/==[]");
+    restTestHarness.reload();
+    assertJQ(ManagedFeatureStore.REST_END_POINT + "/test1",
+        "/features/==[]");
+
+  }
+
+}


[44/50] [abbrv] lucene-solr:apiv2: LUCENE-7501: fix back-compat bug; add test

Posted by sa...@apache.org.
LUCENE-7501: fix back-compat bug; add test


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/69e654b3
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/69e654b3
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/69e654b3

Branch: refs/heads/apiv2
Commit: 69e654b3737a97fea7ffc9f57c8fad5e85f72452
Parents: 5a66b3b
Author: Mike McCandless <mi...@apache.org>
Authored: Wed Nov 2 05:36:00 2016 -0400
Committer: Mike McCandless <mi...@apache.org>
Committed: Wed Nov 2 05:48:19 2016 -0400

----------------------------------------------------------------------
 .../index/TestBackwardsCompatibility.java       |   1 +
 .../lucene/index/TestManyPointsInOldIndex.java  |  74 +++++++++++++++++++
 .../org/apache/lucene/index/manypointsindex.zip | Bin 0 -> 3739 bytes
 .../org/apache/lucene/index/CheckIndex.java     |  31 +++++---
 .../org/apache/lucene/util/bkd/BKDReader.java   |  48 ++++++++++--
 5 files changed, 137 insertions(+), 17 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/69e654b3/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java b/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
index 2371b01..8bf7141 100644
--- a/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
+++ b/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
@@ -157,6 +157,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
     for(int i=0;i<50;i++) {
       writer.addDocument(docs.nextDoc());
     }
+    docs.close();
     writer.close();
     dir.close();
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/69e654b3/lucene/backward-codecs/src/test/org/apache/lucene/index/TestManyPointsInOldIndex.java
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/index/TestManyPointsInOldIndex.java b/lucene/backward-codecs/src/test/org/apache/lucene/index/TestManyPointsInOldIndex.java
new file mode 100644
index 0000000..043979b
--- /dev/null
+++ b/lucene/backward-codecs/src/test/org/apache/lucene/index/TestManyPointsInOldIndex.java
@@ -0,0 +1,74 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.index;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.IntPoint;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.store.BaseDirectoryWrapper;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.FSDirectory;
+import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util.TestUtil;
+
+// LUCENE-7501
+public class TestManyPointsInOldIndex extends LuceneTestCase {
+
+// To regenerate the back index zip:
+//
+// Compile:
+//   1) temporarily remove 'extends LuceneTestCase' above (else java doesn't see our static void main)
+//   2) ant compile-test
+//
+// Run:
+//   1) java -cp ../build/backward-codecs/classes/test:../build/core/classes/java org.apache.lucene.index.TestManyPointsInOldIndex
+//
+//  cd manypointsindex
+//  zip manypointsindex.zip *
+
+  public static void main(String[] args) throws IOException {
+    Directory dir = FSDirectory.open(Paths.get("manypointsindex"));
+    IndexWriter w = new IndexWriter(dir, new IndexWriterConfig());
+    for(int i=0;i<1025;i++) {
+      Document doc = new Document();
+      doc.add(new IntPoint("intpoint", 1025-i));
+      w.addDocument(doc);
+    }
+    w.close();
+    dir.close();
+  }
+
+  public void testCheckOldIndex() throws IOException {
+    Path path = createTempDir("manypointsindex");
+    InputStream resource = getClass().getResourceAsStream("manypointsindex.zip");
+    assertNotNull("manypointsindex not found", resource);
+    TestUtil.unzip(resource, path);
+    BaseDirectoryWrapper dir = newFSDirectory(path);
+    // disable default checking...
+    dir.setCheckIndexOnClose(false);
+
+    // ... because we check ourselves here:
+    TestUtil.checkIndex(dir, false, true, null);
+    dir.close();
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/69e654b3/lucene/backward-codecs/src/test/org/apache/lucene/index/manypointsindex.zip
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/index/manypointsindex.zip b/lucene/backward-codecs/src/test/org/apache/lucene/index/manypointsindex.zip
new file mode 100644
index 0000000..c7c0bf7
Binary files /dev/null and b/lucene/backward-codecs/src/test/org/apache/lucene/index/manypointsindex.zip differ

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/69e654b3/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java b/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java
index f3d3562..7bc08f3 100644
--- a/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java
+++ b/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java
@@ -771,8 +771,10 @@ public final class CheckIndex implements Closeable {
             throw new RuntimeException("Stored Field test failed");
           } else if (segInfoStat.termVectorStatus.error != null) {
             throw new RuntimeException("Term Vector test failed");
-          }  else if (segInfoStat.docValuesStatus.error != null) {
+          } else if (segInfoStat.docValuesStatus.error != null) {
             throw new RuntimeException("DocValues test failed");
+          } else if (segInfoStat.pointsStatus.error != null) {
+            throw new RuntimeException("Points test failed");
           }
         }
 
@@ -1865,12 +1867,12 @@ public final class CheckIndex implements Closeable {
                                    // Compare to last cell:
                                    if (StringHelper.compare(bytesPerDim, packedValue, offset, lastMinPackedValue, offset) < 0) {
                                      // This doc's point, in this dimension, is lower than the minimum value of the last cell checked:
-                                     throw new RuntimeException("packed points value " + Arrays.toString(packedValue) + " for docID=" + docID + " is out-of-bounds of the last cell min=" + Arrays.toString(lastMinPackedValue) + " max=" + Arrays.toString(lastMaxPackedValue) + " dim=" + dim);
+                                     throw new RuntimeException("packed points value " + Arrays.toString(packedValue) + " for field=\"" + fieldInfo.name + "\", docID=" + docID + " is out-of-bounds of the last cell min=" + Arrays.toString(lastMinPackedValue) + " max=" + Arrays.toString(lastMaxPackedValue) + " dim=" + dim);
                                    }
 
                                    if (StringHelper.compare(bytesPerDim, packedValue, offset, lastMaxPackedValue, offset) > 0) {
                                      // This doc's point, in this dimension, is greater than the maximum value of the last cell checked:
-                                     throw new RuntimeException("packed points value " + Arrays.toString(packedValue) + " for docID=" + docID + " is out-of-bounds of the last cell min=" + Arrays.toString(lastMinPackedValue) + " max=" + Arrays.toString(lastMaxPackedValue) + " dim=" + dim);
+                                     throw new RuntimeException("packed points value " + Arrays.toString(packedValue) + " for field=\"" + fieldInfo.name + "\", docID=" + docID + " is out-of-bounds of the last cell min=" + Arrays.toString(lastMinPackedValue) + " max=" + Arrays.toString(lastMaxPackedValue) + " dim=" + dim);
                                    }
                                  }
 
@@ -1879,10 +1881,10 @@ public final class CheckIndex implements Closeable {
                                  if (dimCount == 1) {
                                    int cmp = StringHelper.compare(bytesPerDim, lastPackedValue, 0, packedValue, 0);
                                    if (cmp > 0) {
-                                     throw new RuntimeException("packed points value " + Arrays.toString(packedValue) + " for docID=" + docID + " is out-of-order vs the previous document's value " + Arrays.toString(lastPackedValue));
+                                     throw new RuntimeException("packed points value " + Arrays.toString(packedValue) + " for field=\"" + fieldInfo.name + "\", for docID=" + docID + " is out-of-order vs the previous document's value " + Arrays.toString(lastPackedValue));
                                    } else if (cmp == 0) {
                                      if (docID < lastDocID) {
-                                       throw new RuntimeException("packed points value is the same, but docID=" + docID + " is out of order vs previous docID=" + lastDocID);
+                                       throw new RuntimeException("packed points value is the same, but docID=" + docID + " is out of order vs previous docID=" + lastDocID + ", field=\"" + fieldInfo.name + "\"");
                                      }
                                    }
                                    System.arraycopy(packedValue, 0, lastPackedValue, 0, bytesPerDim);
@@ -1902,24 +1904,29 @@ public final class CheckIndex implements Closeable {
                                  for(int dim=0;dim<dimCount;dim++) {
                                    int offset = bytesPerDim * dim;
 
+                                   if (StringHelper.compare(bytesPerDim, minPackedValue, offset, maxPackedValue, offset) > 0) {
+                                     throw new RuntimeException("packed points cell minPackedValue " + Arrays.toString(minPackedValue) +
+                                                                " is out-of-bounds of the cell's maxPackedValue " + Arrays.toString(maxPackedValue) + " dim=" + dim + " field=\"" + fieldInfo.name + "\"");
+                                   }
+
                                    // Make sure this cell is not outside of the global min/max:
                                    if (StringHelper.compare(bytesPerDim, minPackedValue, offset, globalMinPackedValue, offset) < 0) {
                                      throw new RuntimeException("packed points cell minPackedValue " + Arrays.toString(minPackedValue) +
-                                                                " is out-of-bounds of the global minimum " + Arrays.toString(globalMinPackedValue) + " dim=" + dim);
+                                                                " is out-of-bounds of the global minimum " + Arrays.toString(globalMinPackedValue) + " dim=" + dim + " field=\"" + fieldInfo.name + "\"");
                                    }
 
                                    if (StringHelper.compare(bytesPerDim, maxPackedValue, offset, globalMinPackedValue, offset) < 0) {
-                                     throw new RuntimeException("packed points cell maxPackedValue " + Arrays.toString(minPackedValue) +
-                                                                " is out-of-bounds of the global minimum " + Arrays.toString(globalMinPackedValue) + " dim=" + dim);
+                                     throw new RuntimeException("packed points cell maxPackedValue " + Arrays.toString(maxPackedValue) +
+                                                                " is out-of-bounds of the global minimum " + Arrays.toString(globalMinPackedValue) + " dim=" + dim + " field=\"" + fieldInfo.name + "\"");
                                    }
 
                                    if (StringHelper.compare(bytesPerDim, minPackedValue, offset, globalMaxPackedValue, offset) > 0) {
                                      throw new RuntimeException("packed points cell minPackedValue " + Arrays.toString(minPackedValue) +
-                                                                " is out-of-bounds of the global maximum " + Arrays.toString(globalMaxPackedValue) + " dim=" + dim);
+                                                                " is out-of-bounds of the global maximum " + Arrays.toString(globalMaxPackedValue) + " dim=" + dim + " field=\"" + fieldInfo.name + "\"");
                                    }
                                    if (StringHelper.compare(bytesPerDim, maxPackedValue, offset, globalMaxPackedValue, offset) > 0) {
                                      throw new RuntimeException("packed points cell maxPackedValue " + Arrays.toString(maxPackedValue) +
-                                                                " is out-of-bounds of the global maximum " + Arrays.toString(globalMaxPackedValue) + " dim=" + dim);
+                                                                " is out-of-bounds of the global maximum " + Arrays.toString(globalMaxPackedValue) + " dim=" + dim + " field=\"" + fieldInfo.name + "\"");
                                    }
                                  }                                   
 
@@ -1930,11 +1937,11 @@ public final class CheckIndex implements Closeable {
 
                                private void checkPackedValue(String desc, byte[] packedValue, int docID) {
                                  if (packedValue == null) {
-                                   throw new RuntimeException(desc + " is null for docID=" + docID);
+                                   throw new RuntimeException(desc + " is null for docID=" + docID + " field=\"" + fieldInfo.name + "\"");
                                  }
 
                                  if (packedValue.length != packedBytesCount) {
-                                   throw new RuntimeException(desc + " has incorrect length=" + packedValue.length + " vs expected=" + packedBytesCount + " for docID=" + docID);
+                                   throw new RuntimeException(desc + " has incorrect length=" + packedValue.length + " vs expected=" + packedBytesCount + " for docID=" + docID + " field=\"" + fieldInfo.name + "\"");
                                  }
                                }
                              });

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/69e654b3/lucene/core/src/java/org/apache/lucene/util/bkd/BKDReader.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/util/bkd/BKDReader.java b/lucene/core/src/java/org/apache/lucene/util/bkd/BKDReader.java
index 1ddb566..6bf7dfc 100644
--- a/lucene/core/src/java/org/apache/lucene/util/bkd/BKDReader.java
+++ b/lucene/core/src/java/org/apache/lucene/util/bkd/BKDReader.java
@@ -69,6 +69,12 @@ public class BKDReader extends PointValues implements Accountable {
     in.readBytes(minPackedValue, 0, packedBytesLength);
     in.readBytes(maxPackedValue, 0, packedBytesLength);
 
+    for(int dim=0;dim<numDims;dim++) {
+      if (StringHelper.compare(bytesPerDim, minPackedValue, dim*bytesPerDim, maxPackedValue, dim*bytesPerDim) > 0) {
+        throw new CorruptIndexException("minPackedValue " + new BytesRef(minPackedValue) + " is > maxPackedValue " + new BytesRef(maxPackedValue) + " for dim=" + dim, in);
+      }
+    }
+    
     pointCount = in.readVLong();
     docCount = in.readVInt();
 
@@ -137,6 +143,7 @@ public class BKDReader extends PointValues implements Accountable {
     this.numDims = numDims;
     this.maxPointsInLeafNode = maxPointsInLeafNode;
     this.bytesPerDim = bytesPerDim;
+    // no version check here because callers of this API (SimpleText) have no back compat:
     bytesPerIndexEntry = numDims == 1 ? bytesPerDim : bytesPerDim + 1;
     packedBytesLength = numDims * bytesPerDim;
     this.leafNodeOffset = leafBlockFPs.length;
@@ -238,7 +245,18 @@ public class BKDReader extends PointValues implements Accountable {
       // Non-leaf node:
 
       int address = nodeID * bytesPerIndexEntry;
-      int splitDim = numDims == 1 ? 0 : splitPackedValues[address++] & 0xff;
+      int splitDim;
+      if (numDims == 1) {
+        splitDim = 0;
+        if (version < BKDWriter.VERSION_IMPLICIT_SPLIT_DIM_1D) {
+          // skip over wastefully encoded 0 splitDim:
+          assert splitPackedValues[address] == 0;
+          address++;
+        }
+      } else {
+        splitDim = splitPackedValues[address++] & 0xff;
+      }
+      
       assert splitDim < numDims;
 
       byte[] splitPackedValue = new byte[packedBytesLength];
@@ -459,14 +477,23 @@ public class BKDReader extends PointValues implements Accountable {
       
       // Non-leaf node: recurse on the split left and right nodes
 
-      // TODO: save the unused 1 byte prefix (it's always 0) in the 1d case here:
       int address = nodeID * bytesPerIndexEntry;
-      int splitDim = numDims == 1 ? 0 : splitPackedValues[address++] & 0xff;
+      int splitDim;
+      if (numDims == 1) {
+        splitDim = 0;
+        if (version < BKDWriter.VERSION_IMPLICIT_SPLIT_DIM_1D) {
+          // skip over wastefully encoded 0 splitDim:
+          assert splitPackedValues[address] == 0;
+          address++;
+        }
+      } else {
+        splitDim = splitPackedValues[address++] & 0xff;
+      }
+      
       assert splitDim < numDims;
 
       // TODO: can we alloc & reuse this up front?
 
-      // TODO: can we alloc & reuse this up front?
       byte[] splitPackedValue = new byte[packedBytesLength];
 
       // Recurse on left sub-tree:
@@ -488,7 +515,18 @@ public class BKDReader extends PointValues implements Accountable {
   /** Copies the split value for this node into the provided byte array */
   public void copySplitValue(int nodeID, byte[] splitPackedValue) {
     int address = nodeID * bytesPerIndexEntry;
-    int splitDim = numDims == 1 ? 0 : splitPackedValues[address++] & 0xff;
+    int splitDim;
+    if (numDims == 1) {
+      splitDim = 0;
+      if (version < BKDWriter.VERSION_IMPLICIT_SPLIT_DIM_1D) {
+        // skip over wastefully encoded 0 splitDim:
+        assert splitPackedValues[address] == 0;
+        address++;
+      }
+    } else {
+      splitDim = splitPackedValues[address++] & 0xff;
+    }
+    
     assert splitDim < numDims;
     System.arraycopy(splitPackedValues, address, splitPackedValue, splitDim*bytesPerDim, bytesPerDim);
   }


[32/50] [abbrv] lucene-solr:apiv2: SOLR-9481: Move changes entry to 6.4

Posted by sa...@apache.org.
SOLR-9481: Move changes entry to 6.4


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/22aa34e0
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/22aa34e0
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/22aa34e0

Branch: refs/heads/apiv2
Commit: 22aa34e017bec1c8e8fd517e2969b1311c545c25
Parents: 5120816
Author: Jan H�ydahl <ja...@apache.org>
Authored: Tue Nov 1 12:31:40 2016 +0100
Committer: Jan H�ydahl <ja...@apache.org>
Committed: Tue Nov 1 12:31:40 2016 +0100

----------------------------------------------------------------------
 solr/CHANGES.txt | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/22aa34e0/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 7f067e9..fd4d2af 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -89,6 +89,10 @@ New Features
 * SOLR-9442: Adds Array of NamedValuePair (json.nl=arrnvp) style to JSONResponseWriter.
   (Jonny Marks, Christine Poerschke)
 
+* SOLR-9481: Authentication and Authorization plugins now work in standalone mode if security.json is placed in
+  SOLR_HOME on every node. Editing config through API is supported but affects only that one node. 
+  (janhoy)
+
 Optimizations
 ----------------------
 * SOLR-9704: Facet Module / JSON Facet API: Optimize blockChildren facets that have
@@ -205,10 +209,6 @@ New Features
 * SOLR-9654: Add "overrequest" parameter to JSON Facet API to control amount of overrequest
   on a distributed terms facet. (yonik)
 
-* SOLR-9481: Authentication and Authorization plugins now work in standalone mode if security.json is placed in
-  SOLR_HOME on every node. Editing config through API is supported but affects only that one node. 
-  (janhoy)
-
 * SOLR-2212: Add a factory class corresponding to Lucene's NoMergePolicy. (Lance Norskog, Cao Manh Dat via shalin)
 
 * SOLR-9670: Support SOLR_AUTHENTICATION_OPTS in solr.cmd (janhoy)


[34/50] [abbrv] lucene-solr:apiv2: SOLR-9681: move "filter" inside "domain" block

Posted by sa...@apache.org.
SOLR-9681: move "filter" inside "domain" block


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/359f981b
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/359f981b
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/359f981b

Branch: refs/heads/apiv2
Commit: 359f981b0e2737c3d019d0097e5be3bf76874407
Parents: 4383bec
Author: yonik <yo...@apache.org>
Authored: Tue Nov 1 11:59:51 2016 -0400
Committer: yonik <yo...@apache.org>
Committed: Tue Nov 1 12:01:04 2016 -0400

----------------------------------------------------------------------
 .../solr/search/facet/FacetProcessor.java       | 30 ++++++++-------
 .../apache/solr/search/facet/FacetRequest.java  | 39 +++++++++++---------
 .../solr/search/facet/TestJsonFacets.java       | 20 +++++-----
 3 files changed, 48 insertions(+), 41 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/359f981b/solr/core/src/java/org/apache/solr/search/facet/FacetProcessor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetProcessor.java b/solr/core/src/java/org/apache/solr/search/facet/FacetProcessor.java
index a8914f1..c1d2882 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetProcessor.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetProcessor.java
@@ -85,21 +85,15 @@ public abstract class FacetProcessor<FacetRequestT extends FacetRequest>  {
   }
 
   public void process() throws IOException {
-    // Check filters... if we do have filters they apply after domain changes.
-    // We still calculate them first because we can use it in a parent->child domain change.
-    evalFilters();
-    boolean appliedFilters = handleDomainChanges();
-    if (filter != null && !appliedFilters) {
-      fcontext.base = fcontext.base.intersection( filter );
-    }
+    handleDomainChanges();
   }
 
   private void evalFilters() throws IOException {
-    if (freq.filters == null || freq.filters.isEmpty()) return;
+    if (freq.domain.filters == null || freq.domain.filters.isEmpty()) return;
 
-    List<Query> qlist = new ArrayList<>(freq.filters.size());
+    List<Query> qlist = new ArrayList<>(freq.domain.filters.size());
     // TODO: prevent parsing filters each time!
-    for (Object rawFilter : freq.filters) {
+    for (Object rawFilter : freq.domain.filters) {
       Query symbolicFilter;
       if (rawFilter instanceof String) {
         QParser parser = null;
@@ -119,11 +113,19 @@ public abstract class FacetProcessor<FacetRequestT extends FacetRequest>  {
     this.filter = fcontext.searcher.getDocSet(qlist);
   }
 
-  private boolean handleDomainChanges() throws IOException {
-    if (freq.domain == null) return false;
+  private void handleDomainChanges() throws IOException {
+    if (freq.domain == null) return;
     handleFilterExclusions();
+
+    // Check filters... if we do have filters they apply after domain changes.
+    // We still calculate them first because we can use it in a parent->child domain change.
+    evalFilters();
+
     boolean appliedFilters = handleBlockJoin();
-    return appliedFilters;
+
+    if (this.filter != null && !appliedFilters) {
+      fcontext.base = fcontext.base.intersection( filter );
+    }
   }
 
   private void handleFilterExclusions() throws IOException {
@@ -187,7 +189,7 @@ public abstract class FacetProcessor<FacetRequestT extends FacetRequest>  {
     fcontext.base = fcontext.searcher.getDocSet(qlist);
   }
 
-  // returns "true" if filters have already been applied.
+  // returns "true" if filters were applied to fcontext.base already
   private boolean handleBlockJoin() throws IOException {
     boolean appliedFilters = false;
     if (!(freq.domain.toChildren || freq.domain.toParent)) return appliedFilters;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/359f981b/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java b/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java
index d3c8722..273466c 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java
@@ -78,7 +78,6 @@ public abstract class FacetRequest {
 
   protected Map<String,AggValueSource> facetStats;  // per-bucket statistics
   protected Map<String,FacetRequest> subFacets;     // per-bucket sub-facets
-  protected List<Object> filters;
   protected boolean processEmpty;
   protected Domain domain;
 
@@ -87,7 +86,8 @@ public abstract class FacetRequest {
     public List<String> excludeTags;
     public boolean toParent;
     public boolean toChildren;
-    public String parents;
+    public String parents; // identifies the parent filter... the full set of parent documents for any block join operation
+    public List<Object> filters; // list of symbolic filters (JSON query format)
   }
 
   public FacetRequest() {
@@ -359,33 +359,38 @@ abstract class FacetParser<FacetRequestT extends FacetRequest> {
 
       Map<String,Object> domainMap = (Map<String,Object>) m.get("domain");
       if (domainMap != null) {
+        FacetRequest.Domain domain = getDomain();
+
         excludeTags = getStringList(domainMap, "excludeTags");
         if (excludeTags != null) {
-          getDomain().excludeTags = excludeTags;
+          domain.excludeTags = excludeTags;
         }
 
         String blockParent = (String)domainMap.get("blockParent");
         String blockChildren = (String)domainMap.get("blockChildren");
 
         if (blockParent != null) {
-          getDomain().toParent = true;
-          getDomain().parents = blockParent;
+          domain.toParent = true;
+          domain.parents = blockParent;
         } else if (blockChildren != null) {
-          getDomain().toChildren = true;
-          getDomain().parents = blockChildren;
+          domain.toChildren = true;
+          domain.parents = blockChildren;
         }
 
-      }
-
-      Object filterOrList = m.get("filter");
-      if (filterOrList != null) {
-        if (filterOrList instanceof List) {
-          facet.filters = (List<Object>)filterOrList;
-        } else {
-          facet.filters = new ArrayList<>(1);
-          facet.filters.add(filterOrList);
+        Object filterOrList = domainMap.get("filter");
+        if (filterOrList != null) {
+          assert domain.filters == null;
+          if (filterOrList instanceof List) {
+            domain.filters = (List<Object>)filterOrList;
+          } else {
+            domain.filters = new ArrayList<>(1);
+            domain.filters.add(filterOrList);
+          }
         }
-      }
+
+
+      } // end "domain"
+
 
     }
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/359f981b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
index 57e3ed1..32c6ef1 100644
--- a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
+++ b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
@@ -1167,12 +1167,12 @@ public class TestJsonFacets extends SolrTestCaseHS {
     // test filter
     client.testJQ(params(p, "q", "*:*", "myfilt","${cat_s}:A"
         , "json.facet", "{" +
-            "t:{${terms} type:terms, field:${cat_s}, filter:[]}" + // empty filter list
-            ",t_filt:{${terms} type:terms, field:${cat_s}, filter:'${cat_s}:B'}" +
-            ",t_filt2:{${terms} type:terms, field:${cat_s}, filter:'{!query v=$myfilt}'}" +  // test access to qparser and other query parameters
-            ",t_filt3:{${terms} type:terms, field:${cat_s}, filter:['-id:1','-id:2']}" +
-            ",q:{type:query, q:'${cat_s}:B', filter:['-id:5']}" + // also tests a top-level negative filter
-            ",r:{type:range, field:${num_d}, start:-5, end:10, gap:5, filter:'-id:4'}" +
+            "t:{${terms} type:terms, field:${cat_s}, domain:{filter:[]} }" + // empty filter list
+            ",t_filt:{${terms} type:terms, field:${cat_s}, domain:{filter:'${cat_s}:B'} }" +
+            ",t_filt2:{${terms} type:terms, field:${cat_s}, domain:{filter:'{!query v=$myfilt}'} }" +  // test access to qparser and other query parameters
+            ",t_filt3:{${terms} type:terms, field:${cat_s}, domain:{filter:['-id:1','-id:2']} }" +
+            ",q:{type:query, q:'${cat_s}:B', domain:{filter:['-id:5']} }" + // also tests a top-level negative filter
+            ",r:{type:range, field:${num_d}, start:-5, end:10, gap:5, domain:{filter:'-id:4'} }" +
             "}"
         )
         , "facets=={ count:6, " +
@@ -1419,10 +1419,10 @@ public class TestJsonFacets extends SolrTestCaseHS {
     // test filter after block join
     client.testJQ(params(p, "q", "*:*"
         , "json.facet", "{ " +
-            "pages1:{type:terms, field:v_t, domain:{blockChildren:'type_s:book'}, filter:'*:*' }" +
-            ",pages2:{type:terms, field:v_t, domain:{blockChildren:'type_s:book'}, filter:'-id:3.1' }" +
-            ",books:{type:terms, field:v_t, domain:{blockParent:'type_s:book'}, filter:'*:*' }" +
-            ",books2:{type:terms, field:v_t, domain:{blockParent:'type_s:book'}, filter:'id:1' }" +
+            "pages1:{type:terms, field:v_t, domain:{blockChildren:'type_s:book', filter:'*:*'} }" +
+            ",pages2:{type:terms, field:v_t, domain:{blockChildren:'type_s:book', filter:'-id:3.1'} }" +
+            ",books:{type:terms, field:v_t, domain:{blockParent:'type_s:book', filter:'*:*'} }" +
+            ",books2:{type:terms, field:v_t, domain:{blockParent:'type_s:book', filter:'id:1'} }" +
             "}"
         )
         , "facets=={ count:10" +


[26/50] [abbrv] lucene-solr:apiv2: Adding version 6.4.0

Posted by sa...@apache.org.
Adding version 6.4.0


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/72bdbd23
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/72bdbd23
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/72bdbd23

Branch: refs/heads/apiv2
Commit: 72bdbd234e28daba522d71aed37c11c79970678b
Parents: 66c90a9
Author: Shalin Shekhar Mangar <sh...@apache.org>
Authored: Mon Oct 31 16:47:00 2016 +0530
Committer: Shalin Shekhar Mangar <sh...@apache.org>
Committed: Mon Oct 31 16:47:00 2016 +0530

----------------------------------------------------------------------
 lucene/CHANGES.txt                                       | 3 +++
 lucene/core/src/java/org/apache/lucene/util/Version.java | 7 +++++++
 2 files changed, 10 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/72bdbd23/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 385a9ae..6697fbf 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -54,6 +54,9 @@ Other
 
 * LUCENE-7360: Remove Explanation.toHtml() (Alan Woodward)
 
+======================= Lucene 6.4.0 =======================
+(No Changes)
+
 ======================= Lucene 6.3.0 =======================
 
 API Changes

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/72bdbd23/lucene/core/src/java/org/apache/lucene/util/Version.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/util/Version.java b/lucene/core/src/java/org/apache/lucene/util/Version.java
index 58a9e0e..2355a9a 100644
--- a/lucene/core/src/java/org/apache/lucene/util/Version.java
+++ b/lucene/core/src/java/org/apache/lucene/util/Version.java
@@ -74,6 +74,13 @@ public final class Version {
   public static final Version LUCENE_6_3_0 = new Version(6, 3, 0);
 
   /**
+   * Match settings and bugs in Lucene's 6.4.0 release.
+   * @deprecated Use latest
+   */
+  @Deprecated
+  public static final Version LUCENE_6_4_0 = new Version(6, 4, 0);
+
+  /**
    * Match settings and bugs in Lucene's 7.0.0 release.
    *  <p>
    *  Use this to get the latest &amp; greatest settings, bug


[12/50] [abbrv] lucene-solr:apiv2: SOLR-9481: Try to fix flaky test error by removing unnecessary initCore() in @Before method

Posted by sa...@apache.org.
SOLR-9481: Try to fix flaky test error by removing unnecessary initCore() in @Before method


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/1f064119
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/1f064119
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/1f064119

Branch: refs/heads/apiv2
Commit: 1f06411946237eff51f7d23bc52eb64e76a1c18b
Parents: f56d111
Author: Jan H�ydahl <ja...@apache.org>
Authored: Fri Oct 28 15:04:49 2016 +0200
Committer: Jan H�ydahl <ja...@apache.org>
Committed: Fri Oct 28 15:04:49 2016 +0200

----------------------------------------------------------------------
 .../src/test/org/apache/solr/security/BasicAuthStandaloneTest.java  | 1 -
 1 file changed, 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/1f064119/solr/core/src/test/org/apache/solr/security/BasicAuthStandaloneTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/security/BasicAuthStandaloneTest.java b/solr/core/src/test/org/apache/solr/security/BasicAuthStandaloneTest.java
index 33c0ab3..55e78e7 100644
--- a/solr/core/src/test/org/apache/solr/security/BasicAuthStandaloneTest.java
+++ b/solr/core/src/test/org/apache/solr/security/BasicAuthStandaloneTest.java
@@ -71,7 +71,6 @@ public class BasicAuthStandaloneTest extends AbstractSolrTestCase {
     instance = new SolrInstance("inst", null);
     instance.setUp();
     jetty = createJetty(instance);
-    initCore("solrconfig.xml", "schema.xml", instance.getHomeDir().toString());
     securityConfHandler = new SecurityConfHandlerLocalForTesting(jetty.getCoreContainer());
   }
 


[03/50] [abbrv] lucene-solr:apiv2: SOLR-1085: Add support for MoreLikeThis queries and responses in SolrJ client

Posted by sa...@apache.org.
SOLR-1085: Add support for MoreLikeThis queries and responses in SolrJ client


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/2172f3e0
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/2172f3e0
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/2172f3e0

Branch: refs/heads/apiv2
Commit: 2172f3e0081e3e59ce8b02c2bb5654a592f79f74
Parents: 0feca1a
Author: Shalin Shekhar Mangar <sh...@apache.org>
Authored: Thu Oct 27 17:41:25 2016 +0530
Committer: Shalin Shekhar Mangar <sh...@apache.org>
Committed: Thu Oct 27 17:41:25 2016 +0530

----------------------------------------------------------------------
 solr/CHANGES.txt                                |   3 +
 .../component/MoreLikeThisComponent.java        |   2 +-
 .../org/apache/solr/client/solrj/SolrQuery.java | 248 +++++++++++++++++++
 .../client/solrj/response/QueryResponse.java    |   8 +
 .../solr/common/params/MoreLikeThisParams.java  |   3 +
 .../solr/client/solrj/SolrExampleTests.java     |  21 +-
 .../apache/solr/client/solrj/SolrQueryTest.java |  25 ++
 7 files changed, 299 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2172f3e0/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index ae1d709..4ef1061 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -170,6 +170,9 @@ New Features
 
 * SOLR-9559: Add ExecutorStream to execute stored Streaming Expressions (Joel Bernstein)
 
+* SOLR-1085: Add support for MoreLikeThis queries and responses in SolrJ client.
+  (Maurice Jumelet, Bill Mitchell, Cao Manh Dat via shalin)
+
 Bug Fixes
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2172f3e0/solr/core/src/java/org/apache/solr/handler/component/MoreLikeThisComponent.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/MoreLikeThisComponent.java b/solr/core/src/java/org/apache/solr/handler/component/MoreLikeThisComponent.java
index 6ccdd12..7cf6d39 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/MoreLikeThisComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/MoreLikeThisComponent.java
@@ -173,7 +173,7 @@ public class MoreLikeThisComponent extends SearchComponent {
         && rb.req.getParams().getBool(COMPONENT_NAME, false)) {
       Map<Object,SolrDocumentList> tempResults = new LinkedHashMap<>();
       
-      int mltcount = rb.req.getParams().getInt(MoreLikeThisParams.DOC_COUNT, 5);
+      int mltcount = rb.req.getParams().getInt(MoreLikeThisParams.DOC_COUNT, MoreLikeThisParams.DEFAULT_DOC_COUNT);
       String keyName = rb.req.getSchema().getUniqueKeyField().getName();
       
       for (ShardRequest sreq : rb.finished) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2172f3e0/solr/solrj/src/java/org/apache/solr/client/solrj/SolrQuery.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/SolrQuery.java b/solr/solrj/src/java/org/apache/solr/client/solrj/SolrQuery.java
index 7eee7be..e6d3d69 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/SolrQuery.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/SolrQuery.java
@@ -27,6 +27,7 @@ import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.FacetParams;
 import org.apache.solr.common.params.HighlightParams;
 import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.params.MoreLikeThisParams;
 import org.apache.solr.common.params.StatsParams;
 import org.apache.solr.common.params.TermsParams;
 
@@ -801,6 +802,253 @@ public class SolrQuery extends ModifiableSolrParams
     return this;
   }
 
+
+  /**
+   * Add field for MoreLikeThis. Automatically
+   * enables MoreLikeThis.
+   *
+   * @param field the names of the field to be added
+   * @return this
+   */
+  public SolrQuery addMoreLikeThisField(String field) {
+    this.setMoreLikeThis(true);
+    return addValueToParam(MoreLikeThisParams.SIMILARITY_FIELDS, field);
+  }
+
+  public SolrQuery setMoreLikeThisFields(String... fields) {
+    if( fields == null || fields.length == 0 ) {
+      this.remove( MoreLikeThisParams.SIMILARITY_FIELDS );
+      this.setMoreLikeThis(false);
+      return this;
+    }
+
+    StringBuilder sb = new StringBuilder();
+    sb.append(fields[0]);
+    for (int i = 1; i < fields.length; i++) {
+      sb.append(',');
+      sb.append(fields[i]);
+    }
+    this.set(MoreLikeThisParams.SIMILARITY_FIELDS, sb.toString());
+    this.setMoreLikeThis(true);
+    return this;
+  }
+
+  /**
+   * @return an array with the fields used to compute similarity.
+   */
+  public String[] getMoreLikeThisFields() {
+    String fl = this.get(MoreLikeThisParams.SIMILARITY_FIELDS);
+    if(fl==null || fl.length()==0) {
+      return null;
+    }
+    return fl.split(",");
+  }
+
+  /**
+   * Sets the frequency below which terms will be ignored in the source doc
+   *
+   * @param mintf the minimum term frequency
+   * @return this
+   */
+  public SolrQuery setMoreLikeThisMinTermFreq(int mintf) {
+    this.set(MoreLikeThisParams.MIN_TERM_FREQ, mintf);
+    return this;
+  }
+
+  /**
+   * Gets the frequency below which terms will be ignored in the source doc
+   */
+  public int getMoreLikeThisMinTermFreq() {
+    return this.getInt(MoreLikeThisParams.MIN_TERM_FREQ, 2);
+  }
+
+  /**
+   * Sets the frequency at which words will be ignored which do not occur in
+   * at least this many docs.
+   *
+   * @param mindf the minimum document frequency
+   * @return this
+   */
+  public SolrQuery setMoreLikeThisMinDocFreq(int mindf) {
+    this.set(MoreLikeThisParams.MIN_DOC_FREQ, mindf);
+    return this;
+  }
+
+  /**
+   * Gets the frequency at which words will be ignored which do not occur in
+   * at least this many docs.
+   */
+  public int getMoreLikeThisMinDocFreq() {
+    return this.getInt(MoreLikeThisParams.MIN_DOC_FREQ, 5);
+  }
+
+  /**
+   * Sets the minimum word length below which words will be ignored.
+   *
+   * @param minwl the minimum word length
+   * @return this
+   */
+  public SolrQuery setMoreLikeThisMinWordLen(int minwl) {
+    this.set(MoreLikeThisParams.MIN_WORD_LEN, minwl);
+    return this;
+  }
+
+  /**
+   * Gets the minimum word length below which words will be ignored.
+   */
+  public int getMoreLikeThisMinWordLen() {
+    return this.getInt(MoreLikeThisParams.MIN_WORD_LEN, 0);
+  }
+
+  /**
+   * Sets the maximum word length above which words will be ignored.
+   *
+   * @param maxwl the maximum word length
+   * @return this
+   */
+  public SolrQuery setMoreLikeThisMaxWordLen(int maxwl) {
+    this.set(MoreLikeThisParams.MAX_WORD_LEN, maxwl);
+    return this;
+  }
+
+  /**
+   * Gets the maximum word length above which words will be ignored.
+   */
+  public int getMoreLikeThisMaxWordLen() {
+    return this.getInt(MoreLikeThisParams.MAX_WORD_LEN, 0);
+  }
+
+  /**
+   * Sets the maximum number of query terms that will be included in any
+   * generated query.
+   *
+   * @param maxqt the maximum number of query terms
+   * @return this
+   */
+  public SolrQuery setMoreLikeThisMaxQueryTerms(int maxqt) {
+    this.set(MoreLikeThisParams.MAX_QUERY_TERMS, maxqt);
+    return this;
+  }
+
+  /**
+   * Gets the maximum number of query terms that will be included in any
+   * generated query.
+   */
+  public int getMoreLikeThisMaxQueryTerms() {
+    return this.getInt(MoreLikeThisParams.MAX_QUERY_TERMS, 25);
+  }
+
+  /**
+   * Sets the maximum number of tokens to parse in each example doc field
+   * that is not stored with TermVector support.
+   *
+   * @param maxntp the maximum number of tokens to parse
+   * @return this
+   */
+  public SolrQuery setMoreLikeThisMaxTokensParsed(int maxntp) {
+    this.set(MoreLikeThisParams.MAX_NUM_TOKENS_PARSED, maxntp);
+    return this;
+  }
+
+  /**
+   * Gets the maximum number of tokens to parse in each example doc field
+   * that is not stored with TermVector support.
+   */
+  public int getMoreLikeThisMaxTokensParsed() {
+    return this.getInt(MoreLikeThisParams.MAX_NUM_TOKENS_PARSED, 5000);
+  }
+
+  /**
+   * Sets if the query will be boosted by the interesting term relevance.
+   *
+   * @param b set to true to boost the query with the interesting term relevance
+   * @return this
+   */
+  public SolrQuery setMoreLikeThisBoost(boolean b) {
+    this.set(MoreLikeThisParams.BOOST, b);
+    return this;
+  }
+
+  /**
+   * Gets if the query will be boosted by the interesting term relevance.
+   */
+  public boolean getMoreLikeThisBoost() {
+    return this.getBool(MoreLikeThisParams.BOOST, false);
+  }
+
+  /**
+   * Sets the query fields and their boosts using the same format as that
+   * used in DisMaxQParserPlugin. These fields must also be added
+   * using {@link #addMoreLikeThisField(String)}.
+   *
+   * @param qf the query fields
+   * @return this
+   */
+  public SolrQuery setMoreLikeThisQF(String qf) {
+    this.set(MoreLikeThisParams.QF, qf);
+    return this;
+  }
+
+  /**
+   * Gets the query fields and their boosts.
+   */
+  public String getMoreLikeThisQF() {
+    return this.get(MoreLikeThisParams.QF);
+  }
+
+  /**
+   * Sets the number of similar documents to return for each result.
+   *
+   * @param count the number of similar documents to return for each result
+   * @return this
+   */
+  public SolrQuery setMoreLikeThisCount(int count) {
+    this.set(MoreLikeThisParams.DOC_COUNT, count);
+    return this;
+  }
+
+  /**
+   * Gets the number of similar documents to return for each result.
+   */
+  public int getMoreLikeThisCount() {
+    return this.getInt(MoreLikeThisParams.DOC_COUNT, MoreLikeThisParams.DEFAULT_DOC_COUNT);
+  }
+
+  /**
+   * Enable/Disable MoreLikeThis. After enabling MoreLikeThis, the fields
+   * used for computing similarity must be specified calling
+   * {@link #addMoreLikeThisField(String)}.
+   *
+   * @param b flag to indicate if MoreLikeThis should be enabled. if b==false
+   * removes all mlt.* parameters
+   * @return this
+   */
+  public SolrQuery setMoreLikeThis(boolean b) {
+    if(b) {
+      this.set(MoreLikeThisParams.MLT, true);
+    } else {
+      this.remove(MoreLikeThisParams.MLT);
+      this.remove(MoreLikeThisParams.SIMILARITY_FIELDS);
+      this.remove(MoreLikeThisParams.MIN_TERM_FREQ);
+      this.remove(MoreLikeThisParams.MIN_DOC_FREQ);
+      this.remove(MoreLikeThisParams.MIN_WORD_LEN);
+      this.remove(MoreLikeThisParams.MAX_WORD_LEN);
+      this.remove(MoreLikeThisParams.MAX_QUERY_TERMS);
+      this.remove(MoreLikeThisParams.MAX_NUM_TOKENS_PARSED);
+      this.remove(MoreLikeThisParams.BOOST);
+      this.remove(MoreLikeThisParams.QF);
+      this.remove(MoreLikeThisParams.DOC_COUNT);
+    }
+    return this;
+  }
+
+  /**
+   * @return true if MoreLikeThis is enabled, false otherwise
+   */
+  public boolean getMoreLikeThis() {
+    return this.getBool(MoreLikeThisParams.MLT, false);
+  }
+
   public SolrQuery setFields(String ... fields) {
     if( fields == null || fields.length == 0 ) {
       this.remove( CommonParams.FL );

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2172f3e0/solr/solrj/src/java/org/apache/solr/client/solrj/response/QueryResponse.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/response/QueryResponse.java b/solr/solrj/src/java/org/apache/solr/client/solrj/response/QueryResponse.java
index debb079..eb595aa 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/response/QueryResponse.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/response/QueryResponse.java
@@ -51,6 +51,7 @@ public class QueryResponse extends SolrResponseBase
   private Map<String,NamedList<Object>> _suggestInfo = null;
   private NamedList<Object> _statsInfo = null;
   private NamedList<NamedList<Number>> _termsInfo = null;
+  private NamedList<SolrDocumentList> _moreLikeThisInfo = null;
   private String _cursorMarkNext = null;
 
   // Grouping response
@@ -168,6 +169,9 @@ public class QueryResponse extends SolrResponseBase
         _termsInfo = (NamedList<NamedList<Number>>) res.getVal( i );
         extractTermsInfo( _termsInfo );
       }
+      else if ( "moreLikeThis".equals( n ) ) {
+        _moreLikeThisInfo = (NamedList<SolrDocumentList>) res.getVal( i );
+      }
       else if ( CursorMarkParams.CURSOR_MARK_NEXT.equals( n ) ) {
         _cursorMarkNext = (String) res.getVal( i );
       }
@@ -547,6 +551,10 @@ public class QueryResponse extends SolrResponseBase
   public TermsResponse getTermsResponse() {
     return _termsResponse;
   }
+
+  public NamedList<SolrDocumentList> getMoreLikeThis() {
+    return _moreLikeThisInfo;
+  }
   
   /**
    * See also: {@link #getLimitingFacets()}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2172f3e0/solr/solrj/src/java/org/apache/solr/common/params/MoreLikeThisParams.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/params/MoreLikeThisParams.java b/solr/solrj/src/java/org/apache/solr/common/params/MoreLikeThisParams.java
index b41cbfd..c898fdb 100644
--- a/solr/solrj/src/java/org/apache/solr/common/params/MoreLikeThisParams.java
+++ b/solr/solrj/src/java/org/apache/solr/common/params/MoreLikeThisParams.java
@@ -50,6 +50,9 @@ public interface MoreLikeThisParams
 
   // Do you want to include the original document in the results or not
   public final static String INTERESTING_TERMS = PREFIX + "interestingTerms";  // false,details,(list or true)
+
+  // the default doc count
+  public final static int DEFAULT_DOC_COUNT = 5;
   
   public enum TermStyle {
     NONE,

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2172f3e0/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java
index 0f91adf..88227ba 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java
@@ -1996,37 +1996,38 @@ abstract public class SolrExampleTests extends SolrExampleTestsBase
     // test with mlt.fl having comma separated values
     SolrQuery q = new SolrQuery("*:*");
     q.setRows(20);
-    q.setParam("mlt", "true");
-    q.setParam("mlt.mintf", "0");
-    q.setParam("mlt.count", "2");
-    q.setParam("mlt.fl", "x_s,y_s,z_s");
+    q.setMoreLikeThisFields("x_s", "y_s", "z_s");
+    q.setMoreLikeThisMinTermFreq(0);
+    q.setMoreLikeThisCount(2);
     QueryResponse response = client.query(q);
     assertEquals(20, response.getResults().getNumFound());
-    NamedList<Object> moreLikeThis = (NamedList<Object>) response.getResponse().get("moreLikeThis");
+    NamedList<SolrDocumentList> moreLikeThis = response.getMoreLikeThis();
     assertNotNull("MoreLikeThis response should not have been null", moreLikeThis);
     for (int i=0; i<20; i++)  {
       String id = "testMoreLikeThis" + i;
-      SolrDocumentList mltResp = (SolrDocumentList) moreLikeThis.get(id);
+      SolrDocumentList mltResp = moreLikeThis.get(id);
       assertNotNull("MoreLikeThis response for id=" + id + " should not be null", mltResp);
       assertTrue("MoreLikeThis response for id=" + id + " had numFound=0", mltResp.getNumFound() > 0);
+      assertTrue("MoreLikeThis response for id=" + id + " had not returned exactly 2 documents", mltResp.size() == 2);
     }
 
     // now test with multiple mlt.fl parameters
     q = new SolrQuery("*:*");
     q.setRows(20);
     q.setParam("mlt", "true");
-    q.setParam("mlt.mintf", "0");
-    q.setParam("mlt.count", "2");
     q.setParam("mlt.fl", "x_s", "y_s", "z_s");
+    q.setMoreLikeThisMinTermFreq(0);
+    q.setMoreLikeThisCount(2);
     response = client.query(q);
     assertEquals(20, response.getResults().getNumFound());
-    moreLikeThis = (NamedList<Object>) response.getResponse().get("moreLikeThis");
+    moreLikeThis = response.getMoreLikeThis();
     assertNotNull("MoreLikeThis response should not have been null", moreLikeThis);
     for (int i=0; i<20; i++)  {
       String id = "testMoreLikeThis" + i;
-      SolrDocumentList mltResp = (SolrDocumentList) moreLikeThis.get(id);
+      SolrDocumentList mltResp = moreLikeThis.get(id);
       assertNotNull("MoreLikeThis response for id=" + id + " should not be null", mltResp);
       assertTrue("MoreLikeThis response for id=" + id + " had numFound=0", mltResp.getNumFound() > 0);
+      assertTrue("MoreLikeThis response for id=" + id + " had not returned exactly 2 documents", mltResp.size() == 2);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2172f3e0/solr/solrj/src/test/org/apache/solr/client/solrj/SolrQueryTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrQueryTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrQueryTest.java
index 816a2cc..d27847f 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrQueryTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrQueryTest.java
@@ -431,4 +431,29 @@ public class SolrQueryTest extends LuceneTestCase {
     assertNull(solrQuery.getParams("f.field3.facet.interval.set"));
     
   }
+
+  public void testMoreLikeThis() {
+    SolrQuery solrQuery = new SolrQuery();
+    solrQuery.addMoreLikeThisField("mlt1");
+    assertTrue(solrQuery.getMoreLikeThis());
+
+    solrQuery.addMoreLikeThisField("mlt2");
+    solrQuery.addMoreLikeThisField("mlt3");
+    solrQuery.addMoreLikeThisField("mlt4");
+    assertEquals(4, solrQuery.getMoreLikeThisFields().length);
+    solrQuery.setMoreLikeThisFields(null);
+    assertTrue(null == solrQuery.getMoreLikeThisFields());
+    assertFalse(solrQuery.getMoreLikeThis());
+
+    assertEquals(true, solrQuery.setMoreLikeThisBoost(true).getMoreLikeThisBoost());
+    assertEquals("qf", solrQuery.setMoreLikeThisQF("qf").getMoreLikeThisQF());
+    assertEquals(10, solrQuery.setMoreLikeThisMaxTokensParsed(10).getMoreLikeThisMaxTokensParsed());
+    assertEquals(11, solrQuery.setMoreLikeThisMinTermFreq(11).getMoreLikeThisMinTermFreq());
+    assertEquals(12, solrQuery.setMoreLikeThisMinDocFreq(12).getMoreLikeThisMinDocFreq());
+    assertEquals(13, solrQuery.setMoreLikeThisMaxWordLen(13).getMoreLikeThisMaxWordLen());
+    assertEquals(14, solrQuery.setMoreLikeThisMinWordLen(14).getMoreLikeThisMinWordLen());
+    assertEquals(15, solrQuery.setMoreLikeThisMaxQueryTerms(15).getMoreLikeThisMaxQueryTerms());
+    assertEquals(16, solrQuery.setMoreLikeThisCount(16).getMoreLikeThisCount());
+
+  }
 }


[06/50] [abbrv] lucene-solr:apiv2: SOLR-9371: Fix bin/solr script calculations - start/stop wait time and RMI_PORT

Posted by sa...@apache.org.
SOLR-9371: Fix bin/solr script calculations - start/stop wait time and RMI_PORT


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/1344d895
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/1344d895
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/1344d895

Branch: refs/heads/apiv2
Commit: 1344d895f96644a4d541acd5a9fbe9fe4d1969a5
Parents: fa4e599
Author: Erick Erickson <er...@apache.org>
Authored: Thu Oct 27 17:54:34 2016 -0700
Committer: Erick Erickson <er...@apache.org>
Committed: Thu Oct 27 17:54:34 2016 -0700

----------------------------------------------------------------------
 solr/CHANGES.txt    |  4 ++++
 solr/bin/solr       | 39 +++++++++++++++++++++++++++++++--------
 solr/bin/solr.in.sh |  6 ++++++
 3 files changed, 41 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/1344d895/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 4ef1061..b1daf1b 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -382,6 +382,10 @@ Other Changes
 
 * SOLR-9533: Reload core config when a core is reloaded (Gethin James, Joel Bernstein)
 
+* SOLR-9371: Fix bin/solr calculations for start/stop wait time and RMI_PORT.
+  (Shawn Heisey via Erick Erickson)
+
+
 ==================  6.2.1 ==================
 
 Bug Fixes

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/1344d895/solr/bin/solr
----------------------------------------------------------------------
diff --git a/solr/bin/solr b/solr/bin/solr
index 9d55e0a..1d8edfa 100755
--- a/solr/bin/solr
+++ b/solr/bin/solr
@@ -119,6 +119,9 @@ else
   JAVA=java
 fi
 
+if [ -z "$SOLR_STOP_WAIT" ]; then
+  SOLR_STOP_WAIT=180
+fi
 # test that Java exists, is executable and correct version
 JAVA_VER=$("$JAVA" -version 2>&1)
 if [[ $? -ne 0 ]] ; then
@@ -231,7 +234,7 @@ function print_usage() {
     echo ""
     echo "  -p <port>     Specify the port to start the Solr HTTP listener on; default is 8983"
     echo "                  The specified port (SOLR_PORT) will also be used to determine the stop port"
-    echo "                  STOP_PORT=(\$SOLR_PORT-1000) and JMX RMI listen port RMI_PORT=(1\$SOLR_PORT). "
+    echo "                  STOP_PORT=(\$SOLR_PORT-1000) and JMX RMI listen port RMI_PORT=(\$SOLR_PORT+10000). "
     echo "                  For instance, if you set -p 8985, then the STOP_PORT=7985 and RMI_PORT=18985"
     echo ""
     echo "  -d <dir>      Specify the Solr server directory; defaults to server"
@@ -575,9 +578,24 @@ function stop_solr() {
   SOLR_PID="$4"
 
   if [ "$SOLR_PID" != "" ]; then
-    echo -e "Sending stop command to Solr running on port $SOLR_PORT ... waiting 5 seconds to allow Jetty process $SOLR_PID to stop gracefully."
+    echo -e "Sending stop command to Solr running on port $SOLR_PORT ... waiting up to $SOLR_STOP_WAIT seconds to allow Jetty process $SOLR_PID to stop gracefully."
     "$JAVA" $SOLR_SSL_OPTS $AUTHC_OPTS -jar "$DIR/start.jar" "STOP.PORT=$STOP_PORT" "STOP.KEY=$STOP_KEY" --stop || true
-    (sleep 5) &
+      (loops=0
+      while true
+      do
+        CHECK_PID=`ps auxww | awk '{print $2}' | grep -w $SOLR_PID | sort -r | tr -d ' '`
+        if [ "$CHECK_PID" != "" ]; then
+          slept=$((loops * 2))
+          if [ $slept -lt $SOLR_STOP_WAIT ]; then
+            sleep 2
+            loops=$[$loops+1]
+          else
+            exit # subshell!
+          fi
+        else
+          exit # subshell!
+        fi
+      done) &
     spinner $!
     rm -f "$SOLR_PID_DIR/solr-$SOLR_PORT.pid"
   else
@@ -1459,7 +1477,11 @@ fi
 if [ "$ENABLE_REMOTE_JMX_OPTS" == "true" ]; then
 
   if [ -z "$RMI_PORT" ]; then
-    RMI_PORT="1$SOLR_PORT"
+    RMI_PORT=`expr $SOLR_PORT + 10000`
+    if [ $RMI_PORT -gt 65535 ]; then
+      echo -e "\nRMI_PORT is $RMI_PORT, which is invalid!\n"
+      exit 1
+    fi
   fi
 
   REMOTE_JMX_OPTS=('-Dcom.sun.management.jmxremote' \
@@ -1620,18 +1642,19 @@ function launch_solr() {
 
     # no lsof on cygwin though
     if hash lsof 2>/dev/null ; then  # hash returns true if lsof is on the path
-      echo -n "Waiting up to 30 seconds to see Solr running on port $SOLR_PORT"
+      echo -n "Waiting up to $SOLR_STOP_WAIT seconds to see Solr running on port $SOLR_PORT"
       # Launch in a subshell to show the spinner
       (loops=0
       while true
       do
         running=`lsof -PniTCP:$SOLR_PORT -sTCP:LISTEN`
         if [ -z "$running" ]; then
-          if [ $loops -lt 6 ]; then
-            sleep 5
+	  slept=$((loops * 2))
+          if [ $slept -lt $SOLR_STOP_WAIT ]; then
+            sleep 2
             loops=$[$loops+1]
           else
-            echo -e "Still not seeing Solr listening on $SOLR_PORT after 30 seconds!"
+            echo -e "Still not seeing Solr listening on $SOLR_PORT after $SOLR_STOP_WAIT seconds!"
             tail -30 "$SOLR_LOGS_DIR/solr.log"
             exit # subshell!
           fi

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/1344d895/solr/bin/solr.in.sh
----------------------------------------------------------------------
diff --git a/solr/bin/solr.in.sh b/solr/bin/solr.in.sh
index 40c59a6..a84c474 100644
--- a/solr/bin/solr.in.sh
+++ b/solr/bin/solr.in.sh
@@ -21,6 +21,12 @@
 # affecting other Java applications on your server/workstation.
 #SOLR_JAVA_HOME=""
 
+# This controls the number of seconds that the solr script will wait for
+# Solr to stop gracefully or Solr to start.  If the graceful stop fails,
+# the script will forcibly stop Solr.  If the start fails, the script will
+# give up waiting and display the last few lines of the logfile.
+#SOLR_STOP_WAIT="180"
+
 # Increase Java Heap as needed to support your indexing / query needs
 #SOLR_HEAP="512m"
 


[10/50] [abbrv] lucene-solr:apiv2: SOLR-9132: Cut over some collections API and recovery tests

Posted by sa...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f56d111a/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIDistributedZkTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIDistributedZkTest.java b/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIDistributedZkTest.java
index 826a8e5..c1ad2bd 100644
--- a/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIDistributedZkTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIDistributedZkTest.java
@@ -19,9 +19,7 @@ package org.apache.solr.cloud;
 import javax.management.MBeanServer;
 import javax.management.MBeanServerFactory;
 import javax.management.ObjectName;
-import java.io.File;
 import java.io.IOException;
-import java.lang.invoke.MethodHandles;
 import java.lang.management.ManagementFactory;
 import java.nio.file.Files;
 import java.nio.file.Path;
@@ -31,16 +29,14 @@ import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
-import java.util.Iterator;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
-import java.util.Objects;
-import java.util.Properties;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
 
+import com.google.common.collect.ImmutableList;
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.lucene.util.TestUtil;
 import org.apache.solr.client.solrj.SolrClient;
@@ -54,453 +50,224 @@ import org.apache.solr.client.solrj.request.CollectionAdminRequest;
 import org.apache.solr.client.solrj.request.CoreAdminRequest;
 import org.apache.solr.client.solrj.request.CoreAdminRequest.Create;
 import org.apache.solr.client.solrj.request.QueryRequest;
+import org.apache.solr.client.solrj.request.UpdateRequest;
 import org.apache.solr.client.solrj.response.CollectionAdminResponse;
 import org.apache.solr.client.solrj.response.CoreAdminResponse;
-import org.apache.solr.client.solrj.response.QueryResponse;
 import org.apache.solr.common.SolrException;
-import org.apache.solr.common.SolrException.ErrorCode;
-import org.apache.solr.common.SolrInputDocument;
-import org.apache.solr.common.cloud.ClusterState;
 import org.apache.solr.common.cloud.DocCollection;
 import org.apache.solr.common.cloud.Replica;
 import org.apache.solr.common.cloud.Slice;
-import org.apache.solr.common.cloud.SolrZkClient;
 import org.apache.solr.common.cloud.ZkCoreNodeProps;
 import org.apache.solr.common.cloud.ZkNodeProps;
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.params.CollectionParams.CollectionAction;
 import org.apache.solr.common.params.CoreAdminParams;
-import org.apache.solr.common.params.MapSolrParams;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SimpleOrderedMap;
-import org.apache.solr.common.util.StrUtils;
 import org.apache.solr.core.CoreContainer;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.core.SolrInfoMBean.Category;
 import org.apache.solr.util.TestInjection;
 import org.apache.solr.util.TimeOut;
+import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
-import static org.apache.solr.cloud.OverseerCollectionMessageHandler.NUM_SLICES;
 import static org.apache.solr.common.cloud.ZkStateReader.CORE_NAME_PROP;
-import static org.apache.solr.common.cloud.ZkStateReader.MAX_SHARDS_PER_NODE;
 import static org.apache.solr.common.cloud.ZkStateReader.REPLICATION_FACTOR;
-import static org.apache.solr.common.util.Utils.makeMap;
 
 /**
  * Tests the Cloud Collections API.
  */
 @Slow
-public class CollectionsAPIDistributedZkTest extends AbstractFullDistribZkTestBase {
+public class CollectionsAPIDistributedZkTest extends SolrCloudTestCase {
 
-  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-
-  private static final String DEFAULT_COLLECTION = "collection1";
-
-  // we randomly use a second config set rather than just one
-  private boolean secondConfigSet = random().nextBoolean();
-  
   @BeforeClass
   public static void beforeCollectionsAPIDistributedZkTest() {
     TestInjection.randomDelayInCoreCreation = "true:20";
     System.setProperty("validateAfterInactivity", "200");
   }
-  
-  @Override
-  public void distribSetUp() throws Exception {
-    super.distribSetUp();
-    
-    if (secondConfigSet ) {
-      String zkHost = zkServer.getZkHost();
-      String zkAddress = zkServer.getZkAddress();
-      SolrZkClient zkClient = new SolrZkClient(zkHost, AbstractZkTestCase.TIMEOUT);
-      zkClient.makePath("/solr", false, true);
-      zkClient.close();
-
-      zkClient = new SolrZkClient(zkAddress, AbstractZkTestCase.TIMEOUT);
-
-      File solrhome = new File(TEST_HOME());
-      
-      // for now, always upload the config and schema to the canonical names
-      AbstractZkTestCase.putConfig("conf2", zkClient, solrhome, getCloudSolrConfig(), "solrconfig.xml");
-      AbstractZkTestCase.putConfig("conf2", zkClient, solrhome, "schema.xml", "schema.xml");
-      AbstractZkTestCase.putConfig("conf2", zkClient, solrhome, "enumsConfig.xml", "enumsConfig.xml");
-      
-      AbstractZkTestCase.putConfig("conf2", zkClient, solrhome, "solrconfig.snippet.randomindexconfig.xml");
-      AbstractZkTestCase.putConfig("conf2", zkClient, solrhome, "stopwords.txt");
-      AbstractZkTestCase.putConfig("conf2", zkClient, solrhome, "protwords.txt");
-      AbstractZkTestCase.putConfig("conf2", zkClient, solrhome, "currency.xml");
-      AbstractZkTestCase.putConfig("conf2", zkClient, solrhome, "open-exchange-rates.json");
-      AbstractZkTestCase.putConfig("conf2", zkClient, solrhome, "mapping-ISOLatin1Accent.txt");
-      AbstractZkTestCase.putConfig("conf2", zkClient, solrhome, "old_synonyms.txt");
-      AbstractZkTestCase.putConfig("conf2", zkClient, solrhome, "synonyms.txt");
-      AbstractZkTestCase.putConfig("conf2", zkClient, solrhome, "elevate.xml");
-      zkClient.close();
-    }
-  }
-  
-  protected String getSolrXml() {
-    return "solr.xml";
-  }
 
-  
-  public CollectionsAPIDistributedZkTest() {
-    sliceCount = 2;
+  @BeforeClass
+  public static void setupCluster() throws Exception {
+    configureCluster(4)
+        .addConfig("conf", configset("cloud-minimal"))
+        .addConfig("conf2", configset("cloud-minimal-jmx"))
+        .configure();
   }
-  
-  @Override
-  protected void setDistributedParams(ModifiableSolrParams params) {
 
-    if (r.nextBoolean()) {
-      // don't set shards, let that be figured out from the cloud state
-    } else {
-      // use shard ids rather than physical locations
-      StringBuilder sb = new StringBuilder();
-      for (int i = 0; i < getShardCount(); i++) {
-        if (i > 0)
-          sb.append(',');
-        sb.append("shard" + (i + 3));
-      }
-      params.set("shards", sb.toString());
-    }
+  @Before
+  public void clearCluster() throws Exception {
+    cluster.deleteAllCollections();
   }
 
   @Test
-  @ShardsFixed(num = 4)
-  public void test() throws Exception {
-    waitForRecoveriesToFinish(false); // we need to fix no core tests still
-    testNodesUsedByCreate();
-    testNoConfigSetExist();
-    testCollectionsAPI();
-    testCollectionsAPIAddRemoveStress();
-    testErrorHandling();
-    testNoCollectionSpecified();
-    deletePartiallyCreatedCollection();
-    deleteCollectionRemovesStaleZkCollectionsNode();
-    clusterPropTest();
-    // last
-    deleteCollectionWithDownNodes();
-    addReplicaTest();
+  public void testCreationAndDeletion() throws Exception {
+
+    String collectionName = "created_and_deleted";
+
+    CollectionAdminRequest.createCollection(collectionName, "conf", 1, 1).process(cluster.getSolrClient());
+    assertTrue(CollectionAdminRequest.listCollections(cluster.getSolrClient())
+                  .contains(collectionName));
+
+    CollectionAdminRequest.deleteCollection(collectionName).process(cluster.getSolrClient());
+    assertFalse(CollectionAdminRequest.listCollections(cluster.getSolrClient())
+        .contains(collectionName));
+
+    assertFalse(cluster.getZkClient().exists(ZkStateReader.COLLECTIONS_ZKNODE + "/" + collectionName, true));
+
+
   }
 
-  private void deleteCollectionRemovesStaleZkCollectionsNode() throws Exception {
-    
-    // we can use this client because we just want base url
-    final String baseUrl = getBaseUrl((HttpSolrClient) clients.get(0));
+  @Test
+  public void deleteCollectionRemovesStaleZkCollectionsNode() throws Exception {
     
     String collectionName = "out_of_sync_collection";
+
+    // manually create a collections zknode
+    cluster.getZkClient().makePath(ZkStateReader.COLLECTIONS_ZKNODE + "/" + collectionName, true);
+
+    CollectionAdminRequest.deleteCollection(collectionName)
+        .process(cluster.getSolrClient());
+
+    assertFalse(CollectionAdminRequest.listCollections(cluster.getSolrClient())
+                  .contains(collectionName));
     
-    List<Integer> numShardsNumReplicaList = new ArrayList<>();
-    numShardsNumReplicaList.add(2);
-    numShardsNumReplicaList.add(1);
-    
-    
-    cloudClient.getZkStateReader().getZkClient().makePath(ZkStateReader.COLLECTIONS_ZKNODE + "/" + collectionName, true);
-    
-    ModifiableSolrParams params = new ModifiableSolrParams();
-    params.set("action", CollectionAction.DELETE.toString());
-    params.set("name", collectionName);
-    QueryRequest request = new QueryRequest(params);
-    request.setPath("/admin/collections");
-    
-    // there are remnants of the collection in zk, should work
-    makeRequest(baseUrl, request);
-    
-    assertCollectionNotExists(collectionName, 45);
-    
-    assertFalse(cloudClient.getZkStateReader().getZkClient().exists(ZkStateReader.COLLECTIONS_ZKNODE + "/" + collectionName, true));
+    assertFalse(cluster.getZkClient().exists(ZkStateReader.COLLECTIONS_ZKNODE + "/" + collectionName, true));
 
   }
 
-  private void deletePartiallyCreatedCollection() throws Exception {
-    final String baseUrl = getBaseUrl((HttpSolrClient) clients.get(0));
-    String collectionName = "halfdeletedcollection";
+  @Test
+  public void deletePartiallyCreatedCollection() throws Exception {
+
+    final String collectionName = "halfdeletedcollection";
+
+    // create a core that simulates something left over from a partially-deleted collection
     Create createCmd = new Create();
     createCmd.setCoreName("halfdeletedcollection_shard1_replica1");
     createCmd.setCollection(collectionName);
+    createCmd.setCollectionConfigName("conf");
     String dataDir = createTempDir().toFile().getAbsolutePath();
     createCmd.setDataDir(dataDir);
     createCmd.setNumShards(2);
-    if (secondConfigSet) {
-      createCmd.setCollectionConfigName("conf1");
-    }
 
-    makeRequest(baseUrl, createCmd);
+    createCmd.process(cluster.getSolrClient());
 
-    ModifiableSolrParams params = new ModifiableSolrParams();
-    params.set("action", CollectionAction.DELETE.toString());
-    params.set("name", collectionName);
-    QueryRequest request = new QueryRequest(params);
-    request.setPath("/admin/collections");
+    CollectionAdminRequest.deleteCollection(collectionName)
+        .process(cluster.getSolrClient());
 
-    makeRequest(baseUrl, request);
+    assertFalse(CollectionAdminRequest.listCollections(cluster.getSolrClient()).contains(collectionName));
+
+    CollectionAdminRequest.createCollection(collectionName, "conf", 2, 1)
+        .process(cluster.getSolrClient());
+
+    assertTrue(CollectionAdminRequest.listCollections(cluster.getSolrClient()).contains(collectionName));
 
-    assertCollectionNotExists(collectionName, 45);
-    
-    // now creating that collection should work
-    params = new ModifiableSolrParams();
-    params.set("action", CollectionAction.CREATE.toString());
-    params.set("name", collectionName);
-    params.set("numShards", 2);
-    request = new QueryRequest(params);
-    request.setPath("/admin/collections");
-    if (secondConfigSet) {
-      params.set("collection.configName", "conf1");
-    }
-    makeRequest(baseUrl, request);
   }
-  
-  private void deleteCollectionOnlyInZk() throws Exception {
-    final String baseUrl = getBaseUrl((HttpSolrClient) clients.get(0));
-    String collectionName = "onlyinzk";
 
-    cloudClient.getZkStateReader().getZkClient().makePath(ZkStateReader.COLLECTIONS_ZKNODE + "/" + collectionName, true);
+  @Test
+  public void deleteCollectionOnlyInZk() throws Exception {
 
-    ModifiableSolrParams params = new ModifiableSolrParams();
-    params.set("action", CollectionAction.DELETE.toString());
-    params.set("name", collectionName);
-    QueryRequest request = new QueryRequest(params);
-    request.setPath("/admin/collections");
+    final String collectionName = "onlyinzk";
 
-    makeRequest(baseUrl, request);
+    // create the collections node, but nothing else
+    cluster.getZkClient().makePath(ZkStateReader.COLLECTIONS_ZKNODE + "/" + collectionName, true);
 
-    assertCollectionNotExists(collectionName, 45);
+    // delete via API - should remove collections node
+    CollectionAdminRequest.deleteCollection(collectionName).process(cluster.getSolrClient());
+    assertFalse(CollectionAdminRequest.listCollections(cluster.getSolrClient()).contains(collectionName));
     
     // now creating that collection should work
-    params = new ModifiableSolrParams();
-    params.set("action", CollectionAction.CREATE.toString());
-    params.set("name", collectionName);
-    params.set("numShards", 2);
-    request = new QueryRequest(params);
-    request.setPath("/admin/collections");
-    if (secondConfigSet) {
-      params.set("collection.configName", "conf1");
-    }
-    makeRequest(baseUrl, request);
-    
-    waitForRecoveriesToFinish(collectionName, false);
-    
-    params = new ModifiableSolrParams();
-    params.set("action", CollectionAction.DELETE.toString());
-    params.set("name", collectionName);
-    request = new QueryRequest(params);
-    request.setPath("/admin/collections");
+    CollectionAdminRequest.createCollection(collectionName, "conf", 2, 1)
+        .process(cluster.getSolrClient());
+    assertTrue(CollectionAdminRequest.listCollections(cluster.getSolrClient()).contains(collectionName));
 
-    makeRequest(baseUrl, request);
   }
-  
-  private void deleteCollectionWithUnloadedCore() throws Exception {
-    final String baseUrl = getBaseUrl((HttpSolrClient) clients.get(0));
-    
-    String collectionName = "corealreadyunloaded";
-    try (SolrClient client = createNewSolrClient("", baseUrl)) {
-      createCollection(null, collectionName,  2, 1, 2, client, null, "conf1");
-    }
-    waitForRecoveriesToFinish(collectionName, false);
 
+  @Test
+  public void testBadActionNames() throws Exception {
+
+    // try a bad action
     ModifiableSolrParams params = new ModifiableSolrParams();
-    params.set("action", CollectionAction.DELETE.toString());
+    params.set("action", "BADACTION");
+    String collectionName = "badactioncollection";
     params.set("name", collectionName);
-    QueryRequest request = new QueryRequest(params);
+    params.set("numShards", 2);
+    final QueryRequest request = new QueryRequest(params);
     request.setPath("/admin/collections");
 
-    NamedList<Object> result = makeRequest(baseUrl, request);
-    System.out.println("result:" + result);
-    Object failure = result.get("failure");
-    assertNull("We expect no failures", failure);
+    expectThrows(Exception.class, () -> {
+      cluster.getSolrClient().request(request);
+    });
 
-    assertCollectionNotExists(collectionName, 45);
-    
-    // now creating that collection should work
-    params = new ModifiableSolrParams();
+  }
+
+  @Test
+  public void testMissingRequiredParameters() {
+
+    ModifiableSolrParams params = new ModifiableSolrParams();
     params.set("action", CollectionAction.CREATE.toString());
-    params.set("name", collectionName);
     params.set("numShards", 2);
-    request = new QueryRequest(params);
-    request.setPath("/admin/collections");
-    if (secondConfigSet) {
-      params.set("collection.configName", "conf1");
-    }
-    makeRequest(baseUrl, request);
-    
-    params = new ModifiableSolrParams();
-    params.set("action", CollectionAction.DELETE.toString());
-    params.set("name", collectionName);
-    request = new QueryRequest(params);
+    // missing required collection parameter
+    final SolrRequest request = new QueryRequest(params);
     request.setPath("/admin/collections");
 
-    makeRequest(baseUrl, request);
+    expectThrows(Exception.class, () -> {
+      cluster.getSolrClient().request(request);
+    });
   }
-  
-  
-  private void deleteCollectionWithDownNodes() throws Exception {
-    String baseUrl = getBaseUrl((HttpSolrClient) clients.get(0));
-    // now try to remove a collection when a couple of its nodes are down
-    if (secondConfigSet) {
-      try (SolrClient client = createNewSolrClient("", baseUrl)) {
-        createCollection(null, "halfdeletedcollection2", 3, 3, 6, client, null, "conf2");
-      }
-    } else {
-      try (SolrClient client = createNewSolrClient("", baseUrl)) {
-        createCollection(null, "halfdeletedcollection2", 3, 3, 6, client, null);
-      }
-    }
-    
-    waitForRecoveriesToFinish("halfdeletedcollection2", false);
-    
-    // stop a couple nodes
-    ChaosMonkey.stop(jettys.get(0));
-    ChaosMonkey.stop(jettys.get(1));
-    
-    // wait for leaders to settle out
-    for (int i = 1; i < 4; i++) {
-      cloudClient.getZkStateReader().getLeaderRetry("halfdeletedcollection2", "shard" + i, 30000);
-    }
-    
-    baseUrl = getBaseUrl((HttpSolrClient) clients.get(2));
-    
-    // remove a collection
-    ModifiableSolrParams params = new ModifiableSolrParams();
-    params.set("action", CollectionAction.DELETE.toString());
-    params.set("name", "halfdeletedcollection2");
-    QueryRequest request = new QueryRequest(params);
-    request.setPath("/admin/collections");
-    
-    makeRequest(baseUrl, request);
 
-    TimeOut timeout = new TimeOut(10, TimeUnit.SECONDS);
-    while (cloudClient.getZkStateReader().getClusterState().hasCollection("halfdeletedcollection2")) {
-      if (timeout.hasTimedOut()) {
-        throw new AssertionError("Timeout waiting to see removed collection leave clusterstate");
-      }
-      
-      Thread.sleep(200);
-    }
+  @Test
+  public void testTooManyReplicas() {
 
-    assertFalse("Still found collection that should be gone", cloudClient.getZkStateReader().getClusterState().hasCollection("halfdeletedcollection2"));
+    CollectionAdminRequest req = CollectionAdminRequest.createCollection("collection", "conf", 2, 10);
 
-  }
+    expectThrows(Exception.class, () -> {
+      cluster.getSolrClient().request(req);
+    });
 
-  private NamedList<Object> makeRequest(String baseUrl, SolrRequest request, int socketTimeout)
-      throws SolrServerException, IOException {
-    try (SolrClient client = createNewSolrClient("", baseUrl)) {
-      ((HttpSolrClient) client).setSoTimeout(socketTimeout);
-      return client.request(request);
-    }
   }
 
-  private NamedList<Object> makeRequest(String baseUrl, SolrRequest request)
-      throws SolrServerException, IOException {
-    try (SolrClient client = createNewSolrClient("", baseUrl)) {
-      ((HttpSolrClient) client).setSoTimeout(30000);
-      return client.request(request);
-    }
-  }
+  @Test
+  public void testMissingNumShards() {
 
-  private void testErrorHandling() throws Exception {
-    final String baseUrl = getBaseUrl((HttpSolrClient) clients.get(0));
-    
-    // try a bad action
-    ModifiableSolrParams params = new ModifiableSolrParams();
-    params.set("action", "BADACTION");
-    String collectionName = "badactioncollection";
-    params.set("name", collectionName);
-    params.set("numShards", 2);
-    QueryRequest request = new QueryRequest(params);
-    request.setPath("/admin/collections");
-    boolean gotExp = false;
-    try {
-      makeRequest(baseUrl, request);
-    } catch (SolrException e) {
-      gotExp = true;
-    }
-    assertTrue(gotExp);
-    
-    
-    // leave out required param name
-    params = new ModifiableSolrParams();
-    params.set("action", CollectionAction.CREATE.toString());
-    params.set("numShards", 2);
-    collectionName = "collection";
-    // No Name
-    // params.set("name", collectionName);
-    if (secondConfigSet) {
-      params.set("collection.configName", "conf1");
-    }
-    request = new QueryRequest(params);
-    request.setPath("/admin/collections");
-    gotExp = false;
-    try {
-      makeRequest(baseUrl, request);
-    } catch (SolrException e) {
-      gotExp = true;
-    }
-    assertTrue(gotExp);
-    
-    // Too many replicas
-    params = new ModifiableSolrParams();
-    params.set("action", CollectionAction.CREATE.toString());
-    collectionName = "collection";
-    params.set("name", collectionName);
-    params.set("numShards", 2);
-    if (secondConfigSet) {
-      params.set("collection.configName", "conf1");
-    }
-    params.set(REPLICATION_FACTOR, 10);
-    request = new QueryRequest(params);
-    request.setPath("/admin/collections");
-    gotExp = false;
-    try {
-      makeRequest(baseUrl, request);
-    } catch (SolrException e) {
-      gotExp = true;
-    }
-    assertTrue(gotExp);
-    
     // No numShards should fail
-    params = new ModifiableSolrParams();
+    ModifiableSolrParams params = new ModifiableSolrParams();
     params.set("action", CollectionAction.CREATE.toString());
-    collectionName = "acollection";
-    params.set("name", collectionName);
+    params.set("name", "acollection");
     params.set(REPLICATION_FACTOR, 10);
-    if (secondConfigSet) {
-      params.set("collection.configName", "conf1");
-    }
-    request = new QueryRequest(params);
+    params.set("collection.configName", "conf");
+
+    final SolrRequest request = new QueryRequest(params);
     request.setPath("/admin/collections");
-    gotExp = false;
-    try {
-      makeRequest(baseUrl, request);
-    } catch (SolrException e) {
-      gotExp = true;
-    }
-    assertTrue(gotExp);
-    
-    // 0 numShards should fail
-    params = new ModifiableSolrParams();
+
+    expectThrows(Exception.class, () -> {
+      cluster.getSolrClient().request(request);
+    });
+
+  }
+
+  @Test
+  public void testZeroNumShards() {
+
+    ModifiableSolrParams params = new ModifiableSolrParams();
     params.set("action", CollectionAction.CREATE.toString());
-    collectionName = "acollection";
-    params.set("name", collectionName);
+    params.set("name", "acollection");
     params.set(REPLICATION_FACTOR, 10);
     params.set("numShards", 0);
-    if (secondConfigSet) {
-      params.set("collection.configName", "conf1");
-    }
-    request = new QueryRequest(params);
+    params.set("collection.configName", "conf");
+
+    final SolrRequest request = new QueryRequest(params);
     request.setPath("/admin/collections");
-    gotExp = false;
-    try {
-      makeRequest(baseUrl, request);
-    } catch (SolrException e) {
-      gotExp = true;
-    }
-    assertTrue(gotExp);
-    
-    // Fail on one node
+    expectThrows(Exception.class, () -> {
+      cluster.getSolrClient().request(request);
+    });
+
+  }
+
+  @Test
+  public void testCreateShouldFailOnExistingCore() throws Exception {
     
     // first we make a core with the core name the collections api
     // will try and use - this will cause our mock fail
@@ -510,43 +277,33 @@ public class CollectionsAPIDistributedZkTest extends AbstractFullDistribZkTestBa
     String dataDir = createTempDir().toFile().getAbsolutePath();
     createCmd.setDataDir(dataDir);
     createCmd.setNumShards(1);
-    if (secondConfigSet) {
-      createCmd.setCollectionConfigName("conf1");
+    createCmd.setCollectionConfigName("conf");
+
+    try (SolrClient client = cluster.getJettySolrRunner(0).newClient()) {
+      client.request(createCmd);
     }
-    makeRequest(baseUrl, createCmd);
-    
+
     createCmd = new Create();
     createCmd.setCoreName("halfcollection_shard1_replica1");
     createCmd.setCollection("halfcollectionblocker2");
     dataDir = createTempDir().toFile().getAbsolutePath();
     createCmd.setDataDir(dataDir);
     createCmd.setNumShards(1);
-    if (secondConfigSet) {
-      createCmd.setCollectionConfigName("conf1");
-    }
-    makeRequest(getBaseUrl((HttpSolrClient) clients.get(1)), createCmd);
-    
-    params = new ModifiableSolrParams();
-    params.set("action", CollectionAction.CREATE.toString());
-    collectionName = "halfcollection";
-    params.set("name", collectionName);
-    params.set("numShards", 2);
-    params.set("wt", "xml");
-    
-    if (secondConfigSet) {
-      params.set("collection.configName", "conf1");
+    createCmd.setCollectionConfigName("conf");
+
+    try (SolrClient client = cluster.getJettySolrRunner(1).newClient()) {
+      client.request(createCmd);
     }
+
+    String nn1 = cluster.getJettySolrRunner(0).getNodeName();
+    String nn2 = cluster.getJettySolrRunner(1).getNodeName();
+
+    CollectionAdminResponse resp = CollectionAdminRequest.createCollection("halfcollection", "conf", 2, 1)
+        .setCreateNodeSet(nn1 + "," + nn2)
+        .process(cluster.getSolrClient());
     
-    String nn1 = jettys.get(0).getCoreContainer().getZkController().getNodeName();
-    String nn2 =  jettys.get(1).getCoreContainer().getZkController().getNodeName();
-    
-    params.set(OverseerCollectionMessageHandler.CREATE_NODE_SET, nn1 + "," + nn2);
-    request = new QueryRequest(params);
-    request.setPath("/admin/collections");
-    NamedList<Object> resp = makeRequest(baseUrl, request, 60000);
-    
-    SimpleOrderedMap success = (SimpleOrderedMap) resp.get("success");
-    SimpleOrderedMap failure = (SimpleOrderedMap) resp.get("failure");
+    SimpleOrderedMap success = (SimpleOrderedMap) resp.getResponse().get("success");
+    SimpleOrderedMap failure = (SimpleOrderedMap) resp.getResponse().get("failure");
 
     assertNotNull(resp.toString(), success);
     assertNotNull(resp.toString(), failure);
@@ -555,10 +312,14 @@ public class CollectionsAPIDistributedZkTest extends AbstractFullDistribZkTestBa
     String val2 = failure.getVal(0).toString();
     assertTrue(val1.contains("SolrException") || val2.contains("SolrException"));
   }
-  
-  private void testNoCollectionSpecified() throws Exception {
-    assertFalse(cloudClient.getZkStateReader().getClusterState().hasCollection("corewithnocollection"));
-    assertFalse(cloudClient.getZkStateReader().getClusterState().hasCollection("corewithnocollection2"));
+
+  @Test
+  public void testNoCollectionSpecified() throws Exception {
+
+    // TODO - should we remove this behaviour?
+
+    assertFalse(cluster.getSolrClient().getZkStateReader().getClusterState().hasCollection("corewithnocollection"));
+    assertFalse(cluster.getSolrClient().getZkStateReader().getClusterState().hasCollection("corewithnocollection2"));
     
     // try and create a SolrCore with no collection name
     Create createCmd = new Create();
@@ -567,26 +328,28 @@ public class CollectionsAPIDistributedZkTest extends AbstractFullDistribZkTestBa
     String dataDir = createTempDir().toFile().getAbsolutePath();
     createCmd.setDataDir(dataDir);
     createCmd.setNumShards(1);
-    if (secondConfigSet) {
-      createCmd.setCollectionConfigName("conf1");
-    }
+    createCmd.setCollectionConfigName("conf");
 
-    makeRequest(getBaseUrl((HttpSolrClient) clients.get(1)), createCmd);
+    cluster.getSolrClient().request(createCmd);
     
     // try and create a SolrCore with no collection name
     createCmd.setCollection(null);
     createCmd.setCoreName("corewithnocollection2");
 
-    makeRequest(getBaseUrl((HttpSolrClient) clients.get(1)), createCmd);
+    cluster.getSolrClient().request(createCmd);
     
     // in both cases, the collection should have default to the core name
-    cloudClient.getZkStateReader().forceUpdateCollection("corewithnocollection");
-    cloudClient.getZkStateReader().forceUpdateCollection("corewithnocollection2");
-    assertTrue(cloudClient.getZkStateReader().getClusterState().hasCollection("corewithnocollection"));
-    assertTrue(cloudClient.getZkStateReader().getClusterState().hasCollection("corewithnocollection2"));
+    cluster.getSolrClient().getZkStateReader().forceUpdateCollection("corewithnocollection");
+    cluster.getSolrClient().getZkStateReader().forceUpdateCollection("corewithnocollection2");
+    assertTrue(cluster.getSolrClient().getZkStateReader().getClusterState().hasCollection("corewithnocollection"));
+    assertTrue(cluster.getSolrClient().getZkStateReader().getClusterState().hasCollection("corewithnocollection2"));
   }
 
-  private void testNoConfigSetExist() throws Exception {
+  @Test
+  public void testNoConfigSetExist() throws Exception {
+
+    final CloudSolrClient cloudClient = cluster.getSolrClient();
+
     assertFalse(cloudClient.getZkStateReader().getClusterState().hasCollection("corewithnocollection3"));
 
     // try and create a SolrCore with no collection name
@@ -597,14 +360,11 @@ public class CollectionsAPIDistributedZkTest extends AbstractFullDistribZkTestBa
     createCmd.setDataDir(dataDir);
     createCmd.setNumShards(1);
     createCmd.setCollectionConfigName("conf123");
-    boolean gotExp = false;
-    try {
-      makeRequest(getBaseUrl((HttpSolrClient) clients.get(1)), createCmd);
-    } catch (SolrException e) {
-      gotExp = true;
-    }
 
-    assertTrue(gotExp);
+    expectThrows(Exception.class, () -> {
+      cluster.getSolrClient().request(createCmd);
+    });
+
     TimeUnit.MILLISECONDS.sleep(200);
     // in both cases, the collection should have default to the core name
     cloudClient.getZkStateReader().forceUpdateCollection("corewithnocollection3");
@@ -618,401 +378,162 @@ public class CollectionsAPIDistributedZkTest extends AbstractFullDistribZkTestBa
     }
     assertEquals("replicaCount", 0, replicaCount);
 
-    CollectionAdminRequest.List list = new CollectionAdminRequest.List();
-    CollectionAdminResponse res = new CollectionAdminResponse();
-        res.setResponse(makeRequest(getBaseUrl((HttpSolrClient) clients.get(1)), list));
-    List<String> collections = (List<String>) res.getResponse().get("collections");
-    assertTrue(collections.contains("corewithnocollection3"));
+    // TODO - WTF? shouldn't this *not* contain the collection?
+    assertTrue(CollectionAdminRequest.listCollections(cloudClient).contains("corewithnocollection3"));
+
   }
 
-  private void testNodesUsedByCreate() throws Exception {
-    // we can use this client because we just want base url
-    final String baseUrl = getBaseUrl((HttpSolrClient) clients.get(0));
-    
-    ModifiableSolrParams params = new ModifiableSolrParams();
-    params.set("action", CollectionAction.CREATE.toString());
+  @Test
+  public void testCoresAreDistributedAcrossNodes() throws Exception {
 
-    params.set("numShards", 2);
-    params.set(REPLICATION_FACTOR, 2);
-    String collectionName = "nodes_used_collection";
+    CollectionAdminRequest.createCollection("nodes_used_collection", "conf", 2, 2)
+        .process(cluster.getSolrClient());
 
-    params.set("name", collectionName);
-    
-    if (secondConfigSet) {
-      params.set("collection.configName", "conf1");
-    }
-    
-    QueryRequest request = new QueryRequest(params);
-    request.setPath("/admin/collections");
-    makeRequest(baseUrl, request);
-    
-    List<Integer> numShardsNumReplicaList = new ArrayList<>();
-    numShardsNumReplicaList.add(2);
-    numShardsNumReplicaList.add(2);
-    checkForCollection("nodes_used_collection", numShardsNumReplicaList , null);
+    Set<String> liveNodes = cluster.getSolrClient().getZkStateReader().getClusterState().getLiveNodes();
 
     List<String> createNodeList = new ArrayList<>();
+    createNodeList.addAll(liveNodes);
 
-    Set<String> liveNodes = cloudClient.getZkStateReader().getClusterState()
-        .getLiveNodes();
-    
-    for (String node : liveNodes) {
-      createNodeList.add(node);
-    }
-
-    DocCollection col = cloudClient.getZkStateReader().getClusterState().getCollection("nodes_used_collection");
-    Collection<Slice> slices = col.getSlices();
-    for (Slice slice : slices) {
-      Collection<Replica> replicas = slice.getReplicas();
-      for (Replica replica : replicas) {
+    DocCollection collection = getCollectionState("nodes_used_collection");
+    for (Slice slice : collection.getSlices()) {
+      for (Replica replica : slice.getReplicas()) {
         createNodeList.remove(replica.getNodeName());
       }
     }
-    assertEquals(createNodeList.toString(), 1, createNodeList.size());
+
+    assertEquals(createNodeList.toString(), 0, createNodeList.size());
+
+  }
+
+  @Test
+  public void testDeleteNonExistentCollection() throws Exception {
+
+    SolrException e = expectThrows(SolrException.class, () -> {
+      CollectionAdminRequest.deleteCollection("unknown_collection").process(cluster.getSolrClient());
+    });
+
+    // create another collection should still work
+    CollectionAdminRequest.createCollection("acollectionafterbaddelete", "conf", 1, 2)
+        .process(cluster.getSolrClient());
+    waitForState("Collection creation after a bad delete failed", "acollectionafterbaddelete",
+        (n, c) -> DocCollection.isFullyActive(n, c, 1, 2));
+  }
+
+  @Test
+  public void testSpecificConfigsets() throws Exception {
+    CollectionAdminRequest.createCollection("withconfigset2", "conf2", 1, 1).process(cluster.getSolrClient());
+    byte[] data = zkClient().getData(ZkStateReader.COLLECTIONS_ZKNODE + "/" + "withconfigset2", null, null, true);
+    assertNotNull(data);
+    ZkNodeProps props = ZkNodeProps.load(data);
+    String configName = props.getStr(ZkController.CONFIGNAME_PROP);
+    assertEquals("conf2", configName);
+  }
+
+  @Test
+  public void testMaxNodesPerShard() throws Exception {
+
+    // test maxShardsPerNode
+    int numLiveNodes = cluster.getJettySolrRunners().size();
+    int numShards = (numLiveNodes/2) + 1;
+    int replicationFactor = 2;
+    int maxShardsPerNode = 1;
+
+    SolrException e = expectThrows(SolrException.class, () -> {
+      CollectionAdminRequest.createCollection("oversharded", "conf", numShards, replicationFactor)
+          .process(cluster.getSolrClient());
+    });
 
   }
 
-  private void testCollectionsAPI() throws Exception {
+  @Test
+  public void testCreateNodeSet() throws Exception {
+
+    JettySolrRunner jetty1 = cluster.getRandomJetty(random());
+    JettySolrRunner jetty2 = cluster.getRandomJetty(random());
 
-    boolean disableLegacy = random().nextBoolean();
-    CloudSolrClient client1 = null;
+    List<String> baseUrls = ImmutableList.of(jetty1.getBaseUrl().toString(), jetty2.getBaseUrl().toString());
 
-    if (disableLegacy) {
-      log.info("legacyCloud=false");
-      client1 = createCloudClient(null);
-      setClusterProp(client1, ZkStateReader.LEGACY_CLOUD, "false");
+    CollectionAdminRequest.createCollection("nodeset_collection", "conf", 2, 1)
+        .setCreateNodeSet(baseUrls.get(0) + "," + baseUrls.get(1))
+        .process(cluster.getSolrClient());
+
+    DocCollection collectionState = getCollectionState("nodeset_collection");
+    for (Replica replica : collectionState.getReplicas()) {
+      String replicaUrl = replica.getCoreUrl();
+      boolean matchingJetty = false;
+      for (String jettyUrl : baseUrls) {
+        if (replicaUrl.startsWith(jettyUrl))
+          matchingJetty = true;
+      }
+      if (matchingJetty == false)
+        fail("Expected replica to be on " + baseUrls + " but was on " + replicaUrl);
     }
 
-    // TODO: fragile - because we dont pass collection.confName, it will only
-    // find a default if a conf set with a name matching the collection name is found, or 
-    // if there is only one conf set. That and the fact that other tests run first in this
-    // env make this pretty fragile
-    
+  }
+
+  @Test
+  public void testCollectionsAPI() throws Exception {
+
     // create new collections rapid fire
-    Map<String,List<Integer>> collectionInfos = new HashMap<>();
     int cnt = random().nextInt(TEST_NIGHTLY ? 6 : 1) + 1;
-    
+    CollectionAdminRequest.Create[] createRequests = new CollectionAdminRequest.Create[cnt];
+
     for (int i = 0; i < cnt; i++) {
-      int numShards = TestUtil.nextInt(random(), 0, getShardCount()) + 1;
-      int replicationFactor = TestUtil.nextInt(random(), 0, 3) + 1;
-      int maxShardsPerNode = (((numShards * replicationFactor) / getCommonCloudSolrClient()
-          .getZkStateReader().getClusterState().getLiveNodes().size())) + 1;
 
-      
-      CloudSolrClient client = null;
-      try {
-        if (i == 0) {
-          // Test if we can create a collection through CloudSolrServer where
-          // you havnt set default-collection
-          // This is nice because you want to be able to create you first
-          // collection using CloudSolrServer, and in such case there is
-          // nothing reasonable to set as default-collection
-          client = createCloudClient(null);
-        } else if (i == 1) {
-          // Test if we can create a collection through CloudSolrServer where
-          // you have set default-collection to a non-existing collection
-          // This is nice because you want to be able to create you first
-          // collection using CloudSolrServer, and in such case there is
-          // nothing reasonable to set as default-collection, but you might want
-          // to use the same CloudSolrServer throughout the entire
-          // lifetime of your client-application, so it is nice to be able to
-          // set a default-collection on this CloudSolrServer once and for all
-          // and use this CloudSolrServer to create the collection
-          client = createCloudClient("awholynewcollection_" + i);
-        }
-        if (secondConfigSet) {
-          createCollection(collectionInfos, "awholynewcollection_" + i,
-              numShards, replicationFactor, maxShardsPerNode, client, null, "conf2");
-        } else {
-          createCollection(collectionInfos, "awholynewcollection_" + i,
-              numShards, replicationFactor, maxShardsPerNode, client, null);
-        }
-      } finally {
-        if (client != null) client.close();
-      }
-    }
-    
-    Set<Entry<String,List<Integer>>> collectionInfosEntrySet = collectionInfos.entrySet();
-    for (Entry<String,List<Integer>> entry : collectionInfosEntrySet) {
-      String collection = entry.getKey();
-      List<Integer> list = entry.getValue();
-      checkForCollection(collection, list, null);
-      
-      String url = getUrlFromZk(collection);
+      int numShards = TestUtil.nextInt(random(), 0, cluster.getJettySolrRunners().size()) + 1;
+      int replicationFactor = TestUtil.nextInt(random(), 0, 3) + 1;
+      int maxShardsPerNode = (((numShards * replicationFactor) / cluster.getJettySolrRunners().size())) + 1;
 
-      try (HttpSolrClient collectionClient = getHttpSolrClient(url)) {
-        // poll for a second - it can take a moment before we are ready to serve
-        waitForNon403or404or503(collectionClient);
-      }
-    }
-    
-    // sometimes we restart one of the jetty nodes
-    if (random().nextBoolean()) {
-      JettySolrRunner jetty = jettys.get(random().nextInt(jettys.size()));
-      ChaosMonkey.stop(jetty);
-      log.info("============ Restarting jetty");
-      ChaosMonkey.start(jetty);
-      
-      for (Entry<String,List<Integer>> entry : collectionInfosEntrySet) {
-        String collection = entry.getKey();
-        List<Integer> list = entry.getValue();
-        checkForCollection(collection, list, null);
-        
-        String url = getUrlFromZk(collection);
-        
-        try (HttpSolrClient collectionClient = getHttpSolrClient(url)) {
-          // poll for a second - it can take a moment before we are ready to serve
-          waitForNon403or404or503(collectionClient);
-        }
-      }
+      createRequests[i]
+          = CollectionAdminRequest.createCollection("awhollynewcollection_" + i, "conf2", numShards, replicationFactor)
+          .setMaxShardsPerNode(maxShardsPerNode);
+      createRequests[i].processAsync(cluster.getSolrClient());
     }
 
-    // sometimes we restart zookeeper
-    if (random().nextBoolean()) {
-      zkServer.shutdown();
-      log.info("============ Restarting zookeeper");
-      zkServer = new ZkTestServer(zkServer.getZkDir(), zkServer.getPort());
-      zkServer.run();
-    }
-    
-    // sometimes we cause a connection loss - sometimes it will hit the overseer
-    if (random().nextBoolean()) {
-      JettySolrRunner jetty = jettys.get(random().nextInt(jettys.size()));
-      ChaosMonkey.causeConnectionLoss(jetty);
-    }
-    
-    ZkStateReader zkStateReader = getCommonCloudSolrClient().getZkStateReader();
-    for (int j = 0; j < cnt; j++) {
-      waitForRecoveriesToFinish("awholynewcollection_" + j, zkStateReader, false);
-      
-      if (secondConfigSet) {
-        // let's see if they are using the second config set
-        byte[] data = zkStateReader.getZkClient()
-            .getData(
-                ZkStateReader.COLLECTIONS_ZKNODE + "/" + "awholynewcollection_"
-                    + j, null, null, true);
-        assertNotNull(data);
-        ZkNodeProps props = ZkNodeProps.load(data);
-        String configName = props.getStr(ZkController.CONFIGNAME_PROP);
-        assertEquals("conf2", configName);
-        
-      }
+    for (int i = 0; i < cnt; i++) {
+      String collectionName = "awhollynewcollection_" + i;
+      final int j = i;
+      waitForState("Expected to see collection " + collectionName, collectionName,
+          (n, c) -> {
+            CollectionAdminRequest.Create req = createRequests[j];
+            return DocCollection.isFullyActive(n, c, req.getNumShards(), req.getReplicationFactor());
+          });
     }
-    
-    checkInstanceDirs(jettys.get(0)); 
-    
-    List<String> collectionNameList = new ArrayList<>();
-    collectionNameList.addAll(collectionInfos.keySet());
-    String collectionName = collectionNameList.get(random().nextInt(collectionNameList.size()));
-    
-    String url = getUrlFromZk(collectionName);
 
-    try (HttpSolrClient collectionClient = getHttpSolrClient(url)) {
+    cluster.injectChaos(random());
 
-      // lets try and use the solrj client to index a couple documents
-      SolrInputDocument doc1 = getDoc(id, 6, i1, -600, tlong, 600, t1,
-          "humpty dumpy sat on a wall");
-      SolrInputDocument doc2 = getDoc(id, 7, i1, -600, tlong, 600, t1,
-          "humpty dumpy3 sat on a walls");
-      SolrInputDocument doc3 = getDoc(id, 8, i1, -600, tlong, 600, t1,
-          "humpty dumpy2 sat on a walled");
+    for (int i = 0; i < cluster.getJettySolrRunners().size(); i++) {
+      checkInstanceDirs(cluster.getJettySolrRunner(i));
+    }
 
-      collectionClient.add(doc1);
+    String collectionName = createRequests[random().nextInt(createRequests.length)].getCollectionName();
 
-      collectionClient.add(doc2);
+    new UpdateRequest()
+        .add("id", "6")
+        .add("id", "7")
+        .add("id", "8")
+        .commit(cluster.getSolrClient(), collectionName);
+    assertEquals(3, cluster.getSolrClient().query(collectionName, new SolrQuery("*:*")).getResults().getNumFound());
 
-      collectionClient.add(doc3);
+    checkNoTwoShardsUseTheSameIndexDir();
+  }
 
-      collectionClient.commit();
+  @Test
+  public void testCollectionReload() throws Exception {
 
-      assertEquals(3, collectionClient.query(new SolrQuery("*:*")).getResults().getNumFound());
-    }
+    final String collectionName = "reloaded_collection";
+    CollectionAdminRequest.createCollection(collectionName, "conf", 2, 2).process(cluster.getSolrClient());
 
-    // lets try a collection reload
-    
     // get core open times
-    Map<String,Long> urlToTimeBefore = new HashMap<>();
+    Map<String, Long> urlToTimeBefore = new HashMap<>();
     collectStartTimes(collectionName, urlToTimeBefore);
     assertTrue(urlToTimeBefore.size() > 0);
-    ModifiableSolrParams params = new ModifiableSolrParams();
-    params.set("action", CollectionAction.RELOAD.toString());
-    params.set("name", collectionName);
-    QueryRequest request = new QueryRequest(params);
-    request.setPath("/admin/collections");
-    
-    // we can use this client because we just want base url
-    final String baseUrl = getBaseUrl((HttpSolrClient) clients.get(0));
-    
-    makeRequest(baseUrl, request);
+
+    CollectionAdminRequest.reloadCollection(collectionName).processAsync(cluster.getSolrClient());
 
     // reloads make take a short while
     boolean allTimesAreCorrect = waitForReloads(collectionName, urlToTimeBefore);
     assertTrue("some core start times did not change on reload", allTimesAreCorrect);
-    
-    
-    waitForRecoveriesToFinish("awholynewcollection_" + (cnt - 1), zkStateReader, false);
-    
-    // remove a collection
-    params = new ModifiableSolrParams();
-    params.set("action", CollectionAction.DELETE.toString());
-    params.set("name", collectionName);
-    request = new QueryRequest(params);
-    request.setPath("/admin/collections");
- 
-    makeRequest(baseUrl, request);
-    
-    // ensure its out of the state
-    assertCollectionNotExists(collectionName, 45);
-    
-    //collectionNameList.remove(collectionName);
-
-    // remove an unknown collection
-    params = new ModifiableSolrParams();
-    params.set("action", CollectionAction.DELETE.toString());
-    params.set("name", "unknown_collection");
-    request = new QueryRequest(params);
-    request.setPath("/admin/collections");
- 
-    boolean exp = false;
-    try {
-      makeRequest(baseUrl, request);
-    } catch (SolrException e) {
-      exp = true;
-    }
-    assertTrue("Expected exception", exp);
-    
-    // create another collection should still work
-    params = new ModifiableSolrParams();
-    params.set("action", CollectionAction.CREATE.toString());
-
-    params.set("numShards", 1);
-    params.set(REPLICATION_FACTOR, 2);
-    collectionName = "acollectionafterbaddelete";
-
-    params.set("name", collectionName);
-    if (secondConfigSet) {
-      params.set("collection.configName", "conf1");
-    }
-    request = new QueryRequest(params);
-    request.setPath("/admin/collections");
-    makeRequest(baseUrl, request);
-    
-    List<Integer> list = new ArrayList<>(2);
-    list.add(1);
-    list.add(2);
-    checkForCollection(collectionName, list, null);
-    
-    url = getUrlFromZk(collectionName);
-    
-    try (HttpSolrClient collectionClient = getHttpSolrClient(url)) {
-      // poll for a second - it can take a moment before we are ready to serve
-      waitForNon403or404or503(collectionClient);
-    }
-
-    for (int j = 0; j < cnt; j++) {
-      waitForRecoveriesToFinish(collectionName, zkStateReader, false);
-    }
-
-    // test maxShardsPerNode
-    int numLiveNodes = getCommonCloudSolrClient().getZkStateReader().getClusterState().getLiveNodes().size();
-    int numShards = (numLiveNodes/2) + 1;
-    int replicationFactor = 2;
-    int maxShardsPerNode = 1;
-    collectionInfos = new HashMap<>();
-    try (CloudSolrClient client = createCloudClient("awholynewcollection_" + cnt)) {
-      exp = false;
-      try {
-        createCollection(collectionInfos, "awholynewcollection_" + cnt,
-            numShards, replicationFactor, maxShardsPerNode, client, null, "conf1");
-      } catch (SolrException e) {
-        exp = true;
-      }
-      assertTrue("expected exception", exp);
-    }
-
-    
-    // Test createNodeSet
-    numLiveNodes = getCommonCloudSolrClient().getZkStateReader().getClusterState().getLiveNodes().size();
-    List<String> createNodeList = new ArrayList<>();
-    int numOfCreateNodes = numLiveNodes/2;
-    assertFalse("createNodeSet test is pointless with only " + numLiveNodes + " nodes running", numOfCreateNodes == 0);
-    int i = 0;
-    for (String liveNode : getCommonCloudSolrClient().getZkStateReader().getClusterState().getLiveNodes()) {
-      if (i < numOfCreateNodes) {
-        createNodeList.add(liveNode);
-        i++;
-      } else {
-        break;
-      }
-    }
-    maxShardsPerNode = 2;
-    numShards = createNodeList.size() * maxShardsPerNode;
-    replicationFactor = 1;
-    collectionInfos = new HashMap<>();
-
-    try (SolrClient client = createCloudClient("awholynewcollection_" + (cnt+1))) {
-      CollectionAdminResponse res = createCollection(collectionInfos, "awholynewcollection_" + (cnt+1), numShards, replicationFactor, maxShardsPerNode, client, StrUtils.join(createNodeList, ','), "conf1");
-      assertTrue(res.isSuccess());
-    }
-    checkForCollection(collectionInfos.keySet().iterator().next(), collectionInfos.entrySet().iterator().next().getValue(), createNodeList);
-    
-    checkNoTwoShardsUseTheSameIndexDir();
-    if(disableLegacy) {
-      setClusterProp(client1, ZkStateReader.LEGACY_CLOUD, null);
-      client1.close();
-    }
-  }
-  
-  private void testCollectionsAPIAddRemoveStress() throws Exception {
-    
-    class CollectionThread extends Thread {
-      
-      public CollectionThread(String name) {
-        super(name);
-      }
-      
-      public void run() {
-        // create new collections rapid fire
-        Map<String,List<Integer>> collectionInfos = new HashMap<>();
-        int cnt = random().nextInt(TEST_NIGHTLY ? 13 : 1) + 1;
-        
-        for (int i = 0; i < cnt; i++) {
-          String collectionName = "awholynewstresscollection_" + getName() + "_" + i;
-          int numShards = TestUtil.nextInt(random(), 0, getShardCount() * 2) + 1;
-          int replicationFactor = TestUtil.nextInt(random(), 0, 3) + 1;
-          int maxShardsPerNode = (((numShards * 2 * replicationFactor) / getCommonCloudSolrClient()
-              .getZkStateReader().getClusterState().getLiveNodes().size())) + 1;
-
-          try (CloudSolrClient client = createCloudClient(i == 1 ? collectionName : null)) {
-
-            createCollection(collectionInfos, collectionName,
-                numShards, replicationFactor, maxShardsPerNode, client, null,
-                "conf1");
-
-            // remove collection
-            CollectionAdminRequest.Delete delete = new CollectionAdminRequest.Delete()
-                    .setCollectionName(collectionName);
-            client.request(delete);
-          } catch (SolrServerException | IOException e) {
-            e.printStackTrace();
-            throw new RuntimeException(e);
-          }
-        }
-      }
-    }
-    List<Thread> threads = new ArrayList<>();
-    int numThreads = TEST_NIGHTLY ? 6 : 2;
-    for (int i = 0; i < numThreads; i++) {
-      CollectionThread thread = new CollectionThread("collection" + i);
-      threads.add(thread);
-    }
-    
-    for (Thread thread : threads) {
-      thread.start();
-    }
-    for (Thread thread : threads) {
-      thread.join();
-    }
   }
 
   private void checkInstanceDirs(JettySolrRunner jetty) throws IOException {
@@ -1024,7 +545,7 @@ public class CollectionsAPIDistributedZkTest extends AbstractFullDistribZkTestBa
       Path instancedir = (Path) core.getStatistics().get("instanceDir");
       assertTrue("Could not find expected core.properties file", Files.exists(instancedir.resolve("core.properties")));
 
-      Path expected = Paths.get(jetty.getSolrHome()).toAbsolutePath().resolve("cores").resolve(core.getName());
+      Path expected = Paths.get(jetty.getSolrHome()).toAbsolutePath().resolve(core.getName());
 
       assertTrue("Expected: " + expected + "\nFrom core stats: " + instancedir, Files.isSameFile(expected, instancedir));
 
@@ -1061,23 +582,14 @@ public class CollectionsAPIDistributedZkTest extends AbstractFullDistribZkTestBa
     return allTimesAreCorrect;
   }
 
-  private void collectStartTimes(String collectionName,
-      Map<String,Long> urlToTime) throws SolrServerException, IOException {
-    ClusterState clusterState = getCommonCloudSolrClient().getZkStateReader()
-        .getClusterState();
-//    Map<String,DocCollection> collections = clusterState.getCollectionStates();
-    if (clusterState.hasCollection(collectionName)) {
-      Map<String,Slice> slices = clusterState.getSlicesMap(collectionName);
-
-      Iterator<Entry<String,Slice>> it = slices.entrySet().iterator();
-      while (it.hasNext()) {
-        Entry<String,Slice> sliceEntry = it.next();
-        Map<String,Replica> sliceShards = sliceEntry.getValue().getReplicasMap();
-        Iterator<Entry<String,Replica>> shardIt = sliceShards.entrySet()
-            .iterator();
-        while (shardIt.hasNext()) {
-          Entry<String,Replica> shardEntry = shardIt.next();
-          ZkCoreNodeProps coreProps = new ZkCoreNodeProps(shardEntry.getValue());
+  private void collectStartTimes(String collectionName, Map<String,Long> urlToTime)
+      throws SolrServerException, IOException {
+
+    DocCollection collectionState = getCollectionState(collectionName);
+    if (collectionState != null) {
+      for (Slice shard : collectionState) {
+        for (Replica replica : shard) {
+          ZkCoreNodeProps coreProps = new ZkCoreNodeProps(replica);
           CoreAdminResponse mcr;
           try (HttpSolrClient server = getHttpSolrClient(coreProps.getBaseUrl())) {
             mcr = CoreAdminRequest.getStatus(coreProps.getCoreName(), server);
@@ -1087,59 +599,9 @@ public class CollectionsAPIDistributedZkTest extends AbstractFullDistribZkTestBa
         }
       }
     } else {
-      throw new IllegalArgumentException("Could not find collection in :"
-          + clusterState.getCollectionsMap());
-    }
-  }
-
-  private String getUrlFromZk(String collection) {
-    ClusterState clusterState = getCommonCloudSolrClient().getZkStateReader().getClusterState();
-    Map<String,Slice> slices = clusterState.getSlicesMap(collection);
-    
-    if (slices == null) {
-      throw new SolrException(ErrorCode.BAD_REQUEST, "Could not find collection:" + collection);
-    }
-    
-    for (Map.Entry<String,Slice> entry : slices.entrySet()) {
-      Slice slice = entry.getValue();
-      Map<String,Replica> shards = slice.getReplicasMap();
-      Set<Map.Entry<String,Replica>> shardEntries = shards.entrySet();
-      for (Map.Entry<String,Replica> shardEntry : shardEntries) {
-        final ZkNodeProps node = shardEntry.getValue();
-        if (clusterState.liveNodesContain(node.getStr(ZkStateReader.NODE_NAME_PROP))) {
-          return ZkCoreNodeProps.getCoreUrl(node.getStr(ZkStateReader.BASE_URL_PROP), collection); //new ZkCoreNodeProps(node).getCoreUrl();
-        }
-      }
+      throw new IllegalArgumentException("Could not find collection " + collectionName);
     }
-    
-    throw new RuntimeException("Could not find a live node for collection:" + collection);
   }
-
-/*  private void waitForNon403or404or503(HttpSolrServer collectionClient)
-      throws Exception {
-    SolrException exp = null;
-    long timeoutAt = System.currentTimeMillis() + 30000;
-    
-    while (System.currentTimeMillis() < timeoutAt) {
-      boolean missing = false;
-
-      try {
-        collectionClient.query(new SolrQuery("*:*"));
-      } catch (SolrException e) {
-        if (!(e.code() == 403 || e.code() == 503 || e.code() == 404)) {
-          throw e;
-        }
-        exp = e;
-        missing = true;
-      }
-      if (!missing) {
-        return;
-      }
-      Thread.sleep(50);
-    }
-
-    fail("Could not find the new collection - " + exp.code() + " : " + collectionClient.getBaseURL());
-  }*/
   
   private void checkNoTwoShardsUseTheSameIndexDir() throws Exception {
     Map<String, Set<String>> indexDirToShardNamesMap = new HashMap<>();
@@ -1189,142 +651,70 @@ public class CollectionsAPIDistributedZkTest extends AbstractFullDistribZkTestBa
 
   }
 
-  private void addReplicaTest() throws Exception {
+  @Test
+  public void addReplicaTest() throws Exception {
     String collectionName = "addReplicaColl";
-    try (CloudSolrClient client = createCloudClient(null)) {
-      createCollection(collectionName, client, 2, 2);
-      String newReplicaName = Assign.assignNode(client.getZkStateReader().getClusterState().getCollection(collectionName));
-      ArrayList<String> nodeList = new ArrayList<>(client.getZkStateReader().getClusterState().getLiveNodes());
-      Collections.shuffle(nodeList, random());
-
-      Replica newReplica = doAddReplica(collectionName, "shard1",
-          Assign.assignNode(client.getZkStateReader().getClusterState().getCollection(collectionName)),
-          nodeList.get(0), client, null);
-
-      log.info("newReplica {},\n{} ", newReplica, client.getZkStateReader().getBaseUrlForNodeName(nodeList.get(0)));
-
-      assertEquals("Replica should be created on the right node",
-          client.getZkStateReader().getBaseUrlForNodeName(nodeList.get(0)), newReplica.getStr(ZkStateReader.BASE_URL_PROP));
-
-      Properties props = new Properties();
-      String instancePathStr = createTempDir().toString();
-      props.put(CoreAdminParams.INSTANCE_DIR, instancePathStr); //Use name via the property.instanceDir method
-      newReplica = doAddReplica(collectionName, "shard2",
-          Assign.assignNode(client.getZkStateReader().getClusterState().getCollection(collectionName)),
-          null, client, props);
-      assertNotNull(newReplica);
-
-      try (HttpSolrClient coreclient = getHttpSolrClient(newReplica.getStr(ZkStateReader.BASE_URL_PROP))) {
-        CoreAdminResponse status = CoreAdminRequest.getStatus(newReplica.getStr("core"), coreclient);
-        NamedList<Object> coreStatus = status.getCoreStatus(newReplica.getStr("core"));
-        String instanceDirStr = (String) coreStatus.get("instanceDir");
-        assertEquals(Paths.get(instanceDirStr).toString(), instancePathStr);
-      }
 
-      //Test to make sure we can't create another replica with an existing core_name of that collection
-      String coreName = newReplica.getStr(CORE_NAME_PROP);
-      ModifiableSolrParams params = new ModifiableSolrParams();
-      params.set("action", "addreplica");
-      params.set("collection", collectionName);
-      params.set("shard", "shard1");
-      params.set("name", coreName);
-      QueryRequest request = new QueryRequest(params);
-      request.setPath("/admin/collections");
-      try {
-        client.request(request);
-        fail("AddReplica call should not have been successful");
-      } catch (SolrException e) {
-        assertTrue(e.getMessage().contains("Another replica with the same core name already exists for this collection"));
-      }
+    CollectionAdminRequest.createCollection(collectionName, "conf", 2, 2)
+        .setMaxShardsPerNode(4)
+        .process(cluster.getSolrClient());
 
+    ArrayList<String> nodeList
+        = new ArrayList<>(cluster.getSolrClient().getZkStateReader().getClusterState().getLiveNodes());
+    Collections.shuffle(nodeList, random());
 
-      // Check that specifying property.name works. DO NOT remove this when the "name" property is deprecated
-      // for ADDREPLICA, this is "property.name". See SOLR-7132
-      props = new Properties();
-      props.put(CoreAdminParams.NAME, "propertyDotName");
+    String newReplicaName = Assign.assignNode(getCollectionState(collectionName));
+    CollectionAdminRequest.addReplicaToShard(collectionName, "shard1")
+        .setNode(nodeList.get(0))
+        .process(cluster.getSolrClient());
 
-      newReplica = doAddReplica(collectionName, "shard1",
-          Assign.assignNode(client.getZkStateReader().getClusterState().getCollection(collectionName)),
-          nodeList.get(0), client, props);
-      assertEquals("'core' should be 'propertyDotName' ", "propertyDotName", newReplica.getStr("core"));
-    }
-  }
-
-  private Replica doAddReplica(String collectionName, String shard, String newReplicaName, String node,
-                               CloudSolrClient client, Properties props) throws IOException, SolrServerException {
-    CollectionAdminRequest.AddReplica addReplica = new CollectionAdminRequest.AddReplica();
+    Replica newReplica = getCollectionState(collectionName).getReplica(newReplicaName);
 
-    addReplica.setCollectionName(collectionName);
-    addReplica.setShardName(shard);
-    if (node != null) {
-      addReplica.setNode(node);
-    }
-    if (props != null) {
-      addReplica.setProperties(props);
-    }
-    client.request(addReplica);
-    TimeOut timeout = new TimeOut(3, TimeUnit.SECONDS);
-    Replica newReplica = null;
+    assertEquals("Replica should be created on the right node",
+        cluster.getSolrClient().getZkStateReader().getBaseUrlForNodeName(nodeList.get(0)),
+        newReplica.getStr(ZkStateReader.BASE_URL_PROP));
 
-    for (; ! timeout.hasTimedOut(); ) {
-      Slice slice = client.getZkStateReader().getClusterState().getSlice(collectionName, shard);
-      newReplica = slice.getReplica(newReplicaName);
-    }
+    newReplicaName = Assign.assignNode(getCollectionState(collectionName));
+    Path instancePath = createTempDir();
+    CollectionAdminRequest.addReplicaToShard(collectionName, "shard1")
+        .withProperty(CoreAdminParams.INSTANCE_DIR, instancePath.toString())
+        .process(cluster.getSolrClient());
 
+    newReplica = getCollectionState(collectionName).getReplica(newReplicaName);
     assertNotNull(newReplica);
-    return newReplica;
-  }
-  @Override
-  protected QueryResponse queryServer(ModifiableSolrParams params) throws SolrServerException, IOException {
 
-    if (r.nextBoolean())
-      return super.queryServer(params);
+    try (HttpSolrClient coreclient = getHttpSolrClient(newReplica.getStr(ZkStateReader.BASE_URL_PROP))) {
+      CoreAdminResponse status = CoreAdminRequest.getStatus(newReplica.getStr("core"), coreclient);
+      NamedList<Object> coreStatus = status.getCoreStatus(newReplica.getStr("core"));
+      String instanceDirStr = (String) coreStatus.get("instanceDir");
+      assertEquals(instanceDirStr, instancePath.toString());
+    }
 
-    if (r.nextBoolean())
-      params.set("collection",DEFAULT_COLLECTION);
+    //Test to make sure we can't create another replica with an existing core_name of that collection
+    String coreName = newReplica.getStr(CORE_NAME_PROP);
+    SolrException e = expectThrows(SolrException.class, () -> {
+      ModifiableSolrParams params = new ModifiableSolrParams();
+      params.set("action", "addreplica");
+      params.set("collection", collectionName);
+      params.set("shard", "shard1");
+      params.set("name", coreName);
+      QueryRequest request = new QueryRequest(params);
+      request.setPath("/admin/collections");
+      cluster.getSolrClient().request(request);
+    });
 
-    QueryResponse rsp = getCommonCloudSolrClient().query(params);
-    return rsp;
-  }
+    assertTrue(e.getMessage().contains("Another replica with the same core name already exists for this collection"));
 
-  protected void createCollection(String COLL_NAME, CloudSolrClient client,int replicationFactor , int numShards ) throws Exception {
-    int maxShardsPerNode = ((((numShards+1) * replicationFactor) / getCommonCloudSolrClient()
-        .getZkStateReader().getClusterState().getLiveNodes().size())) + 1;
-
-    Map<String, Object> props = makeMap(
-        REPLICATION_FACTOR, replicationFactor,
-        MAX_SHARDS_PER_NODE, maxShardsPerNode,
-        NUM_SLICES, numShards);
-    Map<String,List<Integer>> collectionInfos = new HashMap<>();
-    createCollection(collectionInfos, COLL_NAME, props, client, "conf1");
-    assertAllActive(COLL_NAME, getCommonCloudSolrClient().getZkStateReader());
-    
-  }
-  
-  private void clusterPropTest() throws Exception {
-    try (CloudSolrClient client = createCloudClient(null)) {
-      assertTrue("cluster property not set", setClusterProp(client, ZkStateReader.LEGACY_CLOUD, "false"));
-      assertTrue("cluster property not unset ", setClusterProp(client, ZkStateReader.LEGACY_CLOUD, null));
-    }
-  }
+    // Check that specifying property.name works. DO NOT remove this when the "name" property is deprecated
+    // for ADDREPLICA, this is "property.name". See SOLR-7132
+    newReplicaName = Assign.assignNode(getCollectionState(collectionName));
+    CollectionAdminRequest.addReplicaToShard(collectionName, "shard1")
+        .withProperty(CoreAdminParams.NAME, "propertyDotName")
+        .process(cluster.getSolrClient());
 
-  public static boolean setClusterProp(CloudSolrClient client, String name , String val) throws SolrServerException, IOException, InterruptedException {
-    Map m = makeMap(
-        "action", CollectionAction.CLUSTERPROP.toLower(),
-        "name",name);
+    newReplica = getCollectionState(collectionName).getReplica(newReplicaName);
+    assertEquals("'core' should be 'propertyDotName' ", "propertyDotName", newReplica.getStr("core"));
 
-    if(val != null) m.put("val", val);
-    SolrRequest request = new QueryRequest(new MapSolrParams(m));
-    request.setPath("/admin/collections");
-    client.request(request);
-
-    TimeOut timeout = new TimeOut(3, TimeUnit.SECONDS);
-    boolean changed = false;
-    while(! timeout.hasTimedOut()){
-      Thread.sleep(10);
-      changed = Objects.equals(val,client.getZkStateReader().getClusterProperty(name, (String) null));
-      if(changed) break;
-    }
-    return changed;
   }
+
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f56d111a/solr/core/src/test/org/apache/solr/cloud/CreateCollectionCleanupTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/CreateCollectionCleanupTest.java b/solr/core/src/test/org/apache/solr/cloud/CreateCollectionCleanupTest.java
index 989e1af..df7a2e2 100644
--- a/solr/core/src/test/org/apache/solr/cloud/CreateCollectionCleanupTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/CreateCollectionCleanupTest.java
@@ -77,8 +77,7 @@ public class CreateCollectionCleanupTest extends SolrCloudTestCase {
     assertFalse(rsp.isSuccess());
 
     // Confirm using LIST that the collection does not exist
-    CollectionAdminRequest.List list = CollectionAdminRequest.listCollections();
-    rsp = list.process(cloudClient);
-    assertFalse(((ArrayList) rsp.getResponse().get("collections")).contains("foo"));
+    assertFalse(CollectionAdminRequest.listCollections(cloudClient).contains("foo"));
+
   }
 }


[04/50] [abbrv] lucene-solr:apiv2: LUCENE-7429: AnalyzerWrapper can now wrap the normalization analysis chain too.

Posted by sa...@apache.org.
LUCENE-7429: AnalyzerWrapper can now wrap the normalization analysis chain too.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/af600480
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/af600480
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/af600480

Branch: refs/heads/apiv2
Commit: af60048097a83220aae135b09d209a0f2d4ba3c6
Parents: 2172f3e
Author: Adrien Grand <jp...@gmail.com>
Authored: Thu Oct 27 16:27:45 2016 +0200
Committer: Adrien Grand <jp...@gmail.com>
Committed: Thu Oct 27 16:27:45 2016 +0200

----------------------------------------------------------------------
 lucene/CHANGES.txt                              |   3 +
 .../lucene/analysis/custom/CustomAnalyzer.java  |   2 +-
 .../lucene/collation/CollationKeyAnalyzer.java  |   2 +-
 .../org/apache/lucene/analysis/Analyzer.java    |   9 +-
 .../apache/lucene/analysis/AnalyzerWrapper.java |  50 ++++++++-
 .../analysis/DelegatingAnalyzerWrapper.java     |  14 ++-
 .../analysis/TestDelegatingAnalyzerWrapper.java | 107 +++++++++++++++++++
 .../lucene/analysis/MockBytesAnalyzer.java      |   2 +-
 .../apache/solr/analysis/TokenizerChain.java    |   2 +-
 9 files changed, 180 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/af600480/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index d574a8a..5a6601b 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -107,6 +107,9 @@ Bug Fixes
   allTermsRequired is false and context filters are specified (Mike
   McCandless)
 
+* LUCENE-7429: AnalyzerWrapper can now modify the normalization chain too and
+  DelegatingAnalyzerWrapper does the right thing automatically. (Adrien Grand)
+
 Improvements
 
 * LUCENE-7439: FuzzyQuery now matches all terms within the specified

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/af600480/lucene/analysis/common/src/java/org/apache/lucene/analysis/custom/CustomAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/custom/CustomAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/custom/CustomAnalyzer.java
index b2de5e8..466642c 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/custom/CustomAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/custom/CustomAnalyzer.java
@@ -131,7 +131,7 @@ public final class CustomAnalyzer extends Analyzer {
 
   @Override
   protected TokenStreamComponents createComponents(String fieldName) {
-    final Tokenizer tk = tokenizer.create(attributeFactory());
+    final Tokenizer tk = tokenizer.create(attributeFactory(fieldName));
     TokenStream ts = tk;
     for (final TokenFilterFactory filter : tokenFilters) {
       ts = filter.create(ts);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/af600480/lucene/analysis/common/src/java/org/apache/lucene/collation/CollationKeyAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/collation/CollationKeyAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/collation/CollationKeyAnalyzer.java
index ea98731..4d0f039 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/collation/CollationKeyAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/collation/CollationKeyAnalyzer.java
@@ -85,7 +85,7 @@ public final class CollationKeyAnalyzer extends Analyzer {
   }
 
   @Override
-  protected AttributeFactory attributeFactory() {
+  protected AttributeFactory attributeFactory(String fieldName) {
     return factory;
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/af600480/lucene/core/src/java/org/apache/lucene/analysis/Analyzer.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/analysis/Analyzer.java b/lucene/core/src/java/org/apache/lucene/analysis/Analyzer.java
index aa4b42d..3a5d41c 100644
--- a/lucene/core/src/java/org/apache/lucene/analysis/Analyzer.java
+++ b/lucene/core/src/java/org/apache/lucene/analysis/Analyzer.java
@@ -238,7 +238,7 @@ public abstract class Analyzer implements Closeable {
         throw new IllegalStateException("Normalization threw an unexpected exeption", e);
       }
 
-      final AttributeFactory attributeFactory = attributeFactory();
+      final AttributeFactory attributeFactory = attributeFactory(fieldName);
       try (TokenStream ts = normalize(fieldName,
           new StringTokenStream(attributeFactory, filteredText, text.length()))) {
         final TermToBytesRefAttribute termAtt = ts.addAttribute(TermToBytesRefAttribute.class);
@@ -286,9 +286,10 @@ public abstract class Analyzer implements Closeable {
 
   /** Return the {@link AttributeFactory} to be used for
    *  {@link #tokenStream analysis} and
-   *  {@link #normalize(String, String) normalization}. The default
-   *  implementation returns {@link TokenStream#DEFAULT_TOKEN_ATTRIBUTE_FACTORY}. */
-  protected AttributeFactory attributeFactory() {
+   *  {@link #normalize(String, String) normalization} on the given
+   *  {@code FieldName}. The default implementation returns
+   *  {@link TokenStream#DEFAULT_TOKEN_ATTRIBUTE_FACTORY}. */
+  protected AttributeFactory attributeFactory(String fieldName) {
     return TokenStream.DEFAULT_TOKEN_ATTRIBUTE_FACTORY;
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/af600480/lucene/core/src/java/org/apache/lucene/analysis/AnalyzerWrapper.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/analysis/AnalyzerWrapper.java b/lucene/core/src/java/org/apache/lucene/analysis/AnalyzerWrapper.java
index 1e5640f..d23d004 100644
--- a/lucene/core/src/java/org/apache/lucene/analysis/AnalyzerWrapper.java
+++ b/lucene/core/src/java/org/apache/lucene/analysis/AnalyzerWrapper.java
@@ -19,6 +19,8 @@ package org.apache.lucene.analysis;
 
 import java.io.Reader;
 
+import org.apache.lucene.util.AttributeFactory;
+
 /**
  * Extension to {@link Analyzer} suitable for Analyzers which wrap
  * other Analyzers.
@@ -82,6 +84,22 @@ public abstract class AnalyzerWrapper extends Analyzer {
   }
 
   /**
+   * Wraps / alters the given TokenStream for normalization purposes, taken
+   * from the wrapped Analyzer, to form new components. It is through this
+   * method that new TokenFilters can be added by AnalyzerWrappers. By default,
+   * the given token stream are returned.
+   * 
+   * @param fieldName
+   *          Name of the field which is to be analyzed
+   * @param in
+   *          TokenStream taken from the wrapped Analyzer
+   * @return Wrapped / altered TokenStreamComponents.
+   */
+  protected TokenStream wrapTokenStreamForNormalization(String fieldName, TokenStream in) {
+    return in;
+  }
+
+  /**
    * Wraps / alters the given Reader. Through this method AnalyzerWrappers can
    * implement {@link #initReader(String, Reader)}. By default, the given reader
    * is returned.
@@ -95,13 +113,33 @@ public abstract class AnalyzerWrapper extends Analyzer {
   protected Reader wrapReader(String fieldName, Reader reader) {
     return reader;
   }
-  
+
+  /**
+   * Wraps / alters the given Reader. Through this method AnalyzerWrappers can
+   * implement {@link #initReaderForNormalization(String, Reader)}. By default,
+   * the given reader  is returned.
+   * 
+   * @param fieldName
+   *          name of the field which is to be analyzed
+   * @param reader
+   *          the reader to wrap
+   * @return the wrapped reader
+   */
+  protected Reader wrapReaderForNormalization(String fieldName, Reader reader) {
+    return reader;
+  }
+
   @Override
   protected final TokenStreamComponents createComponents(String fieldName) {
     return wrapComponents(fieldName, getWrappedAnalyzer(fieldName).createComponents(fieldName));
   }
 
   @Override
+  protected final TokenStream normalize(String fieldName, TokenStream in) {
+    return wrapTokenStreamForNormalization(fieldName, getWrappedAnalyzer(fieldName).normalize(fieldName, in));
+  }
+
+  @Override
   public int getPositionIncrementGap(String fieldName) {
     return getWrappedAnalyzer(fieldName).getPositionIncrementGap(fieldName);
   }
@@ -115,4 +153,14 @@ public abstract class AnalyzerWrapper extends Analyzer {
   public final Reader initReader(String fieldName, Reader reader) {
     return getWrappedAnalyzer(fieldName).initReader(fieldName, wrapReader(fieldName, reader));
   }
+
+  @Override
+  protected final Reader initReaderForNormalization(String fieldName, Reader reader) {
+    return getWrappedAnalyzer(fieldName).initReaderForNormalization(fieldName, wrapReaderForNormalization(fieldName, reader));
+  }
+
+  @Override
+  protected final AttributeFactory attributeFactory(String fieldName) {
+    return getWrappedAnalyzer(fieldName).attributeFactory(fieldName);
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/af600480/lucene/core/src/java/org/apache/lucene/analysis/DelegatingAnalyzerWrapper.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/analysis/DelegatingAnalyzerWrapper.java b/lucene/core/src/java/org/apache/lucene/analysis/DelegatingAnalyzerWrapper.java
index 6f05d4d..edf5b2b 100644
--- a/lucene/core/src/java/org/apache/lucene/analysis/DelegatingAnalyzerWrapper.java
+++ b/lucene/core/src/java/org/apache/lucene/analysis/DelegatingAnalyzerWrapper.java
@@ -54,12 +54,22 @@ public abstract class DelegatingAnalyzerWrapper extends AnalyzerWrapper {
   protected final TokenStreamComponents wrapComponents(String fieldName, TokenStreamComponents components) {
     return super.wrapComponents(fieldName, components);
   }
-  
+
+  @Override
+  protected final TokenStream wrapTokenStreamForNormalization(String fieldName, TokenStream in) {
+    return super.wrapTokenStreamForNormalization(fieldName, in);
+  }
+
   @Override
   protected final Reader wrapReader(String fieldName, Reader reader) {
     return super.wrapReader(fieldName, reader);
   }
-  
+
+  @Override
+  protected final Reader wrapReaderForNormalization(String fieldName, Reader reader) {
+    return super.wrapReaderForNormalization(fieldName, reader);
+  }
+
   private static final class DelegatingReuseStrategy extends ReuseStrategy {
     DelegatingAnalyzerWrapper wrapper;
     private final ReuseStrategy fallbackStrategy;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/af600480/lucene/core/src/test/org/apache/lucene/analysis/TestDelegatingAnalyzerWrapper.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/analysis/TestDelegatingAnalyzerWrapper.java b/lucene/core/src/test/org/apache/lucene/analysis/TestDelegatingAnalyzerWrapper.java
new file mode 100644
index 0000000..1d6cf15
--- /dev/null
+++ b/lucene/core/src/test/org/apache/lucene/analysis/TestDelegatingAnalyzerWrapper.java
@@ -0,0 +1,107 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.analysis;
+
+import java.io.IOException;
+import java.io.Reader;
+import java.nio.charset.StandardCharsets;
+
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.LuceneTestCase;
+
+public class TestDelegatingAnalyzerWrapper extends LuceneTestCase {
+
+  public void testDelegatesNormalization() {
+    Analyzer analyzer1 = new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false);
+    DelegatingAnalyzerWrapper w1 = new DelegatingAnalyzerWrapper(Analyzer.GLOBAL_REUSE_STRATEGY) {
+      @Override
+      protected Analyzer getWrappedAnalyzer(String fieldName) {
+        return analyzer1;
+      }
+    };
+    assertEquals(new BytesRef("Ab C"), w1.normalize("foo", "Ab C"));
+
+    Analyzer analyzer2 = new MockAnalyzer(random(), MockTokenizer.WHITESPACE, true);
+    DelegatingAnalyzerWrapper w2 = new DelegatingAnalyzerWrapper(Analyzer.GLOBAL_REUSE_STRATEGY) {
+      @Override
+      protected Analyzer getWrappedAnalyzer(String fieldName) {
+        return analyzer2;
+      }
+    };
+    assertEquals(new BytesRef("ab c"), w2.normalize("foo", "Ab C"));
+  }
+
+  public void testDelegatesAttributeFactory() throws Exception {
+    Analyzer analyzer1 = new MockBytesAnalyzer();
+    DelegatingAnalyzerWrapper w1 = new DelegatingAnalyzerWrapper(Analyzer.GLOBAL_REUSE_STRATEGY) {
+      @Override
+      protected Analyzer getWrappedAnalyzer(String fieldName) {
+        return analyzer1;
+      }
+    };
+    assertEquals(new BytesRef("Ab C".getBytes(StandardCharsets.UTF_16LE)), w1.normalize("foo", "Ab C"));
+  }
+
+  public void testDelegatesCharFilter() throws Exception {
+    Analyzer analyzer1 = new Analyzer() {
+      @Override
+      protected Reader initReaderForNormalization(String fieldName, Reader reader) {
+        return new DummyCharFilter(reader, 'b', 'z');
+      }
+      @Override
+      protected TokenStreamComponents createComponents(String fieldName) {
+        Tokenizer tokenizer = new MockTokenizer(attributeFactory(fieldName));
+        return new TokenStreamComponents(tokenizer);
+      }
+    };
+    DelegatingAnalyzerWrapper w1 = new DelegatingAnalyzerWrapper(Analyzer.GLOBAL_REUSE_STRATEGY) {
+      @Override
+      protected Analyzer getWrappedAnalyzer(String fieldName) {
+        return analyzer1;
+      }
+    };
+    assertEquals(new BytesRef("az c"), w1.normalize("foo", "ab c"));
+  }
+
+  private static class DummyCharFilter extends CharFilter {
+
+    private final char match, repl;
+
+    public DummyCharFilter(Reader input, char match, char repl) {
+      super(input);
+      this.match = match;
+      this.repl = repl;
+    }
+
+    @Override
+    protected int correct(int currentOff) {
+      return currentOff;
+    }
+
+    @Override
+    public int read(char[] cbuf, int off, int len) throws IOException {
+      final int read = input.read(cbuf, off, len);
+      for (int i = 0; i < read; ++i) {
+        if (cbuf[off+i] == match) {
+          cbuf[off+i] = repl;
+        }
+      }
+      return read;
+    }
+    
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/af600480/lucene/test-framework/src/java/org/apache/lucene/analysis/MockBytesAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/analysis/MockBytesAnalyzer.java b/lucene/test-framework/src/java/org/apache/lucene/analysis/MockBytesAnalyzer.java
index b8cfc5b..4d51717 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/analysis/MockBytesAnalyzer.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/analysis/MockBytesAnalyzer.java
@@ -30,7 +30,7 @@ public final class MockBytesAnalyzer extends Analyzer {
   }
 
   @Override
-  protected AttributeFactory attributeFactory() {
+  protected AttributeFactory attributeFactory(String fieldName) {
     return MockUTF16TermAttributeImpl.UTF16_TERM_ATTRIBUTE_FACTORY;
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/af600480/solr/core/src/java/org/apache/solr/analysis/TokenizerChain.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/analysis/TokenizerChain.java b/solr/core/src/java/org/apache/solr/analysis/TokenizerChain.java
index a5afbec..ab5458c 100644
--- a/solr/core/src/java/org/apache/solr/analysis/TokenizerChain.java
+++ b/solr/core/src/java/org/apache/solr/analysis/TokenizerChain.java
@@ -99,7 +99,7 @@ public final class TokenizerChain extends SolrAnalyzer {
 
   @Override
   protected TokenStreamComponents createComponents(String fieldName) {
-    Tokenizer tk = tokenizer.create(attributeFactory());
+    Tokenizer tk = tokenizer.create(attributeFactory(fieldName));
     TokenStream ts = tk;
     for (TokenFilterFactory filter : filters) {
       ts = filter.create(ts);


[30/50] [abbrv] lucene-solr:apiv2: LUCENE-7135: add issue number in CHANGES.txt

Posted by sa...@apache.org.
LUCENE-7135: add issue number in CHANGES.txt


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/2baad4c2
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/2baad4c2
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/2baad4c2

Branch: refs/heads/apiv2
Commit: 2baad4c22d05a1fcc4a09044eae868b6a5bfe1cf
Parents: 417e29a
Author: Mike McCandless <mi...@apache.org>
Authored: Mon Oct 31 11:23:36 2016 -0400
Committer: Mike McCandless <mi...@apache.org>
Committed: Mon Oct 31 11:23:36 2016 -0400

----------------------------------------------------------------------
 lucene/CHANGES.txt | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2baad4c2/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 6697fbf..f90bc12 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -113,7 +113,7 @@ Bug Fixes
 * LUCENE-7429: AnalyzerWrapper can now modify the normalization chain too and
   DelegatingAnalyzerWrapper does the right thing automatically. (Adrien Grand)
 
-* Lucene's check for 32 or 64 bit JVM now works around security
+* LUCENE-7135: Lucene's check for 32 or 64 bit JVM now works around security
   manager blocking access to some properties (Aaron Madlon-Kay via
   Mike McCandless)
 


[31/50] [abbrv] lucene-solr:apiv2: OLR-9433: SolrCore clean-up logic uses incorrect path to delete dataDir on failure to create a core

Posted by sa...@apache.org.
OLR-9433: SolrCore clean-up logic uses incorrect path to delete dataDir on failure to create a core


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/51208163
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/51208163
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/51208163

Branch: refs/heads/apiv2
Commit: 51208163772a3c7e22912502e8d299e52d832b22
Parents: 2baad4c
Author: Shalin Shekhar Mangar <sh...@apache.org>
Authored: Tue Nov 1 14:32:46 2016 +0530
Committer: Shalin Shekhar Mangar <sh...@apache.org>
Committed: Tue Nov 1 14:32:46 2016 +0530

----------------------------------------------------------------------
 solr/CHANGES.txt                                |  3 ++
 .../src/java/org/apache/solr/core/SolrCore.java |  2 +-
 .../handler/admin/CoreAdminHandlerTest.java     | 55 ++++++++++++++++++++
 3 files changed, 59 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51208163/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 09bf007..7f067e9 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -99,6 +99,9 @@ Bug Fixes
 * SOLR-9701: NPE in export handler when "fl" parameter is omitted.
   (Erick Erickson)
 
+* SOLR-9433: SolrCore clean-up logic uses incorrect path to delete dataDir on failure to create a core.
+  (Evan Sayer, shalin)
+
 Other Changes
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51208163/solr/core/src/java/org/apache/solr/core/SolrCore.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java
index 5019cd0..16b4e03 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrCore.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java
@@ -2631,7 +2631,7 @@ public final class SolrCore implements SolrInfoMBean, Closeable {
 
   public static void deleteUnloadedCore(CoreDescriptor cd, boolean deleteDataDir, boolean deleteInstanceDir) {
     if (deleteDataDir) {
-      File dataDir = new File(cd.getDataDir());
+      File dataDir = new File(cd.getInstanceDir().resolve(cd.getDataDir()).toAbsolutePath().toString());
       try {
         FileUtils.deleteDirectory(dataDir);
       } catch (IOException e) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/51208163/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminHandlerTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminHandlerTest.java
index 04bc3bd..8d2156d 100644
--- a/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminHandlerTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminHandlerTest.java
@@ -20,6 +20,7 @@ import java.io.File;
 import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.nio.file.Path;
+import java.nio.file.Paths;
 import java.util.Map;
 
 import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule;
@@ -28,6 +29,7 @@ import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.client.solrj.embedded.JettySolrRunner;
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
 import org.apache.solr.client.solrj.request.CoreAdminRequest;
+import org.apache.solr.client.solrj.request.CoreStatus;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrInputDocument;
 import org.apache.solr.common.params.CoreAdminParams;
@@ -280,6 +282,59 @@ public class CoreAdminHandlerTest extends SolrTestCaseJ4 {
   }
 
   @Test
+  public void testDeleteInstanceDirAfterCreateFailure() throws Exception  {
+    File solrHomeDirectory = new File(initCoreDataDir, getClass().getName() + "-corex-"
+        + System.nanoTime());
+    solrHomeDirectory.mkdirs();
+    copySolrHomeToTemp(solrHomeDirectory, "corex");
+    File corex = new File(solrHomeDirectory, "corex");
+    FileUtils.write(new File(corex, "core.properties"), "", StandardCharsets.UTF_8);
+    JettySolrRunner runner = new JettySolrRunner(solrHomeDirectory.getAbsolutePath(), buildJettyConfig("/solr"));
+    runner.start();
+
+    try (HttpSolrClient client = getHttpSolrClient(runner.getBaseUrl() + "/corex")) {
+      client.setConnectionTimeout(SolrTestCaseJ4.DEFAULT_CONNECTION_TIMEOUT);
+      client.setSoTimeout(SolrTestCaseJ4.DEFAULT_CONNECTION_TIMEOUT);
+      SolrInputDocument doc = new SolrInputDocument();
+      doc.addField("id", "123");
+      client.add(doc);
+      client.commit();
+    }
+
+    Path dataDir = null;
+    try (HttpSolrClient client = getHttpSolrClient(runner.getBaseUrl().toString())) {
+      CoreStatus status = CoreAdminRequest.getCoreStatus("corex", true, client);
+      String dataDirectory = status.getDataDirectory();
+      dataDir = Paths.get(dataDirectory);
+      assertTrue(Files.exists(dataDir));
+    }
+
+    File subHome = new File(solrHomeDirectory, "corex" + File.separator + "conf");
+    String top = SolrTestCaseJ4.TEST_HOME() + "/collection1/conf";
+    FileUtils.copyFile(new File(top, "bad-error-solrconfig.xml"), new File(subHome, "solrconfig.xml"));
+
+    try (HttpSolrClient client = getHttpSolrClient(runner.getBaseUrl().toString())) {
+      client.setConnectionTimeout(SolrTestCaseJ4.DEFAULT_CONNECTION_TIMEOUT);
+      client.setSoTimeout(SolrTestCaseJ4.DEFAULT_CONNECTION_TIMEOUT);
+      try {
+        CoreAdminRequest.reloadCore("corex", client);
+      } catch (Exception e) {
+        // this is expected because we put a bad solrconfig -- ignore
+      }
+
+      CoreAdminRequest.Unload req = new CoreAdminRequest.Unload(false);
+      req.setDeleteDataDir(true);
+      req.setDeleteInstanceDir(false); // important because the data directory is inside the instance directory
+      req.setCoreName("corex");
+      req.process(client);
+    }
+
+    runner.stop();
+
+    assertTrue("The data directory was not cleaned up on unload after a failed core reload", Files.notExists(dataDir));
+  }
+
+  @Test
   public void testNonexistentCoreReload() throws Exception {
     final CoreAdminHandler admin = new CoreAdminHandler(h.getCoreContainer());
     SolrQueryResponse resp = new SolrQueryResponse();


[25/50] [abbrv] lucene-solr:apiv2: LUCENE-7529: Fix argument checks of MultiDocValues' advanceExact impls.

Posted by sa...@apache.org.
LUCENE-7529: Fix argument checks of MultiDocValues' advanceExact impls.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/325b74e0
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/325b74e0
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/325b74e0

Branch: refs/heads/apiv2
Commit: 325b74e0e3e9c9ce265d8d7844f01209394b920a
Parents: 2ad2fca
Author: Adrien Grand <jp...@gmail.com>
Authored: Mon Oct 31 10:34:48 2016 +0100
Committer: Adrien Grand <jp...@gmail.com>
Committed: Mon Oct 31 10:54:54 2016 +0100

----------------------------------------------------------------------
 .../org/apache/lucene/index/MultiDocValues.java  | 10 +++++-----
 .../apache/lucene/index/TestMultiDocValues.java  | 19 +++++++++++++++++--
 2 files changed, 22 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/325b74e0/lucene/core/src/java/org/apache/lucene/index/MultiDocValues.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/MultiDocValues.java b/lucene/core/src/java/org/apache/lucene/index/MultiDocValues.java
index 51d684d..3970e0a 100644
--- a/lucene/core/src/java/org/apache/lucene/index/MultiDocValues.java
+++ b/lucene/core/src/java/org/apache/lucene/index/MultiDocValues.java
@@ -140,7 +140,7 @@ public class MultiDocValues {
 
       @Override
       public boolean advanceExact(int targetDocID) throws IOException {
-        if (targetDocID <= docID) {
+        if (targetDocID < docID) {
           throw new IllegalArgumentException("can only advance beyond current document: on docID=" + docID + " but targetDocID=" + targetDocID);
         }
         int readerIndex = ReaderUtil.subIndex(targetDocID, leaves);
@@ -266,7 +266,7 @@ public class MultiDocValues {
 
       @Override
       public boolean advanceExact(int targetDocID) throws IOException {
-        if (targetDocID <= docID) {
+        if (targetDocID < docID) {
           throw new IllegalArgumentException("can only advance beyond current document: on docID=" + docID + " but targetDocID=" + targetDocID);
         }
         int readerIndex = ReaderUtil.subIndex(targetDocID, leaves);
@@ -390,7 +390,7 @@ public class MultiDocValues {
 
       @Override
       public boolean advanceExact(int targetDocID) throws IOException {
-        if (targetDocID <= docID) {
+        if (targetDocID < docID) {
           throw new IllegalArgumentException("can only advance beyond current document: on docID=" + docID + " but targetDocID=" + targetDocID);
         }
         int readerIndex = ReaderUtil.subIndex(targetDocID, leaves);
@@ -525,7 +525,7 @@ public class MultiDocValues {
 
       @Override
       public boolean advanceExact(int targetDocID) throws IOException {
-        if (targetDocID <= docID) {
+        if (targetDocID < docID) {
           throw new IllegalArgumentException("can only advance beyond current document: on docID=" + docID + " but targetDocID=" + targetDocID);
         }
         int readerIndex = ReaderUtil.subIndex(targetDocID, leaves);
@@ -1007,7 +1007,7 @@ public class MultiDocValues {
     
     @Override
     public boolean advanceExact(int targetDocID) throws IOException {
-      if (targetDocID <= docID) {
+      if (targetDocID < docID) {
         throw new IllegalArgumentException("can only advance beyond current document: on docID=" + docID + " but targetDocID=" + targetDocID);
       }
       int readerIndex = ReaderUtil.subIndex(targetDocID, docStarts);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/325b74e0/lucene/core/src/test/org/apache/lucene/index/TestMultiDocValues.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestMultiDocValues.java b/lucene/core/src/test/org/apache/lucene/index/TestMultiDocValues.java
index f6669d5..7d4d74f 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestMultiDocValues.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestMultiDocValues.java
@@ -71,7 +71,8 @@ public class TestMultiDocValues extends LuceneTestCase {
       assertEquals(single.longValue(), multi.longValue());
     }
     testRandomAdvance(merged.getNumericDocValues("numbers"), MultiDocValues.getNumericValues(ir, "numbers"));
-    
+    testRandomAdvanceExact(merged.getNumericDocValues("numbers"), MultiDocValues.getNumericValues(ir, "numbers"), merged.maxDoc());
+
     ir.close();
     ir2.close();
     dir.close();
@@ -113,6 +114,7 @@ public class TestMultiDocValues extends LuceneTestCase {
       assertEquals(expected, actual);
     }
     testRandomAdvance(merged.getBinaryDocValues("bytes"), MultiDocValues.getBinaryValues(ir, "bytes"));
+    testRandomAdvanceExact(merged.getBinaryDocValues("bytes"), MultiDocValues.getBinaryValues(ir, "bytes"), merged.maxDoc());
 
     ir.close();
     ir2.close();
@@ -164,6 +166,7 @@ public class TestMultiDocValues extends LuceneTestCase {
       assertEquals(single.ordValue(), multi.ordValue());
     }
     testRandomAdvance(merged.getSortedDocValues("bytes"), MultiDocValues.getSortedValues(ir, "bytes"));
+    testRandomAdvanceExact(merged.getSortedDocValues("bytes"), MultiDocValues.getSortedValues(ir, "bytes"), merged.maxDoc());
     ir.close();
     ir2.close();
     dir.close();
@@ -209,6 +212,7 @@ public class TestMultiDocValues extends LuceneTestCase {
       assertEquals(expected, actual);
     }
     testRandomAdvance(merged.getSortedDocValues("bytes"), MultiDocValues.getSortedValues(ir, "bytes"));
+    testRandomAdvanceExact(merged.getSortedDocValues("bytes"), MultiDocValues.getSortedValues(ir, "bytes"), merged.maxDoc());
     
     ir.close();
     ir2.close();
@@ -275,6 +279,7 @@ public class TestMultiDocValues extends LuceneTestCase {
       }
     }
     testRandomAdvance(merged.getSortedSetDocValues("bytes"), MultiDocValues.getSortedSetValues(ir, "bytes"));
+    testRandomAdvanceExact(merged.getSortedSetDocValues("bytes"), MultiDocValues.getSortedSetValues(ir, "bytes"), merged.maxDoc());
     
     ir.close();
     ir2.close();
@@ -341,7 +346,8 @@ public class TestMultiDocValues extends LuceneTestCase {
       }
     }
     testRandomAdvance(merged.getSortedSetDocValues("bytes"), MultiDocValues.getSortedSetValues(ir, "bytes"));
-    
+    testRandomAdvanceExact(merged.getSortedSetDocValues("bytes"), MultiDocValues.getSortedSetValues(ir, "bytes"), merged.maxDoc());
+
     ir.close();
     ir2.close();
     dir.close();
@@ -391,6 +397,7 @@ public class TestMultiDocValues extends LuceneTestCase {
       }
     }
     testRandomAdvance(merged.getSortedNumericDocValues("nums"), MultiDocValues.getSortedNumericValues(ir, "nums"));
+    testRandomAdvanceExact(merged.getSortedNumericDocValues("nums"), MultiDocValues.getSortedNumericValues(ir, "nums"), merged.maxDoc());
     
     ir.close();
     ir2.close();
@@ -410,4 +417,12 @@ public class TestMultiDocValues extends LuceneTestCase {
       }
     }
   }
+
+  private void testRandomAdvanceExact(DocValuesIterator iter1, DocValuesIterator iter2, int maxDoc) throws IOException {
+    for (int target = random().nextInt(Math.min(maxDoc, 10)); target < maxDoc; target += random().nextInt(10)) {
+      final boolean exists1 = iter1.advanceExact(target);
+      final boolean exists2 = iter2.advanceExact(target);
+      assertEquals(exists1, exists2);
+    }
+  }
 }


[24/50] [abbrv] lucene-solr:apiv2: LUCENE-7528: Fix Lucene54's advanceExact impl in the sparse case.

Posted by sa...@apache.org.
LUCENE-7528: Fix Lucene54's advanceExact impl in the sparse case.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/66c90a96
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/66c90a96
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/66c90a96

Branch: refs/heads/apiv2
Commit: 66c90a9683407eee52e85120cb1e80fd9dd905f5
Parents: 325b74e
Author: Adrien Grand <jp...@gmail.com>
Authored: Mon Oct 31 10:36:26 2016 +0100
Committer: Adrien Grand <jp...@gmail.com>
Committed: Mon Oct 31 10:54:54 2016 +0100

----------------------------------------------------------------------
 .../lucene54/Lucene54DocValuesProducer.java     |  2 +-
 .../lucene54/TestLucene54DocValuesFormat.java   | 27 ++++++++++++++++++++
 2 files changed, 28 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/66c90a96/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene54/Lucene54DocValuesProducer.java
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene54/Lucene54DocValuesProducer.java b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene54/Lucene54DocValuesProducer.java
index f1c169c..a35f503 100644
--- a/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene54/Lucene54DocValuesProducer.java
+++ b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene54/Lucene54DocValuesProducer.java
@@ -715,7 +715,7 @@ final class Lucene54DocValuesProducer extends DocValuesProducer implements Close
       }
       --index;
       doc = target;
-      return false;
+      return index >= 0 && docIds.get(index) == target;
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/66c90a96/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene54/TestLucene54DocValuesFormat.java
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene54/TestLucene54DocValuesFormat.java b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene54/TestLucene54DocValuesFormat.java
index b231716..a761dfc 100644
--- a/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene54/TestLucene54DocValuesFormat.java
+++ b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene54/TestLucene54DocValuesFormat.java
@@ -488,6 +488,33 @@ public class TestLucene54DocValuesFormat extends BaseCompressingDocValuesFormatT
         }
       }
 
+      // advanceExact
+      for (int i = 0; i < 2000; ++i) {
+        sparseValues.reset();
+        if (random().nextBoolean() && docIds.length > 0) {
+          sparseValues.advance(docIds[TestUtil.nextInt(random(), 0, docIds.length - 1)]);
+        }
+
+        final int target = TestUtil.nextInt(random(), Math.max(0, sparseValues.docID()), maxDoc - 1);
+        final boolean exists = sparseValues.advanceExact(target);
+        
+        final int index = Arrays.binarySearch(docIds, target);
+        assertEquals(index >= 0, exists);
+        assertEquals(target, sparseValues.docID());
+
+        final boolean exists2 = sparseValues.advanceExact(target);
+        assertEquals(index >= 0, exists2);
+        assertEquals(target, sparseValues.docID());
+
+        final int nextIndex = index >= 0 ? index + 1 : -1 - index;
+        if (nextIndex >= docIds.length) {
+          assertEquals(DocIdSetIterator.NO_MORE_DOCS, sparseValues.nextDoc());
+        } else {
+          assertEquals(docIds[nextIndex], sparseValues.nextDoc());
+        }
+      }
+      
+
       final SparseNumericDocValuesRandomAccessWrapper raWrapper = new SparseNumericDocValuesRandomAccessWrapper(sparseValues, missingValue);
 
       // random-access


[43/50] [abbrv] lucene-solr:apiv2: SOLR-8542: Adds Solr Learning to Rank (LTR) plugin for reranking results with machine learning models. (Michael Nilsson, Diego Ceccarelli, Joshua Pantony, Jon Dorando, Naveen Santhapuri, Alessandro Benedetti, David Groh

Posted by sa...@apache.org.
SOLR-8542: Adds Solr Learning to Rank (LTR) plugin for reranking results with machine learning models. (Michael Nilsson, Diego Ceccarelli, Joshua Pantony, Jon Dorando, Naveen Santhapuri, Alessandro Benedetti, David Grohmann, Christine Poerschke)


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/5a66b3bc
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/5a66b3bc
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/5a66b3bc

Branch: refs/heads/apiv2
Commit: 5a66b3bc089e4b3e73b1c41c4cdcd89b183b85e7
Parents: b6ff3fd
Author: Christine Poerschke <cp...@apache.org>
Authored: Tue Nov 1 17:50:14 2016 +0000
Committer: Christine Poerschke <cp...@apache.org>
Committed: Tue Nov 1 17:50:14 2016 +0000

----------------------------------------------------------------------
 dev-tools/idea/.idea/modules.xml                |    1 +
 dev-tools/idea/solr/contrib/ltr/ltr.iml         |   37 +
 solr/CHANGES.txt                                |    3 +
 solr/contrib/ltr/README.md                      |  406 +++++
 solr/contrib/ltr/README.txt                     |    1 +
 solr/contrib/ltr/build.xml                      |   30 +
 solr/contrib/ltr/example/config.json            |   14 +
 solr/contrib/ltr/example/libsvm_formatter.py    |  124 ++
 solr/contrib/ltr/example/solrconfig.xml         | 1722 ++++++++++++++++++
 .../ltr/example/techproducts-features.json      |   26 +
 .../contrib/ltr/example/techproducts-model.json |   18 +
 .../ltr/example/train_and_upload_demo_model.py  |  163 ++
 solr/contrib/ltr/example/user_queries.txt       |    8 +
 solr/contrib/ltr/ivy.xml                        |   32 +
 .../src/java/org/apache/solr/ltr/DocInfo.java   |   42 +
 .../java/org/apache/solr/ltr/FeatureLogger.java |  193 ++
 .../java/org/apache/solr/ltr/LTRRescorer.java   |  249 +++
 .../org/apache/solr/ltr/LTRScoringQuery.java    |  738 ++++++++
 .../org/apache/solr/ltr/LTRThreadModule.java    |  163 ++
 .../solr/ltr/SolrQueryRequestContextUtils.java  |   83 +
 .../org/apache/solr/ltr/feature/Feature.java    |  335 ++++
 .../solr/ltr/feature/FeatureException.java      |   31 +
 .../solr/ltr/feature/FieldLengthFeature.java    |  152 ++
 .../solr/ltr/feature/FieldValueFeature.java     |  141 ++
 .../solr/ltr/feature/OriginalScoreFeature.java  |  118 ++
 .../apache/solr/ltr/feature/SolrFeature.java    |  320 ++++
 .../apache/solr/ltr/feature/ValueFeature.java   |  148 ++
 .../apache/solr/ltr/feature/package-info.java   |   21 +
 .../apache/solr/ltr/model/LTRScoringModel.java  |  298 +++
 .../org/apache/solr/ltr/model/LinearModel.java  |  147 ++
 .../apache/solr/ltr/model/ModelException.java   |   31 +
 .../ltr/model/MultipleAdditiveTreesModel.java   |  377 ++++
 .../org/apache/solr/ltr/model/package-info.java |   21 +
 .../solr/ltr/norm/IdentityNormalizer.java       |   53 +
 .../apache/solr/ltr/norm/MinMaxNormalizer.java  |  107 ++
 .../org/apache/solr/ltr/norm/Normalizer.java    |   64 +
 .../solr/ltr/norm/NormalizerException.java      |   31 +
 .../solr/ltr/norm/StandardNormalizer.java       |   99 +
 .../org/apache/solr/ltr/norm/package-info.java  |   23 +
 .../java/org/apache/solr/ltr/package-info.java  |   45 +
 .../org/apache/solr/ltr/store/FeatureStore.java |   67 +
 .../org/apache/solr/ltr/store/ModelStore.java   |   74 +
 .../org/apache/solr/ltr/store/package-info.java |   21 +
 .../ltr/store/rest/ManagedFeatureStore.java     |  215 +++
 .../solr/ltr/store/rest/ManagedModelStore.java  |  319 ++++
 .../solr/ltr/store/rest/package-info.java       |   22 +
 .../LTRFeatureLoggerTransformerFactory.java     |  254 +++
 .../solr/response/transform/package-info.java   |   23 +
 .../apache/solr/search/LTRQParserPlugin.java    |  233 +++
 .../org/apache/solr/search/package-info.java    |   23 +
 solr/contrib/ltr/src/java/overview.html         |   91 +
 .../featureExamples/comp_features.json          |   37 +
 .../featureExamples/external_features.json      |   51 +
 ...external_features_for_sparse_processing.json |   18 +
 .../featureExamples/features-linear-efi.json    |   17 +
 .../featureExamples/features-linear.json        |   51 +
 .../features-store-test-model.json              |   51 +
 .../test-files/featureExamples/fq_features.json |   16 +
 .../multipleadditivetreesmodel_features.json    |   16 +
 .../contrib/ltr/src/test-files/log4j.properties |   32 +
 .../modelExamples/external_model.json           |   12 +
 .../modelExamples/external_model_store.json     |   13 +
 .../src/test-files/modelExamples/fq-model.json  |   20 +
 .../modelExamples/linear-model-efi.json         |   14 +
 .../test-files/modelExamples/linear-model.json  |   30 +
 .../multipleadditivetreesmodel.json             |   38 +
 ...tivetreesmodel_external_binary_features.json |   38 +
 .../multipleadditivetreesmodel_no_feature.json  |   24 +
 .../multipleadditivetreesmodel_no_features.json |   14 +
 .../multipleadditivetreesmodel_no_left.json     |   22 +
 .../multipleadditivetreesmodel_no_params.json   |    8 +
 .../multipleadditivetreesmodel_no_right.json    |   22 +
 ...multipleadditivetreesmodel_no_threshold.json |   24 +
 .../multipleadditivetreesmodel_no_tree.json     |   15 +
 .../multipleadditivetreesmodel_no_trees.json    |   10 +
 .../multipleadditivetreesmodel_no_weight.json   |   24 +
 .../test-files/solr/collection1/conf/schema.xml |   88 +
 .../solr/collection1/conf/solrconfig-ltr.xml    |   65 +
 .../collection1/conf/solrconfig-ltr_Th10_10.xml |   69 +
 .../collection1/conf/solrconfig-multiseg.xml    |   62 +
 .../solr/collection1/conf/stopwords.txt         |   16 +
 .../solr/collection1/conf/synonyms.txt          |   28 +
 solr/contrib/ltr/src/test-files/solr/solr.xml   |   42 +
 .../org/apache/solr/ltr/TestLTROnSolrCloud.java |  211 +++
 .../apache/solr/ltr/TestLTRQParserExplain.java  |  152 ++
 .../apache/solr/ltr/TestLTRQParserPlugin.java   |  114 ++
 .../solr/ltr/TestLTRReRankingPipeline.java      |  300 +++
 .../apache/solr/ltr/TestLTRScoringQuery.java    |  319 ++++
 .../org/apache/solr/ltr/TestLTRWithFacet.java   |  103 ++
 .../org/apache/solr/ltr/TestLTRWithSort.java    |  102 ++
 .../solr/ltr/TestParallelWeightCreation.java    |   77 +
 .../org/apache/solr/ltr/TestRerankBase.java     |  429 +++++
 .../solr/ltr/TestSelectiveWeightCreation.java   |  251 +++
 .../ltr/feature/TestEdisMaxSolrFeature.java     |   76 +
 .../solr/ltr/feature/TestExternalFeatures.java  |  157 ++
 .../ltr/feature/TestExternalValueFeatures.java  |   86 +
 ...stFeatureExtractionFromMultipleSegments.java |  105 ++
 .../solr/ltr/feature/TestFeatureLogging.java    |  254 +++
 .../ltr/feature/TestFeatureLtrScoringModel.java |   71 +
 .../solr/ltr/feature/TestFeatureStore.java      |  106 ++
 .../ltr/feature/TestFieldLengthFeature.java     |  156 ++
 .../solr/ltr/feature/TestFieldValueFeature.java |  173 ++
 .../solr/ltr/feature/TestFilterSolrFeature.java |  105 ++
 .../ltr/feature/TestNoMatchSolrFeature.java     |  192 ++
 .../ltr/feature/TestOriginalScoreFeature.java   |  148 ++
 .../solr/ltr/feature/TestRankingFeature.java    |  123 ++
 .../ltr/feature/TestUserTermScoreWithQ.java     |   74 +
 .../ltr/feature/TestUserTermScorerQuery.java    |   74 +
 .../ltr/feature/TestUserTermScorereQDF.java     |   75 +
 .../solr/ltr/feature/TestValueFeature.java      |  165 ++
 .../apache/solr/ltr/model/TestLinearModel.java  |  207 +++
 .../model/TestMultipleAdditiveTreesModel.java   |  246 +++
 .../solr/ltr/norm/TestMinMaxNormalizer.java     |  120 ++
 .../solr/ltr/norm/TestStandardNormalizer.java   |  132 ++
 .../ltr/store/rest/TestManagedFeatureStore.java |   36 +
 .../solr/ltr/store/rest/TestModelManager.java   |  163 ++
 .../store/rest/TestModelManagerPersistence.java |  121 ++
 117 files changed, 14167 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/dev-tools/idea/.idea/modules.xml
----------------------------------------------------------------------
diff --git a/dev-tools/idea/.idea/modules.xml b/dev-tools/idea/.idea/modules.xml
index 6fbe496..5d2d106 100644
--- a/dev-tools/idea/.idea/modules.xml
+++ b/dev-tools/idea/.idea/modules.xml
@@ -60,6 +60,7 @@
       <module group="Solr/Contrib" filepath="$PROJECT_DIR$/solr/contrib/uima/uima.iml" />
       <module group="Solr/Contrib" filepath="$PROJECT_DIR$/solr/contrib/velocity/velocity.iml" />
       <module group="Solr/Contrib" filepath="$PROJECT_DIR$/solr/contrib/analytics/analytics.iml" />
+      <module group="Solr/Contrib" filepath="$PROJECT_DIR$/solr/contrib/ltr/ltr.iml" />
     </modules>
   </component>
 </project>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/dev-tools/idea/solr/contrib/ltr/ltr.iml
----------------------------------------------------------------------
diff --git a/dev-tools/idea/solr/contrib/ltr/ltr.iml b/dev-tools/idea/solr/contrib/ltr/ltr.iml
new file mode 100644
index 0000000..efc505d
--- /dev/null
+++ b/dev-tools/idea/solr/contrib/ltr/ltr.iml
@@ -0,0 +1,37 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+  <component name="NewModuleRootManager" inherit-compiler-output="false">
+    <output url="file://$MODULE_DIR$/../../../idea-build/solr/contrib/ltr/classes/java" />
+    <output-test url="file://$MODULE_DIR$/../../../idea-build/solr/contrib/ltr/classes/test" />
+    <exclude-output />
+    <content url="file://$MODULE_DIR$">
+      <sourceFolder url="file://$MODULE_DIR$/src/test" isTestSource="true" />
+      <sourceFolder url="file://$MODULE_DIR$/src/test-files" type="java-test-resource" />
+      <sourceFolder url="file://$MODULE_DIR$/src/java" isTestSource="false" />
+      <sourceFolder url="file://$MODULE_DIR$/src/resources" type="java-resource" />
+    </content>
+    <orderEntry type="inheritedJdk" />
+    <orderEntry type="sourceFolder" forTests="false" />
+    <orderEntry type="library" scope="TEST" name="JUnit" level="project" />
+    <orderEntry type="library" name="Solr core library" level="project" />
+    <orderEntry type="library" name="Solrj library" level="project" />
+    <orderEntry type="module-library">
+      <library>
+        <CLASSES>
+          <root url="file://$MODULE_DIR$/lib" />
+        </CLASSES>
+        <JAVADOC />
+        <SOURCES />
+        <jarDirectory url="file://$MODULE_DIR$/lib" recursive="false" />
+      </library>
+    </orderEntry>
+    <orderEntry type="library" scope="TEST" name="Solr example library" level="project" />
+    <orderEntry type="library" scope="TEST" name="Solr core test library" level="project" />
+    <orderEntry type="module" scope="TEST" module-name="lucene-test-framework" />
+    <orderEntry type="module" scope="TEST" module-name="solr-test-framework" />
+    <orderEntry type="module" module-name="solr-core" />
+    <orderEntry type="module" module-name="solrj" />
+    <orderEntry type="module" module-name="lucene-core" />
+    <orderEntry type="module" module-name="analysis-common" />
+  </component>
+</module>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index fd4d2af..16cae8c 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -93,6 +93,9 @@ New Features
   SOLR_HOME on every node. Editing config through API is supported but affects only that one node. 
   (janhoy)
 
+* SOLR-8542: Adds Solr Learning to Rank (LTR) plugin for reranking results with machine learning models.
+  (Michael Nilsson, Diego Ceccarelli, Joshua Pantony, Jon Dorando, Naveen Santhapuri, Alessandro Benedetti, David Grohmann, Christine Poerschke)
+
 Optimizations
 ----------------------
 * SOLR-9704: Facet Module / JSON Facet API: Optimize blockChildren facets that have

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/README.md
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/README.md b/solr/contrib/ltr/README.md
new file mode 100644
index 0000000..5fe0087
--- /dev/null
+++ b/solr/contrib/ltr/README.md
@@ -0,0 +1,406 @@
+Apache Solr Learning to Rank
+========
+
+This is the main [learning to rank integrated into solr](http://www.slideshare.net/lucidworks/learning-to-rank-in-solr-presented-by-michael-nilsson-diego-ceccarelli-bloomberg-lp)
+repository.
+[Read up on learning to rank](https://en.wikipedia.org/wiki/Learning_to_rank)
+
+Apache Solr Learning to Rank (LTR) provides a way for you to extract features
+directly inside Solr for use in training a machine learned model.  You can then
+deploy that model to Solr and use it to rerank your top X search results.
+
+# Test the plugin with solr/example/techproducts in a few easy steps!
+
+Solr provides some simple example of indices. In order to test the plugin with
+the techproducts example please follow these steps.
+
+1. Compile solr and the examples
+
+    `cd solr`
+    `ant dist`
+    `ant server`
+
+2. Run the example to setup the index
+
+   `./bin/solr -e techproducts`
+
+3. Stop solr and install the plugin:
+     1. Stop solr
+
+        `./bin/solr stop`
+     2. Create the lib folder
+
+        `mkdir example/techproducts/solr/techproducts/lib`
+     3. Install the plugin in the lib folder
+
+        `cp build/contrib/ltr/solr-ltr-7.0.0-SNAPSHOT.jar example/techproducts/solr/techproducts/lib/`
+     4. Replace the original solrconfig with one importing all the ltr components
+
+        `cp contrib/ltr/example/solrconfig.xml example/techproducts/solr/techproducts/conf/`
+
+4. Run the example again
+
+   `./bin/solr -e techproducts`
+
+   Note you could also have just restarted your collection using the admin page.
+   You can find more detailed instructions [here](https://wiki.apache.org/solr/SolrPlugins).
+
+5. Deploy features and a model
+
+      `curl -XPUT 'http://localhost:8983/solr/techproducts/schema/feature-store'  --data-binary "@./contrib/ltr/example/techproducts-features.json"  -H 'Content-type:application/json'`
+
+      `curl -XPUT 'http://localhost:8983/solr/techproducts/schema/model-store'  --data-binary "@./contrib/ltr/example/techproducts-model.json"  -H 'Content-type:application/json'`
+
+6. Have fun !
+
+     * Access to the default feature store
+
+       http://localhost:8983/solr/techproducts/schema/feature-store/\_DEFAULT\_
+     * Access to the model store
+
+       http://localhost:8983/solr/techproducts/schema/model-store
+     * Perform a reranking query using the model, and retrieve the features
+
+       http://localhost:8983/solr/techproducts/query?indent=on&q=test&wt=json&rq={!ltr%20model=linear%20reRankDocs=25%20efi.user_query=%27test%27}&fl=[features],price,score,name
+
+
+BONUS: Train an actual machine learning model
+
+1. Download and install [liblinear](https://www.csie.ntu.edu.tw/~cjlin/liblinear/)
+
+2. Change `contrib/ltr/example/config.json` "trainingLibraryLocation" to point to the train directory where you installed liblinear.
+
+3. Extract features, train a reranking model, and deploy it to Solr.
+
+  `cd  contrib/ltr/example`
+
+  `python  train_and_upload_demo_model.py -c config.json`
+
+   This script deploys your features from `config.json` "featuresFile" to Solr.  Then it takes the relevance judged query
+   document pairs of "userQueriesFile" and merges it with the features extracted from Solr into a training
+   file.  That file is used to train a linear model, which is then deployed to Solr for you to rerank results.
+
+4. Search and rerank the results using the trained model
+
+   http://localhost:8983/solr/techproducts/query?indent=on&q=test&wt=json&rq={!ltr%20model=ExampleModel%20reRankDocs=25%20efi.user_query=%27test%27}&fl=price,score,name
+
+# Changes to solrconfig.xml
+```xml
+<config>
+  ...
+
+  <!-- Query parser used to rerank top docs with a provided model -->
+  <queryParser name="ltr" class="org.apache.solr.search.LTRQParserPlugin" />
+
+  <!--  Transformer that will encode the document features in the response.
+  For each document the transformer will add the features as an extra field
+  in the response. The name of the field will be the the name of the
+  transformer enclosed between brackets (in this case [features]).
+  In order to get the feature vector you will have to
+  specify that you want the field (e.g., fl="*,[features])  -->
+
+  <transformer name="features" class="org.apache.solr.response.transform.LTRFeatureLoggerTransformerFactory" />
+
+  <query>
+    ...
+
+    <!-- Cache for storing and fetching feature vectors -->
+    <cache name="QUERY_DOC_FV"
+      class="solr.search.LRUCache"
+      size="4096"
+      initialSize="2048"
+      autowarmCount="4096"
+      regenerator="solr.search.NoOpRegenerator" />
+  </query>
+
+</config>
+
+```
+
+# Defining Features
+In the learning to rank plugin, you can define features in a feature space
+using standard Solr queries. As an example:
+
+###### features.json
+```json
+[
+{ "name": "isBook",
+  "class": "org.apache.solr.ltr.feature.SolrFeature",
+  "params":{ "fq": ["{!terms f=category}book"] }
+},
+{
+  "name":  "documentRecency",
+  "class": "org.apache.solr.ltr.feature.SolrFeature",
+  "params": {
+      "q": "{!func}recip( ms(NOW,publish_date), 3.16e-11, 1, 1)"
+  }
+},
+{
+  "name":"originalScore",
+  "class":"org.apache.solr.ltr.feature.OriginalScoreFeature",
+  "params":{}
+},
+{
+  "name" : "userTextTitleMatch",
+  "class" : "org.apache.solr.ltr.feature.SolrFeature",
+  "params" : { "q" : "{!field f=title}${user_text}" }
+},
+ {
+   "name" : "userFromMobile",
+   "class" : "org.apache.solr.ltr.feature.ValueFeature",
+   "params" : { "value" : "${userFromMobile}", "required":true }
+ }
+]
+```
+
+Defines five features. Anything that is a valid Solr query can be used to define
+a feature.
+
+### Filter Query Features
+The first feature isBook fires if the term 'book' matches the category field
+for the given examined document. Since in this feature q was not specified,
+either the score 1 (in case of a match) or the score 0 (in case of no match)
+will be returned.
+
+### Query Features
+In the second feature (documentRecency) q was specified using a function query.
+In this case the score for the feature on a given document is whatever the query
+returns (1 for docs dated now, 1/2 for docs dated 1 year ago, 1/3 for docs dated
+2 years ago, etc..) . If both an fq and q is used, documents that don't match
+the fq will receive a score of 0 for the documentRecency feature, all other
+documents will receive the score specified by the query for this feature.
+
+### Original Score Feature
+The third feature (originalScore) has no parameters, and uses the
+OriginalScoreFeature class instead of the SolrFeature class.  Its purpose is
+to simply return the score for the original search request against the current
+matching document.
+
+### External Features
+Users can specify external information that can to be passed in as
+part of the query to the ltr ranking framework. In this case, the
+fourth feature (userTextPhraseMatch) will be looking for an external field
+called 'user_text' passed in through the request, and will fire if there is
+a term match for the document field 'title' from the value of the external
+field 'user_text'.  You can provide default values for external features as
+well by specifying ${myField:myDefault}, similar to how you would in a Solr config.
+In this case, the fifth feature (userFromMobile) will be looking for an external parameter
+called 'userFromMobile' passed in through the request, if the ValueFeature is :
+required=true, it will throw an exception if the external feature is not passed
+required=false, it will silently ignore the feature and avoid the scoring ( at Document scoring time, the model will consider 0 as feature value)
+The advantage in defining a feature as not required, where possible, is to avoid wasting caching space and time in calculating the featureScore.
+See the [Run a Rerank Query](#run-a-rerank-query) section for how to pass in external information.
+
+### Custom Features
+Custom features can be created by extending from
+org.apache.solr.ltr.feature.Feature, however this is generally not recommended.
+The majority of features should be possible to create using the methods described
+above.
+
+# Defining Models
+Currently the Learning to Rank plugin supports 2 generalized forms of
+models: 1. Linear Model i.e. [RankSVM](http://www.cs.cornell.edu/people/tj/publications/joachims_02c.pdf), [Pranking](https://papers.nips.cc/paper/2023-pranking-with-ranking.pdf)
+and 2. Multiple Additive Trees i.e. [LambdaMART](http://research.microsoft.com/pubs/132652/MSR-TR-2010-82.pdf), [Gradient Boosted Regression Trees (GBRT)](https://papers.nips.cc/paper/3305-a-general-boosting-method-and-its-application-to-learning-ranking-functions-for-web-search.pdf)
+
+### Linear
+If you'd like to introduce a bias set a constant feature
+to the bias value you'd like and make a weight of 1.0 for that feature.
+
+###### model.json
+```json
+{
+    "class":"org.apache.solr.ltr.model.LinearModel",
+    "name":"myModelName",
+    "features":[
+        { "name": "userTextTitleMatch"},
+        { "name": "originalScore"},
+        { "name": "isBook"}
+    ],
+    "params":{
+        "weights": {
+            "userTextTitleMatch": 1.0,
+            "originalScore": 0.5,
+            "isBook": 0.1
+        }
+
+    }
+}
+```
+
+This is an example of a toy Linear model. Class specifies the class to be
+using to interpret the model. Name is the model identifier you will use 
+when making request to the ltr framework. Features specifies the feature 
+space that you want extracted when using this model. All features that 
+appear in the model params will be used for scoring and must appear in 
+the features list.  You can add extra features to the features list that 
+will be computed but not used in the model for scoring, which can be useful 
+for logging. Params are the Linear parameters.
+
+Good library for training SVM, an example of a Linear model, is 
+(https://www.csie.ntu.edu.tw/~cjlin/liblinear/ , https://www.csie.ntu.edu.tw/~cjlin/libsvm/) . 
+You will need to convert the libSVM model format to the format specified above.
+
+### Multiple Additive Trees
+
+###### model2.json
+```json
+{
+    "class":"org.apache.solr.ltr.model.MultipleAdditiveTreesModel",
+    "name":"multipleadditivetreesmodel",
+    "features":[
+        { "name": "userTextTitleMatch"},
+        { "name": "originalScore"}
+    ],
+    "params":{
+        "trees": [
+            {
+                "weight" : 1,
+                "root": {
+                    "feature": "userTextTitleMatch",
+                    "threshold": 0.5,
+                    "left" : {
+                        "value" : -100
+                    },
+                    "right": {
+                        "feature" : "originalScore",
+                        "threshold": 10.0,
+                        "left" : {
+                            "value" : 50
+                        },
+                        "right" : {
+                            "value" : 75
+                        }
+                    }
+                }
+            },
+            {
+                "weight" : 2,
+                "root": {
+                    "value" : -10
+                }
+            }
+        ]
+    }
+}
+```
+This is an example of a toy Multiple Additive Trees. Class specifies the class to be using to
+interpret the model. Name is the
+model identifier you will use when making request to the ltr framework.
+Features specifies the feature space that you want extracted when using this
+model. All features that appear in the model params will be used for scoring and
+must appear in the features list.  You can add extra features to the features
+list that will be computed but not used in the model for scoring, which can
+be useful for logging. Params are the Multiple Additive Trees specific parameters. In this
+case we have 2 trees, one with 3 leaf nodes and one with 1 leaf node.
+
+A good library for training LambdaMART, an example of Multiple Additive Trees, is ( http://sourceforge.net/p/lemur/wiki/RankLib/ ).
+You will need to convert the RankLib model format to the format specified above.
+
+# Deploy Models and Features
+To send features run
+
+`curl -XPUT 'http://localhost:8983/solr/collection1/schema/feature-store' --data-binary @/path/features.json -H 'Content-type:application/json'`
+
+To send models run
+
+`curl -XPUT 'http://localhost:8983/solr/collection1/schema/model-store' --data-binary @/path/model.json -H 'Content-type:application/json'`
+
+
+# View Models and Features
+`curl -XGET 'http://localhost:8983/solr/collection1/schema/feature-store'`
+
+`curl -XGET 'http://localhost:8983/solr/collection1/schema/model-store'`
+
+# Run a Rerank Query
+Add to your original solr query
+`rq={!ltr model=myModelName reRankDocs=25}`
+
+The model name is the name of the model you sent to solr earlier.
+The number of documents you want reranked, which can be larger than the
+number you display, is reRankDocs.
+
+### Pass in external information for external features
+Add to your original solr query
+`rq={!ltr reRankDocs=3 model=externalmodel efi.field1='text1' efi.field2='text2'}`
+
+Where "field1" specifies the name of the customized field to be used by one
+or more of your features, and text1 is the information to be pass in. As an
+example that matches the earlier shown userTextTitleMatch feature one could do:
+
+`rq={!ltr reRankDocs=3 model=externalmodel efi.user_text='Casablanca' efi.user_intent='movie'}`
+
+# Extract features
+To extract features you need to use the feature vector transformer `features`
+
+`fl=*,score,[features]&rq={!ltr model=yourModel reRankDocs=25}`
+
+If you use `[features]` together with your reranking model, it will return
+the array of features used by your model. Otherwise you can just ask solr to
+produce the features without doing the reranking:
+
+`fl=*,score,[features store=yourFeatureStore format=[dense|sparse] ]`
+
+This will return the values of the features in the given store. The format of the 
+extracted features will be based on the format parameter. The default is sparse.
+
+# Assemble training data
+In order to train a learning to rank model you need training data. Training data is
+what "teaches" the model what the appropriate weight for each feature is. In general
+training data is a collection of queries with associated documents and what their ranking/score
+should be. As an example:
+```
+secretary of state|John Kerry|0.66|CROWDSOURCE
+secretary of state|Cesar A. Perales|0.33|CROWDSOURCE
+secretary of state|New York State|0.0|CROWDSOURCE
+secretary of state|Colorado State University Secretary|0.0|CROWDSOURCE
+
+microsoft ceo|Satya Nadella|1.0|CLICK_LOG
+microsoft ceo|Microsoft|0.0|CLICK_LOG
+microsoft ceo|State|0.0|CLICK_LOG
+microsoft ceo|Secretary|0.0|CLICK_LOG
+```
+In this example the first column indicates the query, the second column indicates a unique id for that doc,
+the third column indicates the relative importance or relevance of that doc, and the fourth column indicates the source.
+There are 2 primary ways you might collect data for use with your machine learning algorithim. The first
+is to collect the clicks of your users given a specific query. There are many ways of preparing this data
+to train a model (http://www.cs.cornell.edu/people/tj/publications/joachims_etal_05a.pdf). The general idea
+is that if a user sees multiple documents and clicks the one lower down, that document should be scored higher
+than the one above it. The second way is explicitly through a crowdsourcing platform like Mechanical Turk or
+CrowdFlower. These platforms allow you to show human workers documents associated with a query and have them
+tell you what the correct ranking should be.
+
+At this point you'll need to collect feature vectors for each query document pair. You can use the information
+from the Extract features section above to do this. An example script has been included in example/train_and_upload_demo_model.py.
+
+# Explanation of the core reranking logic
+An LTR model is plugged into the ranking through the [LTRQParserPlugin](/solr/contrib/ltr/src/java/org/apache/solr/search/LTRQParserPlugin.java). The plugin will
+read from the request the model, an instance of [LTRScoringModel](/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/LTRScoringModel.java),
+plus other parameters. The plugin will generate an LTRQuery, a particular [ReRankQuery](/solr/core/src/java/org/apache/solr/search/AbstractReRankQuery.java).
+It wraps the original solr query for the first pass ranking, and uses the provided model in an
+[LTRScoringQuery](/solr/contrib/ltr/src/java/org/apache/solr/ltr/LTRScoringQuery.java) to
+rescore and rerank the top documents.  The LTRScoringQuery will take care of computing the values of all the
+[features](/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/Feature.java) and then will delegate the final score
+generation to the LTRScoringModel.
+
+# Speeding up the weight creation with threads
+About half the time for ranking is spent in the creation of weights for each feature used in ranking. If the number of features is significantly high (say, 500 or more), this increases the ranking overhead proportionally. To alleviate this problem, parallel weight creation is provided as a configurable option. In order to use this feature, the following lines need to be added to the solrconfig.xml
+```xml
+
+<config>
+  <!-- Query parser used to rerank top docs with a provided model -->
+  <queryParser name="ltr" class="org.apache.solr.search.LTRQParserPlugin">
+     <int name="threadModule.totalPoolThreads">10</int> <!-- Maximum threads to share for all requests -->
+     <int name="threadModule.numThreadsPerRequest">5</int> <!-- Maximum threads to use for a single requests-->
+  </queryParser>
+  
+  <!-- Transformer for extracting features -->
+  <transformer name="features" class="org.apache.solr.response.transform.LTRFeatureLoggerTransformerFactory">
+     <int name="threadModule.totalPoolThreads">10</int> <!-- Maximum threads to share for all requests -->
+     <int name="threadModule.numThreadsPerRequest">5</int> <!-- Maximum threads to use for a single requests-->
+  </transformer>
+</config>
+
+```
+  
+The threadModule.totalPoolThreads option limits the total number of threads to be used across all query instances at any given time. threadModule.numThreadsPerRequest limits the number of threads used to process a single query. In the above example, 10 threads will be used to services all queries and a maximum of 5 threads to service a single query. If the solr instances is expected to receive no more than one query at a time, it is best to set both these numbers to the same value. If multiple queries need to serviced simultaneously, the numbers can be adjusted based on the expected response times. If the value of  threadModule.numThreadsPerRequest is higher, the reponse time for a single query will be improved upto a point. If multiple queries are serviced simultaneously, the threadModule.totalPoolThreads imposes a contention between the queries if (threadModule.numThreadsPerRequest*total parallel queries > threadModule.totalPoolThreads). 
+

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/README.txt
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/README.txt b/solr/contrib/ltr/README.txt
new file mode 120000
index 0000000..42061c0
--- /dev/null
+++ b/solr/contrib/ltr/README.txt
@@ -0,0 +1 @@
+README.md
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/build.xml
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/build.xml b/solr/contrib/ltr/build.xml
new file mode 100644
index 0000000..bbd5cf3
--- /dev/null
+++ b/solr/contrib/ltr/build.xml
@@ -0,0 +1,30 @@
+<?xml version="1.0"?>
+
+<!--
+    Licensed to the Apache Software Foundation (ASF) under one or more
+    contributor license agreements.  See the NOTICE file distributed with
+    this work for additional information regarding copyright ownership.
+    The ASF licenses this file to You under the Apache License, Version 2.0
+    the "License"); you may not use this file except in compliance with
+    the License.  You may obtain a copy of the License at
+
+        http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+ -->
+
+<project name="solr-ltr" default="default">
+
+  <description>
+    Learning to Rank Package
+  </description>
+
+  <import file="../contrib-build.xml"/>
+
+  <target name="compile-core" depends=" solr-contrib-build.compile-core"/>
+
+</project>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/example/config.json
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/example/config.json b/solr/contrib/ltr/example/config.json
new file mode 100644
index 0000000..483fe69
--- /dev/null
+++ b/solr/contrib/ltr/example/config.json
@@ -0,0 +1,14 @@
+{
+  "host":                     "localhost",
+  "port":                     8983,
+  "collection":               "techproducts",
+  "requestHandler":           "query",
+  "q":                        "*:*",
+  "otherParams":              "fl=id,score,[features efi.user_query='$USERQUERY']",
+  "userQueriesFile":          "user_queries.txt",
+  "trainingFile":             "ClickData",
+  "featuresFile":             "techproducts-features.json",
+  "trainingLibraryLocation":  "liblinear/train",
+  "solrModelFile":            "solrModel.json",
+  "solrModelName":            "ExampleModel"
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/example/libsvm_formatter.py
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/example/libsvm_formatter.py b/solr/contrib/ltr/example/libsvm_formatter.py
new file mode 100644
index 0000000..25cf10b
--- /dev/null
+++ b/solr/contrib/ltr/example/libsvm_formatter.py
@@ -0,0 +1,124 @@
+from subprocess import call
+import os
+
+PAIRWISE_THRESHOLD = 1.e-1
+FEATURE_DIFF_THRESHOLD = 1.e-6
+
+class LibSvmFormatter:
+    def processQueryDocFeatureVector(self,docClickInfo,trainingFile):
+        '''Expects as input a sorted by queries list or generator that provides the context 
+        for each query in a tuple composed of: (query , docId , relevance , source , featureVector).
+        The list of documents that are part of the same query will generate comparisons
+        against each other for training. '''
+        curQueryAndSource = "";
+        with open(trainingFile,"w") as output:
+            self.featureNameToId  = {}
+            self.featureIdToName = {}
+            self.curFeatIndex = 1;
+            curListOfFv = []
+            for query,docId,relevance,source,featureVector in docClickInfo:
+                if curQueryAndSource != query + source:
+                    #Time to flush out all the pairs
+                    _writeRankSVMPairs(curListOfFv,output);
+                    curListOfFv = []
+                    curQueryAndSource = query + source
+                curListOfFv.append((relevance,self._makeFeaturesMap(featureVector)))
+            _writeRankSVMPairs(curListOfFv,output); #This catches the last list of comparisons
+
+    def _makeFeaturesMap(self,featureVector):
+        '''expects a list of strings with "feature name":"feature value" pairs. Outputs a map of map[key] = value.
+        Where key is now an integer. libSVM requires the key to be an integer but not all libraries have
+        this requirement.'''
+        features = {}
+        for keyValuePairStr in featureVector:
+            featName,featValue = keyValuePairStr.split(":");
+            features[self._getFeatureId(featName)] = float(featValue);
+        return features
+
+    def _getFeatureId(self,key):
+        if key not in self.featureNameToId:
+                self.featureNameToId[key] = self.curFeatIndex;
+                self.featureIdToName[self.curFeatIndex] = key;
+                self.curFeatIndex += 1;
+        return self.featureNameToId[key];
+
+    def convertLibSvmModelToLtrModel(self,libSvmModelLocation, outputFile, modelName):
+        with open(libSvmModelLocation, 'r') as inFile:
+            with open(outputFile,'w') as convertedOutFile:
+                convertedOutFile.write('{\n\t"class":"org.apache.solr.ltr.model.LinearModel",\n')
+                convertedOutFile.write('\t"name": "' + str(modelName) + '",\n')
+                convertedOutFile.write('\t"features": [\n')
+                isFirst = True;
+                for featKey in self.featureNameToId.keys():
+                    convertedOutFile.write('\t\t{ "name":"' + featKey  + '"}' if isFirst else ',\n\t\t{ "name":"' + featKey  + '"}' );
+                    isFirst = False;
+                convertedOutFile.write("\n\t],\n");
+                convertedOutFile.write('\t"params": {\n\t\t"weights": {\n');
+
+                startReading = False
+                isFirst = True
+                counter = 1
+                for line in inFile:
+                    if startReading:
+                        newParamVal = float(line.strip())
+                        if not isFirst:
+                            convertedOutFile.write(',\n\t\t\t"' + self.featureIdToName[counter] + '":' + str(newParamVal))
+                        else:
+                            convertedOutFile.write('\t\t\t"' + self.featureIdToName[counter] + '":' + str(newParamVal))
+                            isFirst = False
+                        counter += 1
+                    elif line.strip() == 'w':
+                        startReading = True
+                convertedOutFile.write('\n\t\t}\n\t}\n}')
+
+def _writeRankSVMPairs(listOfFeatures,output):
+    '''Given a list of (relevance, {Features Map}) where the list represents
+    a set of documents to be compared, this calculates all pairs and
+    writes the Feature Vectors in a format compatible with libSVM.
+    Ex: listOfFeatures = [
+      #(relevance, {feature1:value, featureN:value})
+      (4, {1:0.9, 2:0.9, 3:0.1})
+      (3, {1:0.7, 2:0.9, 3:0.2})
+      (1, {1:0.1, 2:0.9, 6:0.1})
+    ]    
+    '''
+    for d1 in range(0,len(listOfFeatures)):
+        for d2 in range(d1+1,len(listOfFeatures)):
+            doc1,doc2 = listOfFeatures[d1], listOfFeatures[d2]
+            fv1,fv2 = doc1[1],doc2[1]
+            d1Relevance, d2Relevance = float(doc1[0]),float(doc2[0])
+            if  d1Relevance - d2Relevance > PAIRWISE_THRESHOLD:#d1Relevance > d2Relevance
+                outputLibSvmLine("+1",subtractFvMap(fv1,fv2),output);
+                outputLibSvmLine("-1",subtractFvMap(fv2,fv1),output);
+            elif d1Relevance - d2Relevance < -PAIRWISE_THRESHOLD: #d1Relevance < d2Relevance:
+                outputLibSvmLine("+1",subtractFvMap(fv2,fv1),output);
+                outputLibSvmLine("-1",subtractFvMap(fv1,fv2),output);
+            else: #Must be approximately equal relevance, in which case this is a useless signal and we should skip
+                continue;
+
+def subtractFvMap(fv1,fv2):
+    '''returns the fv from fv1 - fv2'''
+    retFv = fv1.copy();
+    for featInd in fv2.keys():
+        subVal = 0.0;
+        if featInd in fv1:
+            subVal = fv1[featInd] - fv2[featInd]
+        else:
+            subVal = -fv2[featInd]
+        if abs(subVal) > FEATURE_DIFF_THRESHOLD: #This ensures everything is in sparse format, and removes useless signals
+            retFv[featInd] = subVal;
+        else:
+            retFv.pop(featInd, None)
+    return retFv;
+
+def outputLibSvmLine(sign,fvMap,outputFile):
+    outputFile.write(sign)
+    for feat in fvMap.keys():
+        outputFile.write(" " + str(feat) + ":" + str(fvMap[feat]));
+    outputFile.write("\n")
+
+def trainLibSvm(libraryLocation,trainingFileName):
+    if os.path.isfile(libraryLocation):
+        call([libraryLocation, trainingFileName])
+    else:
+        raise Exception("NO LIBRARY FOUND: " + libraryLocation);


[40/50] [abbrv] lucene-solr:apiv2: SOLR-8542: Adds Solr Learning to Rank (LTR) plugin for reranking results with machine learning models. (Michael Nilsson, Diego Ceccarelli, Joshua Pantony, Jon Dorando, Naveen Santhapuri, Alessandro Benedetti, David Groh

Posted by sa...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/SolrFeature.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/SolrFeature.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/SolrFeature.java
new file mode 100644
index 0000000..cb7c1a0
--- /dev/null
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/SolrFeature.java
@@ -0,0 +1,320 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.feature;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.search.DocIdSet;
+import org.apache.lucene.search.DocIdSetIterator;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.Scorer;
+import org.apache.lucene.search.Weight;
+import org.apache.lucene.util.Bits;
+import org.apache.solr.common.params.CommonParams;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.core.SolrCore;
+import org.apache.solr.request.LocalSolrQueryRequest;
+import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.search.QParser;
+import org.apache.solr.search.SolrIndexSearcher;
+import org.apache.solr.search.SyntaxError;
+/**
+ * This feature allows you to reuse any Solr query as a feature. The value
+ * of the feature will be the score of the given query for the current document.
+ * See <a href="https://cwiki.apache.org/confluence/display/solr/Other+Parsers">Solr documentation of other parsers</a> you can use as a feature.
+ * Example configurations:
+ * <pre>[{ "name": "isBook",
+  "class": "org.apache.solr.ltr.feature.SolrFeature",
+  "params":{ "fq": ["{!terms f=category}book"] }
+},
+{
+  "name":  "documentRecency",
+  "class": "org.apache.solr.ltr.feature.SolrFeature",
+  "params": {
+      "q": "{!func}recip( ms(NOW,publish_date), 3.16e-11, 1, 1)"
+  }
+}]</pre>
+ **/
+public class SolrFeature extends Feature {
+
+  private String df;
+  private String q;
+  private List<String> fq;
+
+  public String getDf() {
+    return df;
+  }
+
+  public void setDf(String df) {
+    this.df = df;
+  }
+
+  public String getQ() {
+    return q;
+  }
+
+  public void setQ(String q) {
+    this.q = q;
+  }
+
+  public List<String> getFq() {
+    return fq;
+  }
+
+  public void setFq(List<String> fq) {
+    this.fq = fq;
+  }
+
+  public SolrFeature(String name, Map<String,Object> params) {
+    super(name, params);
+  }
+
+  @Override
+  public LinkedHashMap<String,Object> paramsToMap() {
+    final LinkedHashMap<String,Object> params = new LinkedHashMap<>(3, 1.0f);
+    if (df != null) {
+      params.put("df", df);
+    }
+    if (q != null) {
+      params.put("q", q);
+    }
+    if (fq != null) {
+      params.put("fq", fq);
+    }
+    return params;
+  }
+
+  @Override
+  public FeatureWeight createWeight(IndexSearcher searcher, boolean needsScores,
+      SolrQueryRequest request, Query originalQuery, Map<String,String[]> efi)
+          throws IOException {
+    return new SolrFeatureWeight(searcher, request, originalQuery, efi);
+  }
+
+  @Override
+  protected void validate() throws FeatureException {
+    if ((q == null || q.isEmpty()) &&
+        ((fq == null) || fq.isEmpty())) {
+      throw new FeatureException(getClass().getSimpleName()+
+          ": Q or FQ must be provided");
+    }
+  }
+  /**
+   * Weight for a SolrFeature
+   **/
+  public class SolrFeatureWeight extends FeatureWeight {
+    Weight solrQueryWeight;
+    Query query;
+    List<Query> queryAndFilters;
+
+    public SolrFeatureWeight(IndexSearcher searcher,
+        SolrQueryRequest request, Query originalQuery, Map<String,String[]> efi) throws IOException {
+      super(SolrFeature.this, searcher, request, originalQuery, efi);
+      try {
+        String solrQuery = q;
+        final List<String> fqs = fq;
+
+        if ((solrQuery == null) || solrQuery.isEmpty()) {
+          solrQuery = "*:*";
+        }
+
+        solrQuery = macroExpander.expand(solrQuery);
+        if (solrQuery == null) {
+          throw new FeatureException(this.getClass().getSimpleName()+" requires efi parameter that was not passed in request.");
+        }
+
+        final SolrQueryRequest req = makeRequest(request.getCore(), solrQuery,
+            fqs, df);
+        if (req == null) {
+          throw new IOException("ERROR: No parameters provided");
+        }
+
+        // Build the filter queries
+        queryAndFilters = new ArrayList<Query>(); // If there are no fqs we just want an empty list
+        if (fqs != null) {
+          for (String fq : fqs) {
+            if ((fq != null) && (fq.trim().length() != 0)) {
+              fq = macroExpander.expand(fq);
+              final QParser fqp = QParser.getParser(fq, req);
+              final Query filterQuery = fqp.getQuery();
+              if (filterQuery != null) {
+                queryAndFilters.add(filterQuery);
+              }
+            }
+          }
+        }
+
+        final QParser parser = QParser.getParser(solrQuery, req);
+        query = parser.parse();
+
+        // Query can be null if there was no input to parse, for instance if you
+        // make a phrase query with "to be", and the analyzer removes all the
+        // words
+        // leaving nothing for the phrase query to parse.
+        if (query != null) {
+          queryAndFilters.add(query);
+          solrQueryWeight = searcher.createNormalizedWeight(query, true);
+        }
+      } catch (final SyntaxError e) {
+        throw new FeatureException("Failed to parse feature query.", e);
+      }
+    }
+
+    private LocalSolrQueryRequest makeRequest(SolrCore core, String solrQuery,
+        List<String> fqs, String df) {
+      final NamedList<String> returnList = new NamedList<String>();
+      if ((solrQuery != null) && !solrQuery.isEmpty()) {
+        returnList.add(CommonParams.Q, solrQuery);
+      }
+      if (fqs != null) {
+        for (final String fq : fqs) {
+          returnList.add(CommonParams.FQ, fq);
+        }
+      }
+      if ((df != null) && !df.isEmpty()) {
+        returnList.add(CommonParams.DF, df);
+      }
+      if (returnList.size() > 0) {
+        return new LocalSolrQueryRequest(core, returnList);
+      } else {
+        return null;
+      }
+    }
+
+    @Override
+    public FeatureScorer scorer(LeafReaderContext context) throws IOException {
+      Scorer solrScorer = null;
+      if (solrQueryWeight != null) {
+        solrScorer = solrQueryWeight.scorer(context);
+      }
+
+      final DocIdSetIterator idItr = getDocIdSetIteratorFromQueries(
+          queryAndFilters, context);
+      if (idItr != null) {
+        return solrScorer == null ? new ValueFeatureScorer(this, 1f, idItr)
+            : new SolrFeatureScorer(this, solrScorer,
+                new SolrFeatureScorerIterator(idItr, solrScorer.iterator()));
+      } else {
+        return null;
+      }
+    }
+
+    /**
+     * Given a list of Solr filters/queries, return a doc iterator that
+     * traverses over the documents that matched all the criteria of the
+     * queries.
+     *
+     * @param queries
+     *          Filtering criteria to match documents against
+     * @param context
+     *          Index reader
+     * @return DocIdSetIterator to traverse documents that matched all filter
+     *         criteria
+     */
+    private DocIdSetIterator getDocIdSetIteratorFromQueries(List<Query> queries,
+        LeafReaderContext context) throws IOException {
+      final SolrIndexSearcher.ProcessedFilter pf = ((SolrIndexSearcher) searcher)
+          .getProcessedFilter(null, queries);
+      final Bits liveDocs = context.reader().getLiveDocs();
+
+      DocIdSetIterator idIter = null;
+      if (pf.filter != null) {
+        final DocIdSet idSet = pf.filter.getDocIdSet(context, liveDocs);
+        if (idSet != null) {
+          idIter = idSet.iterator();
+        }
+      }
+
+      return idIter;
+    }
+
+    /**
+     * Scorer for a SolrFeature
+     **/
+    public class SolrFeatureScorer extends FeatureScorer {
+      final private Scorer solrScorer;
+
+      public SolrFeatureScorer(FeatureWeight weight, Scorer solrScorer,
+          SolrFeatureScorerIterator itr) {
+        super(weight, itr);
+        this.solrScorer = solrScorer;
+      }
+
+      @Override
+      public float score() throws IOException {
+        try {
+          return solrScorer.score();
+        } catch (UnsupportedOperationException e) {
+          throw new FeatureException(
+              e.toString() + ": " +
+                  "Unable to extract feature for "
+                  + name, e);
+        }
+      }
+    }
+
+    /**
+     * An iterator that allows to iterate only on the documents for which a feature has
+     * a value.
+     **/
+    public class SolrFeatureScorerIterator extends DocIdSetIterator {
+
+      final private DocIdSetIterator filterIterator;
+      final private DocIdSetIterator scorerFilter;
+
+      SolrFeatureScorerIterator(DocIdSetIterator filterIterator,
+          DocIdSetIterator scorerFilter) {
+        this.filterIterator = filterIterator;
+        this.scorerFilter = scorerFilter;
+      }
+
+      @Override
+      public int docID() {
+        return filterIterator.docID();
+      }
+
+      @Override
+      public int nextDoc() throws IOException {
+        int docID = filterIterator.nextDoc();
+        scorerFilter.advance(docID);
+        return docID;
+      }
+
+      @Override
+      public int advance(int target) throws IOException {
+        // We use iterator to catch the scorer up since
+        // that checks if the target id is in the query + all the filters
+        int docID = filterIterator.advance(target);
+        scorerFilter.advance(docID);
+        return docID;
+      }
+
+      @Override
+      public long cost() {
+        return filterIterator.cost() + scorerFilter.cost();
+      }
+
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/ValueFeature.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/ValueFeature.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/ValueFeature.java
new file mode 100644
index 0000000..61aa9e5
--- /dev/null
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/ValueFeature.java
@@ -0,0 +1,148 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.feature;
+
+import java.io.IOException;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.search.DocIdSetIterator;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Query;
+import org.apache.solr.request.SolrQueryRequest;
+/**
+ * This feature allows to return a constant given value for the current document.
+ *
+ * Example configuration:
+ * <pre>{
+   "name" : "userFromMobile",
+   "class" : "org.apache.solr.ltr.feature.ValueFeature",
+   "params" : { "value" : "${userFromMobile}", "required":true }
+ }</pre>
+ *
+ *You can place a constant value like "1.3f" in the value params, but many times you
+ *would want to pass in external information to use per request. For instance, maybe
+ *you want to rank things differently if the search came from a mobile device, or maybe
+ *you want to use your external query intent system as a feature.
+ *In the rerank request you can pass in rq={... efi.userFromMobile=1}, and the above
+ *feature will return 1 for all the docs for that request.  If required is set to true,
+ *the request will return an error since you failed to pass in the efi, otherwise if will
+ *just skip the feature and use a default value of 0 instead.
+ **/
+public class ValueFeature extends Feature {
+  private float configValue = -1f;
+  private String configValueStr = null;
+
+  private Object value = null;
+  private Boolean required = null;
+
+  public Object getValue() {
+    return value;
+  }
+
+  public void setValue(Object value) {
+    this.value = value;
+    if (value instanceof String) {
+      configValueStr = (String) value;
+    } else if (value instanceof Double) {
+      configValue = ((Double) value).floatValue();
+    } else if (value instanceof Float) {
+      configValue = ((Float) value).floatValue();
+    } else if (value instanceof Integer) {
+      configValue = ((Integer) value).floatValue();
+    } else if (value instanceof Long) {
+      configValue = ((Long) value).floatValue();
+    } else {
+      throw new FeatureException("Invalid type for 'value' in params for " + this);
+    }
+  }
+
+  public boolean isRequired() {
+    return Boolean.TRUE.equals(required);
+  }
+
+  public void setRequired(boolean required) {
+    this.required = required;
+  }
+
+  @Override
+  public LinkedHashMap<String,Object> paramsToMap() {
+    final LinkedHashMap<String,Object> params = new LinkedHashMap<>(2, 1.0f);
+    params.put("value", value);
+    if (required != null) {
+      params.put("required", required);
+    }
+    return params;
+  }
+
+  @Override
+  protected void validate() throws FeatureException {
+    if (configValueStr != null && configValueStr.trim().isEmpty()) {
+      throw new FeatureException("Empty field 'value' in params for " + this);
+    }
+  }
+
+  public ValueFeature(String name, Map<String,Object> params) {
+    super(name, params);
+  }
+
+  @Override
+  public FeatureWeight createWeight(IndexSearcher searcher, boolean needsScores,
+      SolrQueryRequest request, Query originalQuery, Map<String,String[]> efi)
+          throws IOException {
+    return new ValueFeatureWeight(searcher, request, originalQuery, efi);
+  }
+
+  public class ValueFeatureWeight extends FeatureWeight {
+
+    final protected Float featureValue;
+
+    public ValueFeatureWeight(IndexSearcher searcher,
+        SolrQueryRequest request, Query originalQuery, Map<String,String[]> efi) {
+      super(ValueFeature.this, searcher, request, originalQuery, efi);
+      if (configValueStr != null) {
+        final String expandedValue = macroExpander.expand(configValueStr);
+        if (expandedValue != null) {
+          featureValue = Float.parseFloat(expandedValue);
+        } else if (isRequired()) {
+          throw new FeatureException(this.getClass().getSimpleName() + " requires efi parameter that was not passed in request.");
+        } else {
+          featureValue=null;
+        }
+      } else {
+        featureValue = configValue;
+      }
+    }
+
+    @Override
+    public FeatureScorer scorer(LeafReaderContext context) throws IOException {
+      if(featureValue!=null) {
+        return new ValueFeatureScorer(this, featureValue,
+            DocIdSetIterator.all(DocIdSetIterator.NO_MORE_DOCS));
+      } else {
+        return null;
+      }
+    }
+
+
+
+
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/package-info.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/package-info.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/package-info.java
new file mode 100644
index 0000000..456fffc
--- /dev/null
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/package-info.java
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ *  Contains Feature related classes
+ */
+package org.apache.solr.ltr.feature;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/LTRScoringModel.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/LTRScoringModel.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/LTRScoringModel.java
new file mode 100644
index 0000000..9edcfe5
--- /dev/null
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/LTRScoringModel.java
@@ -0,0 +1,298 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.model;
+
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.search.Explanation;
+import org.apache.solr.core.SolrResourceLoader;
+import org.apache.solr.ltr.feature.Feature;
+import org.apache.solr.ltr.feature.FeatureException;
+import org.apache.solr.ltr.norm.IdentityNormalizer;
+import org.apache.solr.ltr.norm.Normalizer;
+import org.apache.solr.util.SolrPluginUtils;
+
+/**
+ * A scoring model computes scores that can be used to rerank documents.
+ * <p>
+ * A scoring model consists of
+ * <ul>
+ * <li> a list of features ({@link Feature}) and
+ * <li> a list of normalizers ({@link Normalizer}) plus
+ * <li> parameters or configuration to represent the scoring algorithm.
+ * </ul>
+ * <p>
+ * Example configuration (snippet):
+ * <pre>{
+   "class" : "...",
+   "name" : "myModelName",
+   "features" : [
+       {
+         "name" : "isBook"
+       },
+       {
+         "name" : "originalScore",
+         "norm": {
+             "class" : "org.apache.solr.ltr.norm.StandardNormalizer",
+             "params" : { "avg":"100", "std":"10" }
+         }
+       },
+       {
+         "name" : "price",
+         "norm": {
+             "class" : "org.apache.solr.ltr.norm.MinMaxNormalizer",
+             "params" : { "min":"0", "max":"1000" }
+         }
+       }
+   ],
+   "params" : {
+       ...
+   }
+}</pre>
+ * <p>
+ * {@link LTRScoringModel} is an abstract class and concrete classes must
+ * implement the {@link #score(float[])} and
+ * {@link #explain(LeafReaderContext, int, float, List)} methods.
+ */
+public abstract class LTRScoringModel {
+
+  protected final String name;
+  private final String featureStoreName;
+  protected final List<Feature> features;
+  private final List<Feature> allFeatures;
+  private final Map<String,Object> params;
+  private final List<Normalizer> norms;
+
+  public static LTRScoringModel getInstance(SolrResourceLoader solrResourceLoader,
+      String className, String name, List<Feature> features,
+      List<Normalizer> norms,
+      String featureStoreName, List<Feature> allFeatures,
+      Map<String,Object> params) throws ModelException {
+    final LTRScoringModel model;
+    try {
+      // create an instance of the model
+      model = solrResourceLoader.newInstance(
+          className,
+          LTRScoringModel.class,
+          new String[0], // no sub packages
+          new Class[] { String.class, List.class, List.class, String.class, List.class, Map.class },
+          new Object[] { name, features, norms, featureStoreName, allFeatures, params });
+      if (params != null) {
+        SolrPluginUtils.invokeSetters(model, params.entrySet());
+      }
+    } catch (final Exception e) {
+      throw new ModelException("Model type does not exist " + className, e);
+    }
+    model.validate();
+    return model;
+  }
+
+  public LTRScoringModel(String name, List<Feature> features,
+      List<Normalizer> norms,
+      String featureStoreName, List<Feature> allFeatures,
+      Map<String,Object> params) {
+    this.name = name;
+    this.features = features;
+    this.featureStoreName = featureStoreName;
+    this.allFeatures = allFeatures;
+    this.params = params;
+    this.norms = norms;
+  }
+
+  /**
+   * Validate that settings make sense and throws
+   * {@link ModelException} if they do not make sense.
+   */
+  protected void validate() throws ModelException {
+    if (features.isEmpty()) {
+      throw new ModelException("no features declared for model "+name);
+    }
+    final HashSet<String> featureNames = new HashSet<>();
+    for (final Feature feature : features) {
+      final String featureName = feature.getName();
+      if (!featureNames.add(featureName)) {
+        throw new ModelException("duplicated feature "+featureName+" in model "+name);
+      }
+    }
+    if (features.size() != norms.size()) {
+      throw new ModelException("counted "+features.size()+" features and "+norms.size()+" norms in model "+name);
+    }
+  }
+
+  /**
+   * @return the norms
+   */
+  public List<Normalizer> getNorms() {
+    return Collections.unmodifiableList(norms);
+  }
+
+  /**
+   * @return the name
+   */
+  public String getName() {
+    return name;
+  }
+
+  /**
+   * @return the features
+   */
+  public List<Feature> getFeatures() {
+    return Collections.unmodifiableList(features);
+  }
+
+  public Map<String,Object> getParams() {
+    return params;
+  }
+
+  @Override
+  public int hashCode() {
+    final int prime = 31;
+    int result = 1;
+    result = (prime * result) + ((features == null) ? 0 : features.hashCode());
+    result = (prime * result) + ((name == null) ? 0 : name.hashCode());
+    result = (prime * result) + ((params == null) ? 0 : params.hashCode());
+    result = (prime * result) + ((norms == null) ? 0 : norms.hashCode());
+    result = (prime * result) + ((featureStoreName == null) ? 0 : featureStoreName.hashCode());
+    return result;
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (this == obj) {
+      return true;
+    }
+    if (obj == null) {
+      return false;
+    }
+    if (getClass() != obj.getClass()) {
+      return false;
+    }
+    final LTRScoringModel other = (LTRScoringModel) obj;
+    if (features == null) {
+      if (other.features != null) {
+        return false;
+      }
+    } else if (!features.equals(other.features)) {
+      return false;
+    }
+    if (norms == null) {
+      if (other.norms != null) {
+        return false;
+      }
+    } else if (!norms.equals(other.norms)) {
+      return false;
+    }
+    if (name == null) {
+      if (other.name != null) {
+        return false;
+      }
+    } else if (!name.equals(other.name)) {
+      return false;
+    }
+    if (params == null) {
+      if (other.params != null) {
+        return false;
+      }
+    } else if (!params.equals(other.params)) {
+      return false;
+    }
+    if (featureStoreName == null) {
+      if (other.featureStoreName != null) {
+        return false;
+      }
+    } else if (!featureStoreName.equals(other.featureStoreName)) {
+      return false;
+    }
+
+
+    return true;
+  }
+
+  public boolean hasParams() {
+    return !((params == null) || params.isEmpty());
+  }
+
+  public Collection<Feature> getAllFeatures() {
+    return allFeatures;
+  }
+
+  public String getFeatureStoreName() {
+    return featureStoreName;
+  }
+
+  /**
+   * Given a list of normalized values for all features a scoring algorithm
+   * cares about, calculate and return a score.
+   *
+   * @param modelFeatureValuesNormalized
+   *          List of normalized feature values. Each feature is identified by
+   *          its id, which is the index in the array
+   * @return The final score for a document
+   */
+  public abstract float score(float[] modelFeatureValuesNormalized);
+
+  /**
+   * Similar to the score() function, except it returns an explanation of how
+   * the features were used to calculate the score.
+   *
+   * @param context
+   *          Context the document is in
+   * @param doc
+   *          Document to explain
+   * @param finalScore
+   *          Original score
+   * @param featureExplanations
+   *          Explanations for each feature calculation
+   * @return Explanation for the scoring of a document
+   */
+  public abstract Explanation explain(LeafReaderContext context, int doc,
+      float finalScore, List<Explanation> featureExplanations);
+
+  @Override
+  public String toString() {
+    return  getClass().getSimpleName() + "(name="+getName()+")";
+  }
+
+  /**
+   * Goes through all the stored feature values, and calculates the normalized
+   * values for all the features that will be used for scoring.
+   */
+  public void normalizeFeaturesInPlace(float[] modelFeatureValues) {
+    float[] modelFeatureValuesNormalized = modelFeatureValues;
+    if (modelFeatureValues.length != norms.size()) {
+      throw new FeatureException("Must have normalizer for every feature");
+    }
+    for(int idx = 0; idx < modelFeatureValuesNormalized.length; ++idx) {
+      modelFeatureValuesNormalized[idx] =
+          norms.get(idx).normalize(modelFeatureValuesNormalized[idx]);
+    }
+  }
+
+  public Explanation getNormalizerExplanation(Explanation e, int idx) {
+    Normalizer n = norms.get(idx);
+    if (n != IdentityNormalizer.INSTANCE) {
+      return n.explain(e);
+    }
+    return e;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/LinearModel.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/LinearModel.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/LinearModel.java
new file mode 100644
index 0000000..57fc5ad
--- /dev/null
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/LinearModel.java
@@ -0,0 +1,147 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.model;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.search.Explanation;
+import org.apache.solr.ltr.feature.Feature;
+import org.apache.solr.ltr.norm.Normalizer;
+
+/**
+ * A scoring model that computes scores using a dot product.
+ * Example models are RankSVM and Pranking.
+ * <p>
+ * Example configuration:
+ * <pre>{
+   "class" : "org.apache.solr.ltr.model.LinearModel",
+   "name" : "myModelName",
+   "features" : [
+       { "name" : "userTextTitleMatch" },
+       { "name" : "originalScore" },
+       { "name" : "isBook" }
+   ],
+   "params" : {
+       "weights" : {
+           "userTextTitleMatch" : 1.0,
+           "originalScore" : 0.5,
+           "isBook" : 0.1
+       }
+   }
+}</pre>
+ * <p>
+ * Background reading:
+ * <ul>
+ * <li> <a href="http://www.cs.cornell.edu/people/tj/publications/joachims_02c.pdf">
+ * Thorsten Joachims. Optimizing Search Engines Using Clickthrough Data.
+ * Proceedings of the ACM Conference on Knowledge Discovery and Data Mining (KDD), ACM, 2002.</a>
+ * </ul>
+ * <ul>
+ * <li> <a href="https://papers.nips.cc/paper/2023-pranking-with-ranking.pdf">
+ * Koby Crammer and Yoram Singer. Pranking with Ranking.
+ * Advances in Neural Information Processing Systems (NIPS), 2001.</a>
+ * </ul>
+ */
+public class LinearModel extends LTRScoringModel {
+
+  protected Float[] featureToWeight;
+
+  public void setWeights(Object weights) {
+    final Map<String,Double> modelWeights = (Map<String,Double>) weights;
+    for (int ii = 0; ii < features.size(); ++ii) {
+      final String key = features.get(ii).getName();
+      final Double val = modelWeights.get(key);
+      featureToWeight[ii] = (val == null ? null : new Float(val.floatValue()));
+    }
+  }
+
+  public LinearModel(String name, List<Feature> features,
+      List<Normalizer> norms,
+      String featureStoreName, List<Feature> allFeatures,
+      Map<String,Object> params) {
+    super(name, features, norms, featureStoreName, allFeatures, params);
+    featureToWeight = new Float[features.size()];
+  }
+
+  @Override
+  protected void validate() throws ModelException {
+    super.validate();
+
+    final ArrayList<String> missingWeightFeatureNames = new ArrayList<String>();
+    for (int i = 0; i < features.size(); ++i) {
+      if (featureToWeight[i] == null) {
+        missingWeightFeatureNames.add(features.get(i).getName());
+      }
+    }
+    if (missingWeightFeatureNames.size() == features.size()) {
+      throw new ModelException("Model " + name + " doesn't contain any weights");
+    }
+    if (!missingWeightFeatureNames.isEmpty()) {
+      throw new ModelException("Model " + name + " lacks weight(s) for "+missingWeightFeatureNames);
+    }
+  }
+
+  @Override
+  public float score(float[] modelFeatureValuesNormalized) {
+    float score = 0;
+    for (int i = 0; i < modelFeatureValuesNormalized.length; ++i) {
+      score += modelFeatureValuesNormalized[i] * featureToWeight[i];
+    }
+    return score;
+  }
+
+  @Override
+  public Explanation explain(LeafReaderContext context, int doc,
+      float finalScore, List<Explanation> featureExplanations) {
+    final List<Explanation> details = new ArrayList<>();
+    int index = 0;
+
+    for (final Explanation featureExplain : featureExplanations) {
+      final List<Explanation> featureDetails = new ArrayList<>();
+      featureDetails.add(Explanation.match(featureToWeight[index],
+          "weight on feature"));
+      featureDetails.add(featureExplain);
+
+      details.add(Explanation.match(featureExplain.getValue()
+          * featureToWeight[index], "prod of:", featureDetails));
+      index++;
+    }
+
+    return Explanation.match(finalScore, toString()
+        + " model applied to features, sum of:", details);
+  }
+
+  @Override
+  public String toString() {
+    final StringBuilder sb = new StringBuilder(getClass().getSimpleName());
+    sb.append("(name=").append(getName());
+    sb.append(",featureWeights=[");
+    for (int ii = 0; ii < features.size(); ++ii) {
+      if (ii>0) {
+        sb.append(',');
+      }
+      final String key = features.get(ii).getName();
+      sb.append(key).append('=').append(featureToWeight[ii]);
+    }
+    sb.append("])");
+    return sb.toString();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/ModelException.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/ModelException.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/ModelException.java
new file mode 100644
index 0000000..de8786d
--- /dev/null
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/ModelException.java
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.model;
+
+public class ModelException extends RuntimeException {
+
+  private static final long serialVersionUID = 1L;
+
+  public ModelException(String message) {
+    super(message);
+  }
+
+  public ModelException(String message, Exception cause) {
+    super(message, cause);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/MultipleAdditiveTreesModel.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/MultipleAdditiveTreesModel.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/MultipleAdditiveTreesModel.java
new file mode 100644
index 0000000..4fa595e
--- /dev/null
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/MultipleAdditiveTreesModel.java
@@ -0,0 +1,377 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.model;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.search.Explanation;
+import org.apache.solr.ltr.feature.Feature;
+import org.apache.solr.ltr.norm.Normalizer;
+import org.apache.solr.util.SolrPluginUtils;
+
+/**
+ * A scoring model that computes scores based on the summation of multiple weighted trees.
+ * Example models are LambdaMART and Gradient Boosted Regression Trees (GBRT) .
+ * <p>
+ * Example configuration:
+<pre>{
+   "class" : "org.apache.solr.ltr.model.MultipleAdditiveTreesModel",
+   "name" : "multipleadditivetreesmodel",
+   "features":[
+       { "name" : "userTextTitleMatch"},
+       { "name" : "originalScore"}
+   ],
+   "params" : {
+       "trees" : [
+           {
+               "weight" : 1,
+               "root": {
+                   "feature" : "userTextTitleMatch",
+                   "threshold" : 0.5,
+                   "left" : {
+                       "value" : -100
+                   },
+                   "right" : {
+                       "feature" : "originalScore",
+                       "threshold" : 10.0,
+                       "left" : {
+                           "value" : 50
+                       },
+                       "right" : {
+                           "value" : 75
+                       }
+                   }
+               }
+           },
+           {
+               "weight" : 2,
+               "root" : {
+                   "value" : -10
+               }
+           }
+       ]
+   }
+}</pre>
+ * <p>
+ * Background reading:
+ * <ul>
+ * <li> <a href="http://research.microsoft.com/pubs/132652/MSR-TR-2010-82.pdf">
+ * Christopher J.C. Burges. From RankNet to LambdaRank to LambdaMART: An Overview.
+ * Microsoft Research Technical Report MSR-TR-2010-82.</a>
+ * </ul>
+ * <ul>
+ * <li> <a href="https://papers.nips.cc/paper/3305-a-general-boosting-method-and-its-application-to-learning-ranking-functions-for-web-search.pdf">
+ * Z. Zheng, H. Zha, T. Zhang, O. Chapelle, K. Chen, and G. Sun. A General Boosting Method and its Application to Learning Ranking Functions for Web Search.
+ * Advances in Neural Information Processing Systems (NIPS), 2007.</a>
+ * </ul>
+ */
+public class MultipleAdditiveTreesModel extends LTRScoringModel {
+
+  private final HashMap<String,Integer> fname2index;
+  private List<RegressionTree> trees;
+
+  private RegressionTree createRegressionTree(Map<String,Object> map) {
+    final RegressionTree rt = new RegressionTree();
+    if (map != null) {
+      SolrPluginUtils.invokeSetters(rt, map.entrySet());
+    }
+    return rt;
+  }
+
+  private RegressionTreeNode createRegressionTreeNode(Map<String,Object> map) {
+    final RegressionTreeNode rtn = new RegressionTreeNode();
+    if (map != null) {
+      SolrPluginUtils.invokeSetters(rtn, map.entrySet());
+    }
+    return rtn;
+  }
+
+  public class RegressionTreeNode {
+    private static final float NODE_SPLIT_SLACK = 1E-6f;
+
+    private float value = 0f;
+    private String feature;
+    private int featureIndex = -1;
+    private Float threshold;
+    private RegressionTreeNode left;
+    private RegressionTreeNode right;
+
+    public void setValue(float value) {
+      this.value = value;
+    }
+
+    public void setValue(String value) {
+      this.value = Float.parseFloat(value);
+    }
+
+    public void setFeature(String feature) {
+      this.feature = feature;
+      final Integer idx = fname2index.get(this.feature);
+      // this happens if the tree specifies a feature that does not exist
+      // this could be due to lambdaSmart building off of pre-existing trees
+      // that use a feature that is no longer output during feature extraction
+      featureIndex = (idx == null) ? -1 : idx;
+    }
+
+    public void setThreshold(float threshold) {
+      this.threshold = threshold + NODE_SPLIT_SLACK;
+    }
+
+    public void setThreshold(String threshold) {
+      this.threshold = Float.parseFloat(threshold) + NODE_SPLIT_SLACK;
+    }
+
+    public void setLeft(Object left) {
+      this.left = createRegressionTreeNode((Map<String,Object>) left);
+    }
+
+    public void setRight(Object right) {
+      this.right = createRegressionTreeNode((Map<String,Object>) right);
+    }
+
+    public boolean isLeaf() {
+      return feature == null;
+    }
+
+    public float score(float[] featureVector) {
+      if (isLeaf()) {
+        return value;
+      }
+
+      // unsupported feature (tree is looking for a feature that does not exist)
+      if  ((featureIndex < 0) || (featureIndex >= featureVector.length)) {
+        return 0f;
+      }
+
+      if (featureVector[featureIndex] <= threshold) {
+        return left.score(featureVector);
+      } else {
+        return right.score(featureVector);
+      }
+    }
+
+    public String explain(float[] featureVector) {
+      if (isLeaf()) {
+        return "val: " + value;
+      }
+
+      // unsupported feature (tree is looking for a feature that does not exist)
+      if  ((featureIndex < 0) || (featureIndex >= featureVector.length)) {
+        return  "'" + feature + "' does not exist in FV, Return Zero";
+      }
+
+      // could store extra information about how much training data supported
+      // each branch and report
+      // that here
+
+      if (featureVector[featureIndex] <= threshold) {
+        String rval = "'" + feature + "':" + featureVector[featureIndex] + " <= "
+            + threshold + ", Go Left | ";
+        return rval + left.explain(featureVector);
+      } else {
+        String rval = "'" + feature + "':" + featureVector[featureIndex] + " > "
+            + threshold + ", Go Right | ";
+        return rval + right.explain(featureVector);
+      }
+    }
+
+    @Override
+    public String toString() {
+      final StringBuilder sb = new StringBuilder();
+      if (isLeaf()) {
+        sb.append(value);
+      } else {
+        sb.append("(feature=").append(feature);
+        sb.append(",threshold=").append(threshold.floatValue()-NODE_SPLIT_SLACK);
+        sb.append(",left=").append(left);
+        sb.append(",right=").append(right);
+        sb.append(')');
+      }
+      return sb.toString();
+    }
+
+    public RegressionTreeNode() {
+    }
+
+    public void validate() throws ModelException {
+      if (isLeaf()) {
+        if (left != null || right != null) {
+          throw new ModelException("MultipleAdditiveTreesModel tree node is leaf with left="+left+" and right="+right);
+        }
+        return;
+      }
+      if (null == threshold) {
+        throw new ModelException("MultipleAdditiveTreesModel tree node is missing threshold");
+      }
+      if (null == left) {
+        throw new ModelException("MultipleAdditiveTreesModel tree node is missing left");
+      } else {
+        left.validate();
+      }
+      if (null == right) {
+        throw new ModelException("MultipleAdditiveTreesModel tree node is missing right");
+      } else {
+        right.validate();
+      }
+    }
+
+  }
+
+  public class RegressionTree {
+
+    private Float weight;
+    private RegressionTreeNode root;
+
+    public void setWeight(float weight) {
+      this.weight = new Float(weight);
+    }
+
+    public void setWeight(String weight) {
+      this.weight = new Float(weight);
+    }
+
+    public void setRoot(Object root) {
+      this.root = createRegressionTreeNode((Map<String,Object>)root);
+    }
+
+    public float score(float[] featureVector) {
+      return weight.floatValue() * root.score(featureVector);
+    }
+
+    public String explain(float[] featureVector) {
+      return root.explain(featureVector);
+    }
+
+    @Override
+    public String toString() {
+      final StringBuilder sb = new StringBuilder();
+      sb.append("(weight=").append(weight);
+      sb.append(",root=").append(root);
+      sb.append(")");
+      return sb.toString();
+    }
+
+    public RegressionTree() {
+    }
+
+    public void validate() throws ModelException {
+      if (weight == null) {
+        throw new ModelException("MultipleAdditiveTreesModel tree doesn't contain a weight");
+      }
+      if (root == null) {
+        throw new ModelException("MultipleAdditiveTreesModel tree doesn't contain a tree");
+      } else {
+        root.validate();
+      }
+    }
+  }
+
+  public void setTrees(Object trees) {
+    this.trees = new ArrayList<RegressionTree>();
+    for (final Object o : (List<Object>) trees) {
+      final RegressionTree rt = createRegressionTree((Map<String,Object>) o);
+      this.trees.add(rt);
+    }
+  }
+
+  public MultipleAdditiveTreesModel(String name, List<Feature> features,
+      List<Normalizer> norms,
+      String featureStoreName, List<Feature> allFeatures,
+      Map<String,Object> params) {
+    super(name, features, norms, featureStoreName, allFeatures, params);
+
+    fname2index = new HashMap<String,Integer>();
+    for (int i = 0; i < features.size(); ++i) {
+      final String key = features.get(i).getName();
+      fname2index.put(key, i);
+    }
+  }
+
+  @Override
+  protected void validate() throws ModelException {
+    super.validate();
+    if (trees == null) {
+      throw new ModelException("no trees declared for model "+name);
+    }
+    for (RegressionTree tree : trees) {
+      tree.validate();
+    }
+  }
+
+  @Override
+  public float score(float[] modelFeatureValuesNormalized) {
+    float score = 0;
+    for (final RegressionTree t : trees) {
+      score += t.score(modelFeatureValuesNormalized);
+    }
+    return score;
+  }
+
+  // /////////////////////////////////////////
+  // produces a string that looks like:
+  // 40.0 = multipleadditivetreesmodel [ org.apache.solr.ltr.model.MultipleAdditiveTreesModel ]
+  // model applied to
+  // features, sum of:
+  // 50.0 = tree 0 | 'matchedTitle':1.0 > 0.500001, Go Right |
+  // 'this_feature_doesnt_exist' does not
+  // exist in FV, Go Left | val: 50.0
+  // -10.0 = tree 1 | val: -10.0
+  @Override
+  public Explanation explain(LeafReaderContext context, int doc,
+      float finalScore, List<Explanation> featureExplanations) {
+    final float[] fv = new float[featureExplanations.size()];
+    int index = 0;
+    for (final Explanation featureExplain : featureExplanations) {
+      fv[index] = featureExplain.getValue();
+      index++;
+    }
+
+    final List<Explanation> details = new ArrayList<>();
+    index = 0;
+
+    for (final RegressionTree t : trees) {
+      final float score = t.score(fv);
+      final Explanation p = Explanation.match(score, "tree " + index + " | "
+          + t.explain(fv));
+      details.add(p);
+      index++;
+    }
+
+    return Explanation.match(finalScore, toString()
+        + " model applied to features, sum of:", details);
+  }
+
+  @Override
+  public String toString() {
+    final StringBuilder sb = new StringBuilder(getClass().getSimpleName());
+    sb.append("(name=").append(getName());
+    sb.append(",trees=[");
+    for (int ii = 0; ii < trees.size(); ++ii) {
+      if (ii>0) {
+        sb.append(',');
+      }
+      sb.append(trees.get(ii));
+    }
+    sb.append("])");
+    return sb.toString();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/package-info.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/package-info.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/package-info.java
new file mode 100644
index 0000000..32bd626
--- /dev/null
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/package-info.java
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ *  Contains Model related classes
+ */
+package org.apache.solr.ltr.model;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/IdentityNormalizer.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/IdentityNormalizer.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/IdentityNormalizer.java
new file mode 100644
index 0000000..a3d1026
--- /dev/null
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/IdentityNormalizer.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.norm;
+
+import java.util.LinkedHashMap;
+
+/**
+ * A Normalizer that normalizes a feature value to itself. This is the
+ * default normalizer class, if no normalizer is configured then the
+ * IdentityNormalizer will be used.
+ */
+public class IdentityNormalizer extends Normalizer {
+
+  public static final IdentityNormalizer INSTANCE = new IdentityNormalizer();
+
+  public IdentityNormalizer() {
+
+  }
+
+  @Override
+  public float normalize(float value) {
+    return value;
+  }
+
+  @Override
+  public LinkedHashMap<String,Object> paramsToMap() {
+    return null;
+  }
+
+  @Override
+  protected void validate() throws NormalizerException {
+  }
+
+  @Override
+  public String toString() {
+    return getClass().getSimpleName();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/MinMaxNormalizer.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/MinMaxNormalizer.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/MinMaxNormalizer.java
new file mode 100644
index 0000000..92e233c
--- /dev/null
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/MinMaxNormalizer.java
@@ -0,0 +1,107 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.norm;
+
+import java.util.LinkedHashMap;
+
+/**
+ * A Normalizer to scale a feature value using a (min,max) range.
+ * <p>
+ * Example configuration:
+<pre>
+"norm" : {
+    "class" : "org.apache.solr.ltr.norm.MinMaxNormalizer",
+    "params" : { "min":"0", "max":"50" }
+}
+</pre>
+ * Example normalizations:
+ * <ul>
+ * <li>-5 will be normalized to -0.1
+ * <li>55 will be normalized to  1.1
+ * <li>+5 will be normalized to +0.1
+ * </ul>
+ */
+public class MinMaxNormalizer extends Normalizer {
+
+  private float min = Float.NEGATIVE_INFINITY;
+  private float max = Float.POSITIVE_INFINITY;
+  private float delta = max - min;
+
+  private void updateDelta() {
+    delta = max - min;
+  }
+
+  public float getMin() {
+    return min;
+  }
+
+  public void setMin(float min) {
+    this.min = min;
+    updateDelta();
+  }
+
+  public void setMin(String min) {
+    this.min = Float.parseFloat(min);
+    updateDelta();
+  }
+
+  public float getMax() {
+    return max;
+  }
+
+  public void setMax(float max) {
+    this.max = max;
+    updateDelta();
+  }
+
+  public void setMax(String max) {
+    this.max = Float.parseFloat(max);
+    updateDelta();
+  }
+
+  @Override
+  protected void validate() throws NormalizerException {
+    if (delta == 0f) {
+      throw
+      new NormalizerException("MinMax Normalizer delta must not be zero " +
+          "| min = " + min + ",max = " + max + ",delta = " + delta);
+    }
+  }
+
+  @Override
+  public float normalize(float value) {
+    return (value - min) / delta;
+  }
+
+  @Override
+  public LinkedHashMap<String,Object> paramsToMap() {
+    final LinkedHashMap<String,Object> params = new LinkedHashMap<>(2, 1.0f);
+    params.put("min", min);
+    params.put("max", max);
+    return params;
+  }
+
+  @Override
+  public String toString() {
+    final StringBuilder sb = new StringBuilder(64); // default initialCapacity of 16 won't be enough
+    sb.append(getClass().getSimpleName()).append('(');
+    sb.append("min=").append(min);
+    sb.append(",max=").append(max).append(')');
+    return sb.toString();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/Normalizer.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/Normalizer.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/Normalizer.java
new file mode 100644
index 0000000..2b311f8
--- /dev/null
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/Normalizer.java
@@ -0,0 +1,64 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.norm;
+
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+import org.apache.lucene.search.Explanation;
+import org.apache.solr.core.SolrResourceLoader;
+import org.apache.solr.util.SolrPluginUtils;
+
+/**
+ * A normalizer normalizes the value of a feature. After the feature values
+ * have been computed, the {@link Normalizer#normalize(float)} methods will
+ * be called and the resulting values will be used by the model.
+ */
+public abstract class Normalizer {
+
+
+  public abstract float normalize(float value);
+
+  public abstract LinkedHashMap<String,Object> paramsToMap();
+
+  public Explanation explain(Explanation explain) {
+    final float normalized = normalize(explain.getValue());
+    final String explainDesc = "normalized using " + toString();
+
+    return Explanation.match(normalized, explainDesc, explain);
+  }
+
+  public static Normalizer getInstance(SolrResourceLoader solrResourceLoader,
+      String className, Map<String,Object> params) {
+    final Normalizer f = solrResourceLoader.newInstance(className, Normalizer.class);
+    if (params != null) {
+      SolrPluginUtils.invokeSetters(f, params.entrySet());
+    }
+    f.validate();
+    return f;
+  }
+
+  /**
+   * As part of creation of a normalizer instance, this function confirms
+   * that the normalizer parameters are valid.
+   *
+   * @throws NormalizerException
+   *             Normalizer Exception
+   */
+  protected abstract void validate() throws NormalizerException;
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/NormalizerException.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/NormalizerException.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/NormalizerException.java
new file mode 100644
index 0000000..5b33f05
--- /dev/null
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/NormalizerException.java
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.norm;
+
+public class NormalizerException extends RuntimeException {
+
+  private static final long serialVersionUID = 1L;
+
+  public NormalizerException(String message) {
+    super(message);
+  }
+
+  public NormalizerException(String message, Exception cause) {
+    super(message, cause);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/StandardNormalizer.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/StandardNormalizer.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/StandardNormalizer.java
new file mode 100644
index 0000000..7ab525c
--- /dev/null
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/StandardNormalizer.java
@@ -0,0 +1,99 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.norm;
+
+import java.util.LinkedHashMap;
+
+/**
+ * A Normalizer to scale a feature value around an average-and-standard-deviation distribution.
+ * <p>
+ * Example configuration:
+<pre>
+"norm" : {
+    "class" : "org.apache.solr.ltr.norm.StandardNormalizer",
+    "params" : { "avg":"42", "std":"6" }
+}
+</pre>
+ * <p>
+ * Example normalizations:
+ * <ul>
+ * <li>39 will be normalized to -0.5
+ * <li>42 will be normalized to  0
+ * <li>45 will be normalized to +0.5
+ * </ul>
+ */
+public class StandardNormalizer extends Normalizer {
+
+  private float avg = 0f;
+  private float std = 1f;
+
+  public float getAvg() {
+    return avg;
+  }
+
+  public void setAvg(float avg) {
+    this.avg = avg;
+  }
+
+  public float getStd() {
+    return std;
+  }
+
+  public void setStd(float std) {
+    this.std = std;
+  }
+
+  public void setAvg(String avg) {
+    this.avg = Float.parseFloat(avg);
+  }
+
+  public void setStd(String std) {
+    this.std = Float.parseFloat(std);
+  }
+
+  @Override
+  public float normalize(float value) {
+    return (value - avg) / std;
+  }
+
+  @Override
+  protected void validate() throws NormalizerException {
+    if (std <= 0f) {
+      throw
+      new NormalizerException("Standard Normalizer standard deviation must "
+          + "be positive | avg = " + avg + ",std = " + std);
+    }
+  }
+
+  @Override
+  public LinkedHashMap<String,Object> paramsToMap() {
+    final LinkedHashMap<String,Object> params = new LinkedHashMap<>(2, 1.0f);
+    params.put("avg", avg);
+    params.put("std", std);
+    return params;
+  }
+
+  @Override
+  public String toString() {
+    final StringBuilder sb = new StringBuilder(64); // default initialCapacity of 16 won't be enough
+    sb.append(getClass().getSimpleName()).append('(');
+    sb.append("avg=").append(avg);
+    sb.append(",std=").append(avg).append(')');
+    return sb.toString();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/package-info.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/package-info.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/package-info.java
new file mode 100644
index 0000000..164b425
--- /dev/null
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/package-info.java
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * A normalizer normalizes the value of a feature. Once that the feature values
+ * will be computed, the normalizer will be applied and the resulting values
+ * will be received by the model.
+ */
+package org.apache.solr.ltr.norm;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/java/org/apache/solr/ltr/package-info.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/package-info.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/package-info.java
new file mode 100644
index 0000000..59aebe8
--- /dev/null
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/package-info.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * <p>
+ * This package contains the main logic for performing the reranking using
+ * a Learning to Rank model.
+ * </p>
+ * <p>
+ * A model will be applied on each document through a {@link org.apache.solr.ltr.LTRScoringQuery}, a
+ * subclass of {@link org.apache.lucene.search.Query}. As a normal query,
+ * the learned model will produce a new score
+ * for each document reranked.
+ * </p>
+ * <p>
+ * A {@link org.apache.solr.ltr.LTRScoringQuery} is created by providing an instance of
+ * {@link org.apache.solr.ltr.model.LTRScoringModel}. An instance of
+ * {@link org.apache.solr.ltr.model.LTRScoringModel}
+ * defines how to combine the features in order to create a new
+ * score for a document. A new Learning to Rank model is plugged
+ * into the framework  by extending {@link org.apache.solr.ltr.model.LTRScoringModel},
+ * (see for example {@link org.apache.solr.ltr.model.MultipleAdditiveTreesModel} and {@link org.apache.solr.ltr.model.LinearModel}).
+ * </p>
+ * <p>
+ * The {@link org.apache.solr.ltr.LTRScoringQuery} will take care of computing the values of
+ * all the features (see {@link org.apache.solr.ltr.feature.Feature}) and then will delegate the final score
+ * generation to the {@link org.apache.solr.ltr.model.LTRScoringModel}, by calling the method
+ * {@link org.apache.solr.ltr.model.LTRScoringModel#score(float[] modelFeatureValuesNormalized) score(float[] modelFeatureValuesNormalized)}.
+ * </p>
+ */
+package org.apache.solr.ltr;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/java/org/apache/solr/ltr/store/FeatureStore.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/store/FeatureStore.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/store/FeatureStore.java
new file mode 100644
index 0000000..ab2595f
--- /dev/null
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/store/FeatureStore.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.store;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.LinkedHashMap;
+import java.util.List;
+
+import org.apache.solr.ltr.feature.Feature;
+import org.apache.solr.ltr.feature.FeatureException;
+
+public class FeatureStore {
+
+  /** the name of the default feature store **/
+  public static final String DEFAULT_FEATURE_STORE_NAME = "_DEFAULT_";
+
+  private final LinkedHashMap<String,Feature> store = new LinkedHashMap<>(); // LinkedHashMap because we need predictable iteration order
+  private final String name;
+
+  public FeatureStore(String name) {
+    this.name = name;
+  }
+
+  public String getName() {
+    return name;
+  }
+
+  public Feature get(String name) {
+    return store.get(name);
+  }
+
+  public void add(Feature feature) {
+    final String name = feature.getName();
+    if (store.containsKey(name)) {
+      throw new FeatureException(name
+          + " already contained in the store, please use a different name");
+    }
+    feature.setIndex(store.size());
+    store.put(name, feature);
+  }
+
+  public List<Feature> getFeatures() {
+    final List<Feature> storeValues = new ArrayList<Feature>(store.values());
+    return Collections.unmodifiableList(storeValues);
+  }
+
+  @Override
+  public String toString() {
+    return "FeatureStore [features=" + store.keySet() + "]";
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/java/org/apache/solr/ltr/store/ModelStore.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/store/ModelStore.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/store/ModelStore.java
new file mode 100644
index 0000000..dbb065f
--- /dev/null
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/store/ModelStore.java
@@ -0,0 +1,74 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.store;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.solr.ltr.model.LTRScoringModel;
+import org.apache.solr.ltr.model.ModelException;
+
+/**
+ * Contains the model and features declared.
+ */
+public class ModelStore {
+
+  private final Map<String,LTRScoringModel> availableModels;
+
+  public ModelStore() {
+    availableModels = new HashMap<>();
+  }
+
+  public synchronized LTRScoringModel getModel(String name) {
+    return availableModels.get(name);
+  }
+
+  public void clear() {
+    availableModels.clear();
+  }
+
+  public List<LTRScoringModel> getModels() {
+    final List<LTRScoringModel> availableModelsValues =
+        new ArrayList<LTRScoringModel>(availableModels.values());
+    return Collections.unmodifiableList(availableModelsValues);
+  }
+
+  @Override
+  public String toString() {
+    return "ModelStore [availableModels=" + availableModels.keySet() + "]";
+  }
+
+  public LTRScoringModel delete(String modelName) {
+    return availableModels.remove(modelName);
+  }
+
+  public synchronized void addModel(LTRScoringModel modeldata)
+      throws ModelException {
+    final String name = modeldata.getName();
+
+    if (availableModels.containsKey(name)) {
+      throw new ModelException("model '" + name
+          + "' already exists. Please use a different name");
+    }
+
+    availableModels.put(modeldata.getName(), modeldata);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/java/org/apache/solr/ltr/store/package-info.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/store/package-info.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/store/package-info.java
new file mode 100644
index 0000000..1ed9bff
--- /dev/null
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/store/package-info.java
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Contains feature and model store related classes.
+ */
+package org.apache.solr.ltr.store;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5a66b3bc/solr/contrib/ltr/src/java/org/apache/solr/ltr/store/rest/ManagedFeatureStore.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/store/rest/ManagedFeatureStore.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/store/rest/ManagedFeatureStore.java
new file mode 100644
index 0000000..beb217c
--- /dev/null
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/store/rest/ManagedFeatureStore.java
@@ -0,0 +1,215 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.store.rest;
+
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.core.SolrCore;
+import org.apache.solr.core.SolrResourceLoader;
+import org.apache.solr.ltr.feature.Feature;
+import org.apache.solr.ltr.store.FeatureStore;
+import org.apache.solr.response.SolrQueryResponse;
+import org.apache.solr.rest.BaseSolrResource;
+import org.apache.solr.rest.ManagedResource;
+import org.apache.solr.rest.ManagedResourceObserver;
+import org.apache.solr.rest.ManagedResourceStorage;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Managed resource for a storing a feature.
+ */
+public class ManagedFeatureStore extends ManagedResource implements ManagedResource.ChildResourceSupport {
+
+  public static void registerManagedFeatureStore(SolrResourceLoader solrResourceLoader,
+      ManagedResourceObserver managedResourceObserver) {
+    solrResourceLoader.getManagedResourceRegistry().registerManagedResource(
+        REST_END_POINT,
+        ManagedFeatureStore.class,
+        managedResourceObserver);
+  }
+
+  public static ManagedFeatureStore getManagedFeatureStore(SolrCore core) {
+    return (ManagedFeatureStore) core.getRestManager()
+        .getManagedResource(REST_END_POINT);
+  }
+
+  /** the feature store rest endpoint **/
+  public static final String REST_END_POINT = "/schema/feature-store";
+  // TODO: reduce from public to package visibility (once tests no longer need public access)
+
+  /** name of the attribute containing the feature class **/
+  static final String CLASS_KEY = "class";
+  /** name of the attribute containing the feature name **/
+  static final String NAME_KEY = "name";
+  /** name of the attribute containing the feature params **/
+  static final String PARAMS_KEY = "params";
+  /** name of the attribute containing the feature store used **/
+  static final String FEATURE_STORE_NAME_KEY = "store";
+
+  private final Map<String,FeatureStore> stores = new HashMap<>();
+
+  /**
+   * Managed feature store: the name of the attribute containing all the feature
+   * stores
+   **/
+  private static final String FEATURE_STORE_JSON_FIELD = "featureStores";
+
+  /**
+   * Managed feature store: the name of the attribute containing all the
+   * features of a feature store
+   **/
+  private static final String FEATURES_JSON_FIELD = "features";
+
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+  public ManagedFeatureStore(String resourceId, SolrResourceLoader loader,
+      ManagedResourceStorage.StorageIO storageIO) throws SolrException {
+    super(resourceId, loader, storageIO);
+
+  }
+
+  public synchronized FeatureStore getFeatureStore(String name) {
+    if (name == null) {
+      name = FeatureStore.DEFAULT_FEATURE_STORE_NAME;
+    }
+    if (!stores.containsKey(name)) {
+      stores.put(name, new FeatureStore(name));
+    }
+    return stores.get(name);
+  }
+
+  @Override
+  protected void onManagedDataLoadedFromStorage(NamedList<?> managedInitArgs,
+      Object managedData) throws SolrException {
+
+    stores.clear();
+    log.info("------ managed feature ~ loading ------");
+    if (managedData instanceof List) {
+      @SuppressWarnings("unchecked")
+      final List<Map<String,Object>> up = (List<Map<String,Object>>) managedData;
+      for (final Map<String,Object> u : up) {
+        final String featureStore = (String) u.get(FEATURE_STORE_NAME_KEY);
+        addFeature(u, featureStore);
+      }
+    }
+  }
+
+  public synchronized void addFeature(Map<String,Object> map, String featureStore) {
+    log.info("register feature based on {}", map);
+    final FeatureStore fstore = getFeatureStore(featureStore);
+    final Feature feature = fromFeatureMap(solrResourceLoader, map);
+    fstore.add(feature);
+  }
+
+  @SuppressWarnings("unchecked")
+  @Override
+  public Object applyUpdatesToManagedData(Object updates) {
+    if (updates instanceof List) {
+      final List<Map<String,Object>> up = (List<Map<String,Object>>) updates;
+      for (final Map<String,Object> u : up) {
+        final String featureStore = (String) u.get(FEATURE_STORE_NAME_KEY);
+        addFeature(u, featureStore);
+      }
+    }
+
+    if (updates instanceof Map) {
+      // a unique feature
+      Map<String,Object> updatesMap = (Map<String,Object>) updates;
+      final String featureStore = (String) updatesMap.get(FEATURE_STORE_NAME_KEY);
+      addFeature(updatesMap, featureStore);
+    }
+
+    final List<Object> features = new ArrayList<>();
+    for (final FeatureStore fs : stores.values()) {
+      features.addAll(featuresAsManagedResources(fs));
+    }
+    return features;
+  }
+
+  @Override
+  public synchronized void doDeleteChild(BaseSolrResource endpoint, String childId) {
+    if (childId.equals("*")) {
+      stores.clear();
+    }
+    if (stores.containsKey(childId)) {
+      stores.remove(childId);
+    }
+    storeManagedData(applyUpdatesToManagedData(null));
+  }
+
+  /**
+   * Called to retrieve a named part (the given childId) of the resource at the
+   * given endpoint. Note: since we have a unique child feature store we ignore
+   * the childId.
+   */
+  @Override
+  public void doGet(BaseSolrResource endpoint, String childId) {
+    final SolrQueryResponse response = endpoint.getSolrResponse();
+
+    // If no feature store specified, show all the feature stores available
+    if (childId == null) {
+      response.add(FEATURE_STORE_JSON_FIELD, stores.keySet());
+    } else {
+      final FeatureStore store = getFeatureStore(childId);
+      if (store == null) {
+        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
+            "missing feature store [" + childId + "]");
+      }
+      response.add(FEATURES_JSON_FIELD,
+          featuresAsManagedResources(store));
+    }
+  }
+
+  private static List<Object> featuresAsManagedResources(FeatureStore store) {
+    final List<Feature> storedFeatures = store.getFeatures();
+    final List<Object> features = new ArrayList<Object>(storedFeatures.size());
+    for (final Feature f : storedFeatures) {
+      final LinkedHashMap<String,Object> m = toFeatureMap(f);
+      m.put(FEATURE_STORE_NAME_KEY, store.getName());
+      features.add(m);
+    }
+    return features;
+  }
+
+  private static LinkedHashMap<String,Object> toFeatureMap(Feature feat) {
+    final LinkedHashMap<String,Object> o = new LinkedHashMap<>(4, 1.0f); // 1 extra for caller to add store
+    o.put(NAME_KEY, feat.getName());
+    o.put(CLASS_KEY, feat.getClass().getCanonicalName());
+    o.put(PARAMS_KEY, feat.paramsToMap());
+    return o;
+  }
+
+  private static Feature fromFeatureMap(SolrResourceLoader solrResourceLoader,
+      Map<String,Object> featureMap) {
+    final String className = (String) featureMap.get(CLASS_KEY);
+
+    final String name = (String) featureMap.get(NAME_KEY);
+
+    @SuppressWarnings("unchecked")
+    final Map<String,Object> params = (Map<String,Object>) featureMap.get(PARAMS_KEY);
+
+    return Feature.getInstance(solrResourceLoader, className, name, params);
+  }
+}