You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by is...@apache.org on 2017/03/19 15:57:42 UTC

[08/12] lucene-solr:jira/solr-6736: Merge master into jira/solr-6736 branch

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/887d1b18/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/SolrFeature.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/SolrFeature.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/SolrFeature.java
index 13eb96f..4aa872d 100644
--- a/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/SolrFeature.java
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/SolrFeature.java
@@ -157,6 +157,9 @@ public class SolrFeature extends Feature {
           for (String fq : fqs) {
             if ((fq != null) && (fq.trim().length() != 0)) {
               fq = macroExpander.expand(fq);
+              if (fq == null) {
+                throw new FeatureException(this.getClass().getSimpleName()+" requires efi parameter that was not passed in request.");
+              }
               final QParser fqp = QParser.getParser(fq, req);
               final Query filterQuery = fqp.getQuery();
               if (filterQuery != null) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/887d1b18/solr/contrib/ltr/src/test-files/featureExamples/external_features.json
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test-files/featureExamples/external_features.json b/solr/contrib/ltr/src/test-files/featureExamples/external_features.json
index d8a9eca..5c4f12d 100644
--- a/solr/contrib/ltr/src/test-files/featureExamples/external_features.json
+++ b/solr/contrib/ltr/src/test-files/featureExamples/external_features.json
@@ -48,4 +48,10 @@
     "params" : {
         "q" : "{!field f=title}${user_query}"
     }
+}, {
+    "name" : "titlePhrasesMatch",
+    "class" : "org.apache.solr.ltr.feature.SolrFeature",
+    "params" : {
+        "fq" : [ "{!field f=title}${userTitlePhrase1}", "{!field f=title}${userTitlePhrase2}"]
+    }
 } ]

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/887d1b18/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestSelectiveWeightCreation.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestSelectiveWeightCreation.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestSelectiveWeightCreation.java
index 5cfd999..7bf8373 100644
--- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestSelectiveWeightCreation.java
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestSelectiveWeightCreation.java
@@ -210,18 +210,22 @@ public class TestSelectiveWeightCreation extends TestRerankBase {
   @Test
   public void testSelectiveWeightsRequestFeaturesFromDifferentStore() throws Exception {
 
-    final String docs0fv = FeatureLoggerTestUtils.toFeatureVector(
+    final String docs0fv_sparse = FeatureLoggerTestUtils.toFeatureVector(
         "matchedTitle","1.0", "titlePhraseMatch","0.6103343");
+    final String docs0fv_dense = FeatureLoggerTestUtils.toFeatureVector(
+        "matchedTitle","1.0", "titlePhraseMatch","0.6103343", "titlePhrasesMatch","0.0");
     final String docs0fv_fstore4= FeatureLoggerTestUtils.toFeatureVector(
         "popularity","3.0", "originalScore","1.0");
 
+    final String docs0fv = chooseDefaultFeatureVector(docs0fv_dense, docs0fv_sparse);
+
     // extract all features in externalmodel's store (default store)
     // rerank using externalmodel (default store)
     final SolrQuery query = new SolrQuery();
     query.setQuery("*:*");
     query.add("fl", "*,score,fv:[fv]");
     query.add("rows", "5");
-    query.add("rq", "{!ltr reRankDocs=10 model=externalmodel efi.user_query=w3}");
+    query.add("rq", "{!ltr reRankDocs=10 model=externalmodel efi.user_query=w3 efi.userTitlePhrase1=w2 efi.userTitlePhrase2=w1}");
 
     assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='3'");
     assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/id=='4'");

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/887d1b18/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestExternalFeatures.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestExternalFeatures.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestExternalFeatures.java
index 4010ee1..c6ae30f 100644
--- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestExternalFeatures.java
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestExternalFeatures.java
@@ -67,7 +67,7 @@ public class TestExternalFeatures extends TestRerankBase {
 
     query.remove("fl");
     query.add("fl", "*,score,[fv]");
-    query.add("rq", "{!ltr reRankDocs=10 model=externalmodel efi.user_query=w3}");
+    query.add("rq", "{!ltr reRankDocs=10 model=externalmodel efi.user_query=w3 efi.userTitlePhrase1=w4 efi.userTitlePhrase2=w5}");
 
     assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='3'");
     assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/score==0.7693934");
@@ -77,7 +77,7 @@ public class TestExternalFeatures extends TestRerankBase {
     // Adding an efi in the transformer should not affect the rq ranking with a
     // different value for efi of the same parameter
     query.remove("fl");
-    query.add("fl", "*,score,[fv efi.user_query=w2]");
+    query.add("fl", "*,score,[fv efi.user_query=w2 efi.userTitlePhrase1=w4 efi.userTitlePhrase2=w5]");
 
     assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='3'");
     assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/score==0.7693934");
@@ -92,11 +92,12 @@ public class TestExternalFeatures extends TestRerankBase {
     query.add("fl", "*,score,fv:[fv]");
     query.add("rows", "1");
     // Stopword only query passed in
-    query.add("rq", "{!ltr reRankDocs=10 model=externalmodel efi.user_query='a'}");
+    query.add("rq", "{!ltr reRankDocs=10 model=externalmodel efi.user_query='a' efi.userTitlePhrase1='b' efi.userTitlePhrase2='c'}");
 
     final String docs0fv_dense_csv = FeatureLoggerTestUtils.toFeatureVector(
         "matchedTitle","0.0",
-        "titlePhraseMatch","0.0");
+        "titlePhraseMatch","0.0",
+        "titlePhrasesMatch","0.0");
     final String docs0fv_sparse_csv = FeatureLoggerTestUtils.toFeatureVector();
 
     final String docs0fv_default_csv = chooseDefaultFeatureVector(docs0fv_dense_csv, docs0fv_sparse_csv);
@@ -181,4 +182,20 @@ public class TestExternalFeatures extends TestRerankBase {
     query.add("fl", "fvalias:[fv store=fstore4]");
     assertJQ("/query" + query.toQueryString(), "/error/msg=='Exception from createWeight for ValueFeature [name=popularity, params={value=${myPop}, required=true}] ValueFeatureWeight requires efi parameter that was not passed in request.'");
   }
+
+  @Test
+  public void featureExtraction_valueFeatureRequiredInFq_shouldThrowException() throws Exception {
+    final String userTitlePhrase1 = "userTitlePhrase1";
+    final String userTitlePhrase2 = "userTitlePhrase2";
+    final String userTitlePhrasePresent = (random().nextBoolean() ? userTitlePhrase1 : userTitlePhrase2);
+
+    final SolrQuery query = new SolrQuery();
+    query.setQuery("*:*");
+    query.add("rows", "1");
+    query.add("fl", "score,features:[fv efi.user_query=uq "+userTitlePhrasePresent+"=utpp]");
+    assertJQ("/query" + query.toQueryString(), "/error/msg=='Exception from createWeight for "
+        + "SolrFeature [name=titlePhrasesMatch, params={fq=[{!field f=title}${"+userTitlePhrase1+"}, {!field f=title}${"+userTitlePhrase2+"}]}] "
+        + "SolrFeatureWeight requires efi parameter that was not passed in request.'");
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/887d1b18/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/TreeMergeOutputFormat.java
----------------------------------------------------------------------
diff --git a/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/TreeMergeOutputFormat.java b/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/TreeMergeOutputFormat.java
index e3487ad..cac57c3 100644
--- a/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/TreeMergeOutputFormat.java
+++ b/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/TreeMergeOutputFormat.java
@@ -163,7 +163,8 @@ public class TreeMergeOutputFormat extends FileOutputFormat<Text, NullWritable>
 
         // Set Solr's commit data so the created index is usable by SolrCloud. E.g. Currently SolrCloud relies on
         // commitTimeMSec in the commit data to do replication.
-        SolrIndexWriter.setCommitData(writer);
+        //TODO no commitUpdateCommand
+        SolrIndexWriter.setCommitData(writer, -1);
 
         timer = new RTimer();
         LOG.info("Optimizing Solr: Closing index writer");

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/887d1b18/solr/contrib/velocity/src/java/org/apache/solr/response/PageTool.java
----------------------------------------------------------------------
diff --git a/solr/contrib/velocity/src/java/org/apache/solr/response/PageTool.java b/solr/contrib/velocity/src/java/org/apache/solr/response/PageTool.java
index 1947f36..48dc826 100644
--- a/solr/contrib/velocity/src/java/org/apache/solr/response/PageTool.java
+++ b/solr/contrib/velocity/src/java/org/apache/solr/response/PageTool.java
@@ -38,7 +38,7 @@ public class PageTool {
     String rows = request.getParams().get("rows");
 
     if (rows != null) {
-      results_per_page = new Integer(rows);
+      results_per_page = Integer.parseInt(rows);
     }
     //TODO: Handle group by results
     Object docs = response.getResponse();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/887d1b18/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java b/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java
index d3ad322..223a539 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java
@@ -21,6 +21,7 @@ import java.io.IOException;
 import java.lang.invoke.MethodHandles;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.concurrent.Future;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.hadoop.fs.Path;
@@ -420,7 +421,24 @@ final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase {
         try {
           // we must check LIR before registering as leader
           checkLIR(coreName, allReplicasInLine);
-          
+
+          boolean onlyLeaderIndexes = zkController.getClusterState().getCollection(collection).getRealtimeReplicas() == 1;
+          if (onlyLeaderIndexes) {
+            // stop replicate from old leader
+            zkController.stopReplicationFromLeader(coreName);
+            if (weAreReplacement) {
+              try (SolrCore core = cc.getCore(coreName)) {
+                Future<UpdateLog.RecoveryInfo> future = core.getUpdateHandler().getUpdateLog().recoverFromCurrentLog();
+                if (future != null) {
+                  log.info("Replaying tlog before become new leader");
+                  future.get();
+                } else {
+                  log.info("New leader does not have old tlog to replay");
+                }
+              }
+            }
+          }
+
           super.runLeaderProcess(weAreReplacement, 0);
           try (SolrCore core = cc.getCore(coreName)) {
             if (core != null) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/887d1b18/solr/core/src/java/org/apache/solr/cloud/OverseerAutoReplicaFailoverThread.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/OverseerAutoReplicaFailoverThread.java b/solr/core/src/java/org/apache/solr/cloud/OverseerAutoReplicaFailoverThread.java
index 10b4bf3..ea09eef 100644
--- a/solr/core/src/java/org/apache/solr/cloud/OverseerAutoReplicaFailoverThread.java
+++ b/solr/core/src/java/org/apache/solr/cloud/OverseerAutoReplicaFailoverThread.java
@@ -243,13 +243,14 @@ public class OverseerAutoReplicaFailoverThread implements Runnable, Closeable {
     final String dataDir = badReplica.replica.getStr("dataDir");
     final String ulogDir = badReplica.replica.getStr("ulogDir");
     final String coreNodeName = badReplica.replica.getName();
+    final String shardId = badReplica.slice.getName();
     if (dataDir != null) {
       // need an async request - full shard goes down leader election
       final String coreName = badReplica.replica.getStr(ZkStateReader.CORE_NAME_PROP);
       log.debug("submit call to {}", createUrl);
       MDC.put("OverseerAutoReplicaFailoverThread.createUrl", createUrl);
       try {
-        updateExecutor.submit(() -> createSolrCore(collection, createUrl, dataDir, ulogDir, coreNodeName, coreName));
+        updateExecutor.submit(() -> createSolrCore(collection, createUrl, dataDir, ulogDir, coreNodeName, coreName, shardId));
       } finally {
         MDC.remove("OverseerAutoReplicaFailoverThread.createUrl");
       }
@@ -440,7 +441,7 @@ public class OverseerAutoReplicaFailoverThread implements Runnable, Closeable {
 
   private boolean createSolrCore(final String collection,
       final String createUrl, final String dataDir, final String ulogDir,
-      final String coreNodeName, final String coreName) {
+      final String coreNodeName, final String coreName, final String shardId) {
 
     try (HttpSolrClient client = new HttpSolrClient.Builder(createUrl).build()) {
       log.debug("create url={}", createUrl);
@@ -451,6 +452,7 @@ public class OverseerAutoReplicaFailoverThread implements Runnable, Closeable {
       createCmd.setCoreNodeName(coreNodeName);
       // TODO: how do we ensure unique coreName
       // for now, the collections API will use unique names
+      createCmd.setShardId(shardId);
       createCmd.setCoreName(coreName);
       createCmd.setDataDir(dataDir);
       createCmd.setUlogDir(ulogDir.substring(0, ulogDir.length() - "/tlog".length()));

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/887d1b18/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java b/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java
index 00eb12d..4d64a00 100644
--- a/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java
+++ b/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java
@@ -131,6 +131,7 @@ public class OverseerCollectionMessageHandler implements OverseerMessageHandler
       ZkStateReader.REPLICATION_FACTOR, "1",
       ZkStateReader.MAX_SHARDS_PER_NODE, "1",
       ZkStateReader.AUTO_ADD_REPLICAS, "false",
+      ZkStateReader.REALTIME_REPLICAS, "-1",
       DocCollection.RULE, null,
       SNITCH, null));
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/887d1b18/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java b/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java
index 3bd2e74..cb6c69c 100644
--- a/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java
+++ b/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java
@@ -46,6 +46,7 @@ import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.cloud.ZooKeeperException;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.params.UpdateParams;
+import org.apache.solr.common.util.NamedList;
 import org.apache.solr.core.CoreContainer;
 import org.apache.solr.core.CoreDescriptor;
 import org.apache.solr.core.DirectoryFactory.DirContext;
@@ -62,17 +63,43 @@ import org.apache.solr.update.UpdateLog;
 import org.apache.solr.update.UpdateLog.RecoveryInfo;
 import org.apache.solr.update.processor.DistributedUpdateProcessor;
 import org.apache.solr.util.RefCounted;
+import org.apache.solr.util.SolrPluginUtils;
+import org.apache.solr.util.plugin.NamedListInitializedPlugin;
 import org.apache.zookeeper.KeeperException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+/**
+ * This class may change in future and customisations are not supported
+ * between versions in terms of API or back compat behaviour.
+ * @lucene.experimental
+ */
 public class RecoveryStrategy extends Thread implements Closeable {
 
+  public static class Builder implements NamedListInitializedPlugin {
+    private NamedList args;
+    @Override
+    public void init(NamedList args) {
+      this.args = args;
+    }
+    // this should only be used from SolrCoreState
+    public RecoveryStrategy create(CoreContainer cc, CoreDescriptor cd,
+        RecoveryStrategy.RecoveryListener recoveryListener) {
+      final RecoveryStrategy recoveryStrategy = newRecoveryStrategy(cc, cd, recoveryListener);
+      SolrPluginUtils.invokeSetters(recoveryStrategy, args);
+      return recoveryStrategy;
+    }
+    protected RecoveryStrategy newRecoveryStrategy(CoreContainer cc, CoreDescriptor cd,
+        RecoveryStrategy.RecoveryListener recoveryListener) {
+      return new RecoveryStrategy(cc, cd, recoveryListener);
+    }
+  }
+
   private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
-  private static final int WAIT_FOR_UPDATES_WITH_STALE_STATE_PAUSE = Integer.getInteger("solr.cloud.wait-for-updates-with-stale-state-pause", 2500);
-  private static final int MAX_RETRIES = 500;
-  private static final int STARTING_RECOVERY_DELAY = 5000;
+  private int waitForUpdatesWithStaleStatePauseMilliSeconds = Integer.getInteger("solr.cloud.wait-for-updates-with-stale-state-pause", 2500);
+  private int maxRetries = 500;
+  private int startingRecoveryDelayMilliSeconds = 5000;
 
   public static interface RecoveryListener {
     public void recovered();
@@ -91,9 +118,9 @@ public class RecoveryStrategy extends Thread implements Closeable {
   private boolean recoveringAfterStartup;
   private CoreContainer cc;
   private volatile HttpUriRequest prevSendPreRecoveryHttpUriRequest;
-  
-  // this should only be used from SolrCoreState
-  public RecoveryStrategy(CoreContainer cc, CoreDescriptor cd, RecoveryListener recoveryListener) {
+  private boolean onlyLeaderIndexes;
+
+  protected RecoveryStrategy(CoreContainer cc, CoreDescriptor cd, RecoveryListener recoveryListener) {
     this.cc = cc;
     this.coreName = cd.getName();
     this.recoveryListener = recoveryListener;
@@ -102,15 +129,45 @@ public class RecoveryStrategy extends Thread implements Closeable {
     zkStateReader = zkController.getZkStateReader();
     baseUrl = zkController.getBaseUrl();
     coreZkNodeName = cd.getCloudDescriptor().getCoreNodeName();
+    String collection = cd.getCloudDescriptor().getCollectionName();
+    onlyLeaderIndexes = zkStateReader.getClusterState().getCollection(collection).getRealtimeReplicas() == 1;
+  }
+
+  final public int getWaitForUpdatesWithStaleStatePauseMilliSeconds() {
+    return waitForUpdatesWithStaleStatePauseMilliSeconds;
+  }
+
+  final public void setWaitForUpdatesWithStaleStatePauseMilliSeconds(int waitForUpdatesWithStaleStatePauseMilliSeconds) {
+    this.waitForUpdatesWithStaleStatePauseMilliSeconds = waitForUpdatesWithStaleStatePauseMilliSeconds;
+  }
+
+  final public int getMaxRetries() {
+    return maxRetries;
+  }
+
+  final public void setMaxRetries(int maxRetries) {
+    this.maxRetries = maxRetries;
+  }
+
+  final public int getStartingRecoveryDelayMilliSeconds() {
+    return startingRecoveryDelayMilliSeconds;
+  }
+
+  final public void setStartingRecoveryDelayMilliSeconds(int startingRecoveryDelayMilliSeconds) {
+    this.startingRecoveryDelayMilliSeconds = startingRecoveryDelayMilliSeconds;
+  }
+
+  final public boolean getRecoveringAfterStartup() {
+    return recoveringAfterStartup;
   }
 
-  public void setRecoveringAfterStartup(boolean recoveringAfterStartup) {
+  final public void setRecoveringAfterStartup(boolean recoveringAfterStartup) {
     this.recoveringAfterStartup = recoveringAfterStartup;
   }
 
   // make sure any threads stop retrying
   @Override
-  public void close() {
+  final public void close() {
     close = true;
     if (prevSendPreRecoveryHttpUriRequest != null) {
       prevSendPreRecoveryHttpUriRequest.abort();
@@ -118,7 +175,7 @@ public class RecoveryStrategy extends Thread implements Closeable {
     LOG.warn("Stopping recovery for core=[{}] coreNodeName=[{}]", coreName, coreZkNodeName);
   }
 
-  private void recoveryFailed(final SolrCore core,
+  final private void recoveryFailed(final SolrCore core,
       final ZkController zkController, final String baseUrl,
       final String shardZkNodeName, final CoreDescriptor cd) throws KeeperException, InterruptedException {
     SolrException.log(LOG, "Recovery failed - I give up.");
@@ -130,11 +187,19 @@ public class RecoveryStrategy extends Thread implements Closeable {
     }
   }
   
-  private void replicate(String nodeName, SolrCore core, ZkNodeProps leaderprops)
+  /**
+   * This method may change in future and customisations are not supported
+   * between versions in terms of API or back compat behaviour.
+   * @lucene.experimental
+   */
+  protected String getReplicateLeaderUrl(ZkNodeProps leaderprops) {
+    return new ZkCoreNodeProps(leaderprops).getCoreUrl();
+  }
+
+  final private void replicate(String nodeName, SolrCore core, ZkNodeProps leaderprops)
       throws SolrServerException, IOException {
 
-    ZkCoreNodeProps leaderCNodeProps = new ZkCoreNodeProps(leaderprops);
-    String leaderUrl = leaderCNodeProps.getCoreUrl();
+    final String leaderUrl = getReplicateLeaderUrl(leaderprops);
     
     LOG.info("Attempting to replicate from [{}].", leaderUrl);
     
@@ -191,21 +256,21 @@ public class RecoveryStrategy extends Thread implements Closeable {
 
   }
 
-  private void commitOnLeader(String leaderUrl) throws SolrServerException,
+  final private void commitOnLeader(String leaderUrl) throws SolrServerException,
       IOException {
     try (HttpSolrClient client = new HttpSolrClient.Builder(leaderUrl).build()) {
       client.setConnectionTimeout(30000);
       UpdateRequest ureq = new UpdateRequest();
       ureq.setParams(new ModifiableSolrParams());
       ureq.getParams().set(DistributedUpdateProcessor.COMMIT_END_POINT, true);
-      ureq.getParams().set(UpdateParams.OPEN_SEARCHER, false);
+      ureq.getParams().set(UpdateParams.OPEN_SEARCHER, onlyLeaderIndexes);
       ureq.setAction(AbstractUpdateRequest.ACTION.COMMIT, false, true).process(
           client);
     }
   }
 
   @Override
-  public void run() {
+  final public void run() {
 
     // set request info for logging
     try (SolrCore core = cc.getCore(coreName)) {
@@ -234,7 +299,7 @@ public class RecoveryStrategy extends Thread implements Closeable {
   }
 
   // TODO: perhaps make this grab a new core each time through the loop to handle core reloads?
-  public void doRecovery(SolrCore core) throws KeeperException, InterruptedException {
+  final public void doRecovery(SolrCore core) throws KeeperException, InterruptedException {
     boolean replayed = false;
     boolean successfulRecovery = false;
 
@@ -247,7 +312,8 @@ public class RecoveryStrategy extends Thread implements Closeable {
       return;
     }
 
-    boolean firstTime = true;
+    // we temporary ignore peersync for realtimeReplicas mode
+    boolean firstTime = !onlyLeaderIndexes;
 
     List<Long> recentVersions;
     try (UpdateLog.RecentUpdates recentUpdates = ulog.getRecentUpdates()) {
@@ -299,6 +365,10 @@ public class RecoveryStrategy extends Thread implements Closeable {
       }
     }
 
+    if (onlyLeaderIndexes) {
+      zkController.stopReplicationFromLeader(coreName);
+    }
+
     Future<RecoveryInfo> replayFuture = null;
     while (!successfulRecovery && !isInterrupted() && !isClosed()) { // don't use interruption or it will close channels though
       try {
@@ -360,7 +430,7 @@ public class RecoveryStrategy extends Thread implements Closeable {
         // are sure to have finished (see SOLR-7141 for
         // discussion around current value)
         try {
-          Thread.sleep(WAIT_FOR_UPDATES_WITH_STALE_STATE_PAUSE);
+          Thread.sleep(waitForUpdatesWithStaleStatePauseMilliSeconds);
         } catch (InterruptedException e) {
           Thread.currentThread().interrupt();
         }
@@ -452,6 +522,9 @@ public class RecoveryStrategy extends Thread implements Closeable {
         if (successfulRecovery) {
           LOG.info("Registering as Active after recovery.");
           try {
+            if (onlyLeaderIndexes) {
+              zkController.startReplicationFromLeader(coreName);
+            }
             zkController.publish(core.getCoreDescriptor(), Replica.State.ACTIVE);
           } catch (Exception e) {
             LOG.error("Could not publish as ACTIVE after succesful recovery", e);
@@ -479,7 +552,7 @@ public class RecoveryStrategy extends Thread implements Closeable {
           LOG.error("Recovery failed - trying again... (" + retries + ")");
           
           retries++;
-          if (retries >= MAX_RETRIES) {
+          if (retries >= maxRetries) {
             SolrException.log(LOG, "Recovery failed - max retries exceeded (" + retries + ").");
             try {
               recoveryFailed(core, zkController, baseUrl, coreZkNodeName, core.getCoreDescriptor());
@@ -504,7 +577,7 @@ public class RecoveryStrategy extends Thread implements Closeable {
               LOG.info("RecoveryStrategy has been closed");
               break; // check if someone closed us
             }
-            Thread.sleep(STARTING_RECOVERY_DELAY);
+            Thread.sleep(startingRecoveryDelayMilliSeconds);
           }
         } catch (InterruptedException e) {
           Thread.currentThread().interrupt();
@@ -525,8 +598,20 @@ public class RecoveryStrategy extends Thread implements Closeable {
     LOG.info("Finished recovery process, successful=[{}]", Boolean.toString(successfulRecovery));
   }
 
-  private Future<RecoveryInfo> replay(SolrCore core)
+  public static Runnable testing_beforeReplayBufferingUpdates;
+
+  final private Future<RecoveryInfo> replay(SolrCore core)
       throws InterruptedException, ExecutionException {
+    if (testing_beforeReplayBufferingUpdates != null) {
+      testing_beforeReplayBufferingUpdates.run();
+    }
+    if (onlyLeaderIndexes) {
+      // roll over all updates during buffering to new tlog, make RTG available
+      SolrQueryRequest req = new LocalSolrQueryRequest(core,
+          new ModifiableSolrParams());
+      core.getUpdateHandler().getUpdateLog().copyOverBufferingUpdates(new CommitUpdateCommand(req, false));
+      return null;
+    }
     Future<RecoveryInfo> future = core.getUpdateHandler().getUpdateLog().applyBufferedUpdates();
     if (future == null) {
       // no replay needed\
@@ -547,7 +632,7 @@ public class RecoveryStrategy extends Thread implements Closeable {
     return future;
   }
   
-  private void cloudDebugLog(SolrCore core, String op) {
+  final private void cloudDebugLog(SolrCore core, String op) {
     if (!LOG.isDebugEnabled()) {
       return;
     }
@@ -566,11 +651,11 @@ public class RecoveryStrategy extends Thread implements Closeable {
     }
   }
 
-  public boolean isClosed() {
+  final public boolean isClosed() {
     return close;
   }
   
-  private void sendPrepRecoveryCmd(String leaderBaseUrl, String leaderCoreName, Slice slice)
+  final private void sendPrepRecoveryCmd(String leaderBaseUrl, String leaderCoreName, Slice slice)
       throws SolrServerException, IOException, InterruptedException, ExecutionException {
 
     WaitForState prepCmd = new WaitForState();
@@ -603,7 +688,7 @@ public class RecoveryStrategy extends Thread implements Closeable {
     }
   }
 
-  private void sendPrepRecoveryCmd(String leaderBaseUrl, WaitForState prepCmd)
+  final private void sendPrepRecoveryCmd(String leaderBaseUrl, WaitForState prepCmd)
       throws SolrServerException, IOException, InterruptedException, ExecutionException {
     try (HttpSolrClient client = new HttpSolrClient.Builder(leaderBaseUrl).build()) {
       client.setConnectionTimeout(10000);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/887d1b18/solr/core/src/java/org/apache/solr/cloud/ReplicateFromLeader.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/ReplicateFromLeader.java b/solr/core/src/java/org/apache/solr/cloud/ReplicateFromLeader.java
new file mode 100644
index 0000000..d7fded9
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/cloud/ReplicateFromLeader.java
@@ -0,0 +1,124 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.cloud;
+
+import java.lang.invoke.MethodHandles;
+
+import org.apache.lucene.index.IndexCommit;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.core.CoreContainer;
+import org.apache.solr.core.SolrConfig;
+import org.apache.solr.core.SolrCore;
+import org.apache.solr.handler.ReplicationHandler;
+import org.apache.solr.request.LocalSolrQueryRequest;
+import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.update.CommitUpdateCommand;
+import org.apache.solr.update.SolrIndexWriter;
+import org.apache.solr.update.UpdateLog;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class ReplicateFromLeader {
+  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+  private CoreContainer cc;
+  private String coreName;
+
+  private ReplicationHandler replicationProcess;
+  private long lastVersion = 0;
+
+  public ReplicateFromLeader(CoreContainer cc, String coreName) {
+    this.cc = cc;
+    this.coreName = coreName;
+  }
+
+  public void startReplication() throws InterruptedException {
+    try (SolrCore core = cc.getCore(coreName)) {
+      if (core == null) {
+        if (cc.isShutDown()) {
+          return;
+        } else {
+          throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "SolrCore not found:" + coreName + " in " + cc.getCoreNames());
+        }
+      }
+      SolrConfig.UpdateHandlerInfo uinfo = core.getSolrConfig().getUpdateHandlerInfo();
+      String pollIntervalStr = "00:00:03";
+      if (uinfo.autoCommmitMaxTime != -1) {
+        pollIntervalStr = toPollIntervalStr(uinfo.autoCommmitMaxTime/2);
+      } else if (uinfo.autoSoftCommmitMaxTime != -1) {
+        pollIntervalStr = toPollIntervalStr(uinfo.autoSoftCommmitMaxTime/2);
+      }
+
+      NamedList slaveConfig = new NamedList();
+      slaveConfig.add("fetchFromLeader", true);
+      slaveConfig.add("pollInterval", pollIntervalStr);
+      NamedList replicationConfig = new NamedList();
+      replicationConfig.add("slave", slaveConfig);
+
+      String lastCommitVersion = getCommitVersion(core);
+      if (lastCommitVersion != null) {
+        lastVersion = Long.parseLong(lastCommitVersion);
+      }
+
+      replicationProcess = new ReplicationHandler();
+      replicationProcess.setPollListener((solrCore, pollSuccess) -> {
+        if (pollSuccess) {
+          String commitVersion = getCommitVersion(core);
+          if (commitVersion == null) return;
+          if (Long.parseLong(commitVersion) == lastVersion) return;
+          UpdateLog updateLog = solrCore.getUpdateHandler().getUpdateLog();
+          SolrQueryRequest req = new LocalSolrQueryRequest(core,
+              new ModifiableSolrParams());
+          CommitUpdateCommand cuc = new CommitUpdateCommand(req, false);
+          cuc.setVersion(Long.parseLong(commitVersion));
+          updateLog.copyOverOldUpdates(cuc);
+          lastVersion = Long.parseLong(commitVersion);
+        }
+      });
+      replicationProcess.init(replicationConfig);
+      replicationProcess.inform(core);
+    }
+  }
+
+  public static String getCommitVersion(SolrCore solrCore) {
+    IndexCommit commit = solrCore.getDeletionPolicy().getLatestCommit();
+    try {
+      String commitVersion = commit.getUserData().get(SolrIndexWriter.COMMIT_COMMAND_VERSION);
+      if (commitVersion == null) return null;
+      else return commitVersion;
+    } catch (Exception e) {
+      LOG.warn("Cannot get commit command version from index commit point ",e);
+      return null;
+    }
+  }
+
+  private static String toPollIntervalStr(int ms) {
+    int sec = ms/1000;
+    int hour = sec / 3600;
+    sec = sec % 3600;
+    int min = sec / 60;
+    sec = sec % 60;
+    return hour + ":" + min + ":" + sec;
+  }
+
+  public void stopReplication() {
+    replicationProcess.close();
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/887d1b18/solr/core/src/java/org/apache/solr/cloud/ZkController.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/ZkController.java b/solr/core/src/java/org/apache/solr/cloud/ZkController.java
index 333acd4..69a77f9 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ZkController.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ZkController.java
@@ -189,6 +189,7 @@ public class ZkController {
 
   private LeaderElector overseerElector;
 
+  private Map<String, ReplicateFromLeader> replicateFromLeaders = new ConcurrentHashMap<>();
 
   // for now, this can be null in tests, in which case recovery will be inactive, and other features
   // may accept defaults or use mocks rather than pulling things from a CoreContainer
@@ -877,7 +878,7 @@ public class ZkController {
           coreName, baseUrl, cloudDesc.getCollectionName(), shardId);
       
       ZkNodeProps leaderProps = new ZkNodeProps(props);
-      
+
       try {
         // If we're a preferred leader, insert ourselves at the head of the queue
         boolean joinAtHead = false;
@@ -913,9 +914,16 @@ public class ZkController {
         // leader election perhaps?
         
         UpdateLog ulog = core.getUpdateHandler().getUpdateLog();
-        
+        boolean onlyLeaderIndexes = zkStateReader.getClusterState().getCollection(collection).getRealtimeReplicas() == 1;
+        boolean isReplicaInOnlyLeaderIndexes = onlyLeaderIndexes && !isLeader;
+        if (isReplicaInOnlyLeaderIndexes) {
+          String commitVersion = ReplicateFromLeader.getCommitVersion(core);
+          if (commitVersion != null) {
+            ulog.copyOverOldUpdates(Long.parseLong(commitVersion));
+          }
+        }
         // we will call register again after zk expiration and on reload
-        if (!afterExpiration && !core.isReloaded() && ulog != null) {
+        if (!afterExpiration && !core.isReloaded() && ulog != null && !isReplicaInOnlyLeaderIndexes) {
           // disable recovery in case shard is in construction state (for shard splits)
           Slice slice = getClusterState().getSlice(collection, shardId);
           if (slice.getState() != Slice.State.CONSTRUCTION || !isLeader) {
@@ -934,6 +942,9 @@ public class ZkController {
         boolean didRecovery
             = checkRecovery(recoverReloadedCores, isLeader, skipRecovery, collection, coreZkNodeName, core, cc, afterExpiration);
         if (!didRecovery) {
+          if (isReplicaInOnlyLeaderIndexes) {
+            startReplicationFromLeader(coreName);
+          }
           publish(desc, Replica.State.ACTIVE);
         }
         
@@ -948,6 +959,20 @@ public class ZkController {
     }
   }
 
+  public void startReplicationFromLeader(String coreName) throws InterruptedException {
+    ReplicateFromLeader replicateFromLeader = new ReplicateFromLeader(cc, coreName);
+    if (replicateFromLeaders.putIfAbsent(coreName, replicateFromLeader) == null) {
+      replicateFromLeader.startReplication();
+    }
+  }
+
+  public void stopReplicationFromLeader(String coreName) {
+    ReplicateFromLeader replicateFromLeader = replicateFromLeaders.remove(coreName);
+    if (replicateFromLeader != null) {
+      replicateFromLeader.stopReplication();
+    }
+  }
+
   // timeoutms is the timeout for the first call to get the leader - there is then
   // a longer wait to make sure that leader matches our local state
   private String getLeader(final CloudDescriptor cloudDesc, int timeoutms) {
@@ -1424,13 +1449,7 @@ public class ZkController {
             errorMessage.set("coreNodeName " + coreNodeName + " does not exist in shard " + cloudDesc.getShardId());
             return false;
           }
-          String baseUrl = replica.getStr(BASE_URL_PROP);
-          String coreName = replica.getStr(CORE_NAME_PROP);
-          if (baseUrl.equals(this.baseURL) && coreName.equals(cd.getName())) {
-            return true;
-          }
-          errorMessage.set("coreNodeName " + coreNodeName + " exists, but does not match expected node or core name");
-          return false;
+          return true;
         });
       } catch (TimeoutException e) {
         String error = errorMessage.get();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/887d1b18/solr/core/src/java/org/apache/solr/core/CoreContainer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/CoreContainer.java b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
index 5a6f98f..e70966a 100644
--- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java
+++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
@@ -36,7 +36,6 @@ import java.util.concurrent.ExecutionException;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Future;
 
-import com.codahale.metrics.Gauge;
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Maps;
 import org.apache.http.auth.AuthSchemeProvider;
@@ -531,17 +530,19 @@ public class CoreContainer {
 
     containerProperties.putAll(cfg.getSolrProperties());
 
-    // initialize gauges for reporting the number of cores
-    Gauge<Integer> loadedCores = () -> solrCores.getCores().size();
-    Gauge<Integer> lazyCores = () -> solrCores.getCoreNames().size() - solrCores.getCores().size();
-    Gauge<Integer> unloadedCores = () -> solrCores.getAllCoreNames().size() - solrCores.getCoreNames().size();
+    // initialize gauges for reporting the number of cores and disk total/free
 
-    metricManager.register(SolrMetricManager.getRegistryName(SolrInfoMBean.Group.node),
-        loadedCores, true, "loaded", SolrInfoMBean.Category.CONTAINER.toString(), "cores");
-    metricManager.register(SolrMetricManager.getRegistryName(SolrInfoMBean.Group.node),
-        lazyCores, true, "lazy",SolrInfoMBean.Category.CONTAINER.toString(), "cores");
-    metricManager.register(SolrMetricManager.getRegistryName(SolrInfoMBean.Group.node),
-        unloadedCores, true, "unloaded",SolrInfoMBean.Category.CONTAINER.toString(), "cores");
+    String registryName = SolrMetricManager.getRegistryName(SolrInfoMBean.Group.node);
+    metricManager.registerGauge(registryName, () -> solrCores.getCores().size(),
+        true, "loaded", SolrInfoMBean.Category.CONTAINER.toString(), "cores");
+    metricManager.registerGauge(registryName, () -> solrCores.getCoreNames().size() - solrCores.getCores().size(),
+        true, "lazy",SolrInfoMBean.Category.CONTAINER.toString(), "cores");
+    metricManager.registerGauge(registryName, () -> solrCores.getAllCoreNames().size() - solrCores.getCoreNames().size(),
+        true, "unloaded",SolrInfoMBean.Category.CONTAINER.toString(), "cores");
+    metricManager.registerGauge(registryName, () -> cfg.getCoreRootDirectory().toFile().getTotalSpace(),
+        true, "totalSpace", SolrInfoMBean.Category.CONTAINER.toString(), "fs");
+    metricManager.registerGauge(registryName, () -> cfg.getCoreRootDirectory().toFile().getUsableSpace(),
+        true, "usableSpace", SolrInfoMBean.Category.CONTAINER.toString(), "fs");
 
     if (isZooKeeperAware()) {
       metricManager.loadClusterReporters(cfg.getMetricReporterPlugins(), this);
@@ -1138,6 +1139,13 @@ public class CoreContainer {
         log.info("Reloading SolrCore '{}' using configuration from {}", cd.getName(), coreConfig.getName());
         SolrCore newCore = core.reload(coreConfig);
         registerCore(cd.getName(), newCore, false, false);
+        if (getZkController() != null) {
+          boolean onlyLeaderIndexes = getZkController().getClusterState().getCollection(cd.getCollectionName()).getRealtimeReplicas() == 1;
+          if (onlyLeaderIndexes && !cd.getCloudDescriptor().isLeader()) {
+            getZkController().stopReplicationFromLeader(core.getName());
+            getZkController().startReplicationFromLeader(newCore.getName());
+          }
+        }
       } catch (SolrCoreState.CoreIsClosedException e) {
         throw e;
       } catch (Exception e) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/887d1b18/solr/core/src/java/org/apache/solr/core/RequestParams.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/RequestParams.java b/solr/core/src/java/org/apache/solr/core/RequestParams.java
index ff0d36c..fbb2555 100644
--- a/solr/core/src/java/org/apache/solr/core/RequestParams.java
+++ b/solr/core/src/java/org/apache/solr/core/RequestParams.java
@@ -222,7 +222,7 @@ public class RequestParams implements MapSerializable {
     }
 
     public Long getVersion() {
-      return meta == null ? 0l : (Long) meta.get("v");
+      return meta == null ? Long.valueOf(0l) : (Long) meta.get("v");
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/887d1b18/solr/core/src/java/org/apache/solr/core/SolrConfig.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/SolrConfig.java b/solr/core/src/java/org/apache/solr/core/SolrConfig.java
index d28a1e3..91cfb28 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrConfig.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrConfig.java
@@ -58,6 +58,7 @@ import org.apache.lucene.index.IndexDeletionPolicy;
 import org.apache.lucene.search.BooleanQuery;
 import org.apache.lucene.util.Version;
 import org.apache.solr.client.solrj.io.stream.expr.Expressible;
+import org.apache.solr.cloud.RecoveryStrategy;
 import org.apache.solr.cloud.ZkSolrResourceLoader;
 import org.apache.solr.common.MapSerializable;
 import org.apache.solr.common.SolrException;
@@ -365,6 +366,7 @@ public class SolrConfig extends Config implements MapSerializable {
       .add(new SolrPluginInfo(SolrEventListener.class, "//listener", REQUIRE_CLASS, MULTI_OK, REQUIRE_NAME_IN_OVERLAY))
 
       .add(new SolrPluginInfo(DirectoryFactory.class, "directoryFactory", REQUIRE_CLASS))
+      .add(new SolrPluginInfo(RecoveryStrategy.Builder.class, "recoveryStrategy"))
       .add(new SolrPluginInfo(IndexDeletionPolicy.class, "indexConfig/deletionPolicy", REQUIRE_CLASS))
       .add(new SolrPluginInfo(CodecFactory.class, "codecFactory", REQUIRE_CLASS))
       .add(new SolrPluginInfo(IndexReaderFactory.class, "indexReaderFactory", REQUIRE_CLASS))

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/887d1b18/solr/core/src/java/org/apache/solr/core/SolrCore.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java
index c8e8067..47a7880 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrCore.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java
@@ -30,6 +30,8 @@ import java.lang.reflect.Constructor;
 import java.net.URL;
 import java.nio.charset.StandardCharsets;
 import java.nio.file.NoSuchFileException;
+import java.nio.file.Path;
+import java.nio.file.Paths;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
@@ -74,6 +76,7 @@ import org.apache.lucene.store.IndexOutput;
 import org.apache.lucene.store.LockObtainFailedException;
 import org.apache.solr.client.solrj.impl.BinaryResponseParser;
 import org.apache.solr.cloud.CloudDescriptor;
+import org.apache.solr.cloud.RecoveryStrategy;
 import org.apache.solr.cloud.ZkSolrResourceLoader;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.cloud.ClusterState;
@@ -169,7 +172,7 @@ import static org.apache.solr.common.params.CommonParams.PATH;
 /**
  *
  */
-public final class SolrCore implements SolrInfoMBean, Closeable {
+public final class SolrCore implements SolrInfoMBean, SolrMetricProducer, Closeable {
 
   public static final String version="1.0";
 
@@ -205,6 +208,7 @@ public final class SolrCore implements SolrInfoMBean, Closeable {
   private final IndexDeletionPolicyWrapper solrDelPolicy;
   private final SolrSnapshotMetaDataManager snapshotMgr;
   private final DirectoryFactory directoryFactory;
+  private final RecoveryStrategy.Builder recoveryStrategyBuilder;
   private IndexReaderFactory indexReaderFactory;
   private final Codec codec;
   private final MemClassLoader memClassLoader;
@@ -214,11 +218,11 @@ public final class SolrCore implements SolrInfoMBean, Closeable {
   private final ReentrantLock ruleExpiryLock;
   private final ReentrantLock snapshotDelLock; // A lock instance to guard against concurrent deletions.
 
-  private final Timer newSearcherTimer;
-  private final Timer newSearcherWarmupTimer;
-  private final Counter newSearcherCounter;
-  private final Counter newSearcherMaxReachedCounter;
-  private final Counter newSearcherOtherErrorsCounter;
+  private Timer newSearcherTimer;
+  private Timer newSearcherWarmupTimer;
+  private Counter newSearcherCounter;
+  private Counter newSearcherMaxReachedCounter;
+  private Counter newSearcherOtherErrorsCounter;
 
   public Date getStartTimeStamp() { return startTime; }
 
@@ -659,6 +663,22 @@ public final class SolrCore implements SolrInfoMBean, Closeable {
     return DirectoryFactory.loadDirectoryFactory(solrConfig, getCoreDescriptor().getCoreContainer(), coreMetricManager.getRegistryName());
   }
 
+  private RecoveryStrategy.Builder initRecoveryStrategyBuilder() {
+    final PluginInfo info = solrConfig.getPluginInfo(RecoveryStrategy.Builder.class.getName());
+    final RecoveryStrategy.Builder rsBuilder;
+    if (info != null && info.className != null) {
+      log.info(info.className);
+      rsBuilder = getResourceLoader().newInstance(info.className, RecoveryStrategy.Builder.class);
+    } else {
+      log.info("solr.RecoveryStrategy.Builder");
+      rsBuilder = new RecoveryStrategy.Builder();
+    }
+    if (info != null) {
+      rsBuilder.init(info.initArgs);
+    }
+    return rsBuilder;
+  }
+
   private void initIndexReaderFactory() {
     IndexReaderFactory indexReaderFactory;
     PluginInfo info = solrConfig.getPluginInfo(IndexReaderFactory.class.getName());
@@ -886,10 +906,12 @@ public final class SolrCore implements SolrInfoMBean, Closeable {
 
     if (updateHandler == null) {
       directoryFactory = initDirectoryFactory();
-      solrCoreState = new DefaultSolrCoreState(directoryFactory);
+      recoveryStrategyBuilder = initRecoveryStrategyBuilder();
+      solrCoreState = new DefaultSolrCoreState(directoryFactory, recoveryStrategyBuilder);
     } else {
       solrCoreState = updateHandler.getSolrCoreState();
       directoryFactory = solrCoreState.getDirectoryFactory();
+      recoveryStrategyBuilder = solrCoreState.getRecoveryStrategyBuilder();
       isReloaded = true;
     }
 
@@ -903,11 +925,7 @@ public final class SolrCore implements SolrInfoMBean, Closeable {
     SolrMetricManager metricManager = this.coreDescriptor.getCoreContainer().getMetricManager();
 
     // initialize searcher-related metrics
-    newSearcherCounter = metricManager.counter(coreMetricManager.getRegistryName(), "new", Category.SEARCHER.toString());
-    newSearcherTimer = metricManager.timer(coreMetricManager.getRegistryName(), "time", Category.SEARCHER.toString(), "new");
-    newSearcherWarmupTimer = metricManager.timer(coreMetricManager.getRegistryName(), "warmup", Category.SEARCHER.toString(), "new");
-    newSearcherMaxReachedCounter = metricManager.counter(coreMetricManager.getRegistryName(), "maxReached", Category.SEARCHER.toString(), "new");
-    newSearcherOtherErrorsCounter = metricManager.counter(coreMetricManager.getRegistryName(), "errors", Category.SEARCHER.toString(), "new");
+    initializeMetrics(metricManager, coreMetricManager.getRegistryName(), null);
 
     // Initialize JMX
     this.infoRegistry = initInfoRegistry(name, config);
@@ -1127,6 +1145,29 @@ public final class SolrCore implements SolrInfoMBean, Closeable {
     return coreMetricManager;
   }
 
+  @Override
+  public void initializeMetrics(SolrMetricManager manager, String registry, String scope) {
+    newSearcherCounter = manager.counter(registry, "new", Category.SEARCHER.toString());
+    newSearcherTimer = manager.timer(registry, "time", Category.SEARCHER.toString(), "new");
+    newSearcherWarmupTimer = manager.timer(registry, "warmup", Category.SEARCHER.toString(), "new");
+    newSearcherMaxReachedCounter = manager.counter(registry, "maxReached", Category.SEARCHER.toString(), "new");
+    newSearcherOtherErrorsCounter = manager.counter(registry, "errors", Category.SEARCHER.toString(), "new");
+
+    manager.registerGauge(registry, () -> name == null ? "(null)" : name, true, "coreName", Category.CORE.toString());
+    manager.registerGauge(registry, () -> startTime, true, "startTime", Category.CORE.toString());
+    manager.registerGauge(registry, () -> getOpenCount(), true, "refCount", Category.CORE.toString());
+    manager.registerGauge(registry, () -> resourceLoader.getInstancePath(), true, "instanceDir", Category.CORE.toString());
+    manager.registerGauge(registry, () -> getIndexDir(), true, "indexDir", Category.CORE.toString());
+    manager.registerGauge(registry, () -> getIndexSize(), true, "sizeInBytes", Category.INDEX.toString());
+    manager.registerGauge(registry, () -> NumberUtils.readableSize(getIndexSize()), true, "size", Category.INDEX.toString());
+    manager.registerGauge(registry, () -> coreDescriptor.getCoreContainer().getCoreNames(this), true, "aliases", Category.CORE.toString());
+    // initialize disk total / free metrics
+    Path dataDirPath = Paths.get(dataDir);
+    File dataDirFile = dataDirPath.toFile();
+    manager.registerGauge(registry, () -> dataDirFile.getTotalSpace(), true, "totalSpace", Category.CORE.toString(), "fs");
+    manager.registerGauge(registry, () -> dataDirFile.getUsableSpace(), true, "usableSpace", Category.CORE.toString(), "fs");
+  }
+
   private Map<String,SolrInfoMBean> initInfoRegistry(String name, SolrConfig config) {
     if (config.jmxConfig.enabled) {
       return new JmxMonitoredMap<String, SolrInfoMBean>(name, coreMetricManager.getRegistryName(), String.valueOf(this.hashCode()), config.jmxConfig);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/887d1b18/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java b/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java
index 8634aee..33e8091 100644
--- a/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java
+++ b/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java
@@ -68,8 +68,11 @@ import org.apache.solr.client.solrj.impl.HttpClientUtil;
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
 import org.apache.solr.client.solrj.impl.HttpSolrClient.Builder;
 import org.apache.solr.client.solrj.request.QueryRequest;
+import org.apache.solr.cloud.CloudDescriptor;
+import org.apache.solr.cloud.ZkController;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
+import org.apache.solr.common.cloud.Replica;
 import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.util.ExecutorUtil;
@@ -115,7 +118,7 @@ public class IndexFetcher {
 
   private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
-  private final String masterUrl;
+  private String masterUrl;
 
   final ReplicationHandler replicationHandler;
 
@@ -150,6 +153,8 @@ public class IndexFetcher {
 
   private boolean useExternalCompression = false;
 
+  private boolean fetchFromLeader = false;
+
   private final HttpClient myHttpClient;
 
   private Integer connTimeout;
@@ -167,11 +172,15 @@ public class IndexFetcher {
 
   public IndexFetcher(final NamedList initArgs, final ReplicationHandler handler, final SolrCore sc) {
     solrCore = sc;
+    Object fetchFromLeader = initArgs.get(FETCH_FROM_LEADER);
+    if (fetchFromLeader != null && fetchFromLeader instanceof Boolean) {
+      this.fetchFromLeader = (boolean) fetchFromLeader;
+    }
     String masterUrl = (String) initArgs.get(MASTER_URL);
-    if (masterUrl == null)
+    if (masterUrl == null && !this.fetchFromLeader)
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
               "'masterUrl' is required for a slave");
-    if (masterUrl.endsWith(ReplicationHandler.PATH)) {
+    if (masterUrl != null && masterUrl.endsWith(ReplicationHandler.PATH)) {
       masterUrl = masterUrl.substring(0, masterUrl.length()-12);
       LOG.warn("'masterUrl' must be specified without the "+ReplicationHandler.PATH+" suffix");
     }
@@ -298,6 +307,15 @@ public class IndexFetcher {
     }
 
     try {
+      if (fetchFromLeader) {
+        Replica replica = getLeaderReplica();
+        CloudDescriptor cd = solrCore.getCoreDescriptor().getCloudDescriptor();
+        if (cd.getCoreNodeName().equals(replica.getName())) {
+          return false;
+        }
+        masterUrl = replica.getCoreUrl();
+        LOG.info("Updated masterUrl to " + masterUrl);
+      }
       //get the current 'replicateable' index version in the master
       NamedList response;
       try {
@@ -404,7 +422,7 @@ public class IndexFetcher {
           isFullCopyNeeded = true;
         }
 
-        if (!isFullCopyNeeded) {
+        if (!isFullCopyNeeded && !fetchFromLeader) {
           // a searcher might be using some flushed but not committed segments
           // because of soft commits (which open a searcher on IW's data)
           // so we need to close the existing searcher on the last commit
@@ -565,6 +583,14 @@ public class IndexFetcher {
     }
   }
 
+  private Replica getLeaderReplica() throws InterruptedException {
+    ZkController zkController = solrCore.getCoreDescriptor().getCoreContainer().getZkController();
+    CloudDescriptor cd = solrCore.getCoreDescriptor().getCloudDescriptor();
+    Replica leaderReplica = zkController.getZkStateReader().getLeaderRetry(
+        cd.getCollectionName(), cd.getShardId());
+    return leaderReplica;
+  }
+
   private void cleanup(final SolrCore core, Directory tmpIndexDir,
       Directory indexDir, boolean deleteTmpIdxDir, File tmpTlogDir, boolean successfulInstall) throws IOException {
     try {
@@ -671,7 +697,7 @@ public class IndexFetcher {
 
       int indexCount = 1, confFilesCount = 1;
       if (props.containsKey(TIMES_INDEX_REPLICATED)) {
-        indexCount = Integer.valueOf(props.getProperty(TIMES_INDEX_REPLICATED)) + 1;
+        indexCount = Integer.parseInt(props.getProperty(TIMES_INDEX_REPLICATED)) + 1;
       }
       StringBuilder sb = readToStringBuilder(replicationTime, props.getProperty(INDEX_REPLICATED_AT_LIST));
       props.setProperty(INDEX_REPLICATED_AT_LIST, sb.toString());
@@ -682,7 +708,7 @@ public class IndexFetcher {
         props.setProperty(CONF_FILES_REPLICATED, confFiles.toString());
         props.setProperty(CONF_FILES_REPLICATED_AT, String.valueOf(replicationTime));
         if (props.containsKey(TIMES_CONFIG_REPLICATED)) {
-          confFilesCount = Integer.valueOf(props.getProperty(TIMES_CONFIG_REPLICATED)) + 1;
+          confFilesCount = Integer.parseInt(props.getProperty(TIMES_CONFIG_REPLICATED)) + 1;
         }
         props.setProperty(TIMES_CONFIG_REPLICATED, String.valueOf(confFilesCount));
       }
@@ -691,7 +717,7 @@ public class IndexFetcher {
       if (!successfulInstall) {
         int numFailures = 1;
         if (props.containsKey(TIMES_FAILED)) {
-          numFailures = Integer.valueOf(props.getProperty(TIMES_FAILED)) + 1;
+          numFailures = Integer.parseInt(props.getProperty(TIMES_FAILED)) + 1;
         }
         props.setProperty(TIMES_FAILED, String.valueOf(numFailures));
         props.setProperty(REPLICATION_FAILED_AT, String.valueOf(replicationTime));

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/887d1b18/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
index cdbadc4..4f6a408 100644
--- a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
@@ -209,6 +209,11 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
   private Long pollIntervalNs;
   private String pollIntervalStr;
 
+  private PollListener pollListener;
+  public interface PollListener {
+    void onComplete(SolrCore solrCore, boolean pollSuccess) throws IOException;
+  }
+
   /**
    * Disable the timer task for polling
    */
@@ -218,6 +223,10 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
     return pollIntervalStr;
   }
 
+  public void setPollListener(PollListener pollListener) {
+    this.pollListener = pollListener;
+  }
+
   @Override
   public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
     rsp.setHttpCaching(false);
@@ -1066,7 +1075,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
       String ss[] = s.split(",");
       List<String> l = new ArrayList<>();
       for (String s1 : ss) {
-        l.add(new Date(Long.valueOf(s1)).toString());
+        l.add(new Date(Long.parseLong(s1)).toString());
       }
       nl.add(key, l);
     } else {
@@ -1142,7 +1151,8 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
       try {
         LOG.debug("Polling for index modifications");
         markScheduledExecutionStart();
-        doFetch(null, false);
+        boolean pollSuccess = doFetch(null, false);
+        if (pollListener != null) pollListener.onComplete(core, pollSuccess);
       } catch (Exception e) {
         LOG.error("Exception in fetching index", e);
       }
@@ -1328,6 +1338,20 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
     });
   }
 
+  public void close() {
+    if (executorService != null) executorService.shutdown();
+    if (pollingIndexFetcher != null) {
+      pollingIndexFetcher.destroy();
+    }
+    if (currentIndexFetcher != null && currentIndexFetcher != pollingIndexFetcher) {
+      currentIndexFetcher.destroy();
+    }
+    ExecutorUtil.shutdownAndAwaitTermination(restoreExecutor);
+    if (restoreFuture != null) {
+      restoreFuture.cancel(false);
+    }
+  }
+
   /**
    * Register a listener for postcommit/optimize
    *
@@ -1680,6 +1704,8 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
 
   public static final String MASTER_URL = "masterUrl";
 
+  public static final String FETCH_FROM_LEADER = "fetchFromLeader";
+
   public static final String STATUS = "status";
 
   public static final String COMMAND = "command";

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/887d1b18/solr/core/src/java/org/apache/solr/handler/SQLHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/SQLHandler.java b/solr/core/src/java/org/apache/solr/handler/SQLHandler.java
index d65ea56..7563fe8 100644
--- a/solr/core/src/java/org/apache/solr/handler/SQLHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/SQLHandler.java
@@ -34,7 +34,9 @@ import org.apache.solr.client.solrj.io.stream.ExceptionStream;
 import org.apache.solr.client.solrj.io.stream.JDBCStream;
 import org.apache.solr.client.solrj.io.stream.TupleStream;
 import org.apache.solr.common.SolrException;
+import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.core.CoreContainer;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.handler.sql.CalciteSolrDriver;
@@ -74,6 +76,9 @@ public class SQLHandler extends RequestHandlerBase implements SolrCoreAware, Per
 
   public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
     ModifiableSolrParams params = new ModifiableSolrParams(req.getParams());
+    params = adjustParams(params);
+    req.setParams(params);
+
     String sql = params.get("stmt");
     // Set defaults for parameters
     params.set("numWorkers", params.getInt("numWorkers", 1));
@@ -139,6 +144,8 @@ public class SQLHandler extends RequestHandlerBase implements SolrCoreAware, Per
   private class SqlHandlerStream extends JDBCStream {
     private final boolean includeMetadata;
     private boolean firstTuple = true;
+    List<String> metadataFields = new ArrayList<>();
+    Map<String, String> metadataAliases = new HashMap<>();
 
     SqlHandlerStream(String connectionUrl, String sqlQuery, StreamComparator definedSort,
                      Properties connectionProperties, String driverClassName, boolean includeMetadata)
@@ -151,7 +158,7 @@ public class SQLHandler extends RequestHandlerBase implements SolrCoreAware, Per
     @Override
     public Tuple read() throws IOException {
       // Return a metadata tuple as the first tuple and then pass through to the JDBCStream.
-      if(includeMetadata && firstTuple) {
+      if(firstTuple) {
         try {
           Map<String, Object> fields = new HashMap<>();
 
@@ -159,8 +166,6 @@ public class SQLHandler extends RequestHandlerBase implements SolrCoreAware, Per
 
           ResultSetMetaData resultSetMetaData = resultSet.getMetaData();
 
-          List<String> metadataFields = new ArrayList<>();
-          Map<String, String> metadataAliases = new HashMap<>();
           for(int i = 1; i <= resultSetMetaData.getColumnCount(); i++) {
             String columnName = resultSetMetaData.getColumnName(i);
             String columnLabel = resultSetMetaData.getColumnLabel(i);
@@ -168,16 +173,30 @@ public class SQLHandler extends RequestHandlerBase implements SolrCoreAware, Per
             metadataAliases.put(columnName, columnLabel);
           }
 
-          fields.put("isMetadata", true);
-          fields.put("fields", metadataFields);
-          fields.put("aliases", metadataAliases);
-          return new Tuple(fields);
+          if(includeMetadata) {
+            fields.put("isMetadata", true);
+            fields.put("fields", metadataFields);
+            fields.put("aliases", metadataAliases);
+            return new Tuple(fields);
+          }
         } catch (SQLException e) {
           throw new IOException(e);
         }
-      } else {
-        return super.read();
       }
+
+      Tuple tuple = super.read();
+      if(!tuple.EOF) {
+        tuple.fieldNames = metadataFields;
+        tuple.fieldLabels = metadataAliases;
+      }
+      return tuple;
     }
   }
+
+  private ModifiableSolrParams adjustParams(SolrParams params) {
+    ModifiableSolrParams adjustedParams = new ModifiableSolrParams();
+    adjustedParams.add(params);
+    adjustedParams.add(CommonParams.OMIT_HEADER, "true");
+    return adjustedParams;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/887d1b18/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
index d7759ca..2e17af6 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
@@ -115,6 +115,7 @@ import static org.apache.solr.common.cloud.DocCollection.STATE_FORMAT;
 import static org.apache.solr.common.cloud.ZkStateReader.AUTO_ADD_REPLICAS;
 import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROP;
 import static org.apache.solr.common.cloud.ZkStateReader.MAX_SHARDS_PER_NODE;
+import static org.apache.solr.common.cloud.ZkStateReader.REALTIME_REPLICAS;
 import static org.apache.solr.common.cloud.ZkStateReader.PROPERTY_PROP;
 import static org.apache.solr.common.cloud.ZkStateReader.PROPERTY_VALUE_PROP;
 import static org.apache.solr.common.cloud.ZkStateReader.REPLICATION_FACTOR;
@@ -404,7 +405,8 @@ public class CollectionsHandler extends RequestHandlerBase implements Permission
           STATE_FORMAT,
           AUTO_ADD_REPLICAS,
           RULE,
-          SNITCH);
+          SNITCH,
+          REALTIME_REPLICAS);
 
       if (props.get(STATE_FORMAT) == null) {
         props.put(STATE_FORMAT, "2");

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/887d1b18/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java
index b53c818..25f317c 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java
@@ -50,6 +50,8 @@ public class MetricsHandler extends RequestHandlerBase implements PermissionName
   final CoreContainer container;
   final SolrMetricManager metricManager;
 
+  public static final String COMPACT_PARAM = "compact";
+
   public MetricsHandler() {
     this.container = null;
     this.metricManager = null;
@@ -71,6 +73,7 @@ public class MetricsHandler extends RequestHandlerBase implements PermissionName
       throw new SolrException(SolrException.ErrorCode.INVALID_STATE, "Core container instance not initialized");
     }
 
+    boolean compact = req.getParams().getBool(COMPACT_PARAM, false);
     MetricFilter mustMatchFilter = parseMustMatchFilter(req);
     List<MetricType> metricTypes = parseMetricTypes(req);
     List<MetricFilter> metricFilters = metricTypes.stream().map(MetricType::asMetricFilter).collect(Collectors.toList());
@@ -79,7 +82,8 @@ public class MetricsHandler extends RequestHandlerBase implements PermissionName
     NamedList response = new NamedList();
     for (String registryName : requestedRegistries) {
       MetricRegistry registry = metricManager.registry(registryName);
-      response.add(registryName, MetricUtils.toNamedList(registry, metricFilters, mustMatchFilter, false, false, null));
+      response.add(registryName, MetricUtils.toNamedList(registry, metricFilters, mustMatchFilter, false,
+          false, compact, null));
     }
     rsp.getValues().add("metrics", response);
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/887d1b18/solr/core/src/java/org/apache/solr/handler/admin/PropertiesRequestHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/PropertiesRequestHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/PropertiesRequestHandler.java
index c16cded..57a7492 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/PropertiesRequestHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/PropertiesRequestHandler.java
@@ -17,12 +17,14 @@
 package org.apache.solr.handler.admin;
 
 import java.io.IOException;
+import java.util.Enumeration;
 
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SimpleOrderedMap;
 import org.apache.solr.handler.RequestHandlerBase;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.response.SolrQueryResponse;
+import org.apache.solr.util.RedactionUtils;
 
 import static org.apache.solr.common.params.CommonParams.NAME;
 
@@ -32,23 +34,36 @@ import static org.apache.solr.common.params.CommonParams.NAME;
  */
 public class PropertiesRequestHandler extends RequestHandlerBase
 {
+
+  public static final String REDACT_STRING = RedactionUtils.getRedactString();
+
   @Override
   public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws IOException 
   {
-    Object props = null;
+    NamedList<String> props = new SimpleOrderedMap<>();
     String name = req.getParams().get(NAME);
     if( name != null ) {
-      NamedList<String> p = new SimpleOrderedMap<>();
-      p.add( name, System.getProperty(name) );
-      props = p;
+      String property = getSecuredPropertyValue(name);
+      props.add( name, property);
     }
     else {
-      props = System.getProperties();
+      Enumeration<?> enumeration = System.getProperties().propertyNames();
+      while(enumeration.hasMoreElements()){
+        name = (String) enumeration.nextElement();
+        props.add(name, getSecuredPropertyValue(name));
+      }
     }
     rsp.add( "system.properties", props );
     rsp.setHttpCaching(false);
   }
-  
+
+  private String getSecuredPropertyValue(String name) {
+    if(RedactionUtils.isSystemPropertySensitive(name)){
+      return REDACT_STRING;
+    }
+    return System.getProperty(name);
+  }
+
   //////////////////////// SolrInfoMBeans methods //////////////////////
 
   @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/887d1b18/solr/core/src/java/org/apache/solr/handler/admin/SystemInfoHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/SystemInfoHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/SystemInfoHandler.java
index d031d69..fc1679f 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/SystemInfoHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/SystemInfoHandler.java
@@ -36,6 +36,8 @@ import java.text.DecimalFormat;
 import java.text.DecimalFormatSymbols;
 import java.util.Arrays;
 import java.util.Date;
+import java.util.LinkedList;
+import java.util.List;
 import java.util.Locale;
 
 import org.apache.commons.io.IOUtils;
@@ -50,7 +52,7 @@ import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.response.SolrQueryResponse;
 import org.apache.solr.schema.IndexSchema;
 import org.apache.solr.util.RTimer;
-
+import org.apache.solr.util.RedactionUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -66,6 +68,8 @@ public class SystemInfoHandler extends RequestHandlerBase
 {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
+  public static String REDACT_STRING = RedactionUtils.getRedactString();
+
   /**
    * <p>
    * Undocumented expert level system property to prevent doing a reverse lookup of our hostname.
@@ -373,7 +377,7 @@ public class SystemInfoHandler extends RequestHandlerBase
 
       // the input arguments passed to the Java virtual machine
       // which does not include the arguments to the main method.
-      jmx.add( "commandLineArgs", mx.getInputArguments());
+      jmx.add( "commandLineArgs", getInputArgumentsRedacted(mx));
 
       jmx.add( "startTime", new Date(mx.getStartTime()));
       jmx.add( "upTimeMS",  mx.getUptime() );
@@ -436,6 +440,18 @@ public class SystemInfoHandler extends RequestHandlerBase
 
     return newSizeAndUnits;
   }
+
+  private static List<String> getInputArgumentsRedacted(RuntimeMXBean mx) {
+    List<String> list = new LinkedList<>();
+    for (String arg : mx.getInputArguments()) {
+      if (arg.startsWith("-D") && arg.contains("=") && RedactionUtils.isSystemPropertySensitive(arg.substring(2, arg.indexOf("=")))) {
+        list.add(String.format(Locale.ROOT, "%s=%s", arg.substring(0, arg.indexOf("=")), REDACT_STRING));
+      } else {
+        list.add(arg);
+      }
+    }
+    return list;
+  }
   
 }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/887d1b18/solr/core/src/java/org/apache/solr/handler/component/RangeFacetRequest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/RangeFacetRequest.java b/solr/core/src/java/org/apache/solr/handler/component/RangeFacetRequest.java
index 3ac7300..c234866 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/RangeFacetRequest.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/RangeFacetRequest.java
@@ -659,7 +659,7 @@ public class RangeFacetRequest extends FacetComponent.FacetBase {
 
     @Override
     public Float parseAndAddGap(Float value, String gap) {
-      return new Float(value.floatValue() + Float.valueOf(gap).floatValue());
+      return new Float(value.floatValue() + Float.parseFloat(gap));
     }
   }
 
@@ -677,7 +677,7 @@ public class RangeFacetRequest extends FacetComponent.FacetBase {
 
     @Override
     public Double parseAndAddGap(Double value, String gap) {
-      return new Double(value.doubleValue() + Double.valueOf(gap).doubleValue());
+      return new Double(value.doubleValue() + Double.parseDouble(gap));
     }
   }
 
@@ -695,7 +695,7 @@ public class RangeFacetRequest extends FacetComponent.FacetBase {
 
     @Override
     public Integer parseAndAddGap(Integer value, String gap) {
-      return new Integer(value.intValue() + Integer.valueOf(gap).intValue());
+      return new Integer(value.intValue() + Integer.parseInt(gap));
     }
   }
 
@@ -713,7 +713,7 @@ public class RangeFacetRequest extends FacetComponent.FacetBase {
 
     @Override
     public Long parseAndAddGap(Long value, String gap) {
-      return new Long(value.longValue() + Long.valueOf(gap).longValue());
+      return new Long(value.longValue() + Long.parseLong(gap));
     }
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/887d1b18/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java b/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java
index 123abea..900c787 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java
@@ -690,24 +690,52 @@ public class RealTimeGetComponent extends SearchComponent
           List<Object> vals = new ArrayList<>();
           if (f.fieldType().docValuesType() == DocValuesType.SORTED_NUMERIC) {
             // SORTED_NUMERICS store sortable bits version of the value, need to retrieve the original
-            vals.add(sf.getType().toObject(f));
+            vals.add(sf.getType().toObject(f)); // (will materialize by side-effect)
           } else {
-            vals.add( f );
+            vals.add( materialize(f) );
           }
           out.setField( f.name(), vals );
         }
         else{
-          out.setField( f.name(), f );
+          out.setField( f.name(), materialize(f) );
         }
       }
       else {
-        out.addField( f.name(), f );
+        out.addField( f.name(), materialize(f) );
       }
     }
     return out;
   }
 
   /**
+   * Ensure we don't have {@link org.apache.lucene.document.LazyDocument.LazyField} or equivalent.
+   * It can pose problems if the searcher is about to be closed and we haven't fetched a value yet.
+   */
+  private static IndexableField materialize(IndexableField in) {
+    if (in instanceof Field) { // already materialized
+      return in;
+    }
+    return new ClonedField(in);
+  }
+
+  private static class ClonedField extends Field { // TODO Lucene Field has no copy constructor; maybe it should?
+    ClonedField(IndexableField in) {
+      super(in.name(), in.fieldType());
+      this.fieldsData = in.numericValue();
+      if (this.fieldsData == null) {
+        this.fieldsData = in.binaryValue();
+        if (this.fieldsData == null) {
+          this.fieldsData = in.stringValue();
+          if (this.fieldsData == null) {
+            // fallback:
+            assert false : in; // unexpected
+          }
+        }
+      }
+    }
+  }
+
+  /**
    * Converts a SolrInputDocument to SolrDocument, using an IndexSchema instance. 
    * @lucene.experimental
    */

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/887d1b18/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java b/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java
index ba581d4..c05c6c4 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java
@@ -478,6 +478,11 @@ public class SearchHandler extends RequestHandlerBase implements SolrCoreAware ,
     }
     return sb.toString();
   }
+
+  @Override
+  public Boolean registerV2() {
+    return Boolean.TRUE;
+  }
 }
 
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/887d1b18/solr/core/src/java/org/apache/solr/handler/sql/SolrEnumerator.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/sql/SolrEnumerator.java b/solr/core/src/java/org/apache/solr/handler/sql/SolrEnumerator.java
index 6f9dddf..be6046c 100644
--- a/solr/core/src/java/org/apache/solr/handler/sql/SolrEnumerator.java
+++ b/solr/core/src/java/org/apache/solr/handler/sql/SolrEnumerator.java
@@ -103,10 +103,10 @@ class SolrEnumerator implements Enumerator<Object> {
   private Object getRealVal(Object val) {
     // Check if Double is really a Long
     if(val instanceof Double) {
-      Double doubleVal = (double) val;
+      double doubleVal = (double) val;
       //make sure that double has no decimals and fits within Long
       if(doubleVal % 1 == 0 && doubleVal >= Long.MIN_VALUE && doubleVal <= Long.MAX_VALUE) {
-        return doubleVal.longValue();
+        return (long)doubleVal;
       }
       return doubleVal;
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/887d1b18/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java b/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java
index 9375bc0..644ed97 100644
--- a/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java
+++ b/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java
@@ -330,24 +330,16 @@ class SolrTable extends AbstractQueryableTable implements TranslatableTable {
 
   private String getFields(Set<String> fieldSet) {
     StringBuilder buf = new StringBuilder();
-    boolean appendVersion = true;
     for(String field : fieldSet) {
 
       if(buf.length() > 0) {
         buf.append(",");
       }
 
-      if(field.equals("_version_")) {
-        appendVersion = false;
-      }
 
       buf.append(field);
     }
 
-    if(appendVersion){
-      buf.append(",_version_");
-    }
-
     return buf.toString();
   }
 
@@ -461,6 +453,7 @@ class SolrTable extends AbstractQueryableTable implements TranslatableTable {
 
     params.set(CommonParams.FL, fl);
     params.set(CommonParams.Q, query);
+    params.set(CommonParams.WT, CommonParams.JAVABIN);
     //Always use the /export handler for Group By Queries because it requires exporting full result sets.
     params.set(CommonParams.QT, "/export");
 
@@ -699,6 +692,7 @@ class SolrTable extends AbstractQueryableTable implements TranslatableTable {
 
     params.set(CommonParams.FL, fl);
     params.set(CommonParams.Q, query);
+    params.set(CommonParams.WT, CommonParams.JAVABIN);
     //Always use the /export handler for Distinct Queries because it requires exporting full result sets.
     params.set(CommonParams.QT, "/export");
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/887d1b18/solr/core/src/java/org/apache/solr/index/UninvertDocValuesMergePolicyFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/index/UninvertDocValuesMergePolicyFactory.java b/solr/core/src/java/org/apache/solr/index/UninvertDocValuesMergePolicyFactory.java
new file mode 100644
index 0000000..b6bfbed
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/index/UninvertDocValuesMergePolicyFactory.java
@@ -0,0 +1,218 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.index;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.lucene.codecs.DocValuesProducer;
+import org.apache.lucene.index.BinaryDocValues;
+import org.apache.lucene.index.CodecReader;
+import org.apache.lucene.index.DocValuesType;
+import org.apache.lucene.index.FieldInfo;
+import org.apache.lucene.index.FieldInfos;
+import org.apache.lucene.index.FilterCodecReader;
+import org.apache.lucene.index.IndexOptions;
+import org.apache.lucene.index.MergePolicy;
+import org.apache.lucene.index.NumericDocValues;
+import org.apache.lucene.index.OneMergeWrappingMergePolicy;
+import org.apache.lucene.index.SegmentCommitInfo;
+import org.apache.lucene.index.SortedDocValues;
+import org.apache.lucene.index.SortedNumericDocValues;
+import org.apache.lucene.index.SortedSetDocValues;
+import org.apache.solr.core.SolrResourceLoader;
+import org.apache.solr.schema.IndexSchema;
+import org.apache.solr.schema.SchemaField;
+import org.apache.solr.uninverting.UninvertingReader;
+
+/**
+ * A merge policy that can detect schema changes and  write docvalues into merging segments when a field has docvalues enabled
+ * Using UninvertingReader.
+ * 
+ * This merge policy will delegate to the wrapped merge policy for selecting merge segments
+ * 
+ */
+public class UninvertDocValuesMergePolicyFactory extends WrapperMergePolicyFactory {
+  
+  final private boolean skipIntegrityCheck;
+
+  /**
+   * Whether or not the wrapped docValues producer should check consistency 
+   */
+  public boolean getSkipIntegrityCheck() {
+    return skipIntegrityCheck;
+  }
+
+  public UninvertDocValuesMergePolicyFactory(SolrResourceLoader resourceLoader, MergePolicyFactoryArgs args, IndexSchema schema) {
+    super(resourceLoader, args, schema);
+    final Boolean sic = (Boolean)args.remove("skipIntegrityCheck");
+    if (sic != null) {
+      this.skipIntegrityCheck = sic.booleanValue();
+    } else {
+      this.skipIntegrityCheck = false;
+    }
+    if (!args.keys().isEmpty()) {
+      throw new IllegalArgumentException("Arguments were "+args+" but "+getClass().getSimpleName()+" takes no arguments.");
+    }
+  }
+
+  @Override
+  protected MergePolicy getMergePolicyInstance(MergePolicy wrappedMP) {
+    return new OneMergeWrappingMergePolicy(wrappedMP, (merge) -> new UninvertDocValuesOneMerge(merge.segments));
+  }
+  
+  private UninvertingReader.Type getUninversionType(FieldInfo fi) {
+    SchemaField sf = schema.getFieldOrNull(fi.name);
+    
+    if (null != sf &&
+        sf.hasDocValues() &&
+        fi.getDocValuesType() == DocValuesType.NONE &&
+        fi.getIndexOptions() != IndexOptions.NONE) {
+      return sf.getType().getUninversionType(sf);
+    } else {
+      return null;
+    }
+  }
+    
+  private class UninvertDocValuesOneMerge extends MergePolicy.OneMerge {
+
+    public UninvertDocValuesOneMerge(List<SegmentCommitInfo> segments) {
+      super(segments);
+    }
+    
+    @Override
+    public CodecReader wrapForMerge(CodecReader reader) throws IOException {
+      // Wrap the reader with an uninverting reader if any of the fields have no docvalues but the 
+      // Schema says there should be
+      
+      
+      Map<String,UninvertingReader.Type> uninversionMap = null;
+      
+      for(FieldInfo fi: reader.getFieldInfos()) {
+        final UninvertingReader.Type type = getUninversionType(fi);
+        if (type != null) {
+          if (uninversionMap == null) {
+            uninversionMap = new HashMap<>();
+          }
+          uninversionMap.put(fi.name, type);
+        }
+        
+      }
+      
+      if(uninversionMap == null) {
+        return reader; // Default to normal reader if nothing to uninvert
+      } else {
+        return new UninvertingFilterCodecReader(reader, uninversionMap);
+      }
+      
+    }
+    
+  }
+  
+  
+  /**
+   * Delegates to an Uninverting for fields with docvalues
+   * 
+   * This is going to blow up FieldCache, look into an alternative implementation that uninverts without
+   * fieldcache
+   */
+  private class UninvertingFilterCodecReader extends FilterCodecReader {
+
+    private final UninvertingReader uninvertingReader;
+    private final DocValuesProducer docValuesProducer;
+
+    public UninvertingFilterCodecReader(CodecReader in, Map<String,UninvertingReader.Type> uninversionMap) {
+      super(in);
+
+      this.uninvertingReader = new UninvertingReader(in, uninversionMap);
+      this.docValuesProducer = new DocValuesProducer() {
+
+        @Override
+        public NumericDocValues getNumeric(FieldInfo field) throws IOException {
+          return uninvertingReader.getNumericDocValues(field.name);
+        }
+
+        @Override
+        public BinaryDocValues getBinary(FieldInfo field) throws IOException {
+          return uninvertingReader.getBinaryDocValues(field.name);
+        }
+
+        @Override
+        public SortedDocValues getSorted(FieldInfo field) throws IOException {
+          return uninvertingReader.getSortedDocValues(field.name);
+        }
+
+        @Override
+        public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException {
+          return uninvertingReader.getSortedNumericDocValues(field.name);
+        }
+
+        @Override
+        public SortedSetDocValues getSortedSet(FieldInfo field) throws IOException {
+          return uninvertingReader.getSortedSetDocValues(field.name);
+        }
+
+        @Override
+        public void checkIntegrity() throws IOException {
+          if (!skipIntegrityCheck) {
+            uninvertingReader.checkIntegrity();
+          }
+        }
+
+        @Override
+        public void close() throws IOException {
+        }
+
+        @Override
+        public long ramBytesUsed() {
+          return 0;
+        }
+      };
+    }
+    
+    @Override
+    protected void doClose() throws IOException {
+      docValuesProducer.close();
+      uninvertingReader.close();
+      super.doClose();
+    }
+
+    @Override
+    public DocValuesProducer getDocValuesReader() {
+      return docValuesProducer;
+    }
+    
+    @Override
+    public FieldInfos getFieldInfos() {
+      return uninvertingReader.getFieldInfos();
+    }
+
+    @Override
+    public CacheHelper getCoreCacheHelper() {
+      return in.getCoreCacheHelper();
+    }
+
+    @Override
+    public CacheHelper getReaderCacheHelper() {
+      return in.getReaderCacheHelper();
+    }
+    
+  }
+
+}