You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by ab...@apache.org on 2017/04/04 16:27:14 UTC

[01/14] lucene-solr:jira/solr-9959: SOLR-10383: reduce code duplication in TestOriginalScoreFeature

Repository: lucene-solr
Updated Branches:
  refs/heads/jira/solr-9959 4249c8a86 -> b9b707cce


SOLR-10383: reduce code duplication in TestOriginalScoreFeature


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/e875f135
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/e875f135
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/e875f135

Branch: refs/heads/jira/solr-9959
Commit: e875f135bee21484386160b258b0eb6f0d2b7738
Parents: 99af830
Author: Christine Poerschke <cp...@apache.org>
Authored: Mon Apr 3 12:10:09 2017 +0100
Committer: Christine Poerschke <cp...@apache.org>
Committed: Mon Apr 3 12:10:09 2017 +0100

----------------------------------------------------------------------
 .../ltr/feature/TestOriginalScoreFeature.java   | 113 +++++++++----------
 1 file changed, 56 insertions(+), 57 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e875f135/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestOriginalScoreFeature.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestOriginalScoreFeature.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestOriginalScoreFeature.java
index d651224..4eb9bea 100644
--- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestOriginalScoreFeature.java
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestOriginalScoreFeature.java
@@ -53,51 +53,10 @@ public class TestOriginalScoreFeature extends TestRerankBase {
   @Test
   public void testOriginalScore() throws Exception {
     loadFeature("score", OriginalScoreFeature.class.getCanonicalName(), "{}");
-
     loadModel("originalScore", LinearModel.class.getCanonicalName(),
         new String[] {"score"}, "{\"weights\":{\"score\":1.0}}");
 
-    final SolrQuery query = new SolrQuery();
-    query.setQuery("title:w1");
-    query.add("fl", "*, score");
-    query.add("rows", "4");
-    query.add("wt", "json");
-
-    // Normal term match
-    assertJQ("/query" + query.toQueryString(), "/response/numFound/==4");
-    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='1'");
-    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/id=='8'");
-    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/id=='6'");
-    assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/id=='7'");
-
-    final String res = restTestHarness.query("/query" + query.toQueryString());
-    final Map<String,Object> jsonParse = (Map<String,Object>) ObjectBuilder
-        .fromJSON(res);
-    final String doc0Score = ((Double) ((Map<String,Object>) ((ArrayList<Object>) ((Map<String,Object>) jsonParse
-        .get("response")).get("docs")).get(0)).get("score")).toString();
-    final String doc1Score = ((Double) ((Map<String,Object>) ((ArrayList<Object>) ((Map<String,Object>) jsonParse
-        .get("response")).get("docs")).get(1)).get("score")).toString();
-    final String doc2Score = ((Double) ((Map<String,Object>) ((ArrayList<Object>) ((Map<String,Object>) jsonParse
-        .get("response")).get("docs")).get(2)).get("score")).toString();
-    final String doc3Score = ((Double) ((Map<String,Object>) ((ArrayList<Object>) ((Map<String,Object>) jsonParse
-        .get("response")).get("docs")).get(3)).get("score")).toString();
-
-    query.add("fl", "[fv]");
-    query.add("rq", "{!ltr model=originalScore reRankDocs=4}");
-
-    assertJQ("/query" + query.toQueryString(), "/response/numFound/==4");
-    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='1'");
-    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/score=="
-        + doc0Score);
-    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/id=='8'");
-    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/score=="
-        + doc1Score);
-    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/id=='6'");
-    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/score=="
-        + doc2Score);
-    assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/id=='7'");
-    assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/score=="
-        + doc3Score);
+    implTestOriginalScoreResponseDocsCheck("originalScore", "score", null, null);
   }
 
   @Test
@@ -111,12 +70,29 @@ public class TestOriginalScoreFeature extends TestRerankBase {
         new String[] {"origScore"}, "store2",
         "{\"weights\":{\"origScore\":1.0}}");
 
+    implTestOriginalScoreResponseDocsCheck("origScore", "origScore", "c2", "2.0");
+  }
+
+  public static void implTestOriginalScoreResponseDocsCheck(String modelName,
+      String origScoreFeatureName,
+      String nonScoringFeatureName, String nonScoringFeatureValue) throws Exception {
+
     final SolrQuery query = new SolrQuery();
     query.setQuery("title:w1");
-    query.add("fl", "*, score, fv:[fv]");
+    query.add("fl", "*, score");
     query.add("rows", "4");
     query.add("wt", "json");
-    query.add("rq", "{!ltr model=origScore reRankDocs=4}");
+
+    final int doc0Id = 1;
+    final int doc1Id = 8;
+    final int doc2Id = 6;
+    final int doc3Id = 7;
+
+    assertJQ("/query" + query.toQueryString(), "/response/numFound/==4");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='"+doc0Id+"'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/id=='"+doc1Id+"'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/id=='"+doc2Id+"'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/id=='"+doc3Id+"'");
 
     final String res = restTestHarness.query("/query" + query.toQueryString());
     final Map<String,Object> jsonParse = (Map<String,Object>) ObjectBuilder
@@ -130,20 +106,43 @@ public class TestOriginalScoreFeature extends TestRerankBase {
     final String doc3Score = ((Double) ((Map<String,Object>) ((ArrayList<Object>) ((Map<String,Object>) jsonParse
         .get("response")).get("docs")).get(3)).get("score")).toString();
 
+    final boolean debugQuery = false;
+
+    query.remove("fl");
+    query.add("fl", "*, score, fv:[fv]");
+    query.add("rq", "{!ltr model="+modelName+" reRankDocs=4}");
+
     assertJQ("/query" + query.toQueryString(), "/response/numFound/==4");
-    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='1'");
-    assertJQ("/query" + query.toQueryString(),
-        "/response/docs/[0]/fv=='" + FeatureLoggerTestUtils.toFeatureVector("origScore", doc0Score, "c2", "2.0")+"'");
-    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/id=='8'");
-
-    assertJQ("/query" + query.toQueryString(),
-        "/response/docs/[1]/fv=='" + FeatureLoggerTestUtils.toFeatureVector("origScore", doc1Score, "c2", "2.0")+"'");
-    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/id=='6'");
-    assertJQ("/query" + query.toQueryString(),
-        "/response/docs/[2]/fv=='" + FeatureLoggerTestUtils.toFeatureVector("origScore", doc2Score, "c2", "2.0")+"'");
-    assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/id=='7'");
-    assertJQ("/query" + query.toQueryString(),
-        "/response/docs/[3]/fv=='" + FeatureLoggerTestUtils.toFeatureVector("origScore", doc3Score, "c2", "2.0")+"'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='"+doc0Id+"'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/id=='"+doc1Id+"'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/id=='"+doc2Id+"'");
+    assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/id=='"+doc3Id+"'");
+
+    implTestOriginalScoreResponseDocsCheck(modelName, query, 0, doc0Id, origScoreFeatureName, doc0Score,
+        nonScoringFeatureName, nonScoringFeatureValue, debugQuery);
+    implTestOriginalScoreResponseDocsCheck(modelName, query, 1, doc1Id, origScoreFeatureName, doc1Score,
+        nonScoringFeatureName, nonScoringFeatureValue, debugQuery);
+    implTestOriginalScoreResponseDocsCheck(modelName, query, 2, doc2Id, origScoreFeatureName, doc2Score,
+        nonScoringFeatureName, nonScoringFeatureValue, debugQuery);
+    implTestOriginalScoreResponseDocsCheck(modelName, query, 3, doc3Id, origScoreFeatureName, doc3Score,
+        nonScoringFeatureName, nonScoringFeatureValue, debugQuery);
+  }
+
+  private static void implTestOriginalScoreResponseDocsCheck(String modelName,
+      SolrQuery query, int docIdx, int docId,
+      String origScoreFeatureName, String origScoreFeatureValue,
+      String nonScoringFeatureName, String nonScoringFeatureValue,
+      boolean debugQuery) throws Exception {
+
+    final String fv;
+    if (nonScoringFeatureName == null) {
+      fv = FeatureLoggerTestUtils.toFeatureVector(origScoreFeatureName, origScoreFeatureValue);
+    } else {
+      fv = FeatureLoggerTestUtils.toFeatureVector(origScoreFeatureName, origScoreFeatureValue, nonScoringFeatureName, nonScoringFeatureValue);
+    }
+
+    assertJQ("/query" + query.toQueryString(), "/response/docs/["+docIdx+"]/fv=='"+fv+"'");
+    // TODO: use debugQuery
   }
 
 }


[02/14] lucene-solr:jira/solr-9959: SOLR-10383: Fix debug related NullPointerException in solr/contrib/ltr OriginalScoreFeature class. (Vitezslav Zak, Christine Poerschke)

Posted by ab...@apache.org.
SOLR-10383: Fix debug related NullPointerException in solr/contrib/ltr OriginalScoreFeature class.
(Vitezslav Zak, Christine Poerschke)


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/186c5edd
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/186c5edd
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/186c5edd

Branch: refs/heads/jira/solr-9959
Commit: 186c5edd63fe292388ad435bc1cbb1a32a3a3824
Parents: e875f13
Author: Christine Poerschke <cp...@apache.org>
Authored: Mon Apr 3 13:01:16 2017 +0100
Committer: Christine Poerschke <cp...@apache.org>
Committed: Mon Apr 3 13:01:16 2017 +0100

----------------------------------------------------------------------
 solr/CHANGES.txt                                         |  8 ++++++++
 .../apache/solr/ltr/feature/OriginalScoreFeature.java    |  2 +-
 .../solr/ltr/feature/TestOriginalScoreFeature.java       | 11 +++++++++--
 3 files changed, 18 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/186c5edd/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index e30824f..cd4f7f5 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -188,6 +188,14 @@ Other Changes
 
 * SOLR-9601: Redone DataImportHandler 'tika' example, removing all unused and irrelevant definitions (Alexandre Rafalovitch)
 
+==================  6.5.1 ==================
+
+Bug Fixes
+----------------------
+
+* SOLR-10383: Fix debug related NullPointerException in solr/contrib/ltr OriginalScoreFeature class.
+  (Vitezslav Zak, Christine Poerschke)
+
 ==================  6.5.0 ==================
 
 Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/186c5edd/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/OriginalScoreFeature.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/OriginalScoreFeature.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/OriginalScoreFeature.java
index 549880b..85fb8fd 100644
--- a/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/OriginalScoreFeature.java
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/feature/OriginalScoreFeature.java
@@ -104,7 +104,7 @@ public class OriginalScoreFeature extends Feature {
         // was already scored in step 1
         // we shouldn't need to calc original score again.
         final DocInfo docInfo = getDocInfo();
-        return (docInfo.hasOriginalDocScore() ? docInfo.getOriginalDocScore() : originalScorer.score());
+        return (docInfo != null && docInfo.hasOriginalDocScore() ? docInfo.getOriginalDocScore() : originalScorer.score());
       }
 
       @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/186c5edd/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestOriginalScoreFeature.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestOriginalScoreFeature.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestOriginalScoreFeature.java
index 4eb9bea..f4c0df1 100644
--- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestOriginalScoreFeature.java
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestOriginalScoreFeature.java
@@ -20,6 +20,7 @@ import java.util.ArrayList;
 import java.util.Map;
 
 import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.ltr.FeatureLoggerTestUtils;
 import org.apache.solr.ltr.TestRerankBase;
 import org.apache.solr.ltr.model.LinearModel;
@@ -106,7 +107,10 @@ public class TestOriginalScoreFeature extends TestRerankBase {
     final String doc3Score = ((Double) ((Map<String,Object>) ((ArrayList<Object>) ((Map<String,Object>) jsonParse
         .get("response")).get("docs")).get(3)).get("score")).toString();
 
-    final boolean debugQuery = false;
+    final boolean debugQuery = random().nextBoolean();
+    if (debugQuery) {
+      query.add(CommonParams.DEBUG_QUERY, "true");
+    }
 
     query.remove("fl");
     query.add("fl", "*, score, fv:[fv]");
@@ -142,7 +146,10 @@ public class TestOriginalScoreFeature extends TestRerankBase {
     }
 
     assertJQ("/query" + query.toQueryString(), "/response/docs/["+docIdx+"]/fv=='"+fv+"'");
-    // TODO: use debugQuery
+    if (debugQuery) {
+      assertJQ("/query" + query.toQueryString(),
+          "/debug/explain/"+docId+"=='\n"+origScoreFeatureValue+" = LinearModel(name="+modelName+",featureWeights=["+origScoreFeatureName+"=1.0]) model applied to features, sum of:\n  "+origScoreFeatureValue+" = prod of:\n    1.0 = weight on feature\n    "+origScoreFeatureValue+" = OriginalScoreFeature [query:"+query.getQuery()+"]\n'");
+    }
   }
 
 }


[10/14] lucene-solr:jira/solr-9959: LUCENE-7756: Only record the major Lucene version that created the index, and record the minimum Lucene version that contributed to segments.

Posted by ab...@apache.org.
LUCENE-7756: Only record the major Lucene version that created the index, and record the minimum Lucene version that contributed to segments.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/23b002a0
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/23b002a0
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/23b002a0

Branch: refs/heads/jira/solr-9959
Commit: 23b002a0fdf2f6025f1eb026c0afca247fb21ed0
Parents: 3f172a0
Author: Adrien Grand <jp...@gmail.com>
Authored: Thu Mar 30 09:12:45 2017 +0200
Committer: Adrien Grand <jp...@gmail.com>
Committed: Tue Apr 4 09:57:16 2017 +0200

----------------------------------------------------------------------
 lucene/CHANGES.txt                              |   7 +-
 .../lucene50/Lucene50SegmentInfoFormat.java     |   2 +-
 .../lucene/codecs/lucene62/Lucene62Codec.java   |   2 +-
 .../apache/lucene/index/FixBrokenOffsets.java   |   3 +
 .../lucene50/Lucene50RWSegmentInfoFormat.java   |   2 +-
 .../lucene50/TestLucene50SegmentInfoFormat.java |  10 +
 .../lucene53/TestLucene53NormsFormat.java       |   6 +
 .../lucene/codecs/lucene62/Lucene62RWCodec.java |  12 +
 .../lucene62/Lucene62RWSegmentInfoFormat.java   | 193 ++++++++
 .../lucene62/TestLucene62SegmentInfoFormat.java |  48 ++
 .../index/TestBackwardsCompatibility.java       |  69 ++-
 .../lucene/index/TestFixBrokenOffsets.java      |  10 +-
 .../lucene/index/TestIndexWriterOnOldIndex.java |   6 +-
 .../simpletext/SimpleTextSegmentInfoFormat.java |  29 +-
 .../lucene62/Lucene62SegmentInfoFormat.java     | 152 +------
 .../lucene/codecs/lucene70/Lucene70Codec.java   |   3 +-
 .../lucene70/Lucene70SegmentInfoFormat.java     | 439 +++++++++++++++++++
 .../org/apache/lucene/index/CheckIndex.java     |   2 +-
 .../lucene/index/DocumentsWriterPerThread.java  |   2 +-
 .../apache/lucene/index/FilterCodecReader.java  |   5 +-
 .../apache/lucene/index/FilterLeafReader.java   |   5 +-
 .../org/apache/lucene/index/IndexWriter.java    |  54 ++-
 .../org/apache/lucene/index/LeafMetaData.java   |  74 ++++
 .../org/apache/lucene/index/LeafReader.java     |   7 +-
 .../apache/lucene/index/MergeReaderWrapper.java |   5 +-
 .../org/apache/lucene/index/MergeState.java     |   2 +-
 .../apache/lucene/index/ParallelLeafReader.java |  36 +-
 .../apache/lucene/index/ReadersAndUpdates.java  |   4 +-
 .../org/apache/lucene/index/SegmentInfo.java    |  19 +-
 .../org/apache/lucene/index/SegmentInfos.java   |  77 ++--
 .../org/apache/lucene/index/SegmentMerger.java  |  14 +
 .../org/apache/lucene/index/SegmentReader.java  |  10 +-
 .../lucene/index/SlowCodecReaderWrapper.java    |   5 +-
 .../lucene/index/StandardDirectoryReader.java   |   4 +-
 .../EarlyTerminatingSortingCollector.java       |   2 +-
 .../lucene62/TestLucene62SegmentInfoFormat.java |  39 --
 .../lucene70/TestLucene70SegmentInfoFormat.java |  35 ++
 .../org/apache/lucene/index/TestCodecs.java     |   4 +-
 .../index/TestDemoParallelLeafReader.java       |   3 +-
 .../test/org/apache/lucene/index/TestDoc.java   |   8 +-
 .../apache/lucene/index/TestDocumentWriter.java |   9 +-
 .../apache/lucene/index/TestIndexSorting.java   |   2 +-
 .../apache/lucene/index/TestIndexWriter.java    |   2 +-
 .../index/TestIndexWriterThreadsToSegments.java |   3 +-
 .../index/TestOneMergeWrappingMergePolicy.java  |   1 +
 .../apache/lucene/index/TestSegmentInfos.java   |  22 +-
 .../apache/lucene/index/TestSegmentMerger.java  |   7 +-
 .../apache/lucene/index/TestSegmentReader.java  |   3 +-
 .../lucene/index/TestSegmentTermDocs.java       |   7 +-
 .../search/highlight/TermVectorLeafReader.java  |   7 +-
 .../apache/lucene/index/memory/MemoryIndex.java |   6 +-
 .../org/apache/lucene/index/IndexSplitter.java  |   4 +-
 .../lucene/replicator/nrt/ReplicaNode.java      |   2 +-
 .../index/BaseCompoundFormatTestCase.java       |   3 +-
 .../index/BaseFieldInfoFormatTestCase.java      |   3 +-
 .../index/BaseIndexFileFormatTestCase.java      |  28 +-
 .../lucene/index/BaseNormsFormatTestCase.java   |   6 +-
 .../index/BaseSegmentInfoFormatTestCase.java    |  54 ++-
 .../lucene/index/RandomPostingsTester.java      |   2 +-
 .../org/apache/lucene/search/QueryUtils.java    |   6 +-
 .../solr/index/SlowCompositeReaderWrapper.java  |  19 +-
 .../test/org/apache/solr/search/TestDocSet.java |   7 +-
 62 files changed, 1208 insertions(+), 404 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 83113a8..1f3f30c 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -7,9 +7,12 @@ http://s.apache.org/luceneversions
 
 New Features
 
-* LUCENE-7703: SegmentInfos now record the Lucene version at index creation
-  time. (Adrien Grand)
+* LUCENE-7703: SegmentInfos now record the major Lucene version at index
+  creation time. (Adrien Grand)
 
+* LUCENE-7756: LeafReader.getMetaData now exposes the index created version as
+  well as the oldest Lucene version that contributed to the segment.
+  (Adrien Grand)
 
 API Changes
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene50/Lucene50SegmentInfoFormat.java
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene50/Lucene50SegmentInfoFormat.java b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene50/Lucene50SegmentInfoFormat.java
index 69cda34..d2a384e 100644
--- a/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene50/Lucene50SegmentInfoFormat.java
+++ b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene50/Lucene50SegmentInfoFormat.java
@@ -65,7 +65,7 @@ public class Lucene50SegmentInfoFormat extends SegmentInfoFormat {
         final Set<String> files = input.readSetOfStrings();
         final Map<String,String> attributes = input.readMapOfStrings();
         
-        si = new SegmentInfo(dir, version, segment, docCount, isCompoundFile, null, diagnostics, segmentID, attributes, null);
+        si = new SegmentInfo(dir, version, null, segment, docCount, isCompoundFile, null, diagnostics, segmentID, attributes, null);
         si.setFiles(files);
       } catch (Throwable exception) {
         priorE = exception;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene62/Lucene62Codec.java
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene62/Lucene62Codec.java b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene62/Lucene62Codec.java
index 58b07eb..3dd7daa 100644
--- a/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene62/Lucene62Codec.java
+++ b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene62/Lucene62Codec.java
@@ -114,7 +114,7 @@ public class Lucene62Codec extends Codec {
   }
   
   @Override
-  public final SegmentInfoFormat segmentInfoFormat() {
+  public SegmentInfoFormat segmentInfoFormat() {
     return segmentInfosFormat;
   }
   

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/backward-codecs/src/java/org/apache/lucene/index/FixBrokenOffsets.java
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/java/org/apache/lucene/index/FixBrokenOffsets.java b/lucene/backward-codecs/src/java/org/apache/lucene/index/FixBrokenOffsets.java
index e775a28..9b3615e 100644
--- a/lucene/backward-codecs/src/java/org/apache/lucene/index/FixBrokenOffsets.java
+++ b/lucene/backward-codecs/src/java/org/apache/lucene/index/FixBrokenOffsets.java
@@ -128,6 +128,9 @@ public class FixBrokenOffsets {
     }
 
     Directory destDir = FSDirectory.open(destPath);
+    // We need to maintain the same major version
+    int createdMajor = SegmentInfos.readLatestCommit(srcDir).getIndexCreatedVersionMajor();
+    new SegmentInfos(createdMajor).commit(destDir);
     IndexWriter writer = new IndexWriter(destDir, new IndexWriterConfig());
     writer.addIndexes(filtered);
     IOUtils.close(writer, reader, srcDir, destDir);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene50/Lucene50RWSegmentInfoFormat.java
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene50/Lucene50RWSegmentInfoFormat.java b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene50/Lucene50RWSegmentInfoFormat.java
index 965ee96..4bed311 100644
--- a/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene50/Lucene50RWSegmentInfoFormat.java
+++ b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene50/Lucene50RWSegmentInfoFormat.java
@@ -65,7 +65,7 @@ public class Lucene50RWSegmentInfoFormat extends Lucene50SegmentInfoFormat {
         final Set<String> files = input.readSetOfStrings();
         final Map<String,String> attributes = input.readMapOfStrings();
         
-        si = new SegmentInfo(dir, version, segment, docCount, isCompoundFile, null, diagnostics, segmentID, attributes, null);
+        si = new SegmentInfo(dir, version, null, segment, docCount, isCompoundFile, null, diagnostics, segmentID, attributes, null);
         si.setFiles(files);
       } catch (Throwable exception) {
         priorE = exception;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene50/TestLucene50SegmentInfoFormat.java
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene50/TestLucene50SegmentInfoFormat.java b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene50/TestLucene50SegmentInfoFormat.java
index 688afed..0a9bf79 100644
--- a/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene50/TestLucene50SegmentInfoFormat.java
+++ b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene50/TestLucene50SegmentInfoFormat.java
@@ -29,6 +29,11 @@ public class TestLucene50SegmentInfoFormat extends BaseSegmentInfoFormatTestCase
   }
 
   @Override
+  protected int getCreatedVersionMajor() {
+    return Version.LUCENE_6_0_0.major;
+  }
+
+  @Override
   protected Version[] getVersions() {
     return new Version[] { Version.LUCENE_6_0_0 };
   }
@@ -37,4 +42,9 @@ public class TestLucene50SegmentInfoFormat extends BaseSegmentInfoFormatTestCase
   protected boolean supportsIndexSort() {
     return false;
   }
+
+  @Override
+  protected boolean supportsMinVersion() {
+    return false;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene53/TestLucene53NormsFormat.java
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene53/TestLucene53NormsFormat.java b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene53/TestLucene53NormsFormat.java
index 80a8eee..7d37b45 100644
--- a/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene53/TestLucene53NormsFormat.java
+++ b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene53/TestLucene53NormsFormat.java
@@ -19,6 +19,7 @@ package org.apache.lucene.codecs.lucene53;
 import org.apache.lucene.codecs.Codec;
 import org.apache.lucene.codecs.lucene62.Lucene62RWCodec;
 import org.apache.lucene.index.BaseNormsFormatTestCase;
+import org.apache.lucene.util.Version;
 
 /**
  * Tests Lucene53NormsFormat
@@ -27,6 +28,11 @@ public class TestLucene53NormsFormat extends BaseNormsFormatTestCase {
   private final Codec codec = new Lucene62RWCodec();
 
   @Override
+  protected int getCreatedVersionMajor() {
+    return Version.LUCENE_6_2_0.major;
+  }
+
+  @Override
   protected Codec getCodec() {
     return codec;
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene62/Lucene62RWCodec.java
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene62/Lucene62RWCodec.java b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene62/Lucene62RWCodec.java
index fcb414d..34d3a7f 100644
--- a/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene62/Lucene62RWCodec.java
+++ b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene62/Lucene62RWCodec.java
@@ -17,14 +17,26 @@
 package org.apache.lucene.codecs.lucene62;
 
 import org.apache.lucene.codecs.NormsFormat;
+import org.apache.lucene.codecs.SegmentInfoFormat;
 import org.apache.lucene.codecs.lucene53.Lucene53RWNormsFormat;
 import org.apache.lucene.codecs.lucene62.Lucene62Codec;
 
+/**
+ * Read-write version of 6.2 codec for testing
+ * @deprecated for test purposes only
+ */
+@Deprecated
 public class Lucene62RWCodec extends Lucene62Codec {
 
+  private final SegmentInfoFormat segmentInfoFormat = new Lucene62RWSegmentInfoFormat();
   private final NormsFormat normsFormat = new Lucene53RWNormsFormat();
 
   @Override
+  public SegmentInfoFormat segmentInfoFormat() {
+    return segmentInfoFormat;
+  }
+  
+  @Override
   public NormsFormat normsFormat() {
     return normsFormat;
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene62/Lucene62RWSegmentInfoFormat.java
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene62/Lucene62RWSegmentInfoFormat.java b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene62/Lucene62RWSegmentInfoFormat.java
new file mode 100644
index 0000000..f2fbe9d
--- /dev/null
+++ b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene62/Lucene62RWSegmentInfoFormat.java
@@ -0,0 +1,193 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.codecs.lucene62;
+
+import java.io.IOException;
+import java.util.Set;
+
+import org.apache.lucene.codecs.CodecUtil;
+import org.apache.lucene.index.IndexFileNames;
+import org.apache.lucene.index.SegmentInfo;
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.search.SortField;
+import org.apache.lucene.search.SortedNumericSelector;
+import org.apache.lucene.search.SortedNumericSortField;
+import org.apache.lucene.search.SortedSetSelector;
+import org.apache.lucene.search.SortedSetSortField;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
+import org.apache.lucene.store.IndexOutput;
+import org.apache.lucene.util.Version;
+
+/**
+ * Read-write version of 6.2 SegmentInfoFormat for testing
+ * @deprecated for test purposes only
+ */
+@Deprecated
+public class Lucene62RWSegmentInfoFormat extends Lucene62SegmentInfoFormat {
+
+  @Override
+  public void write(Directory dir, SegmentInfo si, IOContext ioContext) throws IOException {
+    final String fileName = IndexFileNames.segmentFileName(si.name, "", Lucene62SegmentInfoFormat.SI_EXTENSION);
+
+    try (IndexOutput output = dir.createOutput(fileName, ioContext)) {
+      // Only add the file once we've successfully created it, else IFD assert can trip:
+      si.addFile(fileName);
+      CodecUtil.writeIndexHeader(output,
+                                   Lucene62SegmentInfoFormat.CODEC_NAME,
+                                   Lucene62SegmentInfoFormat.VERSION_CURRENT,
+                                   si.getId(),
+                                   "");
+      Version version = si.getVersion();
+      if (version.major < 5) {
+        throw new IllegalArgumentException("invalid major version: should be >= 5 but got: " + version.major + " segment=" + si);
+      }
+      // Write the Lucene version that created this segment, since 3.1
+      output.writeInt(version.major);
+      output.writeInt(version.minor);
+      output.writeInt(version.bugfix);
+      assert version.prerelease == 0;
+      output.writeInt(si.maxDoc());
+
+      output.writeByte((byte) (si.getUseCompoundFile() ? SegmentInfo.YES : SegmentInfo.NO));
+      output.writeMapOfStrings(si.getDiagnostics());
+      Set<String> files = si.files();
+      for (String file : files) {
+        if (!IndexFileNames.parseSegmentName(file).equals(si.name)) {
+          throw new IllegalArgumentException("invalid files: expected segment=" + si.name + ", got=" + files);
+        }
+      }
+      output.writeSetOfStrings(files);
+      output.writeMapOfStrings(si.getAttributes());
+
+      Sort indexSort = si.getIndexSort();
+      int numSortFields = indexSort == null ? 0 : indexSort.getSort().length;
+      output.writeVInt(numSortFields);
+      for (int i = 0; i < numSortFields; ++i) {
+        SortField sortField = indexSort.getSort()[i];
+        SortField.Type sortType = sortField.getType();
+        output.writeString(sortField.getField());
+        int sortTypeID;
+        switch (sortField.getType()) {
+          case STRING:
+            sortTypeID = 0;
+            break;
+          case LONG:
+            sortTypeID = 1;
+            break;
+          case INT:
+            sortTypeID = 2;
+            break;
+          case DOUBLE:
+            sortTypeID = 3;
+            break;
+          case FLOAT:
+            sortTypeID = 4;
+            break;
+          case CUSTOM:
+            if (sortField instanceof SortedSetSortField) {
+              sortTypeID = 5;
+              sortType = SortField.Type.STRING;
+            } else if (sortField instanceof SortedNumericSortField) {
+              sortTypeID = 6;
+              sortType = ((SortedNumericSortField) sortField).getNumericType();
+            } else {
+              throw new IllegalStateException("Unexpected SortedNumericSortField " + sortField);
+            }
+            break;
+          default:
+            throw new IllegalStateException("Unexpected sort type: " + sortField.getType());
+        }
+        output.writeVInt(sortTypeID);
+        if (sortTypeID == 5) {
+          SortedSetSortField ssf = (SortedSetSortField) sortField;
+          if (ssf.getSelector() == SortedSetSelector.Type.MIN) {
+            output.writeByte((byte) 0);
+          } else if (ssf.getSelector() == SortedSetSelector.Type.MAX) {
+            output.writeByte((byte) 1);
+          } else if (ssf.getSelector() == SortedSetSelector.Type.MIDDLE_MIN) {
+            output.writeByte((byte) 2);
+          } else if (ssf.getSelector() == SortedSetSelector.Type.MIDDLE_MAX) {
+            output.writeByte((byte) 3);
+          } else {
+            throw new IllegalStateException("Unexpected SortedSetSelector type: " + ssf.getSelector());
+          }
+        } else if (sortTypeID == 6) {
+          SortedNumericSortField snsf = (SortedNumericSortField) sortField;
+          if (snsf.getNumericType() == SortField.Type.LONG) {
+            output.writeByte((byte) 0);
+          } else if (snsf.getNumericType() == SortField.Type.INT) {
+            output.writeByte((byte) 1);
+          } else if (snsf.getNumericType() == SortField.Type.DOUBLE) {
+            output.writeByte((byte) 2);
+          } else if (snsf.getNumericType() == SortField.Type.FLOAT) {
+            output.writeByte((byte) 3);
+          } else {
+            throw new IllegalStateException("Unexpected SortedNumericSelector type: " + snsf.getNumericType());
+          }
+          if (snsf.getSelector() == SortedNumericSelector.Type.MIN) {
+            output.writeByte((byte) 0);
+          } else if (snsf.getSelector() == SortedNumericSelector.Type.MAX) {
+            output.writeByte((byte) 1);
+          } else {
+            throw new IllegalStateException("Unexpected sorted numeric selector type: " + snsf.getSelector());
+          }
+        }
+        output.writeByte((byte) (sortField.getReverse() ? 0 : 1));
+
+        // write missing value 
+        Object missingValue = sortField.getMissingValue();
+        if (missingValue == null) {
+          output.writeByte((byte) 0);
+        } else {
+          switch(sortType) {
+          case STRING:
+            if (missingValue == SortField.STRING_LAST) {
+              output.writeByte((byte) 1);
+            } else if (missingValue == SortField.STRING_FIRST) {
+              output.writeByte((byte) 2);
+            } else {
+              throw new AssertionError("unrecognized missing value for STRING field \"" + sortField.getField() + "\": " + missingValue);
+            }
+            break;
+          case LONG:
+            output.writeByte((byte) 1);
+            output.writeLong(((Long) missingValue).longValue());
+            break;
+          case INT:
+            output.writeByte((byte) 1);
+            output.writeInt(((Integer) missingValue).intValue());
+            break;
+          case DOUBLE:
+            output.writeByte((byte) 1);
+            output.writeLong(Double.doubleToLongBits(((Double) missingValue).doubleValue()));
+            break;
+          case FLOAT:
+            output.writeByte((byte) 1);
+            output.writeInt(Float.floatToIntBits(((Float) missingValue).floatValue()));
+            break;
+          default:
+            throw new IllegalStateException("Unexpected sort type: " + sortField.getType());
+          }
+        }
+      }
+
+      CodecUtil.writeFooter(output);
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene62/TestLucene62SegmentInfoFormat.java
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene62/TestLucene62SegmentInfoFormat.java b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene62/TestLucene62SegmentInfoFormat.java
new file mode 100644
index 0000000..e0efa95
--- /dev/null
+++ b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene62/TestLucene62SegmentInfoFormat.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.codecs.lucene62;
+
+import org.apache.lucene.codecs.Codec;
+import org.apache.lucene.index.BaseSegmentInfoFormatTestCase;
+import org.apache.lucene.util.Version;
+
+/**
+ * Tests Lucene62SegmentInfoFormat
+ */
+public class TestLucene62SegmentInfoFormat extends BaseSegmentInfoFormatTestCase {
+
+  @Override
+  protected int getCreatedVersionMajor() {
+    return Version.LUCENE_6_2_0.major;
+  }
+
+  @Override
+  protected Version[] getVersions() {
+    return new Version[] { Version.LUCENE_6_2_0 };
+  }
+
+  @Override
+  protected Codec getCodec() {
+    return new Lucene62RWCodec();
+  }
+
+  @Override
+  protected boolean supportsMinVersion() {
+    return false;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java b/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
index 8e87dcc..f180b47 100644
--- a/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
+++ b/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
@@ -693,10 +693,18 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
         System.out.println("\nTEST: index=" + name);
       }
       Directory dir = newDirectory(oldIndexDirs.get(name));
+
+      final SegmentInfos oldSegInfos = SegmentInfos.readLatestCommit(dir);
+
       IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())));
       w.forceMerge(1);
       w.close();
-      
+
+      final SegmentInfos segInfos = SegmentInfos.readLatestCommit(dir);
+      assertEquals(oldSegInfos.getIndexCreatedVersionMajor(), segInfos.getIndexCreatedVersionMajor());
+      assertEquals(Version.LATEST, segInfos.asList().get(0).info.getVersion());
+      assertEquals(oldSegInfos.asList().get(0).info.getMinVersion(), segInfos.asList().get(0).info.getMinVersion());
+
       dir.close();
     }
   }
@@ -707,26 +715,30 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
         System.out.println("\nTEST: old index " + name);
       }
       Directory oldDir = oldIndexDirs.get(name);
-      Version indexCreatedVersion = SegmentInfos.readLatestCommit(oldDir).getIndexCreatedVersion();
+      SegmentInfos infos = SegmentInfos.readLatestCommit(oldDir);
 
       Directory targetDir = newDirectory();
-      // Simulate writing into an index that was created on the same version
-      new SegmentInfos(indexCreatedVersion).commit(targetDir);
+      if (infos.getCommitLuceneVersion().major != Version.LATEST.major) {
+        // both indexes are not compatible
+        Directory targetDir2 = newDirectory();
+        IndexWriter w = new IndexWriter(targetDir2, newIndexWriterConfig(new MockAnalyzer(random())));
+        IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> w.addIndexes(oldDir));
+        assertTrue(e.getMessage(), e.getMessage().startsWith("Cannot use addIndexes(Directory) with indexes that have been created by a different Lucene version."));
+        w.close();
+        targetDir2.close();
+
+        // for the next test, we simulate writing to an index that was created on the same major version
+        new SegmentInfos(infos.getIndexCreatedVersionMajor()).commit(targetDir);
+      }
+
       IndexWriter w = new IndexWriter(targetDir, newIndexWriterConfig(new MockAnalyzer(random())));
       w.addIndexes(oldDir);
       w.close();
       targetDir.close();
 
-      // Now check that we forbid calling addIndexes with a different version
-      targetDir = newDirectory();
-      IndexWriter oldWriter = new IndexWriter(targetDir, newIndexWriterConfig(new MockAnalyzer(random())));
-      IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> oldWriter.addIndexes(oldDir));
-      assertTrue(e.getMessage(), e.getMessage().startsWith("Cannot use addIndexes(Directory) with indexes that have been created by a different Lucene version."));
-
       if (VERBOSE) {
         System.out.println("\nTEST: done adding indices; now close");
       }
-      oldWriter.close();
       
       targetDir.close();
     }
@@ -734,9 +746,22 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
 
   public void testAddOldIndexesReader() throws IOException {
     for (String name : oldNames) {
-      DirectoryReader reader = DirectoryReader.open(oldIndexDirs.get(name));
+      Directory oldDir = oldIndexDirs.get(name);
+      SegmentInfos infos = SegmentInfos.readLatestCommit(oldDir);
+      DirectoryReader reader = DirectoryReader.open(oldDir);
       
       Directory targetDir = newDirectory();
+      if (infos.getCommitLuceneVersion().major != Version.LATEST.major) {
+        Directory targetDir2 = newDirectory();
+        IndexWriter w = new IndexWriter(targetDir2, newIndexWriterConfig(new MockAnalyzer(random())));
+        IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> TestUtil.addIndexesSlowly(w, reader));
+        assertEquals(e.getMessage(), "Cannot merge a segment that has been created with major version 6 into this index which has been created by major version 7");
+        w.close();
+        targetDir2.close();
+
+        // for the next test, we simulate writing to an index that was created on the same major version
+        new SegmentInfos(infos.getIndexCreatedVersionMajor()).commit(targetDir);
+      }
       IndexWriter w = new IndexWriter(targetDir, newIndexWriterConfig(new MockAnalyzer(random())));
       TestUtil.addIndexesSlowly(w, reader);
       w.close();
@@ -1245,11 +1270,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
       SegmentInfos infos = SegmentInfos.readLatestCommit(dir);
       // those indexes are created by a single version so we can
       // compare the commit version with the created version
-      if (infos.getCommitLuceneVersion().onOrAfter(Version.LUCENE_7_0_0)) {
-        assertEquals(infos.getCommitLuceneVersion(), infos.getIndexCreatedVersion());
-      } else {
-        assertNull(infos.getIndexCreatedVersion());
-      }
+      assertEquals(infos.getCommitLuceneVersion().major, infos.getIndexCreatedVersionMajor());
     }
   }
 
@@ -1316,7 +1337,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
     }
   }
   
-  private int checkAllSegmentsUpgraded(Directory dir, Version indexCreatedVersion) throws IOException {
+  private int checkAllSegmentsUpgraded(Directory dir, int indexCreatedVersion) throws IOException {
     final SegmentInfos infos = SegmentInfos.readLatestCommit(dir);
     if (VERBOSE) {
       System.out.println("checkAllSegmentsUpgraded: " + infos);
@@ -1325,7 +1346,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
       assertEquals(Version.LATEST, si.info.getVersion());
     }
     assertEquals(Version.LATEST, infos.getCommitLuceneVersion());
-    assertEquals(indexCreatedVersion, infos.getIndexCreatedVersion());
+    assertEquals(indexCreatedVersion, infos.getIndexCreatedVersionMajor());
     return infos.size();
   }
   
@@ -1343,7 +1364,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
         System.out.println("testUpgradeOldIndex: index=" +name);
       }
       Directory dir = newDirectory(oldIndexDirs.get(name));
-      Version indexCreatedVersion = SegmentInfos.readLatestCommit(dir).getIndexCreatedVersion();
+      int indexCreatedVersion = SegmentInfos.readLatestCommit(dir).getIndexCreatedVersionMajor();
 
       newIndexUpgrader(dir).upgrade();
 
@@ -1360,7 +1381,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
     try {
       for (Map.Entry<String,Directory> entry : oldIndexDirs.entrySet()) {
         String name = entry.getKey();
-        Version indexCreatedVersion = SegmentInfos.readLatestCommit(entry.getValue()).getIndexCreatedVersion();
+        int indexCreatedVersion = SegmentInfos.readLatestCommit(entry.getValue()).getIndexCreatedVersionMajor();
         Path dir = createTempDir(name);
         TestUtil.unzip(getDataInputStream("index." + name + ".zip"), dir);
         
@@ -1413,7 +1434,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
       }
       Directory dir = newDirectory(oldIndexDirs.get(name));
       assertEquals("Original index must be single segment", 1, getNumberOfSegments(dir));
-      Version indexCreatedVersion = SegmentInfos.readLatestCommit(dir).getIndexCreatedVersion();
+      int indexCreatedVersion = SegmentInfos.readLatestCommit(dir).getIndexCreatedVersionMajor();
 
       // create a bunch of dummy segments
       int id = 40;
@@ -1472,7 +1493,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
 
     newIndexUpgrader(dir).upgrade();
 
-    checkAllSegmentsUpgraded(dir, null);
+    checkAllSegmentsUpgraded(dir, 6);
     
     dir.close();
   }
@@ -1598,7 +1619,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
 
       DirectoryReader reader = DirectoryReader.open(dir);
       assertEquals(1, reader.leaves().size());
-      Sort sort = reader.leaves().get(0).reader().getIndexSort();
+      Sort sort = reader.leaves().get(0).reader().getMetaData().getSort();
       assertNotNull(sort);
       assertEquals("<long: \"dateDV\">!", sort.toString());
       reader.close();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/backward-codecs/src/test/org/apache/lucene/index/TestFixBrokenOffsets.java
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/index/TestFixBrokenOffsets.java b/lucene/backward-codecs/src/test/org/apache/lucene/index/TestFixBrokenOffsets.java
index 917785e..46b30d3 100644
--- a/lucene/backward-codecs/src/test/org/apache/lucene/index/TestFixBrokenOffsets.java
+++ b/lucene/backward-codecs/src/test/org/apache/lucene/index/TestFixBrokenOffsets.java
@@ -16,7 +16,6 @@
  */
 package org.apache.lucene.index;
 
-import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.nio.file.Path;
@@ -94,14 +93,11 @@ public class TestFixBrokenOffsets extends LuceneTestCase {
     for(int i=0;i<leaves.size();i++) {
       codecReaders[i] = (CodecReader) leaves.get(i).reader();
     }
-    w.addIndexes(codecReaders);
+    IndexWriter finalW2 = w;
+    e = expectThrows(IllegalArgumentException.class, () -> finalW2.addIndexes(codecReaders));
+    assertEquals("Cannot merge a segment that has been created with major version 6 into this index which has been created by major version 7", e.getMessage());
     reader.close();
     w.close();
-
-    // NOT OK: broken offsets were copied into a 7.0 segment:
-    ByteArrayOutputStream output = new ByteArrayOutputStream(1024);    
-    RuntimeException re = expectThrows(RuntimeException.class, () -> {TestUtil.checkIndex(tmpDir2, false, true, output);});
-    assertEquals("term [66 6f 6f]: doc 0: pos 1: startOffset 7 < lastStartOffset 10; consider using the FixBrokenOffsets tool in Lucene's backward-codecs module to correct your index", re.getMessage());
     tmpDir2.close();
 
     // Now run the tool and confirm the broken offsets are fixed:

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/backward-codecs/src/test/org/apache/lucene/index/TestIndexWriterOnOldIndex.java
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/index/TestIndexWriterOnOldIndex.java b/lucene/backward-codecs/src/test/org/apache/lucene/index/TestIndexWriterOnOldIndex.java
index 73d933a..c77b926 100644
--- a/lucene/backward-codecs/src/test/org/apache/lucene/index/TestIndexWriterOnOldIndex.java
+++ b/lucene/backward-codecs/src/test/org/apache/lucene/index/TestIndexWriterOnOldIndex.java
@@ -36,16 +36,16 @@ public class TestIndexWriterOnOldIndex extends LuceneTestCase {
     Directory dir = newFSDirectory(path);
     for (OpenMode openMode : OpenMode.values()) {
       Directory tmpDir = newDirectory(dir);
-      assertEquals(null /** 6.3.0 */, SegmentInfos.readLatestCommit(tmpDir).getIndexCreatedVersion());
+      assertEquals(6 /** 6.3.0 */, SegmentInfos.readLatestCommit(tmpDir).getIndexCreatedVersionMajor());
       IndexWriter w = new IndexWriter(tmpDir, newIndexWriterConfig().setOpenMode(openMode));
       w.commit();
       w.close();
       switch (openMode) {
         case CREATE:
-          assertEquals(Version.LATEST, SegmentInfos.readLatestCommit(tmpDir).getIndexCreatedVersion());
+          assertEquals(Version.LATEST.major, SegmentInfos.readLatestCommit(tmpDir).getIndexCreatedVersionMajor());
           break;
         default:
-          assertEquals(null /** 6.3.0 */, SegmentInfos.readLatestCommit(tmpDir).getIndexCreatedVersion());
+          assertEquals(6 /** 6.3.0 */, SegmentInfos.readLatestCommit(tmpDir).getIndexCreatedVersionMajor());
       }
       tmpDir.close();
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextSegmentInfoFormat.java
----------------------------------------------------------------------
diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextSegmentInfoFormat.java b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextSegmentInfoFormat.java
index 3d38d72..8a71c6d 100644
--- a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextSegmentInfoFormat.java
+++ b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextSegmentInfoFormat.java
@@ -55,6 +55,7 @@ import org.apache.lucene.util.Version;
  */
 public class SimpleTextSegmentInfoFormat extends SegmentInfoFormat {
   final static BytesRef SI_VERSION          = new BytesRef("    version ");
+  final static BytesRef SI_MIN_VERSION      = new BytesRef("    min version ");
   final static BytesRef SI_DOCCOUNT         = new BytesRef("    number of documents ");
   final static BytesRef SI_USECOMPOUND      = new BytesRef("    uses compound file ");
   final static BytesRef SI_NUM_DIAG         = new BytesRef("    diagnostics ");
@@ -88,7 +89,21 @@ public class SimpleTextSegmentInfoFormat extends SegmentInfoFormat {
       } catch (ParseException pe) {
         throw new CorruptIndexException("unable to parse version string: " + pe.getMessage(), input, pe);
       }
-    
+
+      SimpleTextUtil.readLine(input, scratch);
+      assert StringHelper.startsWith(scratch.get(), SI_MIN_VERSION);
+      Version minVersion;
+      try {
+        String versionString = readString(SI_MIN_VERSION.length, scratch);
+        if (versionString.equals("null")) {
+          minVersion = null;
+        } else {
+          minVersion = Version.parse(versionString);
+        }
+      } catch (ParseException pe) {
+        throw new CorruptIndexException("unable to parse version string: " + pe.getMessage(), input, pe);
+      }
+
       SimpleTextUtil.readLine(input, scratch);
       assert StringHelper.startsWith(scratch.get(), SI_DOCCOUNT);
       final int docCount = Integer.parseInt(readString(SI_DOCCOUNT.length, scratch));
@@ -288,7 +303,7 @@ public class SimpleTextSegmentInfoFormat extends SegmentInfoFormat {
 
       SimpleTextUtil.checkFooter(input);
 
-      SegmentInfo info = new SegmentInfo(directory, version, segmentName, docCount,
+      SegmentInfo info = new SegmentInfo(directory, version, minVersion, segmentName, docCount,
                                          isCompoundFile, null, Collections.unmodifiableMap(diagnostics),
                                          id, Collections.unmodifiableMap(attributes), indexSort);
       info.setFiles(files);
@@ -345,7 +360,15 @@ public class SimpleTextSegmentInfoFormat extends SegmentInfoFormat {
       SimpleTextUtil.write(output, SI_VERSION);
       SimpleTextUtil.write(output, si.getVersion().toString(), scratch);
       SimpleTextUtil.writeNewline(output);
-    
+
+      SimpleTextUtil.write(output, SI_MIN_VERSION);
+      if (si.getMinVersion() == null) {
+        SimpleTextUtil.write(output, "null", scratch);
+      } else {
+        SimpleTextUtil.write(output, si.getMinVersion().toString(), scratch);
+      }
+      SimpleTextUtil.writeNewline(output);
+
       SimpleTextUtil.write(output, SI_DOCCOUNT);
       SimpleTextUtil.write(output, Integer.toString(si.maxDoc()), scratch);
       SimpleTextUtil.writeNewline(output);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/core/src/java/org/apache/lucene/codecs/lucene62/Lucene62SegmentInfoFormat.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene62/Lucene62SegmentInfoFormat.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene62/Lucene62SegmentInfoFormat.java
index da6e395..e91da3b 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/lucene62/Lucene62SegmentInfoFormat.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene62/Lucene62SegmentInfoFormat.java
@@ -37,7 +37,6 @@ import org.apache.lucene.store.ChecksumIndexInput;
 import org.apache.lucene.store.DataOutput; // javadocs
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IOContext;
-import org.apache.lucene.store.IndexOutput;
 import org.apache.lucene.util.Version;
 
 /**
@@ -244,7 +243,7 @@ public class Lucene62SegmentInfoFormat extends SegmentInfoFormat {
           indexSort = null;
         }
 
-        si = new SegmentInfo(dir, version, segment, docCount, isCompoundFile, null, diagnostics, segmentID, attributes, indexSort);
+        si = new SegmentInfo(dir, version, null, segment, docCount, isCompoundFile, null, diagnostics, segmentID, attributes, indexSort);
         si.setFiles(files);
       } catch (Throwable exception) {
         priorE = exception;
@@ -256,153 +255,8 @@ public class Lucene62SegmentInfoFormat extends SegmentInfoFormat {
   }
 
   @Override
-  public void write(Directory dir, SegmentInfo si, IOContext ioContext) throws IOException {
-    final String fileName = IndexFileNames.segmentFileName(si.name, "", Lucene62SegmentInfoFormat.SI_EXTENSION);
-
-    try (IndexOutput output = dir.createOutput(fileName, ioContext)) {
-      // Only add the file once we've successfully created it, else IFD assert can trip:
-      si.addFile(fileName);
-      CodecUtil.writeIndexHeader(output,
-                                   Lucene62SegmentInfoFormat.CODEC_NAME,
-                                   Lucene62SegmentInfoFormat.VERSION_CURRENT,
-                                   si.getId(),
-                                   "");
-      Version version = si.getVersion();
-      if (version.major < 5) {
-        throw new IllegalArgumentException("invalid major version: should be >= 5 but got: " + version.major + " segment=" + si);
-      }
-      // Write the Lucene version that created this segment, since 3.1
-      output.writeInt(version.major);
-      output.writeInt(version.minor);
-      output.writeInt(version.bugfix);
-      assert version.prerelease == 0;
-      output.writeInt(si.maxDoc());
-
-      output.writeByte((byte) (si.getUseCompoundFile() ? SegmentInfo.YES : SegmentInfo.NO));
-      output.writeMapOfStrings(si.getDiagnostics());
-      Set<String> files = si.files();
-      for (String file : files) {
-        if (!IndexFileNames.parseSegmentName(file).equals(si.name)) {
-          throw new IllegalArgumentException("invalid files: expected segment=" + si.name + ", got=" + files);
-        }
-      }
-      output.writeSetOfStrings(files);
-      output.writeMapOfStrings(si.getAttributes());
-
-      Sort indexSort = si.getIndexSort();
-      int numSortFields = indexSort == null ? 0 : indexSort.getSort().length;
-      output.writeVInt(numSortFields);
-      for (int i = 0; i < numSortFields; ++i) {
-        SortField sortField = indexSort.getSort()[i];
-        SortField.Type sortType = sortField.getType();
-        output.writeString(sortField.getField());
-        int sortTypeID;
-        switch (sortField.getType()) {
-          case STRING:
-            sortTypeID = 0;
-            break;
-          case LONG:
-            sortTypeID = 1;
-            break;
-          case INT:
-            sortTypeID = 2;
-            break;
-          case DOUBLE:
-            sortTypeID = 3;
-            break;
-          case FLOAT:
-            sortTypeID = 4;
-            break;
-          case CUSTOM:
-            if (sortField instanceof SortedSetSortField) {
-              sortTypeID = 5;
-              sortType = SortField.Type.STRING;
-            } else if (sortField instanceof SortedNumericSortField) {
-              sortTypeID = 6;
-              sortType = ((SortedNumericSortField) sortField).getNumericType();
-            } else {
-              throw new IllegalStateException("Unexpected SortedNumericSortField " + sortField);
-            }
-            break;
-          default:
-            throw new IllegalStateException("Unexpected sort type: " + sortField.getType());
-        }
-        output.writeVInt(sortTypeID);
-        if (sortTypeID == 5) {
-          SortedSetSortField ssf = (SortedSetSortField) sortField;
-          if (ssf.getSelector() == SortedSetSelector.Type.MIN) {
-            output.writeByte((byte) 0);
-          } else if (ssf.getSelector() == SortedSetSelector.Type.MAX) {
-            output.writeByte((byte) 1);
-          } else if (ssf.getSelector() == SortedSetSelector.Type.MIDDLE_MIN) {
-            output.writeByte((byte) 2);
-          } else if (ssf.getSelector() == SortedSetSelector.Type.MIDDLE_MAX) {
-            output.writeByte((byte) 3);
-          } else {
-            throw new IllegalStateException("Unexpected SortedSetSelector type: " + ssf.getSelector());
-          }
-        } else if (sortTypeID == 6) {
-          SortedNumericSortField snsf = (SortedNumericSortField) sortField;
-          if (snsf.getNumericType() == SortField.Type.LONG) {
-            output.writeByte((byte) 0);
-          } else if (snsf.getNumericType() == SortField.Type.INT) {
-            output.writeByte((byte) 1);
-          } else if (snsf.getNumericType() == SortField.Type.DOUBLE) {
-            output.writeByte((byte) 2);
-          } else if (snsf.getNumericType() == SortField.Type.FLOAT) {
-            output.writeByte((byte) 3);
-          } else {
-            throw new IllegalStateException("Unexpected SortedNumericSelector type: " + snsf.getNumericType());
-          }
-          if (snsf.getSelector() == SortedNumericSelector.Type.MIN) {
-            output.writeByte((byte) 0);
-          } else if (snsf.getSelector() == SortedNumericSelector.Type.MAX) {
-            output.writeByte((byte) 1);
-          } else {
-            throw new IllegalStateException("Unexpected sorted numeric selector type: " + snsf.getSelector());
-          }
-        }
-        output.writeByte((byte) (sortField.getReverse() ? 0 : 1));
-
-        // write missing value 
-        Object missingValue = sortField.getMissingValue();
-        if (missingValue == null) {
-          output.writeByte((byte) 0);
-        } else {
-          switch(sortType) {
-          case STRING:
-            if (missingValue == SortField.STRING_LAST) {
-              output.writeByte((byte) 1);
-            } else if (missingValue == SortField.STRING_FIRST) {
-              output.writeByte((byte) 2);
-            } else {
-              throw new AssertionError("unrecognized missing value for STRING field \"" + sortField.getField() + "\": " + missingValue);
-            }
-            break;
-          case LONG:
-            output.writeByte((byte) 1);
-            output.writeLong(((Long) missingValue).longValue());
-            break;
-          case INT:
-            output.writeByte((byte) 1);
-            output.writeInt(((Integer) missingValue).intValue());
-            break;
-          case DOUBLE:
-            output.writeByte((byte) 1);
-            output.writeLong(Double.doubleToLongBits(((Double) missingValue).doubleValue()));
-            break;
-          case FLOAT:
-            output.writeByte((byte) 1);
-            output.writeInt(Float.floatToIntBits(((Float) missingValue).floatValue()));
-            break;
-          default:
-            throw new IllegalStateException("Unexpected sort type: " + sortField.getType());
-          }
-        }
-      }
-
-      CodecUtil.writeFooter(output);
-    }
+  public void write(Directory dir, SegmentInfo info, IOContext ioContext) throws IOException {
+    throw new UnsupportedOperationException("This format can only be used for reading");
   }
 
   /** File extension used to store {@link SegmentInfo}. */

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70Codec.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70Codec.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70Codec.java
index 7f9aed0..d04d554 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70Codec.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70Codec.java
@@ -37,7 +37,6 @@ import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat.Mode;
 import org.apache.lucene.codecs.lucene50.Lucene50TermVectorsFormat;
 import org.apache.lucene.codecs.lucene60.Lucene60FieldInfosFormat;
 import org.apache.lucene.codecs.lucene60.Lucene60PointsFormat;
-import org.apache.lucene.codecs.lucene62.Lucene62SegmentInfoFormat;
 import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat;
 import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat;
 
@@ -55,7 +54,7 @@ import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat;
 public class Lucene70Codec extends Codec {
   private final TermVectorsFormat vectorsFormat = new Lucene50TermVectorsFormat();
   private final FieldInfosFormat fieldInfosFormat = new Lucene60FieldInfosFormat();
-  private final SegmentInfoFormat segmentInfosFormat = new Lucene62SegmentInfoFormat();
+  private final SegmentInfoFormat segmentInfosFormat = new Lucene70SegmentInfoFormat();
   private final LiveDocsFormat liveDocsFormat = new Lucene50LiveDocsFormat();
   private final CompoundFormat compoundFormat = new Lucene50CompoundFormat();
   

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70SegmentInfoFormat.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70SegmentInfoFormat.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70SegmentInfoFormat.java
new file mode 100644
index 0000000..bd2bf06
--- /dev/null
+++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70SegmentInfoFormat.java
@@ -0,0 +1,439 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.codecs.lucene70;
+
+import java.io.IOException;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.lucene.codecs.CodecUtil;
+import org.apache.lucene.codecs.SegmentInfoFormat;
+import org.apache.lucene.index.CorruptIndexException;
+import org.apache.lucene.index.IndexFileNames;
+import org.apache.lucene.index.IndexWriter; // javadocs
+import org.apache.lucene.index.SegmentInfo; // javadocs
+import org.apache.lucene.index.SegmentInfos; // javadocs
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.search.SortField;
+import org.apache.lucene.search.SortedNumericSelector;
+import org.apache.lucene.search.SortedNumericSortField;
+import org.apache.lucene.search.SortedSetSelector;
+import org.apache.lucene.search.SortedSetSortField;
+import org.apache.lucene.store.ChecksumIndexInput;
+import org.apache.lucene.store.DataOutput; // javadocs
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
+import org.apache.lucene.store.IndexOutput;
+import org.apache.lucene.util.Version;
+
+/**
+ * Lucene 7.0 Segment info format.
+ * <p>
+ * Files:
+ * <ul>
+ *   <li><tt>.si</tt>: Header, SegVersion, SegSize, IsCompoundFile, Diagnostics, Files, Attributes, IndexSort, Footer
+ * </ul>
+ * Data types:
+ * <ul>
+ *   <li>Header --&gt; {@link CodecUtil#writeIndexHeader IndexHeader}</li>
+ *   <li>SegSize --&gt; {@link DataOutput#writeInt Int32}</li>
+ *   <li>SegVersion --&gt; {@link DataOutput#writeString String}</li>
+ *   <li>SegMinVersion --&gt; {@link DataOutput#writeString String}</li>
+ *   <li>Files --&gt; {@link DataOutput#writeSetOfStrings Set&lt;String&gt;}</li>
+ *   <li>Diagnostics,Attributes --&gt; {@link DataOutput#writeMapOfStrings Map&lt;String,String&gt;}</li>
+ *   <li>IsCompoundFile --&gt; {@link DataOutput#writeByte Int8}</li>
+ *   <li>IndexSort --&gt; {@link DataOutput#writeVInt Int32} count, followed by {@code count} SortField</li>
+ *   <li>SortField --&gt; {@link DataOutput#writeString String} field name, followed by {@link DataOutput#writeVInt Int32} sort type ID,
+ *       followed by {@link DataOutput#writeByte Int8} indicatating reversed sort, followed by a type-specific encoding of the optional missing value
+ *   <li>Footer --&gt; {@link CodecUtil#writeFooter CodecFooter}</li>
+ * </ul>
+ * Field Descriptions:
+ * <ul>
+ *   <li>SegVersion is the code version that created the segment.</li>
+ *   <li>SegMinVersion is the minimum code version that contributed documents to the segment.</li>
+ *   <li>SegSize is the number of documents contained in the segment index.</li>
+ *   <li>IsCompoundFile records whether the segment is written as a compound file or
+ *       not. If this is -1, the segment is not a compound file. If it is 1, the segment
+ *       is a compound file.</li>
+ *   <li>The Diagnostics Map is privately written by {@link IndexWriter}, as a debugging aid,
+ *       for each segment it creates. It includes metadata like the current Lucene
+ *       version, OS, Java version, why the segment was created (merge, flush,
+ *       addIndexes), etc.</li>
+ *   <li>Files is a list of files referred to by this segment.</li>
+ * </ul>
+ *
+ * @see SegmentInfos
+ * @lucene.experimental
+ */
+public class Lucene70SegmentInfoFormat extends SegmentInfoFormat {
+
+  /** Sole constructor. */
+  public Lucene70SegmentInfoFormat() {
+  }
+
+  @Override
+  public SegmentInfo read(Directory dir, String segment, byte[] segmentID, IOContext context) throws IOException {
+    final String fileName = IndexFileNames.segmentFileName(segment, "", Lucene70SegmentInfoFormat.SI_EXTENSION);
+    try (ChecksumIndexInput input = dir.openChecksumInput(fileName, context)) {
+      Throwable priorE = null;
+      SegmentInfo si = null;
+      try {
+        int format = CodecUtil.checkIndexHeader(input, Lucene70SegmentInfoFormat.CODEC_NAME,
+                                                Lucene70SegmentInfoFormat.VERSION_START,
+                                                Lucene70SegmentInfoFormat.VERSION_CURRENT,
+                                                segmentID, "");
+        final Version version = Version.fromBits(input.readInt(), input.readInt(), input.readInt());
+        byte hasMinVersion = input.readByte();
+        final Version minVersion;
+        switch (hasMinVersion) {
+          case 0:
+            minVersion = null;
+            break;
+          case 1:
+            minVersion = Version.fromBits(input.readInt(), input.readInt(), input.readInt());
+            break;
+          default:
+            throw new CorruptIndexException("Illegal boolean value " + hasMinVersion, input);
+        }
+
+        final int docCount = input.readInt();
+        if (docCount < 0) {
+          throw new CorruptIndexException("invalid docCount: " + docCount, input);
+        }
+        final boolean isCompoundFile = input.readByte() == SegmentInfo.YES;
+
+        final Map<String,String> diagnostics = input.readMapOfStrings();
+        final Set<String> files = input.readSetOfStrings();
+        final Map<String,String> attributes = input.readMapOfStrings();
+
+        int numSortFields = input.readVInt();
+        Sort indexSort;
+        if (numSortFields > 0) {
+          SortField[] sortFields = new SortField[numSortFields];
+          for(int i=0;i<numSortFields;i++) {
+            String fieldName = input.readString();
+            int sortTypeID = input.readVInt();
+            SortField.Type sortType;
+            SortedSetSelector.Type sortedSetSelector = null;
+            SortedNumericSelector.Type sortedNumericSelector = null;
+            switch(sortTypeID) {
+            case 0:
+              sortType = SortField.Type.STRING;
+              break;
+            case 1:
+              sortType = SortField.Type.LONG;
+              break;
+            case 2:
+              sortType = SortField.Type.INT;
+              break;
+            case 3:
+              sortType = SortField.Type.DOUBLE;
+              break;
+            case 4:
+              sortType = SortField.Type.FLOAT;
+              break;
+            case 5:
+              sortType = SortField.Type.STRING;
+              byte selector = input.readByte();
+              if (selector == 0) {
+                sortedSetSelector = SortedSetSelector.Type.MIN;
+              } else if (selector == 1) {
+                sortedSetSelector = SortedSetSelector.Type.MAX;
+              } else if (selector == 2) {
+                sortedSetSelector = SortedSetSelector.Type.MIDDLE_MIN;
+              } else if (selector == 3) {
+                sortedSetSelector = SortedSetSelector.Type.MIDDLE_MAX;
+              } else {
+                throw new CorruptIndexException("invalid index SortedSetSelector ID: " + selector, input);
+              }
+              break;
+            case 6:
+              byte type = input.readByte();
+              if (type == 0) {
+                sortType = SortField.Type.LONG;
+              } else if (type == 1) {
+                sortType = SortField.Type.INT;
+              } else if (type == 2) {
+                sortType = SortField.Type.DOUBLE;
+              } else if (type == 3) {
+                sortType = SortField.Type.FLOAT;
+              } else {
+                throw new CorruptIndexException("invalid index SortedNumericSortField type ID: " + type, input);
+              }
+              byte numericSelector = input.readByte();
+              if (numericSelector == 0) {
+                sortedNumericSelector = SortedNumericSelector.Type.MIN;
+              } else if (numericSelector == 1) {
+                sortedNumericSelector = SortedNumericSelector.Type.MAX;
+              } else {
+                throw new CorruptIndexException("invalid index SortedNumericSelector ID: " + numericSelector, input);
+              }
+              break;
+            default:
+              throw new CorruptIndexException("invalid index sort field type ID: " + sortTypeID, input);
+            }
+            byte b = input.readByte();
+            boolean reverse;
+            if (b == 0) {
+              reverse = true;
+            } else if (b == 1) {
+              reverse = false;
+            } else {
+              throw new CorruptIndexException("invalid index sort reverse: " + b, input);
+            }
+
+            if (sortedSetSelector != null) {
+              sortFields[i] = new SortedSetSortField(fieldName, reverse, sortedSetSelector);
+            } else if (sortedNumericSelector != null) {
+              sortFields[i] = new SortedNumericSortField(fieldName, sortType, reverse, sortedNumericSelector);
+            } else {
+              sortFields[i] = new SortField(fieldName, sortType, reverse);
+            }
+
+            Object missingValue;
+            b = input.readByte();
+            if (b == 0) {
+              missingValue = null;
+            } else {
+              switch(sortType) {
+              case STRING:
+                if (b == 1) {
+                  missingValue = SortField.STRING_LAST;
+                } else if (b == 2) {
+                  missingValue = SortField.STRING_FIRST;
+                } else {
+                  throw new CorruptIndexException("invalid missing value flag: " + b, input);
+                }
+                break;
+              case LONG:
+                if (b != 1) {
+                  throw new CorruptIndexException("invalid missing value flag: " + b, input);
+                }
+                missingValue = input.readLong();
+                break;
+              case INT:
+                if (b != 1) {
+                  throw new CorruptIndexException("invalid missing value flag: " + b, input);
+                }
+                missingValue = input.readInt();
+                break;
+              case DOUBLE:
+                if (b != 1) {
+                  throw new CorruptIndexException("invalid missing value flag: " + b, input);
+                }
+                missingValue = Double.longBitsToDouble(input.readLong());
+                break;
+              case FLOAT:
+                if (b != 1) {
+                  throw new CorruptIndexException("invalid missing value flag: " + b, input);
+                }
+                missingValue = Float.intBitsToFloat(input.readInt());
+                break;
+              default:
+                throw new AssertionError("unhandled sortType=" + sortType);
+              }
+            }
+            if (missingValue != null) {
+              sortFields[i].setMissingValue(missingValue);
+            }
+          }
+          indexSort = new Sort(sortFields);
+        } else if (numSortFields < 0) {
+          throw new CorruptIndexException("invalid index sort field count: " + numSortFields, input);
+        } else {
+          indexSort = null;
+        }
+
+        si = new SegmentInfo(dir, version, minVersion, segment, docCount, isCompoundFile, null, diagnostics, segmentID, attributes, indexSort);
+        si.setFiles(files);
+      } catch (Throwable exception) {
+        priorE = exception;
+      } finally {
+        CodecUtil.checkFooter(input, priorE);
+      }
+      return si;
+    }
+  }
+
+  @Override
+  public void write(Directory dir, SegmentInfo si, IOContext ioContext) throws IOException {
+    final String fileName = IndexFileNames.segmentFileName(si.name, "", Lucene70SegmentInfoFormat.SI_EXTENSION);
+
+    try (IndexOutput output = dir.createOutput(fileName, ioContext)) {
+      // Only add the file once we've successfully created it, else IFD assert can trip:
+      si.addFile(fileName);
+      CodecUtil.writeIndexHeader(output,
+                                   Lucene70SegmentInfoFormat.CODEC_NAME,
+                                   Lucene70SegmentInfoFormat.VERSION_CURRENT,
+                                   si.getId(),
+                                   "");
+      Version version = si.getVersion();
+      if (version.major < 7) {
+        throw new IllegalArgumentException("invalid major version: should be >= 7 but got: " + version.major + " segment=" + si);
+      }
+      // Write the Lucene version that created this segment, since 3.1
+      output.writeInt(version.major);
+      output.writeInt(version.minor);
+      output.writeInt(version.bugfix);
+
+      // Write the min Lucene version that contributed docs to the segment, since 7.0
+      if (si.getMinVersion() != null) {
+        output.writeByte((byte) 1);
+        Version minVersion = si.getMinVersion();
+        output.writeInt(minVersion.major);
+        output.writeInt(minVersion.minor);
+        output.writeInt(minVersion.bugfix);
+      } else {
+        output.writeByte((byte) 0);
+      }
+
+      assert version.prerelease == 0;
+      output.writeInt(si.maxDoc());
+
+      output.writeByte((byte) (si.getUseCompoundFile() ? SegmentInfo.YES : SegmentInfo.NO));
+      output.writeMapOfStrings(si.getDiagnostics());
+      Set<String> files = si.files();
+      for (String file : files) {
+        if (!IndexFileNames.parseSegmentName(file).equals(si.name)) {
+          throw new IllegalArgumentException("invalid files: expected segment=" + si.name + ", got=" + files);
+        }
+      }
+      output.writeSetOfStrings(files);
+      output.writeMapOfStrings(si.getAttributes());
+
+      Sort indexSort = si.getIndexSort();
+      int numSortFields = indexSort == null ? 0 : indexSort.getSort().length;
+      output.writeVInt(numSortFields);
+      for (int i = 0; i < numSortFields; ++i) {
+        SortField sortField = indexSort.getSort()[i];
+        SortField.Type sortType = sortField.getType();
+        output.writeString(sortField.getField());
+        int sortTypeID;
+        switch (sortField.getType()) {
+          case STRING:
+            sortTypeID = 0;
+            break;
+          case LONG:
+            sortTypeID = 1;
+            break;
+          case INT:
+            sortTypeID = 2;
+            break;
+          case DOUBLE:
+            sortTypeID = 3;
+            break;
+          case FLOAT:
+            sortTypeID = 4;
+            break;
+          case CUSTOM:
+            if (sortField instanceof SortedSetSortField) {
+              sortTypeID = 5;
+              sortType = SortField.Type.STRING;
+            } else if (sortField instanceof SortedNumericSortField) {
+              sortTypeID = 6;
+              sortType = ((SortedNumericSortField) sortField).getNumericType();
+            } else {
+              throw new IllegalStateException("Unexpected SortedNumericSortField " + sortField);
+            }
+            break;
+          default:
+            throw new IllegalStateException("Unexpected sort type: " + sortField.getType());
+        }
+        output.writeVInt(sortTypeID);
+        if (sortTypeID == 5) {
+          SortedSetSortField ssf = (SortedSetSortField) sortField;
+          if (ssf.getSelector() == SortedSetSelector.Type.MIN) {
+            output.writeByte((byte) 0);
+          } else if (ssf.getSelector() == SortedSetSelector.Type.MAX) {
+            output.writeByte((byte) 1);
+          } else if (ssf.getSelector() == SortedSetSelector.Type.MIDDLE_MIN) {
+            output.writeByte((byte) 2);
+          } else if (ssf.getSelector() == SortedSetSelector.Type.MIDDLE_MAX) {
+            output.writeByte((byte) 3);
+          } else {
+            throw new IllegalStateException("Unexpected SortedSetSelector type: " + ssf.getSelector());
+          }
+        } else if (sortTypeID == 6) {
+          SortedNumericSortField snsf = (SortedNumericSortField) sortField;
+          if (snsf.getNumericType() == SortField.Type.LONG) {
+            output.writeByte((byte) 0);
+          } else if (snsf.getNumericType() == SortField.Type.INT) {
+            output.writeByte((byte) 1);
+          } else if (snsf.getNumericType() == SortField.Type.DOUBLE) {
+            output.writeByte((byte) 2);
+          } else if (snsf.getNumericType() == SortField.Type.FLOAT) {
+            output.writeByte((byte) 3);
+          } else {
+            throw new IllegalStateException("Unexpected SortedNumericSelector type: " + snsf.getNumericType());
+          }
+          if (snsf.getSelector() == SortedNumericSelector.Type.MIN) {
+            output.writeByte((byte) 0);
+          } else if (snsf.getSelector() == SortedNumericSelector.Type.MAX) {
+            output.writeByte((byte) 1);
+          } else {
+            throw new IllegalStateException("Unexpected sorted numeric selector type: " + snsf.getSelector());
+          }
+        }
+        output.writeByte((byte) (sortField.getReverse() ? 0 : 1));
+
+        // write missing value 
+        Object missingValue = sortField.getMissingValue();
+        if (missingValue == null) {
+          output.writeByte((byte) 0);
+        } else {
+          switch(sortType) {
+          case STRING:
+            if (missingValue == SortField.STRING_LAST) {
+              output.writeByte((byte) 1);
+            } else if (missingValue == SortField.STRING_FIRST) {
+              output.writeByte((byte) 2);
+            } else {
+              throw new AssertionError("unrecognized missing value for STRING field \"" + sortField.getField() + "\": " + missingValue);
+            }
+            break;
+          case LONG:
+            output.writeByte((byte) 1);
+            output.writeLong(((Long) missingValue).longValue());
+            break;
+          case INT:
+            output.writeByte((byte) 1);
+            output.writeInt(((Integer) missingValue).intValue());
+            break;
+          case DOUBLE:
+            output.writeByte((byte) 1);
+            output.writeLong(Double.doubleToLongBits(((Double) missingValue).doubleValue()));
+            break;
+          case FLOAT:
+            output.writeByte((byte) 1);
+            output.writeInt(Float.floatToIntBits(((Float) missingValue).floatValue()));
+            break;
+          default:
+            throw new IllegalStateException("Unexpected sort type: " + sortField.getType());
+          }
+        }
+      }
+
+      CodecUtil.writeFooter(output);
+    }
+  }
+
+  /** File extension used to store {@link SegmentInfo}. */
+  public final static String SI_EXTENSION = "si";
+  static final String CODEC_NAME = "Lucene70SegmentInfo";
+  static final int VERSION_START = 0;
+  static final int VERSION_CURRENT = VERSION_START;
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java b/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java
index f3bdfb0..c7ad0f4 100644
--- a/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java
+++ b/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java
@@ -697,7 +697,7 @@ public final class CheckIndex implements Closeable {
         long startOpenReaderNS = System.nanoTime();
         if (infoStream != null)
           infoStream.print("    test: open reader.........");
-        reader = new SegmentReader(info, IOContext.DEFAULT);
+        reader = new SegmentReader(info, sis.getIndexCreatedVersionMajor(), IOContext.DEFAULT);
         msg(infoStream, String.format(Locale.ROOT, "OK [took %.3f sec]", nsToSec(System.nanoTime()-startOpenReaderNS)));
 
         segInfoStat.openReaderPassed = true;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java b/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java
index 48901e5..ed50650 100644
--- a/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java
+++ b/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java
@@ -178,7 +178,7 @@ class DocumentsWriterPerThread {
     assert numDocsInRAM == 0 : "num docs " + numDocsInRAM;
     deleteSlice = deleteQueue.newSlice();
    
-    segmentInfo = new SegmentInfo(directoryOrig, Version.LATEST, segmentName, -1, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), indexWriterConfig.getIndexSort());
+    segmentInfo = new SegmentInfo(directoryOrig, Version.LATEST, Version.LATEST, segmentName, -1, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), indexWriterConfig.getIndexSort());
     assert numDocsInRAM == 0;
     if (INFO_VERBOSE && infoStream.isEnabled("DWPT")) {
       infoStream.message("DWPT", Thread.currentThread().getName() + " init seg=" + segmentName + " delQueue=" + deleteQueue);  

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/core/src/java/org/apache/lucene/index/FilterCodecReader.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/FilterCodecReader.java b/lucene/core/src/java/org/apache/lucene/index/FilterCodecReader.java
index 5949fca..fd36ecb 100644
--- a/lucene/core/src/java/org/apache/lucene/index/FilterCodecReader.java
+++ b/lucene/core/src/java/org/apache/lucene/index/FilterCodecReader.java
@@ -27,7 +27,6 @@ import org.apache.lucene.codecs.NormsProducer;
 import org.apache.lucene.codecs.PointsReader;
 import org.apache.lucene.codecs.StoredFieldsReader;
 import org.apache.lucene.codecs.TermVectorsReader;
-import org.apache.lucene.search.Sort;
 import org.apache.lucene.util.Accountable;
 import org.apache.lucene.util.Bits;
 
@@ -104,8 +103,8 @@ public abstract class FilterCodecReader extends CodecReader {
   }
 
   @Override
-  public Sort getIndexSort() {
-    return in.getIndexSort();
+  public LeafMetaData getMetaData() {
+    return in.getMetaData();
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/core/src/java/org/apache/lucene/index/FilterLeafReader.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/FilterLeafReader.java b/lucene/core/src/java/org/apache/lucene/index/FilterLeafReader.java
index 0a3ec7f..f3d8112 100644
--- a/lucene/core/src/java/org/apache/lucene/index/FilterLeafReader.java
+++ b/lucene/core/src/java/org/apache/lucene/index/FilterLeafReader.java
@@ -20,7 +20,6 @@ package org.apache.lucene.index;
 import java.io.IOException;
 import java.util.Iterator;
 
-import org.apache.lucene.search.Sort;
 import org.apache.lucene.util.AttributeSource;
 import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.BytesRef;
@@ -398,9 +397,9 @@ public abstract class FilterLeafReader extends LeafReader {
   }
 
   @Override
-  public Sort getIndexSort() {
+  public LeafMetaData getMetaData() {
     ensureOpen();
-    return in.getIndexSort();
+    return in.getMetaData();
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java b/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java
index 899643a..9a29150 100644
--- a/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java
@@ -30,7 +30,6 @@ import java.util.LinkedList;
 import java.util.List;
 import java.util.Locale;
 import java.util.Map.Entry;
-import java.util.Objects;
 import java.util.Map;
 import java.util.Queue;
 import java.util.Set;
@@ -855,7 +854,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
         // against an index that's currently open for
         // searching.  In this case we write the next
         // segments_N file with no segments:
-        final SegmentInfos sis = new SegmentInfos(Version.LATEST);
+        final SegmentInfos sis = new SegmentInfos(Version.LATEST.major);
         try {
           final SegmentInfos previous = SegmentInfos.readLatestCommit(directory);
           sis.updateGenerationVersionAndCounter(previous);
@@ -2654,12 +2653,12 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
           infoStream.message("IW", "addIndexes: process directory " + dir);
         }
         SegmentInfos sis = SegmentInfos.readLatestCommit(dir); // read infos from dir
-        if (Objects.equals(segmentInfos.getIndexCreatedVersion(), sis.getIndexCreatedVersion()) == false) {
+        if (segmentInfos.getIndexCreatedVersionMajor() != sis.getIndexCreatedVersionMajor()) {
           throw new IllegalArgumentException("Cannot use addIndexes(Directory) with indexes that have been created "
-              + "by a different Lucene version. The current index was generated by "
-              + segmentInfos.getIndexCreatedVersion()
-              + " while one of the directories contains an index that was generated with "
-              + sis.getIndexCreatedVersion());
+              + "by a different Lucene version. The current index was generated by Lucene "
+              + segmentInfos.getIndexCreatedVersionMajor()
+              + " while one of the directories contains an index that was generated with Lucene "
+              + sis.getIndexCreatedVersionMajor());
         }
         totalMaxDoc += sis.totalMaxDoc();
         commits.add(sis);
@@ -2747,7 +2746,26 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
 
     return seqNo;
   }
-  
+
+  private void validateMergeReader(CodecReader leaf) {
+    LeafMetaData segmentMeta = leaf.getMetaData();
+    if (segmentInfos.getIndexCreatedVersionMajor() != segmentMeta.getCreatedVersionMajor()) {
+      throw new IllegalArgumentException("Cannot merge a segment that has been created with major version "
+          + segmentMeta.getCreatedVersionMajor() + " into this index which has been created by major version "
+          + segmentInfos.getIndexCreatedVersionMajor());
+    }
+
+    if (segmentInfos.getIndexCreatedVersionMajor() >= 7 && segmentMeta.getMinVersion() == null) {
+      throw new IllegalStateException("Indexes created on or after Lucene 7 must record the created version major, but " + leaf + " hides it");
+    }
+
+    Sort leafIndexSort = segmentMeta.getSort();
+    if (config.getIndexSort() != null && leafIndexSort != null
+        && config.getIndexSort().equals(leafIndexSort) == false) {
+      throw new IllegalArgumentException("cannot change index sort from " + leafIndexSort + " to " + config.getIndexSort());
+    }
+  }
+
   /**
    * Merges the provided indexes into this index.
    * 
@@ -2801,12 +2819,10 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
       flush(false, true);
 
       String mergedName = newSegmentName();
+
       for (CodecReader leaf : readers) {
         numDocs += leaf.numDocs();
-        Sort leafIndexSort = leaf.getIndexSort();
-        if (indexSort != null && leafIndexSort != null && indexSort.equals(leafIndexSort) == false) {
-          throw new IllegalArgumentException("cannot change index sort from " + leafIndexSort + " to " + indexSort);
-        }
+        validateMergeReader(leaf);
       }
       
       // Best-effort up front check:
@@ -2818,7 +2834,8 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
       // abortable so that IW.close(false) is able to stop it
       TrackingDirectoryWrapper trackingDir = new TrackingDirectoryWrapper(directory);
 
-      SegmentInfo info = new SegmentInfo(directoryOrig, Version.LATEST, mergedName, -1,
+      // We set the min version to null for now, it will be set later by SegmentMerger
+      SegmentInfo info = new SegmentInfo(directoryOrig, Version.LATEST, null, mergedName, -1,
                                          false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), config.getIndexSort());
 
       SegmentMerger merger = new SegmentMerger(Arrays.asList(readers), info, infoStream, trackingDir,
@@ -2907,7 +2924,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
     
     //System.out.println("copy seg=" + info.info.name + " version=" + info.info.getVersion());
     // Same SI as before but we change directory and name
-    SegmentInfo newInfo = new SegmentInfo(directoryOrig, info.info.getVersion(), segName, info.info.maxDoc(),
+    SegmentInfo newInfo = new SegmentInfo(directoryOrig, info.info.getVersion(), info.info.getMinVersion(), segName, info.info.maxDoc(),
                                           info.info.getUseCompoundFile(), info.info.getCodec(), 
                                           info.info.getDiagnostics(), info.info.getId(), info.info.getAttributes(), info.info.getIndexSort());
     SegmentCommitInfo newInfoPerCommit = new SegmentCommitInfo(newInfo, info.getDelCount(), info.getDelGen(), 
@@ -4117,7 +4134,8 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
     // ConcurrentMergePolicy we keep deterministic segment
     // names.
     final String mergeSegmentName = newSegmentName();
-    SegmentInfo si = new SegmentInfo(directoryOrig, Version.LATEST, mergeSegmentName, -1, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), config.getIndexSort());
+    // We set the min version to null for now, it will be set later by SegmentMerger
+    SegmentInfo si = new SegmentInfo(directoryOrig, Version.LATEST, null, mergeSegmentName, -1, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), config.getIndexSort());
     Map<String,String> details = new HashMap<>();
     details.put("mergeMaxNumSegments", "" + merge.maxNumSegments);
     details.put("mergeFactor", Integer.toString(merge.segments.size()));
@@ -4322,7 +4340,9 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
       // Let the merge wrap readers
       List<CodecReader> mergeReaders = new ArrayList<>();
       for (SegmentReader reader : merge.readers) {
-        mergeReaders.add(merge.wrapForMerge(reader));
+        CodecReader wrappedReader = merge.wrapForMerge(reader);
+        validateMergeReader(wrappedReader);
+        mergeReaders.add(wrappedReader);
       }
       final SegmentMerger merger = new SegmentMerger(mergeReaders,
                                                      merge.info.info, infoStream, dirWrapper,
@@ -4608,7 +4628,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
 
   // For infoStream output
   synchronized SegmentInfos toLiveInfos(SegmentInfos sis) {
-    final SegmentInfos newSIS = new SegmentInfos(sis.getIndexCreatedVersion());
+    final SegmentInfos newSIS = new SegmentInfos(sis.getIndexCreatedVersionMajor());
     final Map<SegmentCommitInfo,SegmentCommitInfo> liveSIS = new HashMap<>();
     for(SegmentCommitInfo info : segmentInfos) {
       liveSIS.put(info, info);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/core/src/java/org/apache/lucene/index/LeafMetaData.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/LeafMetaData.java b/lucene/core/src/java/org/apache/lucene/index/LeafMetaData.java
new file mode 100644
index 0000000..567d43e
--- /dev/null
+++ b/lucene/core/src/java/org/apache/lucene/index/LeafMetaData.java
@@ -0,0 +1,74 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.index;
+
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.util.Version;
+
+/**
+ * Provides read-only metadata about a leaf.
+ * @lucene.experimental
+ */
+public final class LeafMetaData {
+
+  private final int createdVersionMajor;
+  private final Version minVersion;
+  private final Sort sort;
+
+  /** Expert: Sole constructor. Public for use by custom {@link LeafReader} impls. */
+  public LeafMetaData(int createdVersionMajor, Version minVersion, Sort sort) {
+    this.createdVersionMajor = createdVersionMajor;
+    if (createdVersionMajor > Version.LATEST.major) {
+      throw new IllegalArgumentException("createdVersionMajor is in the future: " + createdVersionMajor);
+    }
+    if (createdVersionMajor < 6) {
+      throw new IllegalArgumentException("createdVersionMajor must be >= 6, got: " + createdVersionMajor);
+    }
+    if (minVersion != null && minVersion.onOrAfter(Version.LUCENE_7_0_0) == false) {
+      throw new IllegalArgumentException("minVersion must be >= 7.0.0: " + minVersion);
+    }
+    if (createdVersionMajor >= 7 && minVersion == null) {
+      throw new IllegalArgumentException("minVersion must be set when createdVersionMajor is >= 7");
+    }
+    this.minVersion = minVersion;
+    this.sort = sort;
+  }
+
+  /** Get the Lucene version that created this index. This can be used to implement
+   *  backward compatibility on top of the codec API. A return value of {@code 6}
+   *  indicates that the created version is unknown. */
+  public int getCreatedVersionMajor() {
+    return createdVersionMajor;
+  }
+
+  /**
+   * Return the minimum Lucene version that contributed documents to this index,
+   * or {@code null} if this information is not available.
+   */
+  public Version getMinVersion() {
+    return minVersion;
+  }
+
+  /**
+   * Return the order in which documents from this index are sorted, or
+   * {@code null} if documents are in no particular order.
+   */
+  public Sort getSort() {
+    return sort;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/core/src/java/org/apache/lucene/index/LeafReader.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/LeafReader.java b/lucene/core/src/java/org/apache/lucene/index/LeafReader.java
index 13c8646..c738bc5 100644
--- a/lucene/core/src/java/org/apache/lucene/index/LeafReader.java
+++ b/lucene/core/src/java/org/apache/lucene/index/LeafReader.java
@@ -19,7 +19,6 @@ package org.apache.lucene.index;
 import java.io.IOException;
 
 import org.apache.lucene.index.IndexReader.CacheHelper;
-import org.apache.lucene.search.Sort;
 import org.apache.lucene.util.Bits;
 
 /** {@code LeafReader} is an abstract class, providing an interface for accessing an
@@ -246,6 +245,8 @@ public abstract class LeafReader extends IndexReader {
    */
   public abstract void checkIntegrity() throws IOException;
 
-  /** Returns null if this leaf is unsorted, or the {@link Sort} that it was sorted by */
-  public abstract Sort getIndexSort();
+  /**
+   * Return metadata about this leaf.
+   * @lucene.experimental */
+  public abstract LeafMetaData getMetaData();
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/core/src/java/org/apache/lucene/index/MergeReaderWrapper.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/MergeReaderWrapper.java b/lucene/core/src/java/org/apache/lucene/index/MergeReaderWrapper.java
index fffb693..3a3573a 100644
--- a/lucene/core/src/java/org/apache/lucene/index/MergeReaderWrapper.java
+++ b/lucene/core/src/java/org/apache/lucene/index/MergeReaderWrapper.java
@@ -24,7 +24,6 @@ import org.apache.lucene.codecs.FieldsProducer;
 import org.apache.lucene.codecs.NormsProducer;
 import org.apache.lucene.codecs.StoredFieldsReader;
 import org.apache.lucene.codecs.TermVectorsReader;
-import org.apache.lucene.search.Sort;
 import org.apache.lucene.util.Bits;
 
 /** This is a hack to make index sorting fast, with a {@link LeafReader} that always returns merge instances when you ask for the codec readers. */
@@ -235,7 +234,7 @@ class MergeReaderWrapper extends LeafReader {
   }
 
   @Override
-  public Sort getIndexSort() {
-    return in.getIndexSort();
+  public LeafMetaData getMetaData() {
+    return in.getMetaData();
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/core/src/java/org/apache/lucene/index/MergeState.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/MergeState.java b/lucene/core/src/java/org/apache/lucene/index/MergeState.java
index a7c8307..9ad69f6 100644
--- a/lucene/core/src/java/org/apache/lucene/index/MergeState.java
+++ b/lucene/core/src/java/org/apache/lucene/index/MergeState.java
@@ -231,7 +231,7 @@ public class MergeState {
     List<CodecReader> readers = new ArrayList<>(originalReaders.size());
 
     for (CodecReader leaf : originalReaders) {
-      Sort segmentSort = leaf.getIndexSort();
+      Sort segmentSort = leaf.getMetaData().getSort();
 
       if (segmentSort == null) {
         // This segment was written by flush, so documents are not yet sorted, so we sort them now:


[04/14] lucene-solr:jira/solr-9959: SOLR-8906: Make transient core cache pluggable

Posted by ab...@apache.org.
SOLR-8906: Make transient core cache pluggable


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/52632cfc
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/52632cfc
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/52632cfc

Branch: refs/heads/jira/solr-9959
Commit: 52632cfc0c0c945cff2e769e6c2dc4dc9a5da400
Parents: 2e545d7
Author: Erick Erickson <er...@apache.org>
Authored: Mon Apr 3 13:27:12 2017 -0700
Committer: Erick Erickson <er...@apache.org>
Committed: Mon Apr 3 13:27:12 2017 -0700

----------------------------------------------------------------------
 solr/CHANGES.txt                                |   2 +
 .../org/apache/solr/core/CoreContainer.java     |  25 ++-
 .../java/org/apache/solr/core/NodeConfig.java   |  25 ++-
 .../java/org/apache/solr/core/SolrCores.java    | 166 ++++++++++------
 .../org/apache/solr/core/SolrXmlConfig.java     |   5 +
 .../solr/core/TransientSolrCoreCache.java       | 127 ++++++++++++
 .../core/TransientSolrCoreCacheDefault.java     | 198 +++++++++++++++++++
 .../core/TransientSolrCoreCacheFactory.java     |  85 ++++++++
 .../TransientSolrCoreCacheFactoryDefault.java   |  31 +++
 solr/core/src/test-files/solr/solr.xml          |   5 +
 .../org/apache/solr/cloud/ZkControllerTest.java |   9 +-
 .../org/apache/solr/core/TestCoreDiscovery.java |   7 +-
 .../org/apache/solr/core/TestLazyCores.java     |  53 ++++-
 .../java/org/apache/solr/SolrTestCaseJ4.java    |  13 +-
 14 files changed, 662 insertions(+), 89 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/52632cfc/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index cd4f7f5..6fe4cc0 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -188,6 +188,8 @@ Other Changes
 
 * SOLR-9601: Redone DataImportHandler 'tika' example, removing all unused and irrelevant definitions (Alexandre Rafalovitch)
 
+* SOLR-8906: Make transient core cache pluggable (Erick Erickson)
+
 ==================  6.5.1 ==================
 
 Bug Fixes

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/52632cfc/solr/core/src/java/org/apache/solr/core/CoreContainer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/CoreContainer.java b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
index 5ec34ba..1ef036a 100644
--- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java
+++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
@@ -130,6 +130,7 @@ public class CoreContainer {
 
   protected CoreAdminHandler coreAdminHandler = null;
   protected CollectionsHandler collectionsHandler = null;
+  protected TransientSolrCoreCache transientSolrCoreCache = null;
   private InfoHandler infoHandler;
   protected ConfigSetsHandler configSetsHandler = null;
 
@@ -144,6 +145,8 @@ public class CoreContainer {
 
   private UpdateShardHandler updateShardHandler;
 
+  private TransientSolrCoreCacheFactory transientCoreCache;
+  
   private ExecutorService coreContainerWorkExecutor = ExecutorUtil.newMDCAwareCachedThreadPool(
       new DefaultSolrThreadFactory("coreContainerWorkExecutor") );
 
@@ -492,7 +495,7 @@ public class CoreContainer {
     updateShardHandler = new UpdateShardHandler(cfg.getUpdateShardHandlerConfig());
     updateShardHandler.initializeMetrics(metricManager, SolrInfoMBean.Group.node.toString(), "updateShardHandler");
 
-    solrCores.allocateLazyCores(cfg.getTransientCacheSize(), loader);
+    transientCoreCache = TransientSolrCoreCacheFactory.newInstance(loader, this);
 
     logging = LogWatcher.newRegisteredLogWatcher(cfg.getLogWatcherConfig(), loader);
 
@@ -535,9 +538,9 @@ public class CoreContainer {
     String registryName = SolrMetricManager.getRegistryName(SolrInfoMBean.Group.node);
     metricManager.registerGauge(registryName, () -> solrCores.getCores().size(),
         true, "loaded", SolrInfoMBean.Category.CONTAINER.toString(), "cores");
-    metricManager.registerGauge(registryName, () -> solrCores.getCoreNames().size() - solrCores.getCores().size(),
+    metricManager.registerGauge(registryName, () -> solrCores.getLoadedCoreNames().size() - solrCores.getCores().size(),
         true, "lazy",SolrInfoMBean.Category.CONTAINER.toString(), "cores");
-    metricManager.registerGauge(registryName, () -> solrCores.getAllCoreNames().size() - solrCores.getCoreNames().size(),
+    metricManager.registerGauge(registryName, () -> solrCores.getAllCoreNames().size() - solrCores.getLoadedCoreNames().size(),
         true, "unloaded",SolrInfoMBean.Category.CONTAINER.toString(), "cores");
     metricManager.registerGauge(registryName, () -> cfg.getCoreRootDirectory().toFile().getTotalSpace(),
         true, "totalSpace", SolrInfoMBean.Category.CONTAINER.toString(), "fs");
@@ -629,6 +632,16 @@ public class CoreContainer {
     }
   }
 
+  public TransientSolrCoreCache getTransientCacheHandler() {
+
+    if (transientCoreCache == null) {
+      log.error("No transient handler has been defined. Check solr.xml to see if an attempt to provide a custom " +
+          "TransientSolrCoreCacheFactory was done incorrectly since the default should have been used otherwise.");
+      return null;
+    }
+    return transientCoreCache.getTransientSolrCoreCache();
+  }
+  
   public void securityNodeChanged() {
     log.info("Security node changed, reloading security.json");
     reloadSecurityProperties();
@@ -1076,10 +1089,10 @@ public class CoreContainer {
   }
 
   /**
-   * @return a Collection of the names that cores are mapped to
+   * @return a Collection of the names that loaded cores are mapped to
    */
   public Collection<String> getCoreNames() {
-    return solrCores.getCoreNames();
+    return solrCores.getLoadedCoreNames();
   }
 
   /** This method is currently experimental.
@@ -1092,6 +1105,8 @@ public class CoreContainer {
   /**
    * get a list of all the cores that are currently loaded
    * @return a list of al lthe available core names in either permanent or transient core lists.
+   * 
+   * Note: this implies that the core is loaded
    */
   public Collection<String> getAllCoreNames() {
     return solrCores.getAllCoreNames();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/52632cfc/solr/core/src/java/org/apache/solr/core/NodeConfig.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/NodeConfig.java b/solr/core/src/java/org/apache/solr/core/NodeConfig.java
index 258fd14..de2dcea 100644
--- a/solr/core/src/java/org/apache/solr/core/NodeConfig.java
+++ b/solr/core/src/java/org/apache/solr/core/NodeConfig.java
@@ -52,6 +52,8 @@ public class NodeConfig {
 
   private final Integer coreLoadThreads;
 
+  @Deprecated
+  // This should be part of the transientCacheConfig, remove in 7.0
   private final int transientCacheSize;
 
   private final boolean useSchemaCache;
@@ -62,6 +64,8 @@ public class NodeConfig {
 
   private final PluginInfo[] metricReporterPlugins;
 
+  private final PluginInfo transientCacheConfig;
+
   private NodeConfig(String nodeName, Path coreRootDirectory, Path configSetBaseDirectory, String sharedLibDirectory,
                      PluginInfo shardHandlerFactoryConfig, UpdateShardHandlerConfig updateShardHandlerConfig,
                      String coreAdminHandlerClass, String collectionsAdminHandlerClass,
@@ -69,7 +73,7 @@ public class NodeConfig {
                      LogWatcherConfig logWatcherConfig, CloudConfig cloudConfig, Integer coreLoadThreads,
                      int transientCacheSize, boolean useSchemaCache, String managementPath, SolrResourceLoader loader,
                      Properties solrProperties, PluginInfo[] backupRepositoryPlugins,
-                     PluginInfo[] metricReporterPlugins) {
+                     PluginInfo[] metricReporterPlugins, PluginInfo transientCacheConfig) {
     this.nodeName = nodeName;
     this.coreRootDirectory = coreRootDirectory;
     this.configSetBaseDirectory = configSetBaseDirectory;
@@ -90,6 +94,7 @@ public class NodeConfig {
     this.solrProperties = solrProperties;
     this.backupRepositoryPlugins = backupRepositoryPlugins;
     this.metricReporterPlugins = metricReporterPlugins;
+    this.transientCacheConfig = transientCacheConfig;
 
     if (this.cloudConfig != null && this.getCoreLoadThreadCount(false) < 2) {
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
@@ -182,6 +187,8 @@ public class NodeConfig {
     return metricReporterPlugins;
   }
 
+  public PluginInfo getTransientCachePluginInfo() { return transientCacheConfig; }
+
   public static class NodeConfigBuilder {
 
     private Path coreRootDirectory;
@@ -195,13 +202,16 @@ public class NodeConfig {
     private String configSetsHandlerClass = DEFAULT_CONFIGSETSHANDLERCLASS;
     private LogWatcherConfig logWatcherConfig = new LogWatcherConfig(true, null, null, 50);
     private CloudConfig cloudConfig;
-    private Integer coreLoadThreads;
+    private int coreLoadThreads = DEFAULT_CORE_LOAD_THREADS;
+    @Deprecated
+    //Remove in 7.0 and put it all in the transientCache element in solrconfig.xml
     private int transientCacheSize = DEFAULT_TRANSIENT_CACHE_SIZE;
     private boolean useSchemaCache = false;
     private String managementPath;
     private Properties solrProperties = new Properties();
     private PluginInfo[] backupRepositoryPlugins;
     private PluginInfo[] metricReporterPlugins;
+    private PluginInfo transientCacheConfig;
 
     private final SolrResourceLoader loader;
     private final String nodeName;
@@ -210,7 +220,7 @@ public class NodeConfig {
     //No:of core load threads in cloud mode is set to a default of 8
     public static final int DEFAULT_CORE_LOAD_THREADS_IN_CLOUD = 8;
 
-    private static final int DEFAULT_TRANSIENT_CACHE_SIZE = Integer.MAX_VALUE;
+    public static final int DEFAULT_TRANSIENT_CACHE_SIZE = Integer.MAX_VALUE;
 
     private static final String DEFAULT_ADMINHANDLERCLASS = "org.apache.solr.handler.admin.CoreAdminHandler";
     private static final String DEFAULT_INFOHANDLERCLASS = "org.apache.solr.handler.admin.InfoHandler";
@@ -284,6 +294,8 @@ public class NodeConfig {
       return this;
     }
 
+    // Remove in Solr 7.0
+    @Deprecated
     public NodeConfigBuilder setTransientCacheSize(int transientCacheSize) {
       this.transientCacheSize = transientCacheSize;
       return this;
@@ -313,12 +325,17 @@ public class NodeConfig {
       this.metricReporterPlugins = metricReporterPlugins;
       return this;
     }
+    
+    public NodeConfigBuilder setSolrCoreCacheFactoryConfig(PluginInfo transientCacheConfig) {
+      this.transientCacheConfig = transientCacheConfig;
+      return this;
+    }
 
     public NodeConfig build() {
       return new NodeConfig(nodeName, coreRootDirectory, configSetBaseDirectory, sharedLibDirectory, shardHandlerFactoryConfig,
                             updateShardHandlerConfig, coreAdminHandlerClass, collectionsAdminHandlerClass, infoHandlerClass, configSetsHandlerClass,
                             logWatcherConfig, cloudConfig, coreLoadThreads, transientCacheSize, useSchemaCache, managementPath, loader, solrProperties,
-                            backupRepositoryPlugins, metricReporterPlugins);
+                            backupRepositoryPlugins, metricReporterPlugins, transientCacheConfig);
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/52632cfc/solr/core/src/java/org/apache/solr/core/SolrCores.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/SolrCores.java b/solr/core/src/java/org/apache/solr/core/SolrCores.java
index b25e9bb..40d5115 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrCores.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrCores.java
@@ -17,6 +17,7 @@
 package org.apache.solr.core;
 
 import com.google.common.collect.Lists;
+import org.apache.http.annotation.Experimental;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.util.ExecutorUtil;
 import org.apache.solr.logging.MDCLoggingContext;
@@ -32,6 +33,8 @@ import java.util.HashSet;
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Observable;
+import java.util.Observer;
 import java.util.Set;
 import java.util.TreeSet;
 import java.util.concurrent.ConcurrentHashMap;
@@ -39,15 +42,12 @@ import java.util.concurrent.ExecutorService;
 import java.util.concurrent.TimeUnit;
 
 
-class SolrCores {
+class SolrCores implements Observer {
 
   private static Object modifyLock = new Object(); // for locking around manipulating any of the core maps.
   private final Map<String, SolrCore> cores = new LinkedHashMap<>(); // For "permanent" cores
 
-  //WARNING! The _only_ place you put anything into the list of transient cores is with the putTransientCore method!
-  private Map<String, SolrCore> transientCores = new LinkedHashMap<>(); // For "lazily loaded" cores
-
-  private final Map<String, CoreDescriptor> dynamicDescriptors = new LinkedHashMap<>();
+  private final Map<String, CoreDescriptor> lazyDescriptors = new LinkedHashMap<>();
 
   private final CoreContainer container;
   
@@ -66,33 +66,19 @@ class SolrCores {
   SolrCores(CoreContainer container) {
     this.container = container;
   }
-
-  // Trivial helper method for load, note it implements LRU on transient cores. Also note, if
-  // there is no setting for max size, nothing is done and all cores go in the regular "cores" list
-  protected void allocateLazyCores(final int cacheSize, final SolrResourceLoader loader) {
-    if (cacheSize != Integer.MAX_VALUE) {
-      log.info("Allocating transient cache for {} transient cores", cacheSize);
-      transientCores = new LinkedHashMap<String, SolrCore>(cacheSize, 0.75f, true) {
-        @Override
-        protected boolean removeEldestEntry(Map.Entry<String, SolrCore> eldest) {
-          if (size() > cacheSize) {
-            synchronized (modifyLock) {
-              SolrCore coreToClose = eldest.getValue();
-              log.info("Closing transient core [{}]", coreToClose.getName());
-              pendingCloses.add(coreToClose); // Essentially just queue this core up for closing.
-              modifyLock.notifyAll(); // Wakes up closer thread too
-            }
-            return true;
-          }
-          return false;
-        }
-      };
-    }
-  }
-
-  protected void putDynamicDescriptor(String rawName, CoreDescriptor p) {
+  
+  protected void putDynamicDescriptor(String rawName, CoreDescriptor cd) {
     synchronized (modifyLock) {
-      dynamicDescriptors.put(rawName, p);
+      if (cd.isTransient()) {
+        if (container.getTransientCacheHandler() != null) {
+          container.getTransientCacheHandler().addTransientDescriptor(rawName, cd);
+        } else {
+          log.error("Tried to add transient core to transient handler, but no transient core handler has been found. "
+              + " Descriptor: " + cd.toString());
+        }
+      } else {
+        lazyDescriptors.put(rawName, cd);
+      }
     }
   }
 
@@ -101,19 +87,25 @@ class SolrCores {
   protected void close() {
     Collection<SolrCore> coreList = new ArrayList<>();
 
+    
+    TransientSolrCoreCache transientSolrCoreCache = container.getTransientCacheHandler();
+    // Release observer
+    if (transientSolrCoreCache != null) {
+      transientSolrCoreCache.close();
+    }
+
     // It might be possible for one of the cores to move from one list to another while we're closing them. So
     // loop through the lists until they're all empty. In particular, the core could have moved from the transient
     // list to the pendingCloses list.
-
     do {
       coreList.clear();
       synchronized (modifyLock) {
         // make a copy of the cores then clear the map so the core isn't handed out to a request again
         coreList.addAll(cores.values());
         cores.clear();
-
-        coreList.addAll(transientCores.values());
-        transientCores.clear();
+        if (transientSolrCoreCache != null) {
+          coreList.addAll(transientSolrCoreCache.prepareForShutdown());
+        }
 
         coreList.addAll(pendingCloses);
         pendingCloses.clear();
@@ -147,10 +139,12 @@ class SolrCores {
 
   //WARNING! This should be the _only_ place you put anything into the list of transient cores!
   protected SolrCore putTransientCore(NodeConfig cfg, String name, SolrCore core, SolrResourceLoader loader) {
-    SolrCore retCore;
+    SolrCore retCore = null;
     log.info("Opening transient core {}", name);
     synchronized (modifyLock) {
-      retCore = transientCores.put(name, core);
+      if (container.getTransientCacheHandler() != null) {
+        retCore = container.getTransientCacheHandler().addCore(name, core);
+      }
     }
     return retCore;
   }
@@ -161,6 +155,17 @@ class SolrCores {
     }
   }
 
+  /**
+   *
+   * @return A list of "permanent" cores, i.e. cores that  may not be swapped out and are currently loaded.
+   * 
+   * A core may be non-transient but still lazily loaded. If it is "permanent" and lazy-load _and_
+   * not yet loaded it will _not_ be returned by this call.
+   * 
+   * Note: This is one of the places where SolrCloud is incompatible with Transient Cores. This call is used in 
+   * cancelRecoveries, transient cores don't participate.
+   */
+
   List<SolrCore> getCores() {
     List<SolrCore> lst = new ArrayList<>();
 
@@ -170,16 +175,34 @@ class SolrCores {
     }
   }
 
-  Set<String> getCoreNames() {
+  /**
+   * Gets the cores that are currently loaded, i.e. cores that have
+   * 1> loadOnStartup=true and are either not-transient or, if transient, have been loaded and have not been swapped out
+   * 2> loadOnStartup=false and have been loaded but either non-transient or have not been swapped out.
+   * 
+   * Put another way, this will not return any names of cores that are lazily loaded but have not been called for yet
+   * or are transient and either not loaded or have been swapped out.
+   * 
+   * @return List of currently loaded cores.
+   */
+  Set<String> getLoadedCoreNames() {
     Set<String> set = new TreeSet<>();
 
     synchronized (modifyLock) {
       set.addAll(cores.keySet());
-      set.addAll(transientCores.keySet());
+      if (container.getTransientCacheHandler() != null) {
+        set.addAll(container.getTransientCacheHandler().getLoadedCoreNames());
+      }
     }
     return set;
   }
 
+  /** This method is currently experimental.
+   * @return a Collection of the names that a specific core is mapped to.
+   * 
+   * Note: this implies that the core is loaded
+   */
+  @Experimental
   List<String> getCoreNames(SolrCore core) {
     List<String> lst = new ArrayList<>();
 
@@ -189,26 +212,26 @@ class SolrCores {
           lst.add(entry.getKey());
         }
       }
-      for (Map.Entry<String, SolrCore> entry : transientCores.entrySet()) {
-        if (core == entry.getValue()) {
-          lst.add(entry.getKey());
-        }
+      if (container.getTransientCacheHandler() != null) {
+        lst.addAll(container.getTransientCacheHandler().getNamesForCore(core));
       }
     }
     return lst;
   }
 
   /**
-   * Gets a list of all cores, loaded and unloaded (dynamic)
+   * Gets a list of all cores, loaded and unloaded 
    *
-   * @return all cores names, whether loaded or unloaded.
+   * @return all cores names, whether loaded or unloaded, transient or permenent.
    */
   public Collection<String> getAllCoreNames() {
     Set<String> set = new TreeSet<>();
     synchronized (modifyLock) {
       set.addAll(cores.keySet());
-      set.addAll(transientCores.keySet());
-      set.addAll(dynamicDescriptors.keySet());
+      if (container.getTransientCacheHandler() != null) {
+        set.addAll(container.getTransientCacheHandler().getAllCoreNames());
+      }
+      set.addAll(lazyDescriptors.keySet());
     }
     return set;
   }
@@ -251,14 +274,15 @@ class SolrCores {
   protected SolrCore remove(String name) {
 
     synchronized (modifyLock) {
-      SolrCore tmp = cores.remove(name);
-      SolrCore ret = null;
-      ret = (ret == null) ? tmp : ret;
+      SolrCore ret = cores.remove(name);
       // It could have been a newly-created core. It could have been a transient core. The newly-created cores
       // in particular should be checked. It could have been a dynamic core.
-      tmp = transientCores.remove(name);
-      ret = (ret == null) ? tmp : ret;
-      dynamicDescriptors.remove(name);
+      TransientSolrCoreCache transientHandler = container.getTransientCacheHandler(); 
+      if (ret == null && transientHandler != null) {
+        ret = transientHandler.removeCore(name);
+        transientHandler.removeTransientDescriptor(name);
+      }
+      lazyDescriptors.remove(name);
       return ret;
     }
   }
@@ -268,8 +292,8 @@ class SolrCores {
     synchronized (modifyLock) {
       SolrCore core = cores.get(name);
 
-      if (core == null) {
-        core = transientCores.get(name);
+      if (core == null && container.getTransientCacheHandler() != null) {
+        core = container.getTransientCacheHandler().getCore(name);
       }
 
       if (core != null && incRefCount) {
@@ -282,7 +306,9 @@ class SolrCores {
 
   protected CoreDescriptor getDynamicDescriptor(String name) {
     synchronized (modifyLock) {
-      return dynamicDescriptors.get(name);
+      CoreDescriptor cd = lazyDescriptors.get(name);
+      if (cd != null || container.getTransientCacheHandler() == null) return cd;
+      return container.getTransientCacheHandler().getTransientDescriptor(name);
     }
   }
 
@@ -295,7 +321,7 @@ class SolrCores {
       if (cores.containsKey(name)) {
         return true;
       }
-      if (transientCores.containsKey(name)) {
+      if (container.getTransientCacheHandler() != null && container.getTransientCacheHandler().containsCore(name)) {
         // Check pending
         for (SolrCore core : pendingCloses) {
           if (core.getName().equals(name)) {
@@ -314,7 +340,7 @@ class SolrCores {
       if (cores.containsKey(name)) {
         return true;
       }
-      if (transientCores.containsKey(name)) {
+      if (container.getTransientCacheHandler() != null && container.getTransientCacheHandler().containsCore(name)) {
         return true;
       }
     }
@@ -324,13 +350,16 @@ class SolrCores {
 
   protected CoreDescriptor getUnloadedCoreDescriptor(String cname) {
     synchronized (modifyLock) {
-      CoreDescriptor desc = dynamicDescriptors.get(cname);
+      CoreDescriptor desc = lazyDescriptors.get(cname);
       if (desc == null) {
-        return null;
+        if (container.getTransientCacheHandler() == null) return null;
+        desc = container.getTransientCacheHandler().getTransientDescriptor(cname);
+        if (desc == null) {
+          return null;
+        }
       }
       return new CoreDescriptor(cname, desc);
     }
-
   }
 
   // Wait here until any pending operations (load, unload or reload) are completed on this core.
@@ -412,9 +441,9 @@ class SolrCores {
     synchronized (modifyLock) {
       if (cores.containsKey(coreName))
         return cores.get(coreName).getCoreDescriptor();
-      if (dynamicDescriptors.containsKey(coreName))
-        return dynamicDescriptors.get(coreName);
-      return null;
+      if (lazyDescriptors.containsKey(coreName) || container.getTransientCacheHandler() == null)
+        return lazyDescriptors.get(coreName);
+      return container.getTransientCacheHandler().getTransientDescriptor(coreName);
     }
   }
 
@@ -494,4 +523,13 @@ class SolrCores {
     }
     return false;
   }
+
+  // Let transient cache implementation tell us when it ages out a corel
+  @Override
+  public void update(Observable o, Object arg) {
+    synchronized (modifyLock) {
+      pendingCloses.add((SolrCore) arg); // Essentially just queue this core up for closing.
+      modifyLock.notifyAll(); // Wakes up closer thread too
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/52632cfc/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java b/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java
index 951d8d5..b37bd52 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java
@@ -91,6 +91,7 @@ public class SolrXmlConfig {
     NodeConfig.NodeConfigBuilder configBuilder = new NodeConfig.NodeConfigBuilder(nodeName, config.getResourceLoader());
     configBuilder.setUpdateShardHandlerConfig(updateConfig);
     configBuilder.setShardHandlerFactoryConfig(getShardHandlerFactoryPluginInfo(config));
+    configBuilder.setSolrCoreCacheFactoryConfig(getTransientCoreCacheFactoryPluginInfo(config));
     configBuilder.setLogWatcherConfig(loadLogWatcherConfig(config, "solr/logging/*[@name]", "solr/logging/watcher/*[@name]"));
     configBuilder.setSolrProperties(loadProperties(config));
     if (cloudConfig != null)
@@ -456,5 +457,9 @@ public class SolrXmlConfig {
     }
     return configs;
   }
+  private static PluginInfo getTransientCoreCacheFactoryPluginInfo(Config config) {
+    Node node = config.getNode("solr/transientCoreCacheFactory", false);
+    return (node == null) ? null : new PluginInfo(node, "transientCoreCacheFactory", false, true);
+  }
 }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/52632cfc/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCache.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCache.java b/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCache.java
new file mode 100644
index 0000000..63df02b
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCache.java
@@ -0,0 +1,127 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.core;
+
+
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import java.util.Observable;
+import java.util.Set;
+
+import org.apache.http.annotation.Experimental;
+
+/**
+ * The base class for custom transient core maintenance. Any custom plugin that want's to take control of transient
+ * caches (i.e. any core defined with transient=true) should override this class.
+ *
+ * Register your plugin in solr.xml similarly to:
+ *
+ *   &lt;transientCoreCacheFactory name="transientCoreCacheFactory" class="TransientSolrCoreCacheFactoryDefault"&gt;
+ *        &lt;int name="transientCacheSize"&gt;4&lt;/int&gt;
+ *   &lt;/transientCoreCacheFactory&gt;
+ *
+ *
+ * WARNING: There is quite a bit of higher-level locking done by the CoreContainer to avoid various race conditions
+ *          etc. You should _only_ manipulate them within the method calls designed to change them. E.g.
+ *          only add to the transient core descriptors in addTransientDescriptor etc.
+ *          
+ *          Trust the higher-level code (mainly SolrCores and CoreContainer) to call the appropriate operations when
+ *          necessary and to coordinate shutting down cores, manipulating the internal structures and the like..
+ *          
+ *          The only real action you should _initiate_ is to close a core for whatever reason, and do that by 
+ *          calling notifyObservers(coreToClose); The observer will call back to removeCore(name) at the appropriate 
+ *          time. There is no need to directly remove the core _at that time_ from the transientCores list, a call
+ *          will come back to this class when CoreContainer is closing this core.
+ *          
+ *          CoreDescriptors are read-once. During "core discovery" all valid descriptors are enumerated and added to
+ *          the appropriate list. Thereafter, they are NOT re-read from disk. In those situations where you want
+ *          to re-define the coreDescriptor, maintain a "side list" of changed core descriptors. Then override
+ *          getTransientDescriptor to return your new core descriptor. NOTE: assuming you've already closed the
+ *          core, the _next_ time that core is required getTransientDescriptor will be called and if you return the
+ *          new core descriptor your re-definition should be honored. You'll have to maintain this list for the
+ *          duration of this Solr instance running. If you persist the coreDescriptor, then next time Solr starts
+ *          up the new definition will be read.
+ *          
+ *
+ *  If you need to manipulate the return, for instance block a core from being loaded for some period of time, override
+ *  say getTransientDescriptor and return null.
+ *  
+ *  In particular, DO NOT reach into the transientCores structure from a method called to manipulate core descriptors
+ *  or vice-versa.
+ */
+public abstract class TransientSolrCoreCache extends Observable {
+
+  // Gets the core container that encloses this cache.
+  public abstract CoreContainer getContainer();
+
+  // Add the newly-opened core to the list of open cores.
+  public abstract SolrCore addCore(String name, SolrCore core);
+
+  // Return the names of all possible cores, whether they are currently loaded or not.
+  public abstract Set<String> getAllCoreNames();
+  
+  // Return the names of all currently loaded cores
+  public abstract Set<String> getLoadedCoreNames();
+
+  // Remove a core from the internal structures, presumably it 
+  // being closed. If the core is re-opened, it will be readded by CoreContainer.
+  public abstract SolrCore removeCore(String name);
+
+  // Get the core associated with the name. Return null if you don't want this core to be used.
+  public abstract SolrCore getCore(String name);
+
+  // reutrn true if the cache contains the named core.
+  public abstract boolean containsCore(String name);
+  
+  // This method will be called when the container is to be shut down. It should return all
+  // transient solr cores and clear any internal structures that hold them.
+  public abstract Collection<SolrCore> prepareForShutdown();
+
+  // These methods allow the implementation to maintain control over the core descriptors.
+  
+  // This method will only be called during core discovery at startup.
+  public abstract void addTransientDescriptor(String rawName, CoreDescriptor cd);
+  
+  // This method is used when opening cores and the like. If you want to change a core's descriptor, override this
+  // method and return the current core descriptor.
+  public abstract CoreDescriptor getTransientDescriptor(String name);
+
+
+  // Remove the core descriptor from your list of transient descriptors.
+  public abstract CoreDescriptor removeTransientDescriptor(String name);
+
+  // Find all the names a specific core is mapped to. Should not return null, return empty set instead.
+  @Experimental
+  public List<String> getNamesForCore(SolrCore core) {
+    return Collections.emptyList();
+  }
+  
+  /**
+   * Must be called in order to free resources!
+   */
+  public abstract void close();
+
+
+  // These two methods allow custom implementations to communicate arbitrary information as necessary.
+  public abstract int getStatus(String coreName);
+  public abstract void setStatus(String coreName, int status);
+}
+
+
+  

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/52632cfc/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCacheDefault.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCacheDefault.java b/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCacheDefault.java
new file mode 100644
index 0000000..e1fd748
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCacheDefault.java
@@ -0,0 +1,198 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.core;
+
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Observer;
+import java.util.Set;
+
+import org.apache.solr.common.util.NamedList;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class TransientSolrCoreCacheDefault extends TransientSolrCoreCache {
+
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+  private int cacheSize = NodeConfig.NodeConfigBuilder.DEFAULT_TRANSIENT_CACHE_SIZE;
+
+  protected Observer observer;
+  protected CoreContainer coreContainer;
+
+  protected final Map<String, CoreDescriptor> transientDescriptors = new LinkedHashMap<>();
+
+  //WARNING! The _only_ place you put anything into the list of transient cores is with the putTransientCore method!
+  protected Map<String, SolrCore> transientCores = new LinkedHashMap<>(); // For "lazily loaded" cores
+
+  /**
+   * @param container The enclosing CoreContainer. It allows us to access everything we need.
+   */
+  public TransientSolrCoreCacheDefault(final CoreContainer container) {
+    this.coreContainer = container;
+    this.observer= coreContainer.solrCores;
+    
+    NodeConfig cfg = container.getNodeConfig();
+    if (cfg.getTransientCachePluginInfo() == null) {
+      // Still handle just having transientCacheSize defined in the body of solr.xml  not in a transient handler clause.
+      // deprecate this for 7.0?
+      this.cacheSize = cfg.getTransientCacheSize();
+    } else {
+      NamedList args = cfg.getTransientCachePluginInfo().initArgs;
+      Object obj = args.get("transientCacheSize");
+      if (obj != null) {
+        this.cacheSize = (int) obj;
+      }
+    }
+    doInit();
+  }
+  // This just moves the 
+  private void doInit() {
+    NodeConfig cfg = coreContainer.getNodeConfig();
+    if (cfg.getTransientCachePluginInfo() == null) {
+      // Still handle just having transientCacheSize defined in the body of solr.xml not in a transient handler clause.
+      this.cacheSize = cfg.getTransientCacheSize();
+    } else {
+      NamedList args = cfg.getTransientCachePluginInfo().initArgs;
+      Object obj = args.get("transientCacheSize");
+      if (obj != null) {
+        this.cacheSize = (int) obj;
+      }
+    }
+
+    log.info("Allocating transient cache for {} transient cores", cacheSize);
+    addObserver(this.observer);
+    // it's possible for cache
+    if (cacheSize < 0) { // Trap old flag
+      cacheSize = Integer.MAX_VALUE;
+    }
+    // Now don't allow ridiculous allocations here, if the size is > 1,000, we'll just deal with
+    // adding cores as they're opened. This blows up with the marker value of -1.
+    transientCores = new LinkedHashMap<String, SolrCore>(Math.min(cacheSize, 1000), 0.75f, true) {
+      @Override
+      protected boolean removeEldestEntry(Map.Entry<String, SolrCore> eldest) {
+        if (size() > cacheSize) {
+          SolrCore coreToClose = eldest.getValue();
+          setChanged();
+          notifyObservers(coreToClose);
+          log.info("Closing transient core [{}]", coreToClose.getName());
+          return true;
+        }
+        return false;
+      }
+    };
+  }
+
+  
+  @Override
+  public Collection<SolrCore> prepareForShutdown() {
+    // Returna copy of the values
+    List<SolrCore> ret = new ArrayList(transientCores.values());
+    transientCores.clear();
+    return ret;
+  }
+
+  @Override
+  public CoreContainer getContainer() { return this.coreContainer; }
+
+  @Override
+  public SolrCore addCore(String name, SolrCore core) {
+    return transientCores.put(name, core);
+  }
+
+  @Override
+  public Set<String> getAllCoreNames() {
+    return transientDescriptors.keySet();
+  }
+  
+  @Override
+  public Set<String> getLoadedCoreNames() {
+    return transientCores.keySet();
+  }
+
+  // Remove a core from the internal structures, presumably it 
+  // being closed. If the core is re-opened, it will be readded by CoreContainer.
+  @Override
+  public SolrCore removeCore(String name) {
+    return transientCores.remove(name);
+  }
+
+  // Get the core associated with the name. Return null if you don't want this core to be used.
+  @Override
+  public SolrCore getCore(String name) {
+    return transientCores.get(name);
+  }
+
+  @Override
+  public boolean containsCore(String name) {
+    return transientCores.containsKey(name);
+  }
+
+  // These methods allow the implementation to maintain control over the core descriptors.
+
+
+  // This method will only be called during core discovery at startup.
+  @Override
+  public void addTransientDescriptor(String rawName, CoreDescriptor cd) {
+    transientDescriptors.put(rawName, cd);
+  }
+
+  // This method is used when opening cores and the like. If you want to change a core's descriptor, override this
+  // method and return the current core descriptor.
+  @Override
+  public CoreDescriptor getTransientDescriptor(String name) {
+    return transientDescriptors.get(name);
+  }
+
+  @Override
+  public CoreDescriptor removeTransientDescriptor(String name) {
+    return transientDescriptors.remove(name);
+  }
+
+  @Override
+  public List<String> getNamesForCore(SolrCore core) {
+    List<String> ret = new ArrayList<>();
+    for (Map.Entry<String, SolrCore> entry : transientCores.entrySet()) {
+      if (core == entry.getValue()) {
+        ret.add(entry.getKey());
+      }
+    }
+    return ret;
+  }
+
+  /**
+   * Must be called in order to free resources!
+   */
+  @Override
+  public void close() {
+    deleteObserver(this.observer);
+  }
+
+
+  // For custom implementations to communicate arbitrary information as necessary.
+  @Override
+  public int getStatus(String coreName) { return 0; } //no_op for default handler.
+
+  @Override
+  public void setStatus(String coreName, int status) {} //no_op for default handler.
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/52632cfc/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCacheFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCacheFactory.java b/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCacheFactory.java
new file mode 100644
index 0000000..b3b8cf0
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCacheFactory.java
@@ -0,0 +1,85 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.core;
+
+import java.lang.invoke.MethodHandles;
+import java.util.Collections;
+import java.util.Locale;
+
+import com.google.common.collect.ImmutableMap;
+import org.apache.solr.util.plugin.PluginInfoInitialized;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * An interface that allows custom transient caches to be maintained with different implementations
+ */
+public abstract class TransientSolrCoreCacheFactory {
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+  private CoreContainer coreContainer = null;
+
+  public abstract TransientSolrCoreCache getTransientSolrCoreCache();
+  /**
+   * Create a new TransientSolrCoreCacheFactory instance
+   *
+   * @param loader a SolrResourceLoader used to find the TransientSolrCacheFactory classes
+   * @param coreContainer CoreContainer that encloses all the Solr cores.              
+   * @return a new, initialized TransientSolrCoreCache instance
+   */
+
+  public static TransientSolrCoreCacheFactory newInstance(SolrResourceLoader loader, CoreContainer coreContainer) {
+    PluginInfo info = coreContainer.getConfig().getTransientCachePluginInfo();
+    if (info == null) { // definition not in our solr.xml file, use default
+      info = DEFAULT_TRANSIENT_SOLR_CACHE_INFO;
+    }
+
+    try {
+      // According to the docs, this returns a TransientSolrCoreCacheFactory with the default c'tor
+      TransientSolrCoreCacheFactory tccf = loader.findClass(info.className, TransientSolrCoreCacheFactory.class).newInstance(); 
+      
+      // OK, now we call it's init method.
+      if (PluginInfoInitialized.class.isAssignableFrom(tccf.getClass()))
+        PluginInfoInitialized.class.cast(tccf).init(info);
+      tccf.setCoreContainer(coreContainer);
+      return tccf;
+    } catch (Exception e) {
+      // Many things could cuse this, bad solrconfig, mis-typed class name, whatever. However, this should not
+      // keep the enclosing coreContainer from instantiating, so log an error and continue.
+      log.error(String.format(Locale.ROOT, "Error instantiating TransientSolrCoreCacheFactory class [%s]: %s",
+          info.className, e.getMessage()));
+      return null;
+    }
+
+  }
+  public static final PluginInfo DEFAULT_TRANSIENT_SOLR_CACHE_INFO =
+      new PluginInfo("transientSolrCoreCacheFactory",
+          ImmutableMap.of("class", TransientSolrCoreCacheFactoryDefault.class.getName(), 
+              "name", TransientSolrCoreCacheFactory.class.getName()),
+          null, Collections.<PluginInfo>emptyList());
+
+
+  // Need this because the plugin framework doesn't require a PluginINfo in the init method, don't see a way to
+  // pass additional parameters and we need this when we create the transient core cache, it's _really_ important.
+  public void setCoreContainer(CoreContainer coreContainer) {
+    this.coreContainer = coreContainer;
+  }
+
+  public CoreContainer getCoreContainer() {
+    return coreContainer;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/52632cfc/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCacheFactoryDefault.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCacheFactoryDefault.java b/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCacheFactoryDefault.java
new file mode 100644
index 0000000..722ab9c
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCacheFactoryDefault.java
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.core;
+
+public class TransientSolrCoreCacheFactoryDefault extends TransientSolrCoreCacheFactory {
+
+  TransientSolrCoreCache transientSolrCoreCache = null;
+
+  @Override
+  public TransientSolrCoreCache getTransientSolrCoreCache() {
+    if (transientSolrCoreCache == null) {
+      transientSolrCoreCache = new TransientSolrCoreCacheDefault(getCoreContainer());
+    }
+
+    return transientSolrCoreCache;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/52632cfc/solr/core/src/test-files/solr/solr.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/solr.xml b/solr/core/src/test-files/solr/solr.xml
index f381475..526dffa 100644
--- a/solr/core/src/test-files/solr/solr.xml
+++ b/solr/core/src/test-files/solr/solr.xml
@@ -31,6 +31,11 @@
     <int name="connTimeout">${connTimeout:15000}</int>
   </shardHandlerFactory>
 
+  <transientCoreCacheFactory name="transientCoreCacheFactory" class="TransientSolrCoreCacheFactoryDefault">
+    <int name="transientCacheSize">4</int>
+  </transientCoreCacheFactory>
+
+
   <solrcloud>
     <str name="host">127.0.0.1</str>
     <int name="hostPort">${hostPort:8983}</int>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/52632cfc/solr/core/src/test/org/apache/solr/cloud/ZkControllerTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/ZkControllerTest.java b/solr/core/src/test/org/apache/solr/cloud/ZkControllerTest.java
index d925774..d05cec9 100644
--- a/solr/core/src/test/org/apache/solr/cloud/ZkControllerTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/ZkControllerTest.java
@@ -29,6 +29,8 @@ import org.apache.solr.common.util.Utils;
 import org.apache.solr.core.CloudConfig;
 import org.apache.solr.core.CoreContainer;
 import org.apache.solr.core.CoreDescriptor;
+import org.apache.solr.core.SolrXmlConfig;
+import org.apache.solr.core.TransientSolrCoreCache;
 import org.apache.solr.handler.admin.CoreAdminHandler;
 import org.apache.solr.handler.component.HttpShardHandlerFactory;
 import org.apache.solr.update.UpdateShardHandler;
@@ -327,7 +329,7 @@ public class ZkControllerTest extends SolrTestCaseJ4 {
   private static class MockCoreContainer extends CoreContainer {
     UpdateShardHandler updateShardHandler = new UpdateShardHandler(UpdateShardHandlerConfig.DEFAULT);
     public MockCoreContainer() {
-      super((Object)null);
+      super(SolrXmlConfig.fromString(null, "<solr/>"));
       this.shardHandlerFactory = new HttpShardHandlerFactory();
       this.coreAdminHandler = new CoreAdminHandler();
     }
@@ -345,6 +347,11 @@ public class ZkControllerTest extends SolrTestCaseJ4 {
       updateShardHandler.close();
       super.shutdown();
     }
+    
+    @Override
+    public TransientSolrCoreCache getTransientCacheHandler() {
+      return transientSolrCoreCache;
+    }
 
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/52632cfc/solr/core/src/test/org/apache/solr/core/TestCoreDiscovery.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/TestCoreDiscovery.java b/solr/core/src/test/org/apache/solr/core/TestCoreDiscovery.java
index 65d459a..22020ba 100644
--- a/solr/core/src/test/org/apache/solr/core/TestCoreDiscovery.java
+++ b/solr/core/src/test/org/apache/solr/core/TestCoreDiscovery.java
@@ -60,12 +60,12 @@ public class TestCoreDiscovery extends SolrTestCaseJ4 {
     setMeUp(null);
   }
 
-  private Properties makeCorePropFile(String name, boolean isLazy, boolean loadOnStartup, String... extraProps) {
+  private Properties makeCorePropFile(String name, boolean isTransient, boolean loadOnStartup, String... extraProps) {
     Properties props = new Properties();
     props.put(CoreDescriptor.CORE_NAME, name);
     props.put(CoreDescriptor.CORE_SCHEMA, "schema-tiny.xml");
     props.put(CoreDescriptor.CORE_CONFIG, "solrconfig-minimal.xml");
-    props.put(CoreDescriptor.CORE_TRANSIENT, Boolean.toString(isLazy));
+    props.put(CoreDescriptor.CORE_TRANSIENT, Boolean.toString(isTransient));
     props.put(CoreDescriptor.CORE_LOADONSTARTUP, Boolean.toString(loadOnStartup));
     props.put(CoreDescriptor.CORE_DATADIR, "${core.dataDir:stuffandnonsense}");
 
@@ -140,7 +140,7 @@ public class TestCoreDiscovery extends SolrTestCaseJ4 {
     try {
 
       TestLazyCores.checkInCores(cc, "core1");
-      TestLazyCores.checkNotInCores(cc, "lazy1", "core2", "collection1");
+      TestLazyCores.checkNotInCores(cc, "lazy1", "core2");
 
       // force loading of core2 and lazy1 by getting them from the CoreContainer
       try (SolrCore core1 = cc.getCore("core1");
@@ -463,4 +463,5 @@ public class TestCoreDiscovery extends SolrTestCaseJ4 {
     NodeConfig absConfig = SolrXmlConfig.fromString(loader, "<solr><str name=\"coreRootDirectory\">/absolute</str></solr>");
     assertThat(absConfig.getCoreRootDirectory().toString(), not(containsString(solrHomeDirectory.getAbsolutePath())));
   }
+  
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/52632cfc/solr/core/src/test/org/apache/solr/core/TestLazyCores.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/TestLazyCores.java b/solr/core/src/test/org/apache/solr/core/TestLazyCores.java
index 0c0845b..8690e27 100644
--- a/solr/core/src/test/org/apache/solr/core/TestLazyCores.java
+++ b/solr/core/src/test/org/apache/solr/core/TestLazyCores.java
@@ -38,6 +38,7 @@ import org.apache.solr.handler.admin.CoreAdminHandler;
 import org.apache.solr.request.LocalSolrQueryRequest;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.response.SolrQueryResponse;
+import org.apache.solr.servlet.SolrDispatchFilter;
 import org.apache.solr.update.AddUpdateCommand;
 import org.apache.solr.update.CommitUpdateCommand;
 import org.apache.solr.update.UpdateHandler;
@@ -83,13 +84,13 @@ public class TestLazyCores extends SolrTestCaseJ4 {
   private CoreContainer init() throws Exception {
     solrHomeDirectory = createTempDir().toFile();
     
+    copyXmlToHome(solrHomeDirectory.getAbsoluteFile(), "solr.xml");
     for (int idx = 1; idx < 10; ++idx) {
       copyMinConf(new File(solrHomeDirectory, "collection" + idx));
     }
 
-    SolrResourceLoader loader = new SolrResourceLoader(solrHomeDirectory.toPath());
-    NodeConfig config = new NodeConfig.NodeConfigBuilder("testNode", loader).setTransientCacheSize(4).build();
-    return createCoreContainer(config, testCores);
+    NodeConfig cfg = SolrDispatchFilter.loadNodeConfig(solrHomeDirectory.toPath(), null);
+    return createCoreContainer(cfg, testCores);
   }
   
   @Test
@@ -188,7 +189,7 @@ public class TestLazyCores extends SolrTestCaseJ4 {
           , "//result[@numFound='0']"
       );
 
-      checkInCores(cc, "collection4");
+      checkInCores(cc, "collection1", "collection2", "collection4", "collection5");
 
       core4.close();
       collection1.close();
@@ -454,11 +455,14 @@ public class TestLazyCores extends SolrTestCaseJ4 {
   // 1> produce errors as appropriate when the config or schema files are foo'd
   // 2> "self heal". That is, if the problem is corrected can the core be reloaded and used?
   // 3> that OK cores can be searched even when some cores failed to load.
+  // 4> that having no solr.xml entry for transient chache handler correctly uses the default.
   @Test
   public void testBadConfigsGenerateErrors() throws Exception {
     final CoreContainer cc = initGoodAndBad(Arrays.asList("core1", "core2"),
         Arrays.asList("badSchema1", "badSchema2"),
         Arrays.asList("badConfig1", "badConfig2"));
+    
+    
     try {
       // first, did the two good cores load successfully?
       checkInCores(cc, "core1", "core2");
@@ -491,8 +495,9 @@ public class TestLazyCores extends SolrTestCaseJ4 {
       copyGoodConf("badSchema1", "schema-tiny.xml", "schema.xml");
       copyGoodConf("badSchema2", "schema-tiny.xml", "schema.xml");
 
+      
       // This should force a reload of the cores.
-      SolrCore bc1 = cc.getCore("badConfig1");
+      SolrCore bc1 = cc.getCore("badConfig1");;
       SolrCore bc2 = cc.getCore("badConfig2");
       SolrCore bs1 = cc.getCore("badSchema1");
       SolrCore bs2 = cc.getCore("badSchema2");
@@ -635,16 +640,46 @@ public class TestLazyCores extends SolrTestCaseJ4 {
   }
 
   public static void checkNotInCores(CoreContainer cc, String... nameCheck) {
-    Collection<String> names = cc.getCoreNames();
+    Collection<String> loadedNames = cc.getCoreNames();
     for (String name : nameCheck) {
-      assertFalse("core " + name + " was found in the list of cores", names.contains(name));
+      assertFalse("core " + name + " was found in the list of cores", loadedNames.contains(name));
+    }
+    
+    // There was a problem at one point exacerbated by the poor naming conventions. So parallel to loaded cores, there
+    // should be the ability to get the core _names_ that are loaded as well as all the core names _possible_
+    //
+    // the names above should only contain loaded core names. Every name in names should be in allNames, but none of 
+    // the names in nameCheck should be loaded and thus should not be in names.
+    
+    Collection<String> allNames = cc.getAllCoreNames();
+    // Every core, loaded or not should be in the accumulated coredescriptors:
+    List<CoreDescriptor> descriptors = cc.getCoreDescriptors();
+
+    assertEquals("There should be as many coreDescriptors as coreNames", allNames.size(), descriptors.size());
+    for (CoreDescriptor desc : descriptors) {
+      assertTrue("Name should have a corresponding descriptor", allNames.contains(desc.getName()));
+    }
+    
+    // First check that all loaded cores are in allNames.
+    for (String name : loadedNames) {                                                                                        
+      assertTrue("Loaded core " + name + " should have been found in the list of all possible core names",
+          allNames.contains(name));
+    }
+
+    for (String name : nameCheck) {
+      assertTrue("Not-currently-loaded core " + name + " should have been found in the list of all possible core names",
+          allNames.contains(name));
     }
   }
 
   public static void checkInCores(CoreContainer cc, String... nameCheck) {
-    Collection<String> names = cc.getCoreNames();
+    Collection<String> loadedNames = cc.getCoreNames();
+    
+    assertEquals("There whould be exactly as many loaded cores as loaded names returned. ", 
+        loadedNames.size(), nameCheck.length);
+    
     for (String name : nameCheck) {
-      assertTrue("core " + name + " was not found in the list of cores", names.contains(name));
+      assertTrue("core " + name + " was not found in the list of cores", loadedNames.contains(name));
     }
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/52632cfc/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
----------------------------------------------------------------------
diff --git a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
index be8e96d..faf6707 100644
--- a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
+++ b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
@@ -2002,8 +2002,7 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase {
     FileUtils.copyFile(new File(top, "solrconfig.snippet.randomindexconfig.xml"), new File(subHome, "solrconfig.snippet.randomindexconfig.xml"));
   }
 
-  // Creates minimal full setup, including the old solr.xml file that used to be hard coded in ConfigSolrXmlOld
-  // TODO: remove for 5.0
+  // Creates minimal full setup, including solr.xml
   public static void copyMinFullSetup(File dstRoot) throws IOException {
     if (! dstRoot.exists()) {
       assertTrue("Failed to make subdirectory ", dstRoot.mkdirs());
@@ -2013,6 +2012,15 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase {
     copyMinConf(dstRoot);
   }
 
+  // Just copies the file indicated to the tmp home directory naming it "solr.xml"
+  public static void copyXmlToHome(File dstRoot, String fromFile) throws IOException {
+    if (! dstRoot.exists()) {
+      assertTrue("Failed to make subdirectory ", dstRoot.mkdirs());
+    }
+    File xmlF = new File(SolrTestCaseJ4.TEST_HOME(), fromFile);
+    FileUtils.copyFile(xmlF, new File(dstRoot, "solr.xml"));
+    
+  }
   // Creates a consistent configuration, _including_ solr.xml at dstRoot. Creates collection1/conf and copies
   // the stock files in there.
 
@@ -2020,7 +2028,6 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase {
     if (!dstRoot.exists()) {
       assertTrue("Failed to make subdirectory ", dstRoot.mkdirs());
     }
-
     FileUtils.copyFile(new File(SolrTestCaseJ4.TEST_HOME(), "solr.xml"), new File(dstRoot, "solr.xml"));
 
     File subHome = new File(dstRoot, collection + File.separator + "conf");


[11/14] lucene-solr:jira/solr-9959: SOLR-9745: check exit code only if process has finished

Posted by ab...@apache.org.
SOLR-9745: check exit code only if process has finished


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/4c737b8d
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/4c737b8d
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/4c737b8d

Branch: refs/heads/jira/solr-9959
Commit: 4c737b8df9b130cf530d17271576730e21d5b4cc
Parents: 23b002a
Author: Mikhail Khludnev <mk...@apache.org>
Authored: Tue Apr 4 11:01:19 2017 +0300
Committer: Mikhail Khludnev <mk...@apache.org>
Committed: Tue Apr 4 11:02:59 2017 +0300

----------------------------------------------------------------------
 solr/core/src/java/org/apache/solr/util/SolrCLI.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4c737b8d/solr/core/src/java/org/apache/solr/util/SolrCLI.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/util/SolrCLI.java b/solr/core/src/java/org/apache/solr/util/SolrCLI.java
index 3572ea4..7be76af 100644
--- a/solr/core/src/java/org/apache/solr/util/SolrCLI.java
+++ b/solr/core/src/java/org/apache/solr/util/SolrCLI.java
@@ -2939,7 +2939,7 @@ public class SolrCLI {
           // safe to ignore ...
           Thread.interrupted();
         }
-        if (handler.getExitValue() != 0) {
+        if (handler.hasResult() && handler.getExitValue() != 0) {
           throw new Exception("Failed to start Solr using command: "+startCmd+" Exception : "+handler.getException());
         }
       } else {


[12/14] lucene-solr:jira/solr-9959: SOLR-10416: The JSON output of /admin/metrics is fixed to write the container as a map (SimpleOrderedMap) instead of an array (NamedList)

Posted by ab...@apache.org.
SOLR-10416: The JSON output of /admin/metrics is fixed to write the container as a map (SimpleOrderedMap) instead of an array (NamedList)


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/ee98cdc7
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/ee98cdc7
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/ee98cdc7

Branch: refs/heads/jira/solr-9959
Commit: ee98cdc79014af0bd309ab4298fdbaeb38ee402b
Parents: 4c737b8
Author: Shalin Shekhar Mangar <sh...@apache.org>
Authored: Tue Apr 4 14:20:31 2017 +0530
Committer: Shalin Shekhar Mangar <sh...@apache.org>
Committed: Tue Apr 4 14:20:31 2017 +0530

----------------------------------------------------------------------
 solr/CHANGES.txt                                              | 3 +++
 .../java/org/apache/solr/handler/admin/MetricsHandler.java    | 3 ++-
 .../org/apache/solr/handler/admin/MetricsHandlerTest.java     | 7 ++++---
 3 files changed, 9 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ee98cdc7/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 8a5c3e8..83f1440 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -198,6 +198,9 @@ Bug Fixes
 * SOLR-10383: Fix debug related NullPointerException in solr/contrib/ltr OriginalScoreFeature class.
   (Vitezslav Zak, Christine Poerschke)
 
+* SOLR-10416: The JSON output of /admin/metrics is fixed to write the container as a
+  map (SimpleOrderedMap) instead of an array (NamedList). (shalin)
+
 ==================  6.5.0 ==================
 
 Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ee98cdc7/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java
index 25f317c..4dc86d9 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java
@@ -33,6 +33,7 @@ import com.codahale.metrics.MetricRegistry;
 import com.codahale.metrics.Timer;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.util.NamedList;
+import org.apache.solr.common.util.SimpleOrderedMap;
 import org.apache.solr.common.util.StrUtils;
 import org.apache.solr.core.CoreContainer;
 import org.apache.solr.handler.RequestHandlerBase;
@@ -79,7 +80,7 @@ public class MetricsHandler extends RequestHandlerBase implements PermissionName
     List<MetricFilter> metricFilters = metricTypes.stream().map(MetricType::asMetricFilter).collect(Collectors.toList());
     Set<String> requestedRegistries = parseRegistries(req);
 
-    NamedList response = new NamedList();
+    NamedList response = new SimpleOrderedMap();
     for (String registryName : requestedRegistries) {
       MetricRegistry registry = metricManager.registry(registryName);
       response.add(registryName, MetricUtils.toNamedList(registry, metricFilters, mustMatchFilter, false,

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ee98cdc7/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java
index 81e14d9..2f84997 100644
--- a/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java
@@ -22,6 +22,7 @@ import java.util.Map;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.util.NamedList;
+import org.apache.solr.common.util.SimpleOrderedMap;
 import org.apache.solr.response.SolrQueryResponse;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -147,9 +148,9 @@ public class MetricsHandlerTest extends SolrTestCaseJ4 {
     handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", "group", "node", "type", "timer", "prefix", "CONTAINER.cores"), resp);
     values = resp.getValues();
     assertNotNull(values.get("metrics"));
-    values = (NamedList) values.get("metrics");
-    assertEquals(1, values.size());
-    assertEquals(0, ((NamedList)values.get("solr.node")).size());
+    SimpleOrderedMap map = (SimpleOrderedMap) values.get("metrics");
+    assertEquals(1, map.size());
+    assertEquals(0, ((NamedList)map.get("solr.node")).size());
   }
 
   @Test


[07/14] lucene-solr:jira/solr-9959: SOLR-10338: Revert configure SecureRandom non blocking for tests. (reverted from commit 0445f8200e0630e1bb8b7117f200529ed1259747)

Posted by ab...@apache.org.
SOLR-10338: Revert configure SecureRandom non blocking for tests. (reverted from commit 0445f8200e0630e1bb8b7117f200529ed1259747)


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/0bf7a5ff
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/0bf7a5ff
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/0bf7a5ff

Branch: refs/heads/jira/solr-9959
Commit: 0bf7a5ff335d2825a014862f5d04e70a7f5c016e
Parents: e872dc7
Author: Mark Miller <ma...@apache.org>
Authored: Mon Apr 3 22:00:08 2017 -0300
Committer: Mark Miller <ma...@apache.org>
Committed: Mon Apr 3 22:00:08 2017 -0300

----------------------------------------------------------------------
 dev-tools/idea/.idea/workspace.xml              |  2 +-
 dev-tools/maven/pom.xml.template                |  1 -
 lucene/common-build.xml                         |  3 --
 solr/CHANGES.txt                                |  2 -
 .../test/SecureRandomAlgorithmTesterApp.java    | 41 --------------------
 .../java/org/apache/solr/SolrTestCaseJ4.java    |  9 -----
 6 files changed, 1 insertion(+), 57 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0bf7a5ff/dev-tools/idea/.idea/workspace.xml
----------------------------------------------------------------------
diff --git a/dev-tools/idea/.idea/workspace.xml b/dev-tools/idea/.idea/workspace.xml
index e22108f..0ca7f0c 100644
--- a/dev-tools/idea/.idea/workspace.xml
+++ b/dev-tools/idea/.idea/workspace.xml
@@ -2,7 +2,7 @@
 <project version="4">
   <component name="RunManager" selected="JUnit.Lucene core">
     <configuration default="true" type="JUnit" factoryName="JUnit">
-      <option name="VM_PARAMETERS" value="-ea -Djava.security.egd=file:/dev/./urandom" />
+      <option name="VM_PARAMETERS" value="-ea" />
     </configuration>
     <configuration default="false" name="Lucene core" type="JUnit" factoryName="JUnit">
       <module name="lucene-core-tests" />

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0bf7a5ff/dev-tools/maven/pom.xml.template
----------------------------------------------------------------------
diff --git a/dev-tools/maven/pom.xml.template b/dev-tools/maven/pom.xml.template
index 6b7f915..cd8d6b8 100644
--- a/dev-tools/maven/pom.xml.template
+++ b/dev-tools/maven/pom.xml.template
@@ -277,7 +277,6 @@
               <tests.postingsformat>${tests.postingsformat}</tests.postingsformat>
               <tests.timezone>${tests.timezone}</tests.timezone>
               <tests.verbose>${tests.verbose}</tests.verbose>
-              <java.security.egd>file:/dev/./urandom</java.security.egd>
             </systemPropertyVariables>
           </configuration>
         </plugin>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0bf7a5ff/lucene/common-build.xml
----------------------------------------------------------------------
diff --git a/lucene/common-build.xml b/lucene/common-build.xml
index aee7899..327a01d 100644
--- a/lucene/common-build.xml
+++ b/lucene/common-build.xml
@@ -1062,9 +1062,6 @@
 
             <sysproperty key="tests.src.home" value="${user.dir}" />
 
-            <!-- replaces default random source to the nonblocking variant -->
-            <sysproperty key="java.security.egd" value="file:/dev/./urandom"/>
-
             <!-- Only pass these to the test JVMs if defined in ANT. -->
             <syspropertyset>
                 <propertyref prefix="tests.maxfailures" />

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0bf7a5ff/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index c1a7503..8a5c3e8 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -180,8 +180,6 @@ Other Changes
 
 * SOLR-10147: Admin UI -> Cloud -> Graph: Impossible to see shard state (Amrit Sarkar, janhoy)
 
-* SOLR-10338: Configure SecureRandom non blocking for tests. (Mihaly Toth, hossman, Ishan Chattopadhyaya, via Mark Miller)
-
 * SOLR-10399: Generalize some internal facet logic to simplify points/non-points field handling (Adrien Grand, hossman)
 
 * SOLR-7383: New DataImportHandler 'atom' example, replacing broken 'rss' example (Alexandre Rafalovitch)

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0bf7a5ff/solr/core/src/test/SecureRandomAlgorithmTesterApp.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/SecureRandomAlgorithmTesterApp.java b/solr/core/src/test/SecureRandomAlgorithmTesterApp.java
deleted file mode 100644
index 44f79e9..0000000
--- a/solr/core/src/test/SecureRandomAlgorithmTesterApp.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.security.NoSuchAlgorithmException;
-import java.security.SecureRandom;
-
-public class SecureRandomAlgorithmTesterApp {
-  public static void main(String[] args) throws NoSuchAlgorithmException {
-    String algorithm = args[0];
-    String method = args[1];
-    int amount = Integer.valueOf(args[2]);
-    SecureRandom secureRandom;
-    if(algorithm.equals("default"))
-      secureRandom = new SecureRandom();
-    else 
-      secureRandom = SecureRandom.getInstance(algorithm);
-    System.out.println("Algorithm:" + secureRandom.getAlgorithm());
-    switch(method) {
-      case "seed": secureRandom.generateSeed(amount); break;
-      case "bytes": secureRandom.nextBytes(new byte[amount]); break;
-      case "long": secureRandom.nextLong(); break;
-      case "int": secureRandom.nextInt(); break;
-      default: throw new IllegalArgumentException("Not supported random function: " + method);
-    }
-    System.out.println("SecureRandom function invoked");
-  }
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0bf7a5ff/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
----------------------------------------------------------------------
diff --git a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
index faf6707..0d4cedd 100644
--- a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
+++ b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
@@ -39,7 +39,6 @@ import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.nio.file.Paths;
-import java.security.SecureRandom;
 import java.time.Instant;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -2439,14 +2438,6 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase {
     }
   }
 
-  @BeforeClass
-  public static void assertNonBlockingRandomGeneratorAvailable() {
-    if(Boolean.parseBoolean(System.getProperty("test.solr.allow.any.securerandom","false")))
-      return;
-    // Use -Djava.security.egd=file:/dev/./urandom VM option if you hit this 
-    assertEquals("SHA1PRNG", new SecureRandom().getAlgorithm());
-  }
-  
   @AfterClass
   public static void unchooseMPForMP() {
     System.clearProperty(SYSTEM_PROPERTY_SOLR_TESTS_USEMERGEPOLICYFACTORY);


[09/14] lucene-solr:jira/solr-9959: LUCENE-7756: Only record the major Lucene version that created the index, and record the minimum Lucene version that contributed to segments.

Posted by ab...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/core/src/java/org/apache/lucene/index/ParallelLeafReader.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/ParallelLeafReader.java b/lucene/core/src/java/org/apache/lucene/index/ParallelLeafReader.java
index eee0c16..5fb28b0 100644
--- a/lucene/core/src/java/org/apache/lucene/index/ParallelLeafReader.java
+++ b/lucene/core/src/java/org/apache/lucene/index/ParallelLeafReader.java
@@ -28,6 +28,7 @@ import java.util.TreeMap;
 
 import org.apache.lucene.search.Sort;
 import org.apache.lucene.util.Bits;
+import org.apache.lucene.util.Version;
 
 /** An {@link LeafReader} which reads multiple, parallel indexes.  Each index
  * added must have the same number of documents, but typically each contains
@@ -56,7 +57,7 @@ public class ParallelLeafReader extends LeafReader {
   private final boolean closeSubReaders;
   private final int maxDoc, numDocs;
   private final boolean hasDeletions;
-  private final Sort indexSort;
+  private final LeafMetaData metaData;
   private final SortedMap<String,LeafReader> fieldToReader = new TreeMap<>();
   private final SortedMap<String,LeafReader> tvFieldToReader = new TreeMap<>();
   
@@ -104,16 +105,25 @@ public class ParallelLeafReader extends LeafReader {
     FieldInfos.Builder builder = new FieldInfos.Builder();
 
     Sort indexSort = null;
+    int createdVersionMajor = -1;
 
     // build FieldInfos and fieldToReader map:
     for (final LeafReader reader : this.parallelReaders) {
-      Sort leafIndexSort = reader.getIndexSort();
+      LeafMetaData leafMetaData = reader.getMetaData();
+      
+      Sort leafIndexSort = leafMetaData.getSort();
       if (indexSort == null) {
         indexSort = leafIndexSort;
       } else if (leafIndexSort != null && indexSort.equals(leafIndexSort) == false) {
         throw new IllegalArgumentException("cannot combine LeafReaders that have different index sorts: saw both sort=" + indexSort + " and " + leafIndexSort);
       }
 
+      if (createdVersionMajor == -1) {
+        createdVersionMajor = leafMetaData.getCreatedVersionMajor();
+      } else if (createdVersionMajor != leafMetaData.getCreatedVersionMajor()) {
+        throw new IllegalArgumentException("cannot combine LeafReaders that have different creation versions: saw both version=" + createdVersionMajor + " and " + leafMetaData.getCreatedVersionMajor());
+      }
+
       final FieldInfos readerFieldInfos = reader.getFieldInfos();
       for (FieldInfo fieldInfo : readerFieldInfos) {
         // NOTE: first reader having a given field "wins":
@@ -126,8 +136,24 @@ public class ParallelLeafReader extends LeafReader {
         }
       }
     }
+    if (createdVersionMajor == -1) {
+      // empty reader
+      createdVersionMajor = Version.LATEST.major;
+    }
+
+    Version minVersion = Version.LATEST;
+    for (final LeafReader reader : this.parallelReaders) {
+      Version leafVersion = reader.getMetaData().getMinVersion();
+      if (leafVersion == null) {
+        minVersion = null;
+        break;
+      } else if (minVersion.onOrAfter(leafVersion)) {
+        minVersion = leafVersion;
+      }
+    }
+
     fieldInfos = builder.finish();
-    this.indexSort = indexSort;
+    this.metaData = new LeafMetaData(createdVersionMajor, minVersion, indexSort);
     
     // build Fields instance
     for (final LeafReader reader : this.parallelReaders) {
@@ -358,8 +384,8 @@ public class ParallelLeafReader extends LeafReader {
   }
 
   @Override
-  public Sort getIndexSort() {
-    return indexSort;
+  public LeafMetaData getMetaData() {
+    return metaData;
   }
 
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java b/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java
index 3cd465c..d4dd4a4 100644
--- a/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java
+++ b/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java
@@ -140,7 +140,7 @@ class ReadersAndUpdates {
   public SegmentReader getReader(IOContext context) throws IOException {
     if (reader == null) {
       // We steal returned ref:
-      reader = new SegmentReader(info, context);
+      reader = new SegmentReader(info, writer.segmentInfos.getIndexCreatedVersionMajor(), context);
       if (liveDocs == null) {
         liveDocs = reader.getLiveDocs();
       }
@@ -552,7 +552,7 @@ class ReadersAndUpdates {
 
       // reader could be null e.g. for a just merged segment (from
       // IndexWriter.commitMergedDeletes).
-      final SegmentReader reader = this.reader == null ? new SegmentReader(info, IOContext.READONCE) : this.reader;
+      final SegmentReader reader = this.reader == null ? new SegmentReader(info, writer.segmentInfos.getIndexCreatedVersionMajor(), IOContext.READONCE) : this.reader;
       try {
         // clone FieldInfos so that we can update their dvGen separately from
         // the reader's infos and write them to a new fieldInfos_gen file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/core/src/java/org/apache/lucene/index/SegmentInfo.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/SegmentInfo.java b/lucene/core/src/java/org/apache/lucene/index/SegmentInfo.java
index ec12365..1c02441 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SegmentInfo.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SegmentInfo.java
@@ -77,7 +77,13 @@ public final class SegmentInfo {
   // The format expected is "x.y" - "2.x" for pre-3.0 indexes (or null), and
   // specific versions afterwards ("3.0.0", "3.1.0" etc.).
   // see o.a.l.util.Version.
-  private Version version;
+  private final Version version;
+
+  // Tracks the minimum version that contributed documents to a segment. For
+  // flush segments, that is the version that wrote it. For merged segments,
+  // this is the minimum minVersion of all the segments that have been merged
+  // into this segment
+  Version minVersion;
 
   void setDiagnostics(Map<String, String> diagnostics) {
     this.diagnostics = Objects.requireNonNull(diagnostics);
@@ -94,12 +100,13 @@ public final class SegmentInfo {
    * <p>Note: this is public only to allow access from
    * the codecs package.</p>
    */
-  public SegmentInfo(Directory dir, Version version, String name, int maxDoc,
+  public SegmentInfo(Directory dir, Version version, Version minVersion, String name, int maxDoc,
                      boolean isCompoundFile, Codec codec, Map<String,String> diagnostics,
                      byte[] id, Map<String,String> attributes, Sort indexSort) {
     assert !(dir instanceof TrackingDirectoryWrapper);
     this.dir = Objects.requireNonNull(dir);
     this.version = Objects.requireNonNull(version);
+    this.minVersion = minVersion;
     this.name = Objects.requireNonNull(name);
     this.maxDoc = maxDoc;
     this.isCompoundFile = isCompoundFile;
@@ -233,6 +240,14 @@ public final class SegmentInfo {
     return version;
   }
 
+  /**
+   * Return the minimum Lucene version that contributed documents to this
+   * segment, or {@code null} if it is unknown.
+   */
+  public Version getMinVersion() {
+    return minVersion;
+  }
+
   /** Return the id that uniquely identifies this segment. */
   public byte[] getId() {
     return id.clone();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/core/src/java/org/apache/lucene/index/SegmentInfos.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/SegmentInfos.java b/lucene/core/src/java/org/apache/lucene/index/SegmentInfos.java
index 12305d0..e463259 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SegmentInfos.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SegmentInfos.java
@@ -161,16 +161,19 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentCommitInfo
   /** Version of the oldest segment in the index, or null if there are no segments. */
   private Version minSegmentLuceneVersion;
 
-  /** The Lucene version that was used to create the index. */
-  private final Version indexCreatedVersion;
+  /** The Lucene version major that was used to create the index. */
+  private final int indexCreatedVersionMajor;
 
   /** Sole constructor.
-   *  @param indexCreatedVersion the Lucene version at index creation time, or {@code null} if the index was created before 7.0 */
-  public SegmentInfos(Version indexCreatedVersion) {
-    if (indexCreatedVersion != null && indexCreatedVersion.onOrAfter(Version.LUCENE_7_0_0) == false) {
-      throw new IllegalArgumentException("indexCreatedVersion may only be non-null if the index was created on or after 7.0, got " + indexCreatedVersion);
+   *  @param indexCreatedVersionMajor the Lucene version major at index creation time, or 6 if the index was created before 7.0 */
+  public SegmentInfos(int indexCreatedVersionMajor) {
+    if (indexCreatedVersionMajor > Version.LATEST.major) {
+      throw new IllegalArgumentException("indexCreatedVersionMajor is in the future: " + indexCreatedVersionMajor);
     }
-    this.indexCreatedVersion = indexCreatedVersion;
+    if (indexCreatedVersionMajor < 6) {
+      throw new IllegalArgumentException("indexCreatedVersionMajor must be >= 6, got: " + indexCreatedVersionMajor);
+    }
+    this.indexCreatedVersionMajor = indexCreatedVersionMajor;
   }
 
   /** Returns {@link SegmentCommitInfo} at the provided
@@ -314,24 +317,9 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentCommitInfo
       throw new IndexFormatTooOldException(input, "this index is too old (version: " + luceneVersion + ")");
     }
 
-    Version indexCreatedVersion;
+    int indexCreatedVersion = 6;
     if (format >= VERSION_70) {
-      byte b = input.readByte();
-      switch (b) {
-        case 0:
-          // version is not known: pre-7.0 index that has been modified since the 7.0 upgrade
-          indexCreatedVersion = null;
-          break;
-        case 1:
-          // version is known: index has been created on or after 7.0
-          indexCreatedVersion = Version.fromBits(input.readVInt(), input.readVInt(), input.readVInt());
-          break;
-        default:
-          throw new CorruptIndexException("Illegal byte value for a boolean: " + b + ", expected 0 or 1", input);
-      }
-    } else {
-      // pre-7.0 index that has not been modified since the 7.0 upgrade
-      indexCreatedVersion = null;
+      indexCreatedVersion = input.readVInt();
     }
 
     SegmentInfos infos = new SegmentInfos(indexCreatedVersion);
@@ -399,6 +387,14 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentCommitInfo
       if (segmentVersion.onOrAfter(infos.minSegmentLuceneVersion) == false) {
         throw new CorruptIndexException("segments file recorded minSegmentLuceneVersion=" + infos.minSegmentLuceneVersion + " but segment=" + info + " has older version=" + segmentVersion, input);
       }
+
+      if (infos.indexCreatedVersionMajor >= 7 && segmentVersion.major < infos.indexCreatedVersionMajor) {
+        throw new CorruptIndexException("segments file recorded indexCreatedVersionMajor=" + infos.indexCreatedVersionMajor + " but segment=" + info + " has older version=" + segmentVersion, input);
+      }
+
+      if (infos.indexCreatedVersionMajor >= 7 && info.getMinVersion() == null) {
+        throw new CorruptIndexException("segments infos must record minVersion with indexCreatedVersionMajor=" + infos.indexCreatedVersionMajor, input);
+      }
     }
 
     infos.userData = input.readMapOfStrings();
@@ -495,16 +491,7 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentCommitInfo
     out.writeVInt(Version.LATEST.bugfix);
     //System.out.println(Thread.currentThread().getName() + ": now write " + out.getName() + " with version=" + version);
 
-    if (indexCreatedVersion != null) {
-      // 7.0+ index
-      out.writeByte((byte) 1);
-      out.writeVInt(indexCreatedVersion.major);
-      out.writeVInt(indexCreatedVersion.minor);
-      out.writeVInt(indexCreatedVersion.bugfix);
-    } else {
-      // pre-7.0 index
-      out.writeByte((byte) 0);
-    }
+    out.writeVInt(indexCreatedVersionMajor);
 
     out.writeLong(version); 
     out.writeInt(counter); // write counter
@@ -531,6 +518,9 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentCommitInfo
     // write infos
     for (SegmentCommitInfo siPerCommit : this) {
       SegmentInfo si = siPerCommit.info;
+      if (indexCreatedVersionMajor >= 7 && si.minVersion == null) {
+        throw new IllegalStateException("Segments must record minVersion if they have been created on or after Lucene 7: " + si);
+      }
       out.writeString(si.name);
       byte segmentID[] = si.getId();
       // TODO: remove this in lucene 6, we don't need to include 4.x segments in commits anymore
@@ -917,6 +907,10 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentCommitInfo
 
   /** applies all changes caused by committing a merge to this SegmentInfos */
   void applyMergeChanges(MergePolicy.OneMerge merge, boolean dropSegment) {
+    if (indexCreatedVersionMajor >= 7 && merge.info.info.minVersion == null) {
+      throw new IllegalArgumentException("All segments must record the minVersion for indices created on or after Lucene 7");
+    }
+
     final Set<SegmentCommitInfo> mergedAway = new HashSet<>(merge.segments);
     boolean inserted = false;
     int newSegIdx = 0;
@@ -981,6 +975,10 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentCommitInfo
 
   /** Appends the provided {@link SegmentCommitInfo}. */
   public void add(SegmentCommitInfo si) {
+    if (indexCreatedVersionMajor >= 7 && si.info.minVersion == null) {
+      throw new IllegalArgumentException("All segments must record the minVersion for indices created on or after Lucene 7");
+    }
+    
     segments.add(si);
   }
   
@@ -1038,10 +1036,11 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentCommitInfo
     return minSegmentLuceneVersion;
   }
 
-  /** Return the version that was used to initially create the index. This
-   *  version is set when the index is first created and then never changes.
-   *  This returns {@code null} if the index was created before 7.0. */
-  public Version getIndexCreatedVersion() {
-    return indexCreatedVersion;
+  /** Return the version major that was used to initially create the index.
+   *  This version is set when the index is first created and then never
+   *  changes. This information was added as of version 7.0 so older
+   *  indices report 6 as a creation version. */
+  public int getIndexCreatedVersionMajor() {
+    return indexCreatedVersionMajor;
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java b/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java
index d23f010..c67b92d 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java
@@ -30,6 +30,7 @@ import org.apache.lucene.codecs.TermVectorsWriter;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IOContext;
 import org.apache.lucene.util.InfoStream;
+import org.apache.lucene.util.Version;
 
 /**
  * The SegmentMerger class combines two or more Segments, represented by an
@@ -59,6 +60,19 @@ final class SegmentMerger {
     this.codec = segmentInfo.getCodec();
     this.context = context;
     this.fieldInfosBuilder = new FieldInfos.Builder(fieldNumbers);
+    Version minVersion = Version.LATEST;
+    for (CodecReader reader : readers) {
+      Version leafMinVersion = reader.getMetaData().getMinVersion();
+      if (leafMinVersion == null) {
+        minVersion = null;
+        break;
+      }
+      if (minVersion.onOrAfter(leafMinVersion)) {
+        minVersion = leafMinVersion;
+      }
+    }
+    assert segmentInfo.minVersion == null : "The min version should be set by SegmentMerger for merged segments";
+    segmentInfo.minVersion = minVersion;
     if (mergeState.infoStream.isEnabled("SM")) {
       if (segmentInfo.getIndexSort() != null) {
         mergeState.infoStream.message("SM", "index sort during merge: " + segmentInfo.getIndexSort());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java b/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java
index 5dbc492..ccbcdf9 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java
@@ -30,7 +30,6 @@ import org.apache.lucene.codecs.NormsProducer;
 import org.apache.lucene.codecs.PointsReader;
 import org.apache.lucene.codecs.StoredFieldsReader;
 import org.apache.lucene.codecs.TermVectorsReader;
-import org.apache.lucene.search.Sort;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IOContext;
 import org.apache.lucene.util.Bits;
@@ -46,6 +45,7 @@ import org.apache.lucene.util.IOUtils;
 public final class SegmentReader extends CodecReader {
        
   private final SegmentCommitInfo si;
+  private final LeafMetaData metaData;
   private final Bits liveDocs;
 
   // Normally set to si.maxDoc - si.delDocCount, unless we
@@ -68,8 +68,9 @@ public final class SegmentReader extends CodecReader {
    * @throws IOException if there is a low-level IO error
    */
   // TODO: why is this public?
-  public SegmentReader(SegmentCommitInfo si, IOContext context) throws IOException {
+  public SegmentReader(SegmentCommitInfo si, int createdVersionMajor, IOContext context) throws IOException {
     this.si = si;
+    this.metaData = new LeafMetaData(createdVersionMajor, si.info.getMinVersion(), si.info.getIndexSort());
 
     // We pull liveDocs/DV updates from disk:
     this.isNRT = false;
@@ -133,6 +134,7 @@ public final class SegmentReader extends CodecReader {
       throw new IllegalArgumentException("maxDoc=" + si.info.maxDoc() + " but liveDocs.size()=" + liveDocs.length());
     }
     this.si = si;
+    this.metaData = sr.getMetaData();
     this.liveDocs = liveDocs;
     this.isNRT = isNRT;
     this.numDocs = numDocs;
@@ -330,7 +332,7 @@ public final class SegmentReader extends CodecReader {
   }
 
   @Override
-  public Sort getIndexSort() {
-    return si.info.getIndexSort();
+  public LeafMetaData getMetaData() {
+    return metaData;
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/core/src/java/org/apache/lucene/index/SlowCodecReaderWrapper.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/SlowCodecReaderWrapper.java b/lucene/core/src/java/org/apache/lucene/index/SlowCodecReaderWrapper.java
index 99f35bc..b115a85 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SlowCodecReaderWrapper.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SlowCodecReaderWrapper.java
@@ -26,7 +26,6 @@ import org.apache.lucene.codecs.NormsProducer;
 import org.apache.lucene.codecs.PointsReader;
 import org.apache.lucene.codecs.StoredFieldsReader;
 import org.apache.lucene.codecs.TermVectorsReader;
-import org.apache.lucene.search.Sort;
 import org.apache.lucene.util.Bits;
 
 /**
@@ -128,8 +127,8 @@ public final class SlowCodecReaderWrapper {
         }
 
         @Override
-        public Sort getIndexSort() {
-          return reader.getIndexSort();
+        public LeafMetaData getMetaData() {
+          return reader.getMetaData();
         }
       };
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/core/src/java/org/apache/lucene/index/StandardDirectoryReader.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/StandardDirectoryReader.java b/lucene/core/src/java/org/apache/lucene/index/StandardDirectoryReader.java
index 46f81af..f0e7e98 100644
--- a/lucene/core/src/java/org/apache/lucene/index/StandardDirectoryReader.java
+++ b/lucene/core/src/java/org/apache/lucene/index/StandardDirectoryReader.java
@@ -61,7 +61,7 @@ public final class StandardDirectoryReader extends DirectoryReader {
         boolean success = false;
         try {
           for (int i = sis.size()-1; i >= 0; i--) {
-            readers[i] = new SegmentReader(sis.info(i), IOContext.READ);
+            readers[i] = new SegmentReader(sis.info(i), sis.getIndexCreatedVersionMajor(), IOContext.READ);
           }
 
           // This may throw CorruptIndexException if there are too many docs, so
@@ -181,7 +181,7 @@ public final class StandardDirectoryReader extends DirectoryReader {
         if (oldReader == null || commitInfo.info.getUseCompoundFile() != oldReader.getSegmentInfo().info.getUseCompoundFile()) {
 
           // this is a new reader; in case we hit an exception we can decRef it safely
-          newReader = new SegmentReader(commitInfo, IOContext.READ);
+          newReader = new SegmentReader(commitInfo, infos.getIndexCreatedVersionMajor(), IOContext.READ);
           newReaders[i] = newReader;
         } else {
           if (oldReader.isNRT) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/core/src/java/org/apache/lucene/search/EarlyTerminatingSortingCollector.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/EarlyTerminatingSortingCollector.java b/lucene/core/src/java/org/apache/lucene/search/EarlyTerminatingSortingCollector.java
index 8ddd82f..623df65 100644
--- a/lucene/core/src/java/org/apache/lucene/search/EarlyTerminatingSortingCollector.java
+++ b/lucene/core/src/java/org/apache/lucene/search/EarlyTerminatingSortingCollector.java
@@ -98,7 +98,7 @@ public class EarlyTerminatingSortingCollector extends FilterCollector {
 
   @Override
   public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException {
-    Sort segmentSort = context.reader().getIndexSort();
+    Sort segmentSort = context.reader().getMetaData().getSort();
     if (segmentSort != null && canEarlyTerminate(sort, segmentSort) == false) {
       throw new IllegalStateException("Cannot early terminate with sort order " + sort + " if segments are sorted with " + segmentSort);
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/core/src/test/org/apache/lucene/codecs/lucene62/TestLucene62SegmentInfoFormat.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/codecs/lucene62/TestLucene62SegmentInfoFormat.java b/lucene/core/src/test/org/apache/lucene/codecs/lucene62/TestLucene62SegmentInfoFormat.java
deleted file mode 100644
index 54110f7..0000000
--- a/lucene/core/src/test/org/apache/lucene/codecs/lucene62/TestLucene62SegmentInfoFormat.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.lucene.codecs.lucene62;
-
-import org.apache.lucene.codecs.Codec;
-import org.apache.lucene.index.BaseSegmentInfoFormatTestCase;
-import org.apache.lucene.util.TestUtil;
-import org.apache.lucene.util.Version;
-
-/**
- * Tests Lucene62SegmentInfoFormat
- */
-public class TestLucene62SegmentInfoFormat extends BaseSegmentInfoFormatTestCase {
-
-  @Override
-  protected Version[] getVersions() {
-    return new Version[] { Version.LATEST };
-  }
-
-  @Override
-  protected Codec getCodec() {
-    return TestUtil.getDefaultCodec();
-  }
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/core/src/test/org/apache/lucene/codecs/lucene70/TestLucene70SegmentInfoFormat.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/codecs/lucene70/TestLucene70SegmentInfoFormat.java b/lucene/core/src/test/org/apache/lucene/codecs/lucene70/TestLucene70SegmentInfoFormat.java
new file mode 100644
index 0000000..3bf6a18
--- /dev/null
+++ b/lucene/core/src/test/org/apache/lucene/codecs/lucene70/TestLucene70SegmentInfoFormat.java
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.codecs.lucene70;
+
+import org.apache.lucene.codecs.Codec;
+import org.apache.lucene.index.BaseSegmentInfoFormatTestCase;
+import org.apache.lucene.util.TestUtil;
+import org.apache.lucene.util.Version;
+
+public class TestLucene70SegmentInfoFormat extends BaseSegmentInfoFormatTestCase {
+
+  @Override
+  protected Version[] getVersions() {
+    return new Version[] { Version.LATEST };
+  }
+
+  @Override
+  protected Codec getCodec() {
+    return TestUtil.getDefaultCodec();
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/core/src/test/org/apache/lucene/index/TestCodecs.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestCodecs.java b/lucene/core/src/test/org/apache/lucene/index/TestCodecs.java
index c842606..4625f73 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestCodecs.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestCodecs.java
@@ -217,7 +217,7 @@ public class TestCodecs extends LuceneTestCase {
     final FieldInfos fieldInfos = builder.finish();
     final Directory dir = newDirectory();
     Codec codec = Codec.getDefault();
-    final SegmentInfo si = new SegmentInfo(dir, Version.LATEST, SEGMENT, 10000, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
+    final SegmentInfo si = new SegmentInfo(dir, Version.LATEST, Version.LATEST, SEGMENT, 10000, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
     
     this.write(si, fieldInfos, dir, fields);
     final FieldsProducer reader = codec.postingsFormat().fieldsProducer(new SegmentReadState(dir, si, fieldInfos, newIOContext(random())));
@@ -274,7 +274,7 @@ public class TestCodecs extends LuceneTestCase {
     }
 
     Codec codec = Codec.getDefault();
-    final SegmentInfo si = new SegmentInfo(dir, Version.LATEST, SEGMENT, 10000, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
+    final SegmentInfo si = new SegmentInfo(dir, Version.LATEST, Version.LATEST, SEGMENT, 10000, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
     this.write(si, fieldInfos, dir, fields);
 
     if (VERBOSE) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java b/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java
index 34bde51..7b7ec99 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java
@@ -55,6 +55,7 @@ import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.StringHelper;
 import org.apache.lucene.util.TestUtil;
+import org.apache.lucene.util.Version;
 
 // TODO:
 //   - old parallel indices are only pruned on commit/close; can we do it on refresh?
@@ -414,7 +415,7 @@ public class TestDemoParallelLeafReader extends LuceneTestCase {
 
             SegmentInfos infos = SegmentInfos.readLatestCommit(dir);
             assert infos.size() == 1;
-            final LeafReader parLeafReader = new SegmentReader(infos.info(0), IOContext.DEFAULT);
+            final LeafReader parLeafReader = new SegmentReader(infos.info(0), Version.LATEST.major, IOContext.DEFAULT);
 
             //checkParallelReader(leaf, parLeafReader, schemaGen);
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/core/src/test/org/apache/lucene/index/TestDoc.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDoc.java b/lucene/core/src/test/org/apache/lucene/index/TestDoc.java
index 8b24b4d..a20dece 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestDoc.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestDoc.java
@@ -213,12 +213,12 @@ public class TestDoc extends LuceneTestCase {
   private SegmentCommitInfo merge(Directory dir, SegmentCommitInfo si1, SegmentCommitInfo si2, String merged, boolean useCompoundFile)
     throws Exception {
     IOContext context = newIOContext(random(), new IOContext(new MergeInfo(-1, -1, false, -1)));
-    SegmentReader r1 = new SegmentReader(si1, context);
-    SegmentReader r2 = new SegmentReader(si2, context);
+    SegmentReader r1 = new SegmentReader(si1, Version.LATEST.major, context);
+    SegmentReader r2 = new SegmentReader(si2, Version.LATEST.major, context);
 
     final Codec codec = Codec.getDefault();
     TrackingDirectoryWrapper trackingDir = new TrackingDirectoryWrapper(si1.info.dir);
-    final SegmentInfo si = new SegmentInfo(si1.info.dir, Version.LATEST, merged, -1, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
+    final SegmentInfo si = new SegmentInfo(si1.info.dir, Version.LATEST, null, merged, -1, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
 
     SegmentMerger merger = new SegmentMerger(Arrays.<CodecReader>asList(r1, r2),
                                              si, InfoStream.getDefault(), trackingDir,
@@ -244,7 +244,7 @@ public class TestDoc extends LuceneTestCase {
 
   private void printSegment(PrintWriter out, SegmentCommitInfo si)
     throws Exception {
-    SegmentReader reader = new SegmentReader(si, newIOContext(random()));
+    SegmentReader reader = new SegmentReader(si, Version.LATEST.major, newIOContext(random()));
 
     for (int i = 0; i < reader.numDocs(); i++)
       out.println(reader.document(i));

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/core/src/test/org/apache/lucene/index/TestDocumentWriter.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDocumentWriter.java b/lucene/core/src/test/org/apache/lucene/index/TestDocumentWriter.java
index a814c4c..da96f98 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestDocumentWriter.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestDocumentWriter.java
@@ -33,6 +33,7 @@ import org.apache.lucene.util.AttributeSource;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.TestUtil;
+import org.apache.lucene.util.Version;
 
 public class TestDocumentWriter extends LuceneTestCase {
   private Directory dir;
@@ -62,7 +63,7 @@ public class TestDocumentWriter extends LuceneTestCase {
     SegmentCommitInfo info = writer.newestSegment();
     writer.close();
     //After adding the document, we should be able to read it back in
-    SegmentReader reader = new SegmentReader(info, newIOContext(random()));
+    SegmentReader reader = new SegmentReader(info, Version.LATEST.major, newIOContext(random()));
     assertTrue(reader != null);
     Document doc = reader.document(0);
     assertTrue(doc != null);
@@ -123,7 +124,7 @@ public class TestDocumentWriter extends LuceneTestCase {
     writer.commit();
     SegmentCommitInfo info = writer.newestSegment();
     writer.close();
-    SegmentReader reader = new SegmentReader(info, newIOContext(random()));
+    SegmentReader reader = new SegmentReader(info, Version.LATEST.major, newIOContext(random()));
 
     PostingsEnum termPositions = MultiFields.getTermPositionsEnum(reader, "repeated", new BytesRef("repeated"));
     assertTrue(termPositions.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
@@ -194,7 +195,7 @@ public class TestDocumentWriter extends LuceneTestCase {
     writer.commit();
     SegmentCommitInfo info = writer.newestSegment();
     writer.close();
-    SegmentReader reader = new SegmentReader(info, newIOContext(random()));
+    SegmentReader reader = new SegmentReader(info, Version.LATEST.major, newIOContext(random()));
 
     PostingsEnum termPositions = MultiFields.getTermPositionsEnum(reader, "f1", new BytesRef("a"));
     assertTrue(termPositions.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
@@ -236,7 +237,7 @@ public class TestDocumentWriter extends LuceneTestCase {
     writer.commit();
     SegmentCommitInfo info = writer.newestSegment();
     writer.close();
-    SegmentReader reader = new SegmentReader(info, newIOContext(random()));
+    SegmentReader reader = new SegmentReader(info, Version.LATEST.major, newIOContext(random()));
 
     PostingsEnum termPositions = reader.postings(new Term("preanalyzed", "term1"), PostingsEnum.ALL);
     assertTrue(termPositions.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java
index 4275056..4ef580b 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java
@@ -2094,7 +2094,7 @@ public class TestIndexSorting extends LuceneTestCase {
     if (VERBOSE) {
       System.out.println("TEST: now compare r1=" + r1 + " r2=" + r2);
     }
-    assertEquals(sort, getOnlyLeafReader(r2).getIndexSort());
+    assertEquals(sort, getOnlyLeafReader(r2).getMetaData().getSort());
     assertReaderEquals("left: sorted by hand; right: sorted by Lucene", r1, r2);
     IOUtils.close(w1, w2, r1, r2, dir1, dir2);
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java
index 660280b..67add26 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java
@@ -2805,7 +2805,7 @@ public class TestIndexWriter extends LuceneTestCase {
     IndexWriter w = new IndexWriter(dir, newIndexWriterConfig());
     w.commit();
     w.close();
-    assertEquals(Version.LATEST, SegmentInfos.readLatestCommit(dir).getIndexCreatedVersion());
+    assertEquals(Version.LATEST.major, SegmentInfos.readLatestCommit(dir).getIndexCreatedVersionMajor());
     dir.close();
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterThreadsToSegments.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterThreadsToSegments.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterThreadsToSegments.java
index 4e884df..359e7d0 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterThreadsToSegments.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterThreadsToSegments.java
@@ -39,6 +39,7 @@ import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.StringHelper;
 import org.apache.lucene.util.TestUtil;
+import org.apache.lucene.util.Version;
 
 public class TestIndexWriterThreadsToSegments extends LuceneTestCase {
 
@@ -331,7 +332,7 @@ public class TestIndexWriterThreadsToSegments extends LuceneTestCase {
               SegmentInfo si = TestUtil.getDefaultCodec().segmentInfoFormat().read(dir, segName, id, IOContext.DEFAULT);
               si.setCodec(codec);
               SegmentCommitInfo sci = new SegmentCommitInfo(si, 0, -1, -1, -1);
-              SegmentReader sr = new SegmentReader(sci, IOContext.DEFAULT);
+              SegmentReader sr = new SegmentReader(sci, Version.LATEST.major, IOContext.DEFAULT);
               try {
                 thread0Count += sr.docFreq(new Term("field", "threadID0"));
                 thread1Count += sr.docFreq(new Term("field", "threadID1"));

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/core/src/test/org/apache/lucene/index/TestOneMergeWrappingMergePolicy.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestOneMergeWrappingMergePolicy.java b/lucene/core/src/test/org/apache/lucene/index/TestOneMergeWrappingMergePolicy.java
index f27437f..c85646c 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestOneMergeWrappingMergePolicy.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestOneMergeWrappingMergePolicy.java
@@ -124,6 +124,7 @@ public class TestOneMergeWrappingMergePolicy extends LuceneTestCase {
         final SegmentInfo si = new SegmentInfo(
             dir, // dir
             Version.LATEST, // version
+            Version.LATEST, // min version
             TestUtil.randomSimpleString(random()), // name
             random().nextInt(), // maxDoc
             random().nextBoolean(), // isCompoundFile

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/core/src/test/org/apache/lucene/index/TestSegmentInfos.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestSegmentInfos.java b/lucene/core/src/test/org/apache/lucene/index/TestSegmentInfos.java
index 7552786..128601e 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestSegmentInfos.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestSegmentInfos.java
@@ -30,13 +30,15 @@ import java.util.Collections;
 public class TestSegmentInfos extends LuceneTestCase {
 
   public void testIllegalCreatedVersion() {
-    IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new SegmentInfos(Version.LUCENE_6_5_0));
-    assertEquals("indexCreatedVersion may only be non-null if the index was created on or after 7.0, got 6.5.0", e.getMessage());
+    IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new SegmentInfos(5));
+    assertEquals("indexCreatedVersionMajor must be >= 6, got: 5", e.getMessage());
+    e = expectThrows(IllegalArgumentException.class, () -> new SegmentInfos(8));
+    assertEquals("indexCreatedVersionMajor is in the future: 8", e.getMessage());
   }
 
   // LUCENE-5954
   public void testVersionsNoSegments() throws IOException {
-    SegmentInfos sis = new SegmentInfos(Version.LATEST);
+    SegmentInfos sis = new SegmentInfos(Version.LATEST.major);
     BaseDirectoryWrapper dir = newDirectory();
     dir.setCheckIndexOnClose(false);
     sis.commit(dir);
@@ -53,8 +55,8 @@ public class TestSegmentInfos extends LuceneTestCase {
     byte id[] = StringHelper.randomId();
     Codec codec = Codec.getDefault();
 
-    SegmentInfos sis = new SegmentInfos(Version.LATEST);
-    SegmentInfo info = new SegmentInfo(dir, Version.LUCENE_6_0_0, "_0", 1, false, Codec.getDefault(), 
+    SegmentInfos sis = new SegmentInfos(Version.LATEST.major);
+    SegmentInfo info = new SegmentInfo(dir, Version.LUCENE_7_0_0, Version.LUCENE_7_0_0, "_0", 1, false, Codec.getDefault(), 
                                        Collections.<String,String>emptyMap(), id, Collections.<String,String>emptyMap(), null);
     info.setFiles(Collections.<String>emptySet());
     codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
@@ -63,7 +65,7 @@ public class TestSegmentInfos extends LuceneTestCase {
     sis.add(commitInfo);
     sis.commit(dir);
     sis = SegmentInfos.readLatestCommit(dir);
-    assertEquals(Version.LUCENE_6_0_0, sis.getMinSegmentLuceneVersion());
+    assertEquals(Version.LUCENE_7_0_0, sis.getMinSegmentLuceneVersion());
     assertEquals(Version.LATEST, sis.getCommitLuceneVersion());
     dir.close();
   }
@@ -75,15 +77,15 @@ public class TestSegmentInfos extends LuceneTestCase {
     byte id[] = StringHelper.randomId();
     Codec codec = Codec.getDefault();
 
-    SegmentInfos sis = new SegmentInfos(Version.LATEST);
-    SegmentInfo info = new SegmentInfo(dir, Version.LUCENE_6_0_0, "_0", 1, false, Codec.getDefault(), 
+    SegmentInfos sis = new SegmentInfos(Version.LATEST.major);
+    SegmentInfo info = new SegmentInfo(dir, Version.LUCENE_7_0_0, Version.LUCENE_7_0_0, "_0", 1, false, Codec.getDefault(), 
                                        Collections.<String,String>emptyMap(), id, Collections.<String,String>emptyMap(), null);
     info.setFiles(Collections.<String>emptySet());
     codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
     SegmentCommitInfo commitInfo = new SegmentCommitInfo(info, 0, -1, -1, -1);
     sis.add(commitInfo);
 
-    info = new SegmentInfo(dir, Version.LUCENE_6_0_0, "_1", 1, false, Codec.getDefault(), 
+    info = new SegmentInfo(dir, Version.LUCENE_7_0_0, Version.LUCENE_7_0_0, "_1", 1, false, Codec.getDefault(), 
                            Collections.<String,String>emptyMap(), id, Collections.<String,String>emptyMap(), null);
     info.setFiles(Collections.<String>emptySet());
     codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
@@ -92,7 +94,7 @@ public class TestSegmentInfos extends LuceneTestCase {
 
     sis.commit(dir);
     sis = SegmentInfos.readLatestCommit(dir);
-    assertEquals(Version.LUCENE_6_0_0, sis.getMinSegmentLuceneVersion());
+    assertEquals(Version.LUCENE_7_0_0, sis.getMinSegmentLuceneVersion());
     assertEquals(Version.LATEST, sis.getCommitLuceneVersion());
     dir.close();
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/core/src/test/org/apache/lucene/index/TestSegmentMerger.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestSegmentMerger.java b/lucene/core/src/test/org/apache/lucene/index/TestSegmentMerger.java
index 1ef37c0..6d0e04b 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestSegmentMerger.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestSegmentMerger.java
@@ -60,8 +60,8 @@ public class TestSegmentMerger extends LuceneTestCase {
     SegmentCommitInfo info1 = DocHelper.writeDoc(random(), merge1Dir, doc1);
     DocHelper.setupDoc(doc2);
     SegmentCommitInfo info2 = DocHelper.writeDoc(random(), merge2Dir, doc2);
-    reader1 = new SegmentReader(info1, newIOContext(random()));
-    reader2 = new SegmentReader(info2, newIOContext(random()));
+    reader1 = new SegmentReader(info1, Version.LATEST.major, newIOContext(random()));
+    reader2 = new SegmentReader(info2, Version.LATEST.major, newIOContext(random()));
   }
 
   @Override
@@ -84,7 +84,7 @@ public class TestSegmentMerger extends LuceneTestCase {
 
   public void testMerge() throws IOException {
     final Codec codec = Codec.getDefault();
-    final SegmentInfo si = new SegmentInfo(mergedDir, Version.LATEST, mergedSegment, -1, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
+    final SegmentInfo si = new SegmentInfo(mergedDir, Version.LATEST, null, mergedSegment, -1, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
 
     SegmentMerger merger = new SegmentMerger(Arrays.<CodecReader>asList(reader1, reader2),
                                              si, InfoStream.getDefault(), mergedDir,
@@ -97,6 +97,7 @@ public class TestSegmentMerger extends LuceneTestCase {
     SegmentReader mergedReader = new SegmentReader(new SegmentCommitInfo(
                                                          mergeState.segmentInfo,
                                                          0, -1L, -1L, -1L),
+                                                   Version.LATEST.major,
                                                    newIOContext(random()));
     assertTrue(mergedReader != null);
     assertTrue(mergedReader.numDocs() == 2);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/core/src/test/org/apache/lucene/index/TestSegmentReader.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestSegmentReader.java b/lucene/core/src/test/org/apache/lucene/index/TestSegmentReader.java
index f008afe..5434956 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestSegmentReader.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestSegmentReader.java
@@ -29,6 +29,7 @@ import org.apache.lucene.store.IOContext;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.TestUtil;
+import org.apache.lucene.util.Version;
 
 public class TestSegmentReader extends LuceneTestCase {
   private Directory dir;
@@ -42,7 +43,7 @@ public class TestSegmentReader extends LuceneTestCase {
     dir = newDirectory();
     DocHelper.setupDoc(testDoc);
     SegmentCommitInfo info = DocHelper.writeDoc(random(), dir, testDoc);
-    reader = new SegmentReader(info, IOContext.READ);
+    reader = new SegmentReader(info, Version.LATEST.major, IOContext.READ);
   }
   
   @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/core/src/test/org/apache/lucene/index/TestSegmentTermDocs.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestSegmentTermDocs.java b/lucene/core/src/test/org/apache/lucene/index/TestSegmentTermDocs.java
index 7acf3e4..f1271b8 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestSegmentTermDocs.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestSegmentTermDocs.java
@@ -27,6 +27,7 @@ import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.TestUtil;
+import org.apache.lucene.util.Version;
 
 public class TestSegmentTermDocs extends LuceneTestCase {
   private Document testDoc = new Document();
@@ -53,7 +54,7 @@ public class TestSegmentTermDocs extends LuceneTestCase {
 
   public void testTermDocs() throws IOException {
     //After adding the document, we should be able to read it back in
-    SegmentReader reader = new SegmentReader(info, newIOContext(random()));
+    SegmentReader reader = new SegmentReader(info, Version.LATEST.major, newIOContext(random()));
     assertTrue(reader != null);
 
     TermsEnum terms = reader.fields().terms(DocHelper.TEXT_FIELD_2_KEY).iterator();
@@ -71,7 +72,7 @@ public class TestSegmentTermDocs extends LuceneTestCase {
   public void testBadSeek() throws IOException {
     {
       //After adding the document, we should be able to read it back in
-      SegmentReader reader = new SegmentReader(info, newIOContext(random()));
+      SegmentReader reader = new SegmentReader(info, Version.LATEST.major, newIOContext(random()));
       assertTrue(reader != null);
       PostingsEnum termDocs = TestUtil.docs(random(), reader,
           "textField2",
@@ -84,7 +85,7 @@ public class TestSegmentTermDocs extends LuceneTestCase {
     }
     {
       //After adding the document, we should be able to read it back in
-      SegmentReader reader = new SegmentReader(info, newIOContext(random()));
+      SegmentReader reader = new SegmentReader(info, Version.LATEST.major, newIOContext(random()));
       assertTrue(reader != null);
       PostingsEnum termDocs = TestUtil.docs(random(), reader,
           "junk",

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/highlighter/src/java/org/apache/lucene/search/highlight/TermVectorLeafReader.java
----------------------------------------------------------------------
diff --git a/lucene/highlighter/src/java/org/apache/lucene/search/highlight/TermVectorLeafReader.java b/lucene/highlighter/src/java/org/apache/lucene/search/highlight/TermVectorLeafReader.java
index 2e3cdab..3a723cf 100644
--- a/lucene/highlighter/src/java/org/apache/lucene/search/highlight/TermVectorLeafReader.java
+++ b/lucene/highlighter/src/java/org/apache/lucene/search/highlight/TermVectorLeafReader.java
@@ -25,6 +25,7 @@ import org.apache.lucene.index.DocValuesType;
 import org.apache.lucene.index.FieldInfo;
 import org.apache.lucene.index.FieldInfos;
 import org.apache.lucene.index.Fields;
+import org.apache.lucene.index.LeafMetaData;
 import org.apache.lucene.index.IndexOptions;
 import org.apache.lucene.index.LeafReader;
 import org.apache.lucene.index.NumericDocValues;
@@ -34,8 +35,8 @@ import org.apache.lucene.index.SortedNumericDocValues;
 import org.apache.lucene.index.SortedSetDocValues;
 import org.apache.lucene.index.StoredFieldVisitor;
 import org.apache.lucene.index.Terms;
-import org.apache.lucene.search.Sort;
 import org.apache.lucene.util.Bits;
+import org.apache.lucene.util.Version;
 
 /**
  * Wraps a Terms with a {@link org.apache.lucene.index.LeafReader}, typically from term vectors.
@@ -165,8 +166,8 @@ public class TermVectorLeafReader extends LeafReader {
   }
 
   @Override
-  public Sort getIndexSort() {
-    return null;
+  public LeafMetaData getMetaData() {
+    return new LeafMetaData(Version.LATEST.major, null, null);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java
----------------------------------------------------------------------
diff --git a/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java b/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java
index 0c8ea6d..4bd72e9 100644
--- a/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java
+++ b/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java
@@ -40,7 +40,6 @@ import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.search.Scorer;
 import org.apache.lucene.search.SimpleCollector;
-import org.apache.lucene.search.Sort;
 import org.apache.lucene.search.similarities.Similarity;
 import org.apache.lucene.store.RAMDirectory;
 import org.apache.lucene.util.ArrayUtil;
@@ -58,6 +57,7 @@ import org.apache.lucene.util.IntBlockPool.SliceWriter;
 import org.apache.lucene.util.RecyclingByteBlockAllocator;
 import org.apache.lucene.util.RecyclingIntBlockAllocator;
 import org.apache.lucene.util.StringHelper;
+import org.apache.lucene.util.Version;
 
 /**
  * High-performance single-document main memory Apache Lucene fulltext search index. 
@@ -1625,8 +1625,8 @@ public class MemoryIndex {
     }
 
     @Override
-    public Sort getIndexSort() {
-      return null;
+    public LeafMetaData getMetaData() {
+      return new LeafMetaData(Version.LATEST.major, Version.LATEST, null);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/misc/src/java/org/apache/lucene/index/IndexSplitter.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/java/org/apache/lucene/index/IndexSplitter.java b/lucene/misc/src/java/org/apache/lucene/index/IndexSplitter.java
index a3d720d..8925648 100644
--- a/lucene/misc/src/java/org/apache/lucene/index/IndexSplitter.java
+++ b/lucene/misc/src/java/org/apache/lucene/index/IndexSplitter.java
@@ -133,13 +133,13 @@ public class IndexSplitter {
   public void split(Path destDir, String[] segs) throws IOException {
     Files.createDirectories(destDir);
     FSDirectory destFSDir = FSDirectory.open(destDir);
-    SegmentInfos destInfos = new SegmentInfos(infos.getIndexCreatedVersion());
+    SegmentInfos destInfos = new SegmentInfos(infos.getIndexCreatedVersionMajor());
     destInfos.counter = infos.counter;
     for (String n : segs) {
       SegmentCommitInfo infoPerCommit = getInfo(n);
       SegmentInfo info = infoPerCommit.info;
       // Same info just changing the dir:
-      SegmentInfo newInfo = new SegmentInfo(destFSDir, info.getVersion(), info.name, info.maxDoc(),
+      SegmentInfo newInfo = new SegmentInfo(destFSDir, info.getVersion(), info.getMinVersion(), info.name, info.maxDoc(),
                                             info.getUseCompoundFile(), info.getCodec(), info.getDiagnostics(), info.getId(), new HashMap<>(), null);
       destInfos.add(new SegmentCommitInfo(newInfo, infoPerCommit.getDelCount(),
           infoPerCommit.getDelGen(), infoPerCommit.getFieldInfosGen(),

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/ReplicaNode.java
----------------------------------------------------------------------
diff --git a/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/ReplicaNode.java b/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/ReplicaNode.java
index 5319956..ff82d18 100644
--- a/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/ReplicaNode.java
+++ b/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/ReplicaNode.java
@@ -139,7 +139,7 @@ public abstract class ReplicaNode extends Node {
       SegmentInfos infos;
       if (segmentsFileName == null) {
         // No index here yet:
-        infos = new SegmentInfos(Version.LATEST);
+        infos = new SegmentInfos(Version.LATEST.major);
         message("top: init: no segments in index");
       } else {
         message("top: init: read existing segments commit " + segmentsFileName);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/test-framework/src/java/org/apache/lucene/index/BaseCompoundFormatTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BaseCompoundFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BaseCompoundFormatTestCase.java
index 30788e8..07a3f06 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/BaseCompoundFormatTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/BaseCompoundFormatTestCase.java
@@ -638,7 +638,8 @@ public abstract class BaseCompoundFormatTestCase extends BaseIndexFileFormatTest
   
   /** Returns a new fake segment */
   protected static SegmentInfo newSegmentInfo(Directory dir, String name) {
-    return new SegmentInfo(dir, Version.LATEST, name, 10000, false, Codec.getDefault(), Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
+    Version minVersion = random().nextBoolean() ? null : Version.LATEST;
+    return new SegmentInfo(dir, Version.LATEST, minVersion,  name, 10000, false, Codec.getDefault(), Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
   }
   
   /** Creates a file of the specified size with random data. */

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/test-framework/src/java/org/apache/lucene/index/BaseFieldInfoFormatTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BaseFieldInfoFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BaseFieldInfoFormatTestCase.java
index 528e92a..9363ce6 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/BaseFieldInfoFormatTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/BaseFieldInfoFormatTestCase.java
@@ -347,7 +347,8 @@ public abstract class BaseFieldInfoFormatTestCase extends BaseIndexFileFormatTes
   
   /** Returns a new fake segment */
   protected static SegmentInfo newSegmentInfo(Directory dir, String name) {
-    return new SegmentInfo(dir, Version.LATEST, name, 10000, false, Codec.getDefault(), Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
+    Version minVersion = random().nextBoolean() ? null : Version.LATEST;
+    return new SegmentInfo(dir, Version.LATEST, minVersion, name, 10000, false, Codec.getDefault(), Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
   }
   
   @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java
index 3097cdf..e450114 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java
@@ -147,6 +147,22 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
   /** Returns the codec to run tests against */
   protected abstract Codec getCodec();
 
+  /** Returns the major version that this codec is compatible with. */
+  protected int getCreatedVersionMajor() {
+    return Version.LATEST.major;
+  }
+
+  /** Set the created version of the given {@link Directory} and return it. */
+  protected final <D extends Directory> D applyCreatedVersionMajor(D d) throws IOException {
+    if (SegmentInfos.getLastCommitGeneration(d) != -1) {
+      throw new IllegalArgumentException("Cannot set the created version on a Directory that already has segments");
+    }
+    if (getCreatedVersionMajor() != Version.LATEST.major || random().nextBoolean()) {
+      new SegmentInfos(getCreatedVersionMajor()).commit(d);
+    }
+    return d;
+  }
+
   private Codec savedCodec;
 
   public void setUp() throws Exception {
@@ -195,7 +211,7 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
   /** The purpose of this test is to make sure that bulk merge doesn't accumulate useless data over runs. */
   public void testMergeStability() throws Exception {
     assumeTrue("merge is not stable", mergeIsStable());
-    Directory dir = newDirectory();
+    Directory dir = applyCreatedVersionMajor(newDirectory());
 
     // do not use newMergePolicy that might return a MockMergePolicy that ignores the no-CFS ratio
     // do not use RIW which will change things up!
@@ -214,7 +230,7 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
     w.close();
     DirectoryReader reader = DirectoryReader.open(dir);
 
-    Directory dir2 = newDirectory();
+    Directory dir2 = applyCreatedVersionMajor(newDirectory());
     mp = newTieredMergePolicy();
     mp.setNoCFSRatio(0);
     cfg = new IndexWriterConfig(new MockAnalyzer(random())).setUseCompoundFile(false).setMergePolicy(mp);
@@ -245,7 +261,7 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
       avoidCodecs.add(new MockRandomPostingsFormat().getName());
       Codec.setDefault(new RandomCodec(random(), avoidCodecs));
     }
-    Directory dir = newDirectory();
+    Directory dir = applyCreatedVersionMajor(newDirectory());
     IndexWriterConfig cfg = newIndexWriterConfig(new MockAnalyzer(random()));
     IndexWriter w = new IndexWriter(dir, cfg);
     // we need to index enough documents so that constant overhead doesn't dominate
@@ -286,7 +302,7 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
   /** Calls close multiple times on closeable codec apis */
   public void testMultiClose() throws IOException {
     // first make a one doc index
-    Directory oneDocIndex = newDirectory();
+    Directory oneDocIndex = applyCreatedVersionMajor(newDirectory());
     IndexWriter iw = new IndexWriter(oneDocIndex, new IndexWriterConfig(new MockAnalyzer(random())));
     Document oneDoc = new Document();
     FieldType customType = new FieldType(TextField.TYPE_STORED);
@@ -303,7 +319,7 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
     Directory dir = newFSDirectory(createTempDir("justSoYouGetSomeChannelErrors"));
     Codec codec = getCodec();
     
-    SegmentInfo segmentInfo = new SegmentInfo(dir, Version.LATEST, "_0", 1, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
+    SegmentInfo segmentInfo = new SegmentInfo(dir, Version.LATEST, Version.LATEST, "_0", 1, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
     FieldInfo proto = oneDocReader.getFieldInfos().fieldInfo("field");
     FieldInfo field = new FieldInfo(proto.name, proto.number, proto.hasVectors(), proto.omitsNorms(), proto.hasPayloads(), 
                                     proto.getIndexOptions(), proto.getDocValuesType(), proto.getDocValuesGen(), new HashMap<>(),
@@ -499,7 +515,7 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
   // codec does not corrupt the index or leak file handles.
   public void testRandomExceptions() throws Exception {
     // disable slow things: we don't rely upon sleeps here.
-    MockDirectoryWrapper dir = newMockDirectory();
+    MockDirectoryWrapper dir = applyCreatedVersionMajor(newMockDirectory());
     dir.setThrottling(MockDirectoryWrapper.Throttling.NEVER);
     dir.setUseSlowOpenClosers(false);
     dir.setRandomIOExceptionRate(0.001); // more rare

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/test-framework/src/java/org/apache/lucene/index/BaseNormsFormatTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BaseNormsFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BaseNormsFormatTestCase.java
index cd62218..370d009 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/BaseNormsFormatTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/BaseNormsFormatTestCase.java
@@ -449,7 +449,7 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
       norms[i] = longs.getAsLong();
     }
     
-    Directory dir = newDirectory();
+    Directory dir = applyCreatedVersionMajor(newDirectory());
     Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.KEYWORD, false);
     IndexWriterConfig conf = newIndexWriterConfig(analyzer);conf.setMergePolicy(NoMergePolicy.INSTANCE);
     conf.setSimilarity(new CannedNormSimilarity(norms));
@@ -585,7 +585,7 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
    *
    */
   public void testUndeadNorms() throws Exception {
-    Directory dir = newDirectory();
+    Directory dir = applyCreatedVersionMajor(newDirectory());
     RandomIndexWriter w = new RandomIndexWriter(random(), dir);
     int numDocs = atLeast(500);
     List<Integer> toDelete = new ArrayList<>();
@@ -646,7 +646,7 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
       norms[i] = random().nextLong();
     }
 
-    Directory dir = newDirectory();
+    Directory dir = applyCreatedVersionMajor(newDirectory());
     Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.KEYWORD, false);
     IndexWriterConfig conf = newIndexWriterConfig(analyzer);conf.setMergePolicy(NoMergePolicy.INSTANCE);
     conf.setSimilarity(new CannedNormSimilarity(norms));

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/test-framework/src/java/org/apache/lucene/index/BaseSegmentInfoFormatTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BaseSegmentInfoFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BaseSegmentInfoFormatTestCase.java
index ae5416f..29f31d6 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/BaseSegmentInfoFormatTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/BaseSegmentInfoFormatTestCase.java
@@ -48,13 +48,18 @@ import org.apache.lucene.util.Version;
  * if there is some bug in a given si Format that this
  * test fails to catch then this test needs to be improved! */
 public abstract class BaseSegmentInfoFormatTestCase extends BaseIndexFileFormatTestCase {
-  
+
+  /** Whether this format records min versions. */
+  protected boolean supportsMinVersion() {
+    return true;
+  }
+
   /** Test files map */
   public void testFiles() throws Exception {
     Directory dir = newDirectory();
     Codec codec = getCodec();
     byte id[] = StringHelper.randomId();
-    SegmentInfo info = new SegmentInfo(dir, getVersions()[0], "_123", 1, false, codec, 
+    SegmentInfo info = new SegmentInfo(dir, getVersions()[0], getVersions()[0], "_123", 1, false, codec, 
                                        Collections.<String,String>emptyMap(), id, new HashMap<>(), null);
     info.setFiles(Collections.<String>emptySet());
     codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
@@ -68,7 +73,7 @@ public abstract class BaseSegmentInfoFormatTestCase extends BaseIndexFileFormatT
     Directory dir = newDirectory();
     Codec codec = getCodec();
     byte id[] = StringHelper.randomId();
-    SegmentInfo info = new SegmentInfo(dir, getVersions()[0], "_123", 1, false, codec, 
+    SegmentInfo info = new SegmentInfo(dir, getVersions()[0], getVersions()[0], "_123", 1, false, codec, 
                                        Collections.<String,String>emptyMap(), id, new HashMap<>(), null);
     Set<String> originalFiles = Collections.singleton("_123.a");
     info.setFiles(originalFiles);
@@ -97,7 +102,7 @@ public abstract class BaseSegmentInfoFormatTestCase extends BaseIndexFileFormatT
     Map<String,String> diagnostics = new HashMap<>();
     diagnostics.put("key1", "value1");
     diagnostics.put("key2", "value2");
-    SegmentInfo info = new SegmentInfo(dir, getVersions()[0], "_123", 1, false, codec, 
+    SegmentInfo info = new SegmentInfo(dir, getVersions()[0], getVersions()[0], "_123", 1, false, codec, 
                                        diagnostics, id, new HashMap<>(), null);
     info.setFiles(Collections.<String>emptySet());
     codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
@@ -120,7 +125,7 @@ public abstract class BaseSegmentInfoFormatTestCase extends BaseIndexFileFormatT
     Map<String,String> attributes = new HashMap<>();
     attributes.put("key1", "value1");
     attributes.put("key2", "value2");
-    SegmentInfo info = new SegmentInfo(dir, getVersions()[0], "_123", 1, false, codec, 
+    SegmentInfo info = new SegmentInfo(dir, getVersions()[0], getVersions()[0], "_123", 1, false, codec, 
                                        Collections.emptyMap(), id, attributes, null);
     info.setFiles(Collections.<String>emptySet());
     codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
@@ -140,7 +145,7 @@ public abstract class BaseSegmentInfoFormatTestCase extends BaseIndexFileFormatT
     Codec codec = getCodec();
     Directory dir = newDirectory();
     byte id[] = StringHelper.randomId();
-    SegmentInfo info = new SegmentInfo(dir, getVersions()[0], "_123", 1, false, codec, 
+    SegmentInfo info = new SegmentInfo(dir, getVersions()[0], getVersions()[0], "_123", 1, false, codec, 
                                        Collections.<String,String>emptyMap(), id, new HashMap<>(), null);
     info.setFiles(Collections.<String>emptySet());
     codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
@@ -153,15 +158,22 @@ public abstract class BaseSegmentInfoFormatTestCase extends BaseIndexFileFormatT
   public void testVersions() throws Exception {
     Codec codec = getCodec();
     for (Version v : getVersions()) {
-      Directory dir = newDirectory();
-      byte id[] = StringHelper.randomId();
-      SegmentInfo info = new SegmentInfo(dir, v, "_123", 1, false, codec, 
-                                         Collections.<String,String>emptyMap(), id, new HashMap<>(), null);
-      info.setFiles(Collections.<String>emptySet());
-      codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
-      SegmentInfo info2 = codec.segmentInfoFormat().read(dir, "_123", id, IOContext.DEFAULT);
-      assertEquals(info2.getVersion(), v);
-      dir.close();
+      for (Version minV : new Version[] { v, null}) {
+        Directory dir = newDirectory();
+        byte id[] = StringHelper.randomId();
+        SegmentInfo info = new SegmentInfo(dir, v, minV, "_123", 1, false, codec, 
+                                           Collections.<String,String>emptyMap(), id, new HashMap<>(), null);
+        info.setFiles(Collections.<String>emptySet());
+        codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
+        SegmentInfo info2 = codec.segmentInfoFormat().read(dir, "_123", id, IOContext.DEFAULT);
+        assertEquals(info2.getVersion(), v);
+        if (supportsMinVersion()) {
+          assertEquals(info2.getMinVersion(), minV);
+        } else {
+          assertEquals(info2.getMinVersion(), null);
+        }
+        dir.close();
+      }
     }
   }
 
@@ -262,7 +274,7 @@ public abstract class BaseSegmentInfoFormatTestCase extends BaseIndexFileFormatT
       Directory dir = newDirectory();
       Codec codec = getCodec();
       byte id[] = StringHelper.randomId();
-      SegmentInfo info = new SegmentInfo(dir, getVersions()[0], "_123", 1, false, codec, 
+      SegmentInfo info = new SegmentInfo(dir, getVersions()[0], getVersions()[0], "_123", 1, false, codec, 
           Collections.<String,String>emptyMap(), id, new HashMap<>(), sort);
       info.setFiles(Collections.<String>emptySet());
       codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
@@ -292,7 +304,7 @@ public abstract class BaseSegmentInfoFormatTestCase extends BaseIndexFileFormatT
     dir.failOn(fail);
     Codec codec = getCodec();
     byte id[] = StringHelper.randomId();
-    SegmentInfo info = new SegmentInfo(dir, getVersions()[0], "_123", 1, false, codec, 
+    SegmentInfo info = new SegmentInfo(dir, getVersions()[0], getVersions()[0], "_123", 1, false, codec, 
                                        Collections.<String,String>emptyMap(), id, new HashMap<>(), null);
     info.setFiles(Collections.<String>emptySet());
     
@@ -325,7 +337,7 @@ public abstract class BaseSegmentInfoFormatTestCase extends BaseIndexFileFormatT
     dir.failOn(fail);
     Codec codec = getCodec();
     byte id[] = StringHelper.randomId();
-    SegmentInfo info = new SegmentInfo(dir, getVersions()[0], "_123", 1, false, codec, 
+    SegmentInfo info = new SegmentInfo(dir, getVersions()[0], getVersions()[0], "_123", 1, false, codec, 
                                        Collections.<String,String>emptyMap(), id, new HashMap<>(), null);
     info.setFiles(Collections.<String>emptySet());
     
@@ -358,7 +370,7 @@ public abstract class BaseSegmentInfoFormatTestCase extends BaseIndexFileFormatT
     dir.failOn(fail);
     Codec codec = getCodec();
     byte id[] = StringHelper.randomId();
-    SegmentInfo info = new SegmentInfo(dir, getVersions()[0], "_123", 1, false, codec, 
+    SegmentInfo info = new SegmentInfo(dir, getVersions()[0], getVersions()[0], "_123", 1, false, codec, 
                                        Collections.<String,String>emptyMap(), id, new HashMap<>(), null);
     info.setFiles(Collections.<String>emptySet());
     codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
@@ -392,7 +404,7 @@ public abstract class BaseSegmentInfoFormatTestCase extends BaseIndexFileFormatT
     dir.failOn(fail);
     Codec codec = getCodec();
     byte id[] = StringHelper.randomId();
-    SegmentInfo info = new SegmentInfo(dir, getVersions()[0], "_123", 1, false, codec, 
+    SegmentInfo info = new SegmentInfo(dir, getVersions()[0], getVersions()[0], "_123", 1, false, codec, 
                                        Collections.<String,String>emptyMap(), id, new HashMap<>(), null);
     info.setFiles(Collections.<String>emptySet());
     codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
@@ -442,7 +454,7 @@ public abstract class BaseSegmentInfoFormatTestCase extends BaseIndexFileFormatT
                        TestUtil.randomUnicodeString(random()));
       }
       
-      SegmentInfo info = new SegmentInfo(dir, version, name, docCount, isCompoundFile, codec, diagnostics, id, attributes, null);
+      SegmentInfo info = new SegmentInfo(dir, version, null, name, docCount, isCompoundFile, codec, diagnostics, id, attributes, null);
       info.setFiles(files);
       codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
       SegmentInfo info2 = codec.segmentInfoFormat().read(dir, name, id, IOContext.DEFAULT);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/test-framework/src/java/org/apache/lucene/index/RandomPostingsTester.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/RandomPostingsTester.java b/lucene/test-framework/src/java/org/apache/lucene/index/RandomPostingsTester.java
index d415927..d5eb105 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/RandomPostingsTester.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/RandomPostingsTester.java
@@ -611,7 +611,7 @@ public class RandomPostingsTester {
   // maxAllowed = the "highest" we can index, but we will still
   // randomly index at lower IndexOption
   public FieldsProducer buildIndex(Codec codec, Directory dir, IndexOptions maxAllowed, boolean allowPayloads, boolean alwaysTestMax) throws IOException {
-    SegmentInfo segmentInfo = new SegmentInfo(dir, Version.LATEST, "_0", maxDoc, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
+    SegmentInfo segmentInfo = new SegmentInfo(dir, Version.LATEST, Version.LATEST, "_0", maxDoc, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
 
     int maxIndexOption = Arrays.asList(IndexOptions.values()).indexOf(maxAllowed);
     if (LuceneTestCase.VERBOSE) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/lucene/test-framework/src/java/org/apache/lucene/search/QueryUtils.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/QueryUtils.java b/lucene/test-framework/src/java/org/apache/lucene/search/QueryUtils.java
index c40f875..bd23fd2 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/search/QueryUtils.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/search/QueryUtils.java
@@ -26,6 +26,7 @@ import org.apache.lucene.index.BinaryDocValues;
 import org.apache.lucene.index.FieldInfo;
 import org.apache.lucene.index.FieldInfos;
 import org.apache.lucene.index.Fields;
+import org.apache.lucene.index.LeafMetaData;
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.LeafReader;
 import org.apache.lucene.index.LeafReaderContext;
@@ -39,6 +40,7 @@ import org.apache.lucene.index.StoredFieldVisitor;
 import org.apache.lucene.index.Terms;
 import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util.Version;
 
 import junit.framework.Assert;
 
@@ -260,8 +262,8 @@ public class QueryUtils {
       protected void doClose() throws IOException {}
 
       @Override
-      public Sort getIndexSort() {
-        return null;
+      public LeafMetaData getMetaData() {
+        return new LeafMetaData(Version.LATEST.major, Version.LATEST, null);
       }
 
       @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/solr/core/src/java/org/apache/solr/index/SlowCompositeReaderWrapper.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/index/SlowCompositeReaderWrapper.java b/solr/core/src/java/org/apache/solr/index/SlowCompositeReaderWrapper.java
index c445cdf..ab7313f 100644
--- a/solr/core/src/java/org/apache/solr/index/SlowCompositeReaderWrapper.java
+++ b/solr/core/src/java/org/apache/solr/index/SlowCompositeReaderWrapper.java
@@ -23,9 +23,8 @@ import java.util.Map;
 import org.apache.lucene.index.*;
 import org.apache.lucene.index.MultiDocValues.MultiSortedDocValues;
 import org.apache.lucene.index.MultiDocValues.OrdinalMap;
-import org.apache.lucene.index.NumericDocValues;
-import org.apache.lucene.search.Sort;
 import org.apache.lucene.util.Bits;
+import org.apache.lucene.util.Version;
 
 /**
  * This class forces a composite reader (eg a {@link
@@ -47,6 +46,7 @@ public final class SlowCompositeReaderWrapper extends LeafReader {
 
   private final CompositeReader in;
   private final Fields fields;
+  private final LeafMetaData metaData;
   
   /** This method is sugar for getting an {@link LeafReader} from
    * an {@link IndexReader} of any kind. If the reader is already atomic,
@@ -66,6 +66,17 @@ public final class SlowCompositeReaderWrapper extends LeafReader {
     in = reader;
     fields = MultiFields.getFields(in);
     in.registerParentReader(this);
+    if (reader.leaves().isEmpty()) {
+      metaData = new LeafMetaData(Version.LATEST.major, Version.LATEST, null);
+    } else {
+      Version minVersion = reader.leaves().stream()
+          .map(LeafReaderContext::reader)
+          .map(LeafReader::getMetaData)
+          .map(LeafMetaData::getMinVersion)
+          .reduce((v1, v2) -> v1 == null ? null : v2 == null ? null : v2.onOrAfter(v1) ? v1 : v2)
+          .get();
+      metaData = new LeafMetaData(reader.leaves().get(0).reader().getMetaData().getCreatedVersionMajor(), minVersion, null);
+    }
   }
 
   @Override
@@ -263,7 +274,7 @@ public final class SlowCompositeReaderWrapper extends LeafReader {
   }
 
   @Override
-  public Sort getIndexSort() {
-    return null;
+  public LeafMetaData getMetaData() {
+    return metaData;
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/23b002a0/solr/core/src/test/org/apache/solr/search/TestDocSet.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/TestDocSet.java b/solr/core/src/test/org/apache/solr/search/TestDocSet.java
index db6523e..e5cc6eb 100644
--- a/solr/core/src/test/org/apache/solr/search/TestDocSet.java
+++ b/solr/core/src/test/org/apache/solr/search/TestDocSet.java
@@ -25,6 +25,7 @@ import org.apache.lucene.index.BinaryDocValues;
 import org.apache.lucene.index.FieldInfo;
 import org.apache.lucene.index.FieldInfos;
 import org.apache.lucene.index.Fields;
+import org.apache.lucene.index.LeafMetaData;
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.IndexReaderContext;
 import org.apache.lucene.index.LeafReader;
@@ -38,11 +39,11 @@ import org.apache.lucene.index.SortedSetDocValues;
 import org.apache.lucene.index.StoredFieldVisitor;
 import org.apache.lucene.search.DocIdSet;
 import org.apache.lucene.search.DocIdSetIterator;
-import org.apache.lucene.search.Sort;
 import org.apache.lucene.util.BitSetIterator;
 import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.FixedBitSet;
 import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util.Version;
 
 /**
  *
@@ -455,8 +456,8 @@ public class TestDocSet extends LuceneTestCase {
       }
 
       @Override
-      public Sort getIndexSort() {
-        return null;
+      public LeafMetaData getMetaData() {
+        return new LeafMetaData(Version.LATEST.major, Version.LATEST, null);
       }
 
       @Override


[13/14] lucene-solr:jira/solr-9959: SOLR-10394: Rename getSortWithinGroup to getWithinGroupSort in search.grouping.Command class. (Judith Silverman, Christine Poerschke)

Posted by ab...@apache.org.
SOLR-10394: Rename getSortWithinGroup to getWithinGroupSort in search.grouping.Command class.
(Judith Silverman, Christine Poerschke)


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/05749d06
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/05749d06
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/05749d06

Branch: refs/heads/jira/solr-9959
Commit: 05749d0694ee6dce44764cae965c09857dc8975a
Parents: ee98cdc
Author: Christine Poerschke <cp...@apache.org>
Authored: Tue Apr 4 12:52:09 2017 +0100
Committer: Christine Poerschke <cp...@apache.org>
Committed: Tue Apr 4 12:52:09 2017 +0100

----------------------------------------------------------------------
 solr/CHANGES.txt                                                 | 3 +++
 solr/core/src/java/org/apache/solr/search/grouping/Command.java  | 2 +-
 .../src/java/org/apache/solr/search/grouping/CommandHandler.java | 4 ++--
 .../solr/search/grouping/distributed/command/QueryCommand.java   | 2 +-
 .../grouping/distributed/command/SearchGroupsFieldCommand.java   | 2 +-
 .../grouping/distributed/command/TopGroupsFieldCommand.java      | 2 +-
 6 files changed, 9 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/05749d06/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 83f1440..6b0658c 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -190,6 +190,9 @@ Other Changes
 
 * SOLR-9745: print errors from solr.cmd (Gopikannan Venugopalsamy via Mikhail Khludnev)
 
+* SOLR-10394: Rename getSortWithinGroup to getWithinGroupSort in search.grouping.Command class.
+  (Judith Silverman, Christine Poerschke)
+
 ==================  6.5.1 ==================
 
 Bug Fixes

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/05749d06/solr/core/src/java/org/apache/solr/search/grouping/Command.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/grouping/Command.java b/solr/core/src/java/org/apache/solr/search/grouping/Command.java
index 55e2d96..7391df6 100644
--- a/solr/core/src/java/org/apache/solr/search/grouping/Command.java
+++ b/solr/core/src/java/org/apache/solr/search/grouping/Command.java
@@ -60,6 +60,6 @@ public interface Command<T> {
   /**
    * @return The sort inside a group
    */
-  Sort getSortWithinGroup();
+  Sort getWithinGroupSort();
 
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/05749d06/solr/core/src/java/org/apache/solr/search/grouping/CommandHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/grouping/CommandHandler.java b/solr/core/src/java/org/apache/solr/search/grouping/CommandHandler.java
index 2dd2291..8ba0a6a 100644
--- a/solr/core/src/java/org/apache/solr/search/grouping/CommandHandler.java
+++ b/solr/core/src/java/org/apache/solr/search/grouping/CommandHandler.java
@@ -174,9 +174,9 @@ public class CommandHandler {
     final AllGroupHeadsCollector allGroupHeadsCollector;
     if (fieldType.getNumberType() != null) {
       ValueSource vs = fieldType.getValueSource(sf, null);
-      allGroupHeadsCollector = new FunctionAllGroupHeadsCollector(vs, new HashMap(), firstCommand.getSortWithinGroup());
+      allGroupHeadsCollector = new FunctionAllGroupHeadsCollector(vs, new HashMap(), firstCommand.getWithinGroupSort());
     } else {
-      allGroupHeadsCollector = TermAllGroupHeadsCollector.create(firstCommand.getKey(), firstCommand.getSortWithinGroup());
+      allGroupHeadsCollector = TermAllGroupHeadsCollector.create(firstCommand.getKey(), firstCommand.getWithinGroupSort());
     }
     if (collectors.isEmpty()) {
       searchWithTimeLimiter(query, filter, allGroupHeadsCollector);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/05749d06/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/QueryCommand.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/QueryCommand.java b/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/QueryCommand.java
index afb8ba7..1615237 100644
--- a/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/QueryCommand.java
+++ b/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/QueryCommand.java
@@ -149,7 +149,7 @@ public class QueryCommand implements Command<QueryCommandResult> {
   }
 
   @Override
-  public Sort getSortWithinGroup() {
+  public Sort getWithinGroupSort() {
     return null;
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/05749d06/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/SearchGroupsFieldCommand.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/SearchGroupsFieldCommand.java b/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/SearchGroupsFieldCommand.java
index d5f9f9d..7f92382 100644
--- a/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/SearchGroupsFieldCommand.java
+++ b/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/SearchGroupsFieldCommand.java
@@ -138,7 +138,7 @@ public class SearchGroupsFieldCommand implements Command<SearchGroupsFieldComman
   }
 
   @Override
-  public Sort getSortWithinGroup() {
+  public Sort getWithinGroupSort() {
     return null;
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/05749d06/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/TopGroupsFieldCommand.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/TopGroupsFieldCommand.java b/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/TopGroupsFieldCommand.java
index 2c6c401..e34e278 100644
--- a/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/TopGroupsFieldCommand.java
+++ b/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/TopGroupsFieldCommand.java
@@ -173,7 +173,7 @@ public class TopGroupsFieldCommand implements Command<TopGroups<BytesRef>> {
   }
 
   @Override
-  public Sort getSortWithinGroup() {
+  public Sort getWithinGroupSort() {
     return sortWithinGroup;
   }
 }


[08/14] lucene-solr:jira/solr-9959: SOLR-9745: bring back timeout value to fix tests

Posted by ab...@apache.org.
SOLR-9745: bring back timeout value to fix tests


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/3f172a01
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/3f172a01
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/3f172a01

Branch: refs/heads/jira/solr-9959
Commit: 3f172a019b21c0dafaa7a18e0ccd0e99b1c5f3bd
Parents: 0bf7a5f
Author: Mikhail Khludnev <mk...@apache.org>
Authored: Tue Apr 4 08:42:31 2017 +0300
Committer: Mikhail Khludnev <mk...@apache.org>
Committed: Tue Apr 4 08:42:31 2017 +0300

----------------------------------------------------------------------
 solr/core/src/java/org/apache/solr/util/SolrCLI.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3f172a01/solr/core/src/java/org/apache/solr/util/SolrCLI.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/util/SolrCLI.java b/solr/core/src/java/org/apache/solr/util/SolrCLI.java
index da7e63e..3572ea4 100644
--- a/solr/core/src/java/org/apache/solr/util/SolrCLI.java
+++ b/solr/core/src/java/org/apache/solr/util/SolrCLI.java
@@ -2934,7 +2934,7 @@ public class SolrCLI {
 
         // wait for execution.
         try {
-          handler.waitFor();
+          handler.waitFor(3000);
         } catch (InterruptedException ie) {
           // safe to ignore ...
           Thread.interrupted();


[06/14] lucene-solr:jira/solr-9959: SOLR-10351: Add try-with-resources clause around TokenStream

Posted by ab...@apache.org.
SOLR-10351: Add try-with-resources clause around TokenStream


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/e872dc79
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/e872dc79
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/e872dc79

Branch: refs/heads/jira/solr-9959
Commit: e872dc7913036c81b9ef48cf35c3456321b758b7
Parents: 65b4530
Author: Joel Bernstein <jb...@apache.org>
Authored: Mon Apr 3 20:39:37 2017 -0400
Committer: Joel Bernstein <jb...@apache.org>
Committed: Mon Apr 3 20:39:37 2017 -0400

----------------------------------------------------------------------
 .../org/apache/solr/handler/AnalyzeEvaluator.java  | 17 ++++++++---------
 1 file changed, 8 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e872dc79/solr/core/src/java/org/apache/solr/handler/AnalyzeEvaluator.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/AnalyzeEvaluator.java b/solr/core/src/java/org/apache/solr/handler/AnalyzeEvaluator.java
index 207f404..485f9c3 100644
--- a/solr/core/src/java/org/apache/solr/handler/AnalyzeEvaluator.java
+++ b/solr/core/src/java/org/apache/solr/handler/AnalyzeEvaluator.java
@@ -81,17 +81,16 @@ public class AnalyzeEvaluator extends SimpleEvaluator {
       return null;
     }
 
-    TokenStream tokenStream = analyzer.tokenStream(analyzerField, value);
-    CharTermAttribute termAtt = tokenStream.getAttribute(CharTermAttribute.class);
-    tokenStream.reset();
     List<String> tokens = new ArrayList();
-    while (tokenStream.incrementToken()) {
-      tokens.add(termAtt.toString());
-    }
-
-    tokenStream.end();
-    tokenStream.close();
 
+    try(TokenStream tokenStream = analyzer.tokenStream(analyzerField, value)) {
+      CharTermAttribute termAtt = tokenStream.getAttribute(CharTermAttribute.class);
+      tokenStream.reset();
+      while (tokenStream.incrementToken()) {
+        tokens.add(termAtt.toString());
+      }
+      tokenStream.end();
+    }
     return tokens;
   }
 


[05/14] lucene-solr:jira/solr-9959: SOLR-9745: fix solr.cmd to print errors from invoked script

Posted by ab...@apache.org.
SOLR-9745: fix solr.cmd to print errors from invoked script


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/65b4530f
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/65b4530f
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/65b4530f

Branch: refs/heads/jira/solr-9959
Commit: 65b4530fb39842418eae8d2acb7c463182039083
Parents: 52632cf
Author: Mikhail Khludnev <mk...@apache.org>
Authored: Mon Apr 3 23:45:54 2017 +0300
Committer: Mikhail Khludnev <mk...@apache.org>
Committed: Mon Apr 3 23:53:54 2017 +0300

----------------------------------------------------------------------
 solr/CHANGES.txt                                |  2 ++
 .../src/java/org/apache/solr/util/SolrCLI.java  | 18 +++++++---
 .../apache/solr/util/TestSolrCLIRunExample.java | 38 ++++++++++++++++++++
 3 files changed, 53 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/65b4530f/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 6fe4cc0..c1a7503 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -190,6 +190,8 @@ Other Changes
 
 * SOLR-8906: Make transient core cache pluggable (Erick Erickson)
 
+* SOLR-9745: print errors from solr.cmd (Gopikannan Venugopalsamy via Mikhail Khludnev)
+
 ==================  6.5.1 ==================
 
 Bug Fixes

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/65b4530f/solr/core/src/java/org/apache/solr/util/SolrCLI.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/util/SolrCLI.java b/solr/core/src/java/org/apache/solr/util/SolrCLI.java
index 6a85422..da7e63e 100644
--- a/solr/core/src/java/org/apache/solr/util/SolrCLI.java
+++ b/solr/core/src/java/org/apache/solr/util/SolrCLI.java
@@ -62,6 +62,7 @@ import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
 import org.apache.commons.exec.DefaultExecuteResultHandler;
 import org.apache.commons.exec.DefaultExecutor;
+import org.apache.commons.exec.ExecuteException;
 import org.apache.commons.exec.Executor;
 import org.apache.commons.exec.OS;
 import org.apache.commons.exec.environment.EnvironmentUtils;
@@ -2928,18 +2929,25 @@ public class SolrCLI {
             }
           }
         }
-        executor.execute(org.apache.commons.exec.CommandLine.parse(startCmd), startEnv, new DefaultExecuteResultHandler());
+        DefaultExecuteResultHandler handler = new DefaultExecuteResultHandler();
+        executor.execute(org.apache.commons.exec.CommandLine.parse(startCmd), startEnv, handler);
 
-        // brief wait before proceeding on Windows
+        // wait for execution.
         try {
-          Thread.sleep(3000);
+          handler.waitFor();
         } catch (InterruptedException ie) {
           // safe to ignore ...
           Thread.interrupted();
         }
-
+        if (handler.getExitValue() != 0) {
+          throw new Exception("Failed to start Solr using command: "+startCmd+" Exception : "+handler.getException());
+        }
       } else {
-        code = executor.execute(org.apache.commons.exec.CommandLine.parse(startCmd));
+        try {
+          code = executor.execute(org.apache.commons.exec.CommandLine.parse(startCmd));
+        } catch(ExecuteException e){
+          throw new Exception("Failed to start Solr using command: "+startCmd+" Exception : "+ e);
+        }
       }
       if (code != 0)
         throw new Exception("Failed to start Solr using command: "+startCmd);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/65b4530f/solr/core/src/test/org/apache/solr/util/TestSolrCLIRunExample.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/util/TestSolrCLIRunExample.java b/solr/core/src/test/org/apache/solr/util/TestSolrCLIRunExample.java
index 7980560..02d91b0 100644
--- a/solr/core/src/test/org/apache/solr/util/TestSolrCLIRunExample.java
+++ b/solr/core/src/test/org/apache/solr/util/TestSolrCLIRunExample.java
@@ -482,4 +482,42 @@ public class TestSolrCLIRunExample extends SolrTestCaseJ4 {
     // stop the test instance
     executor.execute(org.apache.commons.exec.CommandLine.parse("bin/solr stop -p "+bindPort));
   }
+  
+  @Test
+  public void testFailExecuteScript() throws Exception {
+    File solrHomeDir = new File(ExternalPaths.SERVER_HOME);
+    if (!solrHomeDir.isDirectory())
+      fail(solrHomeDir.getAbsolutePath()+" not found and is required to run this test!");
+   
+    Path tmpDir = createTempDir();
+    File solrExampleDir = tmpDir.toFile();
+    File solrServerDir = solrHomeDir.getParentFile();
+
+    // need a port to start the example server on
+    int bindPort = -1;
+    try (ServerSocket socket = new ServerSocket(0)) {
+      bindPort = socket.getLocalPort();
+    }
+
+    File toExecute = new File(tmpDir.toString(), "failExecuteScript");
+    assertTrue("Should have been able to create file '" + toExecute.getAbsolutePath() + "' ", toExecute.createNewFile());
+    
+    String[] toolArgs = new String[] {
+        "-e", "techproducts",
+        "-serverDir", solrServerDir.getAbsolutePath(),
+        "-exampleDir", solrExampleDir.getAbsolutePath(),
+        "-p", String.valueOf(bindPort),
+        "-script", toExecute.getAbsolutePath().toString()
+    };
+
+    // capture tool output to stdout
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    PrintStream stdoutSim = new PrintStream(baos, true, StandardCharsets.UTF_8.name());
+
+    DefaultExecutor executor = new DefaultExecutor();
+
+    SolrCLI.RunExampleTool tool = new SolrCLI.RunExampleTool(executor, System.in, stdoutSim);
+    int code = tool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), toolArgs));
+    assertTrue("Execution should have failed with return code 1", code == 1);
+  }
 }


[14/14] lucene-solr:jira/solr-9959: Merge branch 'master' into jira/solr-9959

Posted by ab...@apache.org.
Merge branch 'master' into jira/solr-9959


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/b9b707cc
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/b9b707cc
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/b9b707cc

Branch: refs/heads/jira/solr-9959
Commit: b9b707cce66c27953bea03b35ba61795c8252d35
Parents: 4249c8a 05749d0
Author: Andrzej Bialecki <ab...@apache.org>
Authored: Tue Apr 4 18:18:06 2017 +0200
Committer: Andrzej Bialecki <ab...@apache.org>
Committed: Tue Apr 4 18:18:06 2017 +0200

----------------------------------------------------------------------
 dev-tools/idea/.idea/workspace.xml              |   2 +-
 dev-tools/maven/pom.xml.template                |   1 -
 lucene/CHANGES.txt                              |  10 +-
 .../lucene50/Lucene50SegmentInfoFormat.java     |   2 +-
 .../lucene/codecs/lucene62/Lucene62Codec.java   |   2 +-
 .../apache/lucene/index/FixBrokenOffsets.java   |   3 +
 .../lucene50/Lucene50RWSegmentInfoFormat.java   |   2 +-
 .../lucene50/TestLucene50SegmentInfoFormat.java |  10 +
 .../lucene53/TestLucene53NormsFormat.java       |   6 +
 .../lucene/codecs/lucene62/Lucene62RWCodec.java |  12 +
 .../lucene62/Lucene62RWSegmentInfoFormat.java   | 193 ++++++++
 .../lucene62/TestLucene62SegmentInfoFormat.java |  48 ++
 .../index/TestBackwardsCompatibility.java       |  69 ++-
 .../lucene/index/TestFixBrokenOffsets.java      |  10 +-
 .../lucene/index/TestIndexWriterOnOldIndex.java |   6 +-
 .../simpletext/SimpleTextSegmentInfoFormat.java |  29 +-
 lucene/common-build.xml                         |   3 -
 .../lucene62/Lucene62SegmentInfoFormat.java     | 152 +------
 .../lucene/codecs/lucene70/Lucene70Codec.java   |   3 +-
 .../lucene70/Lucene70SegmentInfoFormat.java     | 439 +++++++++++++++++++
 .../org/apache/lucene/index/CheckIndex.java     |   2 +-
 .../lucene/index/DocumentsWriterPerThread.java  |   2 +-
 .../apache/lucene/index/FilterCodecReader.java  |   5 +-
 .../apache/lucene/index/FilterLeafReader.java   |   5 +-
 .../org/apache/lucene/index/IndexWriter.java    |  54 ++-
 .../org/apache/lucene/index/LeafMetaData.java   |  74 ++++
 .../org/apache/lucene/index/LeafReader.java     |   7 +-
 .../apache/lucene/index/MergeReaderWrapper.java |   5 +-
 .../org/apache/lucene/index/MergeState.java     |   2 +-
 .../apache/lucene/index/ParallelLeafReader.java |  36 +-
 .../apache/lucene/index/ReadersAndUpdates.java  |   4 +-
 .../org/apache/lucene/index/SegmentInfo.java    |  19 +-
 .../org/apache/lucene/index/SegmentInfos.java   |  77 ++--
 .../org/apache/lucene/index/SegmentMerger.java  |  14 +
 .../org/apache/lucene/index/SegmentReader.java  |  10 +-
 .../lucene/index/SlowCodecReaderWrapper.java    |   5 +-
 .../lucene/index/StandardDirectoryReader.java   |   4 +-
 .../EarlyTerminatingSortingCollector.java       |   2 +-
 .../org/apache/lucene/search/LRUQueryCache.java |  32 +-
 .../lucene62/TestLucene62SegmentInfoFormat.java |  39 --
 .../lucene70/TestLucene70SegmentInfoFormat.java |  35 ++
 .../org/apache/lucene/index/TestCodecs.java     |   4 +-
 .../index/TestDemoParallelLeafReader.java       |   3 +-
 .../test/org/apache/lucene/index/TestDoc.java   |   8 +-
 .../apache/lucene/index/TestDocumentWriter.java |   9 +-
 .../apache/lucene/index/TestIndexSorting.java   |   2 +-
 .../apache/lucene/index/TestIndexWriter.java    |   2 +-
 .../index/TestIndexWriterThreadsToSegments.java |   3 +-
 .../index/TestOneMergeWrappingMergePolicy.java  |   1 +
 .../apache/lucene/index/TestSegmentInfos.java   |  22 +-
 .../apache/lucene/index/TestSegmentMerger.java  |   7 +-
 .../apache/lucene/index/TestSegmentReader.java  |   3 +-
 .../lucene/index/TestSegmentTermDocs.java       |   7 +-
 .../apache/lucene/search/TestLRUQueryCache.java |  76 ++++
 .../search/highlight/TermVectorLeafReader.java  |   7 +-
 .../apache/lucene/index/memory/MemoryIndex.java |   6 +-
 .../org/apache/lucene/index/IndexSplitter.java  |   4 +-
 .../lucene/replicator/nrt/ReplicaNode.java      |   2 +-
 .../index/BaseCompoundFormatTestCase.java       |   3 +-
 .../index/BaseFieldInfoFormatTestCase.java      |   3 +-
 .../index/BaseIndexFileFormatTestCase.java      |  28 +-
 .../lucene/index/BaseNormsFormatTestCase.java   |   6 +-
 .../index/BaseSegmentInfoFormatTestCase.java    |  54 ++-
 .../lucene/index/RandomPostingsTester.java      |   2 +-
 .../org/apache/lucene/search/QueryUtils.java    |   6 +-
 solr/CHANGES.txt                                |  20 +-
 .../solr/ltr/feature/OriginalScoreFeature.java  |   2 +-
 .../ltr/feature/TestOriginalScoreFeature.java   | 120 ++---
 .../org/apache/solr/core/CoreContainer.java     |  25 +-
 .../java/org/apache/solr/core/NodeConfig.java   |  25 +-
 .../java/org/apache/solr/core/SolrCores.java    | 166 ++++---
 .../org/apache/solr/core/SolrXmlConfig.java     |   5 +
 .../solr/core/TransientSolrCoreCache.java       | 127 ++++++
 .../core/TransientSolrCoreCacheDefault.java     | 198 +++++++++
 .../core/TransientSolrCoreCacheFactory.java     |  85 ++++
 .../TransientSolrCoreCacheFactoryDefault.java   |  31 ++
 .../apache/solr/handler/AnalyzeEvaluator.java   |  17 +-
 .../solr/handler/admin/MetricsHandler.java      |   3 +-
 .../solr/index/SlowCompositeReaderWrapper.java  |  19 +-
 .../apache/solr/search/grouping/Command.java    |   2 +-
 .../solr/search/grouping/CommandHandler.java    |   4 +-
 .../distributed/command/QueryCommand.java       |   2 +-
 .../command/SearchGroupsFieldCommand.java       |   2 +-
 .../command/TopGroupsFieldCommand.java          |   2 +-
 .../src/java/org/apache/solr/util/SolrCLI.java  |  18 +-
 solr/core/src/test-files/solr/solr.xml          |   5 +
 .../test/SecureRandomAlgorithmTesterApp.java    |  41 --
 .../org/apache/solr/cloud/ZkControllerTest.java |   9 +-
 .../org/apache/solr/core/TestCoreDiscovery.java |   7 +-
 .../org/apache/solr/core/TestLazyCores.java     |  53 ++-
 .../solr/handler/admin/MetricsHandlerTest.java  |   7 +-
 .../test/org/apache/solr/search/TestDocSet.java |   7 +-
 .../apache/solr/util/TestSolrCLIRunExample.java |  38 ++
 .../java/org/apache/solr/SolrTestCaseJ4.java    |  22 +-
 94 files changed, 2127 insertions(+), 638 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b9b707cc/solr/core/src/java/org/apache/solr/core/CoreContainer.java
----------------------------------------------------------------------
diff --cc solr/core/src/java/org/apache/solr/core/CoreContainer.java
index ea792c5,1ef036a..f1e28dd
--- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java
+++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
@@@ -491,9 -493,9 +494,9 @@@ public class CoreContainer 
      }
  
      updateShardHandler = new UpdateShardHandler(cfg.getUpdateShardHandlerConfig());
 -    updateShardHandler.initializeMetrics(metricManager, SolrInfoMBean.Group.node.toString(), "updateShardHandler");
 +    updateShardHandler.initializeMetrics(metricManager, SolrInfoBean.Group.node.toString(), "updateShardHandler");
  
-     solrCores.allocateLazyCores(cfg.getTransientCacheSize(), loader);
+     transientCoreCache = TransientSolrCoreCacheFactory.newInstance(loader, this);
  
      logging = LogWatcher.newRegisteredLogWatcher(cfg.getLogWatcherConfig(), loader);
  
@@@ -533,25 -535,17 +536,25 @@@
  
      // initialize gauges for reporting the number of cores and disk total/free
  
 -    String registryName = SolrMetricManager.getRegistryName(SolrInfoMBean.Group.node);
 -    metricManager.registerGauge(registryName, () -> solrCores.getCores().size(),
 -        true, "loaded", SolrInfoMBean.Category.CONTAINER.toString(), "cores");
 -    metricManager.registerGauge(registryName, () -> solrCores.getLoadedCoreNames().size() - solrCores.getCores().size(),
 -        true, "lazy",SolrInfoMBean.Category.CONTAINER.toString(), "cores");
 -    metricManager.registerGauge(registryName, () -> solrCores.getAllCoreNames().size() - solrCores.getLoadedCoreNames().size(),
 -        true, "unloaded",SolrInfoMBean.Category.CONTAINER.toString(), "cores");
 -    metricManager.registerGauge(registryName, () -> cfg.getCoreRootDirectory().toFile().getTotalSpace(),
 -        true, "totalSpace", SolrInfoMBean.Category.CONTAINER.toString(), "fs");
 -    metricManager.registerGauge(registryName, () -> cfg.getCoreRootDirectory().toFile().getUsableSpace(),
 -        true, "usableSpace", SolrInfoMBean.Category.CONTAINER.toString(), "fs");
 +    String registryName = SolrMetricManager.getRegistryName(SolrInfoBean.Group.node);
 +    metricManager.registerGauge(null, registryName, () -> solrCores.getCores().size(),
 +        true, "loaded", SolrInfoBean.Category.CONTAINER.toString(), "cores");
-     metricManager.registerGauge(null, registryName, () -> solrCores.getCoreNames().size() - solrCores.getCores().size(),
++    metricManager.registerGauge(null, registryName, () -> solrCores.getLoadedCoreNames().size() - solrCores.getCores().size(),
 +        true, "lazy", SolrInfoBean.Category.CONTAINER.toString(), "cores");
-     metricManager.registerGauge(null, registryName, () -> solrCores.getAllCoreNames().size() - solrCores.getCoreNames().size(),
++    metricManager.registerGauge(null, registryName, () -> solrCores.getAllCoreNames().size() - solrCores.getLoadedCoreNames().size(),
 +        true, "unloaded", SolrInfoBean.Category.CONTAINER.toString(), "cores");
 +    metricManager.registerGauge(null, registryName, () -> cfg.getCoreRootDirectory().toFile().getTotalSpace(),
 +        true, "totalSpace", SolrInfoBean.Category.CONTAINER.toString(), "fs");
 +    metricManager.registerGauge(null, registryName, () -> cfg.getCoreRootDirectory().toFile().getUsableSpace(),
 +        true, "usableSpace", SolrInfoBean.Category.CONTAINER.toString(), "fs");
 +    // add version information
 +    metricManager.registerGauge(null, registryName, () -> this.getClass().getPackage().getSpecificationVersion(),
 +        true, "specification", SolrInfoBean.Category.CONTAINER.toString(), "version");
 +    metricManager.registerGauge(null, registryName, () -> this.getClass().getPackage().getImplementationVersion(),
 +        true, "implementation", SolrInfoBean.Category.CONTAINER.toString(), "version");
 +
 +    SolrFieldCacheBean fieldCacheBean = new SolrFieldCacheBean();
 +    fieldCacheBean.initializeMetrics(metricManager, registryName, null);
  
      if (isZooKeeperAware()) {
        metricManager.loadClusterReporters(cfg.getMetricReporterPlugins(), this);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b9b707cc/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java
----------------------------------------------------------------------
diff --cc solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java
index 144dc1a,b37bd52..c6fe2ae
--- a/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java
@@@ -453,30 -448,18 +454,34 @@@ public class SolrXmlConfig 
  
    private static PluginInfo[] getMetricReporterPluginInfos(Config config) {
      NodeList nodes = (NodeList) config.evaluate("solr/metrics/reporter", XPathConstants.NODESET);
 -    if (nodes == null || nodes.getLength() == 0)
 -      return new PluginInfo[0];
 -    PluginInfo[] configs = new PluginInfo[nodes.getLength()];
 -    for (int i = 0; i < nodes.getLength(); i++) {
 -      // we don't require class in order to support predefined replica and node reporter classes
 -      configs[i] = new PluginInfo(nodes.item(i), "SolrMetricReporter", true, false);
 +    List<PluginInfo> configs = new ArrayList<>();
 +    boolean hasJmxReporter = false;
 +    if (nodes != null && nodes.getLength() > 0) {
 +      for (int i = 0; i < nodes.getLength(); i++) {
 +        // we don't require class in order to support predefined replica and node reporter classes
 +        PluginInfo info = new PluginInfo(nodes.item(i), "SolrMetricReporter", true, false);
 +        String clazz = info.className;
 +        if (clazz != null && clazz.equals(SolrJmxReporter.class.getName())) {
 +          hasJmxReporter = true;
 +        }
 +        configs.add(info);
 +      }
      }
 -    return configs;
 +    // if there's an MBean server running but there was no JMX reporter then add a default one
 +    MBeanServer mBeanServer = JmxUtil.findFirstMBeanServer();
 +    if (mBeanServer != null && !hasJmxReporter) {
 +      log.info("MBean server found: " + mBeanServer + ", but no JMX reporters were configured - adding default JMX reporter.");
 +      Map<String,String> attributes = new HashMap<>();
 +      attributes.put("name", "default");
 +      attributes.put("class", SolrJmxReporter.class.getName());
 +      PluginInfo defaultPlugin = new PluginInfo("reporter", attributes, null, null);
 +      configs.add(defaultPlugin);
 +    }
 +    return configs.toArray(new PluginInfo[configs.size()]);
    }
+   private static PluginInfo getTransientCoreCacheFactoryPluginInfo(Config config) {
+     Node node = config.getNode("solr/transientCoreCacheFactory", false);
+     return (node == null) ? null : new PluginInfo(node, "transientCoreCacheFactory", false, true);
+   }
  }
  

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b9b707cc/solr/core/src/java/org/apache/solr/handler/AnalyzeEvaluator.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b9b707cc/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/b9b707cc/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java
----------------------------------------------------------------------


[03/14] lucene-solr:jira/solr-9959: LUCENE-7749: Made LRUQueryCache delegate the scoreSupplier method.

Posted by ab...@apache.org.
LUCENE-7749: Made LRUQueryCache delegate the scoreSupplier method.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/2e545d78
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/2e545d78
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/2e545d78

Branch: refs/heads/jira/solr-9959
Commit: 2e545d78f5fe745905bcff19eb73a9a9faa4c032
Parents: 186c5edd
Author: Adrien Grand <jp...@gmail.com>
Authored: Mon Apr 3 13:49:05 2017 +0200
Committer: Adrien Grand <jp...@gmail.com>
Committed: Mon Apr 3 15:07:03 2017 +0200

----------------------------------------------------------------------
 lucene/CHANGES.txt                              |  3 +
 .../org/apache/lucene/search/LRUQueryCache.java | 32 +++++++--
 .../apache/lucene/search/TestLRUQueryCache.java | 76 ++++++++++++++++++++
 3 files changed, 105 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e545d78/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 833fd3c..83113a8 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -106,6 +106,9 @@ Bug Fixes
 * LUCENE-7755: Fixed join queries to not reference IndexReaders, as it could
   cause leaks if they are cached. (Adrien Grand)
 
+* LUCENE-7749: Made LRUQueryCache delegate the scoreSupplier method.
+  (Martin Amirault via Adrien Grand)
+
 Other
 
 * LUCENE-7763: Remove outdated comment in IndexWriterConfig.setIndexSort javadocs.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e545d78/lucene/core/src/java/org/apache/lucene/search/LRUQueryCache.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/LRUQueryCache.java b/lucene/core/src/java/org/apache/lucene/search/LRUQueryCache.java
index b1ba4e4..451ce81 100644
--- a/lucene/core/src/java/org/apache/lucene/search/LRUQueryCache.java
+++ b/lucene/core/src/java/org/apache/lucene/search/LRUQueryCache.java
@@ -717,7 +717,7 @@ public class LRUQueryCache implements QueryCache, Accountable {
     }
 
     @Override
-    public Scorer scorer(LeafReaderContext context) throws IOException {
+    public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException {
       if (used.compareAndSet(false, true)) {
         policy.onUse(getQuery());
       }
@@ -726,18 +726,18 @@ public class LRUQueryCache implements QueryCache, Accountable {
       final IndexReader.CacheHelper cacheHelper = context.reader().getCoreCacheHelper();
       if (cacheHelper == null) {
         // this segment is not suitable for caching
-        return in.scorer(context);
+        return in.scorerSupplier(context);
       }
 
       // Short-circuit: Check whether this segment is eligible for caching
       // before we take a lock because of #get
       if (shouldCache(context) == false) {
-        return in.scorer(context);
+        return in.scorerSupplier(context);
       }
 
       // If the lock is already busy, prefer using the uncached version than waiting
       if (lock.tryLock() == false) {
-        return in.scorer(context);
+        return in.scorerSupplier(context);
       }
 
       DocIdSet docIdSet;
@@ -752,7 +752,7 @@ public class LRUQueryCache implements QueryCache, Accountable {
           docIdSet = cache(context);
           putIfAbsent(in.getQuery(), context, docIdSet, cacheHelper);
         } else {
-          return in.scorer(context);
+          return in.scorerSupplier(context);
         }
       }
 
@@ -765,7 +765,27 @@ public class LRUQueryCache implements QueryCache, Accountable {
         return null;
       }
 
-      return new ConstantScoreScorer(this, 0f, disi);
+      return new ScorerSupplier() {
+        @Override
+        public Scorer get(boolean randomAccess) throws IOException {
+          return new ConstantScoreScorer(CachingWrapperWeight.this, 0f, disi);
+        }
+        
+        @Override
+        public long cost() {
+          return disi.cost();
+        }
+      };
+
+    }
+
+    @Override
+    public Scorer scorer(LeafReaderContext context) throws IOException {
+      ScorerSupplier scorerSupplier = scorerSupplier(context);
+      if (scorerSupplier == null) {
+        return null;
+      }
+      return scorerSupplier.get(false);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2e545d78/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java b/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java
index 91c1887..ee3f4ad 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java
@@ -1273,4 +1273,80 @@ public class TestLRUQueryCache extends LuceneTestCase {
     w.close();
     dir.close();
   }
+
+  private static class DummyQuery2 extends Query {
+
+    private final AtomicBoolean scorerCreated;
+
+    DummyQuery2(AtomicBoolean scorerCreated) {
+      this.scorerCreated = scorerCreated;
+    }
+
+    @Override
+    public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
+      return new ConstantScoreWeight(this, boost) {
+        @Override
+        public Scorer scorer(LeafReaderContext context) throws IOException {
+          return scorerSupplier(context).get(false);
+        }
+        @Override
+        public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException {
+          final Weight weight = this;
+          return new ScorerSupplier() {
+            @Override
+            public Scorer get(boolean randomAccess) throws IOException {
+              scorerCreated.set(true);
+              return new ConstantScoreScorer(weight, boost, DocIdSetIterator.all(1));
+            }
+
+            @Override
+            public long cost() {
+              return 1;
+            }
+          };
+        }
+      };
+    }
+
+    @Override
+    public boolean equals(Object other) {
+      return sameClassAs(other);
+    }
+
+    @Override
+    public int hashCode() {
+      return 0;
+    }
+
+    @Override
+    public String toString(String field) {
+      return "DummyQuery2";
+    }
+
+  }
+
+  public void testPropagatesScorerSupplier() throws IOException {
+    Directory dir = newDirectory();
+    IndexWriterConfig iwc = newIndexWriterConfig().setMergePolicy(NoMergePolicy.INSTANCE);
+    RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
+    w.addDocument(new Document());
+    DirectoryReader reader = w.getReader();
+    IndexSearcher searcher = newSearcher(reader);
+    searcher.setQueryCachingPolicy(NEVER_CACHE);
+
+    LRUQueryCache cache = new LRUQueryCache(1, 1000);
+    searcher.setQueryCache(cache);
+
+    AtomicBoolean scorerCreated = new AtomicBoolean(false);
+    Query query = new DummyQuery2(scorerCreated);
+    Weight weight = searcher.createNormalizedWeight(query, false);
+    ScorerSupplier supplier = weight.scorerSupplier(searcher.getIndexReader().leaves().get(0));
+    assertFalse(scorerCreated.get());
+    supplier.get(random().nextBoolean());
+    assertTrue(scorerCreated.get());
+
+    reader.close();
+    w.close();
+    dir.close();
+  }
 }