You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by mi...@apache.org on 2014/03/16 20:39:37 UTC

svn commit: r1578144 [7/37] - in /lucene/dev/branches/lucene5376_2: ./ dev-tools/ dev-tools/idea/.idea/libraries/ dev-tools/idea/solr/contrib/dataimporthandler/ dev-tools/idea/solr/contrib/map-reduce/ dev-tools/idea/solr/core/src/test/ dev-tools/script...

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/FieldInfo.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/FieldInfo.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/FieldInfo.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/FieldInfo.java Sun Mar 16 19:39:10 2014
@@ -324,7 +324,7 @@ public final class FieldInfo {
    */
   public String putAttribute(String key, String value) {
     if (attributes == null) {
-      attributes = new HashMap<String,String>();
+      attributes = new HashMap<>();
     }
     return attributes.put(key, value);
   }

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/FieldInfos.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/FieldInfos.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/FieldInfos.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/FieldInfos.java Sun Mar 16 19:39:10 2014
@@ -41,8 +41,8 @@ public class FieldInfos implements Itera
   private final boolean hasNorms;
   private final boolean hasDocValues;
   
-  private final SortedMap<Integer,FieldInfo> byNumber = new TreeMap<Integer,FieldInfo>();
-  private final HashMap<String,FieldInfo> byName = new HashMap<String,FieldInfo>();
+  private final SortedMap<Integer,FieldInfo> byNumber = new TreeMap<>();
+  private final HashMap<String,FieldInfo> byName = new HashMap<>();
   private final Collection<FieldInfo> values; // for an unmodifiable iterator
   
   /**
@@ -174,9 +174,9 @@ public class FieldInfos implements Itera
     private int lowestUnassignedFieldNumber = -1;
     
     FieldNumbers() {
-      this.nameToNumber = new HashMap<String, Integer>();
-      this.numberToName = new HashMap<Integer, String>();
-      this.docValuesType = new HashMap<String,DocValuesType>();
+      this.nameToNumber = new HashMap<>();
+      this.numberToName = new HashMap<>();
+      this.docValuesType = new HashMap<>();
     }
     
     /**
@@ -250,7 +250,7 @@ public class FieldInfos implements Itera
   }
   
   static final class Builder {
-    private final HashMap<String,FieldInfo> byName = new HashMap<String,FieldInfo>();
+    private final HashMap<String,FieldInfo> byName = new HashMap<>();
     final FieldNumbers globalFieldNumbers;
 
     Builder() {

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/FreqProxFields.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/FreqProxFields.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/FreqProxFields.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/FreqProxFields.java Sun Mar 16 19:39:10 2014
@@ -35,7 +35,7 @@ import org.apache.lucene.util.BytesRef;
  *  PostingsFormat. */
 
 class FreqProxFields extends Fields {
-  final Map<String,FreqProxTermsWriterPerField> fields = new LinkedHashMap<String,FreqProxTermsWriterPerField>();
+  final Map<String,FreqProxTermsWriterPerField> fields = new LinkedHashMap<>();
 
   public FreqProxFields(List<FreqProxTermsWriterPerField> fieldList) {
     // NOTE: fields are already sorted by field name

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/FreqProxTermsWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/FreqProxTermsWriter.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/FreqProxTermsWriter.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/FreqProxTermsWriter.java Sun Mar 16 19:39:10 2014
@@ -36,7 +36,7 @@ final class FreqProxTermsWriter extends 
     // flushed segment:
     if (state.segUpdates != null && state.segUpdates.terms.size() > 0) {
       Map<Term,Integer> segDeletes = state.segUpdates.terms;
-      List<Term> deleteTerms = new ArrayList<Term>(segDeletes.keySet());
+      List<Term> deleteTerms = new ArrayList<>(segDeletes.keySet());
       Collections.sort(deleteTerms);
       String lastField = null;
       TermsEnum termsEnum = null;
@@ -87,7 +87,7 @@ final class FreqProxTermsWriter extends 
 
     // Gather all FieldData's that have postings, across all
     // ThreadStates
-    List<FreqProxTermsWriterPerField> allFields = new ArrayList<FreqProxTermsWriterPerField>();
+    List<FreqProxTermsWriterPerField> allFields = new ArrayList<>();
 
     for (TermsHashConsumerPerField f : fieldsToFlush.values()) {
       final FreqProxTermsWriterPerField perField = (FreqProxTermsWriterPerField) f;

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/FrozenBufferedUpdates.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/FrozenBufferedUpdates.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/FrozenBufferedUpdates.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/FrozenBufferedUpdates.java Sun Mar 16 19:39:10 2014
@@ -83,7 +83,7 @@ class FrozenBufferedUpdates {
     // so that it maps to all fields it affects, sorted by their docUpto, and traverse
     // that Term only once, applying the update to all fields that still need to be
     // updated. 
-    List<NumericUpdate> allUpdates = new ArrayList<NumericUpdate>();
+    List<NumericUpdate> allUpdates = new ArrayList<>();
     int numericUpdatesSize = 0;
     for (LinkedHashMap<Term,NumericUpdate> fieldUpdates : deletes.numericUpdates.values()) {
       for (NumericUpdate update : fieldUpdates.values()) {

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/IndexFileDeleter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/IndexFileDeleter.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/IndexFileDeleter.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/IndexFileDeleter.java Sun Mar 16 19:39:10 2014
@@ -81,21 +81,21 @@ final class IndexFileDeleter implements 
   /* Reference count for all files in the index.
    * Counts how many existing commits reference a file.
    **/
-  private Map<String, RefCount> refCounts = new HashMap<String, RefCount>();
+  private Map<String, RefCount> refCounts = new HashMap<>();
 
   /* Holds all commits (segments_N) currently in the index.
    * This will have just 1 commit if you are using the
    * default delete policy (KeepOnlyLastCommitDeletionPolicy).
    * Other policies may leave commit points live for longer
    * in which case this list would be longer than 1: */
-  private List<CommitPoint> commits = new ArrayList<CommitPoint>();
+  private List<CommitPoint> commits = new ArrayList<>();
 
   /* Holds files we had incref'd from the previous
    * non-commit checkpoint: */
-  private final List<String> lastFiles = new ArrayList<String>();
+  private final List<String> lastFiles = new ArrayList<>();
 
   /* Commits that the IndexDeletionPolicy have decided to delete: */
-  private List<CommitPoint> commitsToDelete = new ArrayList<CommitPoint>();
+  private List<CommitPoint> commitsToDelete = new ArrayList<>();
 
   private final InfoStream infoStream;
   private Directory directory;
@@ -597,7 +597,7 @@ final class IndexFileDeleter implements 
           infoStream.message("IFD", "unable to remove file \"" + fileName + "\": " + e.toString() + "; Will re-try later.");
         }
         if (deletable == null) {
-          deletable = new ArrayList<String>();
+          deletable = new ArrayList<>();
         }
         deletable.add(fileName);                  // add to deletable
       }

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java Sun Mar 16 19:39:10 2014
@@ -17,30 +17,11 @@ package org.apache.lucene.index;
  * limitations under the License.
  */
 
-import java.io.Closeable;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Locale;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Queue;
-import java.util.Set;
-import java.util.concurrent.atomic.AtomicInteger;
-
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.codecs.Codec;
 import org.apache.lucene.index.FieldInfo.DocValuesType;
 import org.apache.lucene.index.FieldInfos.FieldNumbers;
 import org.apache.lucene.index.IndexWriterConfig.OpenMode;
-import org.apache.lucene.index.MergePolicy.MergeTrigger;
 import org.apache.lucene.index.MergeState.CheckAbort;
 import org.apache.lucene.index.NumericFieldUpdates.UpdatesIterator;
 import org.apache.lucene.search.Query;
@@ -58,6 +39,24 @@ import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.InfoStream;
 import org.apache.lucene.util.ThreadInterruptedException;
 
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Queue;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicInteger;
+
 /**
   An <code>IndexWriter</code> creates and maintains an index.
 
@@ -207,8 +206,9 @@ public class IndexWriter implements Clos
   /**
    * Absolute hard maximum length for a term, in bytes once
    * encoded as UTF8.  If a term arrives from the analyzer
-   * longer than this length, it is skipped and a message is
-   * printed to infoStream, if set (see {@link
+   * longer than this length, an
+   * <code>IllegalArgumentException</code>  is thrown
+   * and a message is printed to infoStream, if set (see {@link
    * IndexWriterConfig#setInfoStream(InfoStream)}).
    */
   public final static int MAX_TERM_LENGTH = DocumentsWriterPerThread.MAX_TERM_LENGTH_UTF8;
@@ -235,7 +235,7 @@ public class IndexWriter implements Clos
   final IndexFileDeleter deleter;
 
   // used by forceMerge to note those needing merging
-  private Map<SegmentCommitInfo,Boolean> segmentsToMerge = new HashMap<SegmentCommitInfo,Boolean>();
+  private Map<SegmentCommitInfo,Boolean> segmentsToMerge = new HashMap<>();
   private int mergeMaxNumSegments;
 
   private Lock writeLock;
@@ -245,13 +245,13 @@ public class IndexWriter implements Clos
 
   // Holds all SegmentInfo instances currently involved in
   // merges
-  private HashSet<SegmentCommitInfo> mergingSegments = new HashSet<SegmentCommitInfo>();
+  private HashSet<SegmentCommitInfo> mergingSegments = new HashSet<>();
 
   private MergePolicy mergePolicy;
   private final MergeScheduler mergeScheduler;
-  private LinkedList<MergePolicy.OneMerge> pendingMerges = new LinkedList<MergePolicy.OneMerge>();
-  private Set<MergePolicy.OneMerge> runningMerges = new HashSet<MergePolicy.OneMerge>();
-  private List<MergePolicy.OneMerge> mergeExceptions = new ArrayList<MergePolicy.OneMerge>();
+  private LinkedList<MergePolicy.OneMerge> pendingMerges = new LinkedList<>();
+  private Set<MergePolicy.OneMerge> runningMerges = new HashSet<>();
+  private List<MergePolicy.OneMerge> mergeExceptions = new ArrayList<>();
   private long mergeGen;
   private boolean stopMerges;
 
@@ -422,7 +422,7 @@ public class IndexWriter implements Clos
 
   class ReaderPool {
     
-    private final Map<SegmentCommitInfo,ReadersAndUpdates> readerMap = new HashMap<SegmentCommitInfo,ReadersAndUpdates>();
+    private final Map<SegmentCommitInfo,ReadersAndUpdates> readerMap = new HashMap<>();
 
     // used only by asserts
     public synchronized boolean infoIsLive(SegmentCommitInfo info) {
@@ -603,7 +603,7 @@ public class IndexWriter implements Clos
     // Make sure that every segment appears only once in the
     // pool:
     private boolean noDups() {
-      Set<String> seen = new HashSet<String>();
+      Set<String> seen = new HashSet<>();
       for(SegmentCommitInfo info : readerMap.keySet()) {
         assert !seen.contains(info.info.name);
         seen.add(info.info.name);
@@ -993,7 +993,7 @@ public class IndexWriter implements Clos
             try {
               // Give merge scheduler last chance to run, in case
               // any pending merges are waiting:
-              mergeScheduler.merge(this);
+              mergeScheduler.merge(this, MergeTrigger.CLOSING, false);
             } catch (ThreadInterruptedException tie) {
               // ignore any interruption, does not matter
               interrupted = true;
@@ -1159,7 +1159,7 @@ public class IndexWriter implements Clos
    * merge policy.
    *
    * <p>Note that each term in the document can be no longer
-   * than 16383 characters, otherwise an
+   * than {@link #MAX_TERM_LENGTH} in bytes, otherwise an
    * IllegalArgumentException will be thrown.</p>
    *
    * <p>Note that it's possible to create an invalid Unicode
@@ -1829,17 +1829,18 @@ public class IndexWriter implements Clos
     }
 
     MergePolicy.MergeSpecification spec;
-
+    boolean newMergesFound = false;
     synchronized(this) {
       spec = mergePolicy.findForcedDeletesMerges(segmentInfos);
-      if (spec != null) {
+      newMergesFound = spec != null;
+      if (newMergesFound) {
         final int numMerges = spec.merges.size();
         for(int i=0;i<numMerges;i++)
           registerMerge(spec.merges.get(i));
       }
     }
 
-    mergeScheduler.merge(this);
+    mergeScheduler.merge(this, MergeTrigger.EXPLICIT, newMergesFound);
 
     if (spec != null && doWait) {
       final int numMerges = spec.merges.size();
@@ -1931,29 +1932,30 @@ public class IndexWriter implements Clos
 
   private final void maybeMerge(MergeTrigger trigger, int maxNumSegments) throws IOException {
     ensureOpen(false);
-    updatePendingMerges(trigger, maxNumSegments);
-    mergeScheduler.merge(this);
+    boolean newMergesFound = updatePendingMerges(trigger, maxNumSegments);
+    mergeScheduler.merge(this, trigger, newMergesFound);
   }
 
-  private synchronized void updatePendingMerges(MergeTrigger trigger, int maxNumSegments)
+  private synchronized boolean updatePendingMerges(MergeTrigger trigger, int maxNumSegments)
     throws IOException {
     assert maxNumSegments == -1 || maxNumSegments > 0;
     assert trigger != null;
     if (stopMerges) {
-      return;
+      return false;
     }
 
     // Do not start new merges if we've hit OOME
     if (hitOOM) {
-      return;
+      return false;
     }
-
+    boolean newMergesFound = false;
     final MergePolicy.MergeSpecification spec;
     if (maxNumSegments != UNBOUNDED_MAX_MERGE_SEGMENTS) {
       assert trigger == MergeTrigger.EXPLICIT || trigger == MergeTrigger.MERGE_FINISHED :
         "Expected EXPLICT or MERGE_FINISHED as trigger even with maxNumSegments set but was: " + trigger.name();
       spec = mergePolicy.findForcedMerges(segmentInfos, maxNumSegments, Collections.unmodifiableMap(segmentsToMerge));
-      if (spec != null) {
+      newMergesFound = spec != null;
+      if (newMergesFound) {
         final int numMerges = spec.merges.size();
         for(int i=0;i<numMerges;i++) {
           final MergePolicy.OneMerge merge = spec.merges.get(i);
@@ -1963,13 +1965,14 @@ public class IndexWriter implements Clos
     } else {
       spec = mergePolicy.findMerges(trigger, segmentInfos);
     }
-
-    if (spec != null) {
+    newMergesFound = spec != null;
+    if (newMergesFound) {
       final int numMerges = spec.merges.size();
       for(int i=0;i<numMerges;i++) {
         registerMerge(spec.merges.get(i));
       }
     }
+    return newMergesFound;
   }
 
   /** Expert: to be used by a {@link MergePolicy} to avoid
@@ -2343,12 +2346,12 @@ public class IndexWriter implements Clos
   }
 
   private synchronized void resetMergeExceptions() {
-    mergeExceptions = new ArrayList<MergePolicy.OneMerge>();
+    mergeExceptions = new ArrayList<>();
     mergeGen++;
   }
 
   private void noDupDirs(Directory... dirs) {
-    HashSet<Directory> dups = new HashSet<Directory>();
+    HashSet<Directory> dups = new HashSet<>();
     for(int i=0;i<dirs.length;i++) {
       if (dups.contains(dirs[i]))
         throw new IllegalArgumentException("Directory " + dirs[i] + " appears more than once");
@@ -2362,7 +2365,7 @@ public class IndexWriter implements Clos
    *  to match with a call to {@link IOUtils#close} in a
    *  finally clause. */
   private List<Lock> acquireWriteLocks(Directory... dirs) throws IOException {
-    List<Lock> locks = new ArrayList<Lock>();
+    List<Lock> locks = new ArrayList<>();
     for(int i=0;i<dirs.length;i++) {
       boolean success = false;
       try {
@@ -2441,7 +2444,7 @@ public class IndexWriter implements Clos
 
       flush(false, true);
 
-      List<SegmentCommitInfo> infos = new ArrayList<SegmentCommitInfo>();
+      List<SegmentCommitInfo> infos = new ArrayList<>();
 
       boolean success = false;
       try {
@@ -2564,7 +2567,7 @@ public class IndexWriter implements Clos
       flush(false, true);
 
       String mergedName = newSegmentName();
-      final List<AtomicReader> mergeReaders = new ArrayList<AtomicReader>();
+      final List<AtomicReader> mergeReaders = new ArrayList<>();
       for (IndexReader indexReader : readers) {
         numDocs += indexReader.numDocs();
         for (AtomicReaderContext ctx : indexReader.leaves()) {
@@ -2603,7 +2606,7 @@ public class IndexWriter implements Clos
 
       SegmentCommitInfo infoPerCommit = new SegmentCommitInfo(info, 0, -1L, -1L);
 
-      info.setFiles(new HashSet<String>(trackingDir.getCreatedFiles()));
+      info.setFiles(new HashSet<>(trackingDir.getCreatedFiles()));
       trackingDir.getCreatedFiles().clear();
                                          
       setDiagnostics(info, SOURCE_ADDINDEXES_READERS);
@@ -2681,7 +2684,7 @@ public class IndexWriter implements Clos
                                           info.info.getDiagnostics());
     SegmentCommitInfo newInfoPerCommit = new SegmentCommitInfo(newInfo, info.getDelCount(), info.getDelGen(), info.getFieldInfosGen());
 
-    Set<String> segFiles = new HashSet<String>();
+    Set<String> segFiles = new HashSet<>();
 
     // Build up new segment's file names.  Must do this
     // before writing SegmentInfo:
@@ -2877,7 +2880,7 @@ public class IndexWriter implements Clos
    * contents after calling this method has no effect.
    */
   public final synchronized void setCommitData(Map<String,String> commitUserData) {
-    segmentInfos.setUserData(new HashMap<String,String>(commitUserData));
+    segmentInfos.setUserData(new HashMap<>(commitUserData));
     ++changeCount;
   }
   
@@ -3200,7 +3203,7 @@ public class IndexWriter implements Clos
     ReadersAndUpdates mergedDeletesAndUpdates = null;
     boolean initWritableLiveDocs = false;
     MergePolicy.DocMap docMap = null;
-    final Map<String,NumericFieldUpdates> mergedFieldUpdates = new HashMap<String,NumericFieldUpdates>();
+    final Map<String,NumericFieldUpdates> mergedFieldUpdates = new HashMap<>();
     
     for (int i = 0; i < sourceSegments.size(); i++) {
       SegmentCommitInfo info = sourceSegments.get(i);
@@ -3854,7 +3857,7 @@ public class IndexWriter implements Clos
     // names.
     final String mergeSegmentName = newSegmentName();
     SegmentInfo si = new SegmentInfo(directory, Constants.LUCENE_MAIN_VERSION, mergeSegmentName, -1, false, codec, null);
-    Map<String,String> details = new HashMap<String,String>();
+    Map<String,String> details = new HashMap<>();
     details.put("mergeMaxNumSegments", "" + merge.maxNumSegments);
     details.put("mergeFactor", Integer.toString(merge.segments.size()));
     setDiagnostics(si, SOURCE_MERGE, details);
@@ -3875,7 +3878,7 @@ public class IndexWriter implements Clos
   }
 
   private static void setDiagnostics(SegmentInfo info, String source, Map<String,String> details) {
-    Map<String,String> diagnostics = new HashMap<String,String>();
+    Map<String,String> diagnostics = new HashMap<>();
     diagnostics.put("source", source);
     diagnostics.put("lucene.version", Constants.LUCENE_VERSION);
     diagnostics.put("os", Constants.OS_NAME);
@@ -3969,7 +3972,7 @@ public class IndexWriter implements Clos
       infoStream.message("IW", "merging " + segString(merge.segments));
     }
 
-    merge.readers = new ArrayList<SegmentReader>();
+    merge.readers = new ArrayList<>();
 
     // This is try/finally to make sure merger's readers are
     // closed:
@@ -4066,7 +4069,7 @@ public class IndexWriter implements Clos
         }
       }
       assert mergeState.segmentInfo == merge.info.info;
-      merge.info.info.setFiles(new HashSet<String>(dirWrapper.getCreatedFiles()));
+      merge.info.info.setFiles(new HashSet<>(dirWrapper.getCreatedFiles()));
 
       // Record which codec was used to write the segment
 
@@ -4313,7 +4316,7 @@ public class IndexWriter implements Clos
   // For infoStream output
   synchronized SegmentInfos toLiveInfos(SegmentInfos sis) {
     final SegmentInfos newSIS = new SegmentInfos();
-    final Map<SegmentCommitInfo,SegmentCommitInfo> liveSIS = new HashMap<SegmentCommitInfo,SegmentCommitInfo>();        
+    final Map<SegmentCommitInfo,SegmentCommitInfo> liveSIS = new HashMap<>();
     for(SegmentCommitInfo info : segmentInfos) {
       liveSIS.put(info, info);
     }
@@ -4605,7 +4608,7 @@ public class IndexWriter implements Clos
     }
 
     // Replace all previous files with the CFS/CFE files:
-    Set<String> siFiles = new HashSet<String>();
+    Set<String> siFiles = new HashSet<>();
     siFiles.add(fileName);
     siFiles.add(IndexFileNames.segmentFileName(info.name, "", IndexFileNames.COMPOUND_FILE_ENTRIES_EXTENSION));
     info.setFiles(siFiles);

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/IndexWriterConfig.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/IndexWriterConfig.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/IndexWriterConfig.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/IndexWriterConfig.java Sun Mar 16 19:39:10 2014
@@ -130,7 +130,7 @@ public final class IndexWriterConfig ext
 
   // indicates whether this config instance is already attached to a writer.
   // not final so that it can be cloned properly.
-  private SetOnce<IndexWriter> writer = new SetOnce<IndexWriter>();
+  private SetOnce<IndexWriter> writer = new SetOnce<>();
   
   /**
    * Sets the {@link IndexWriter} this config is attached to.

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/LogMergePolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/LogMergePolicy.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/LogMergePolicy.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/LogMergePolicy.java Sun Mar 16 19:39:10 2014
@@ -24,8 +24,6 @@ import java.util.List;
 import java.util.Locale;
 import java.util.Map;
 
-import org.apache.lucene.index.MergePolicy.MergeTrigger;
-
 
 /**
  * <p>This class implements a {@link MergePolicy} that tries
@@ -470,7 +468,7 @@ public abstract class LogMergePolicy ext
 
     // Compute levels, which is just log (base mergeFactor)
     // of the size of each segment
-    final List<SegmentInfoAndLevel> levels = new ArrayList<SegmentInfoAndLevel>();
+    final List<SegmentInfoAndLevel> levels = new ArrayList<>();
     final float norm = (float) Math.log(mergeFactor);
 
     final Collection<SegmentCommitInfo> mergingSegments = writer.get().getMergingSegments();
@@ -572,7 +570,7 @@ public abstract class LogMergePolicy ext
         } else if (!anyTooLarge) {
           if (spec == null)
             spec = new MergeSpecification();
-          final List<SegmentCommitInfo> mergeInfos = new ArrayList<SegmentCommitInfo>();
+          final List<SegmentCommitInfo> mergeInfos = new ArrayList<>();
           for(int i=start;i<end;i++) {
             mergeInfos.add(levels.get(i).info);
             assert infos.contains(levels.get(i).info);

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/MergePolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/MergePolicy.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/MergePolicy.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/MergePolicy.java Sun Mar 16 19:39:10 2014
@@ -17,18 +17,18 @@ package org.apache.lucene.index;
  * limitations under the License.
  */
 
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.MergeInfo;
+import org.apache.lucene.util.FixedBitSet;
+import org.apache.lucene.util.SetOnce;
+import org.apache.lucene.util.SetOnce.AlreadySetException;
+
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.MergeInfo;
-import org.apache.lucene.util.FixedBitSet;
-import org.apache.lucene.util.SetOnce.AlreadySetException;
-import org.apache.lucene.util.SetOnce;
-
 /**
  * <p>Expert: a MergePolicy determines the sequence of
  * primitive merge operations.</p>
@@ -122,7 +122,7 @@ public abstract class MergePolicy implem
       if (0 == segments.size())
         throw new RuntimeException("segments must include at least one segment");
       // clone the list, as the in list may be based off original SegmentInfos and may be modified
-      this.segments = new ArrayList<SegmentCommitInfo>(segments);
+      this.segments = new ArrayList<>(segments);
       int count = 0;
       for(SegmentCommitInfo info : segments) {
         count += info.info.getDocCount();
@@ -140,7 +140,7 @@ public abstract class MergePolicy implem
       if (readers == null) {
         throw new IllegalStateException("IndexWriter has not initialized readers from the segment infos yet");
       }
-      final List<AtomicReader> readers = new ArrayList<AtomicReader>(this.readers.size());
+      final List<AtomicReader> readers = new ArrayList<>(this.readers.size());
       for (AtomicReader reader : this.readers) {
         if (reader.numDocs() > 0) {
           readers.add(reader);
@@ -295,7 +295,7 @@ public abstract class MergePolicy implem
      * The subset of segments to be included in the primitive merge.
      */
 
-    public final List<OneMerge> merges = new ArrayList<OneMerge>();
+    public final List<OneMerge> merges = new ArrayList<>();
 
     /** Sole constructor.  Use {@link
      *  #add(MergePolicy.OneMerge)} to add merges. */
@@ -393,7 +393,7 @@ public abstract class MergePolicy implem
       // should not happen
       throw new RuntimeException(e);
     }
-    clone.writer = new SetOnce<IndexWriter>();
+    clone.writer = new SetOnce<>();
     return clone;
   }
 
@@ -412,7 +412,7 @@ public abstract class MergePolicy implem
    * defaults than the {@link MergePolicy}
    */
   protected MergePolicy(double defaultNoCFSRatio, long defaultMaxCFSSegmentSize) {
-    writer = new SetOnce<IndexWriter>();
+    writer = new SetOnce<>();
     this.noCFSRatio = defaultNoCFSRatio;
     this.maxCFSSegmentSize = defaultMaxCFSSegmentSize;
   }
@@ -566,29 +566,4 @@ public abstract class MergePolicy implem
     this.maxCFSSegmentSize = (v > Long.MAX_VALUE) ? Long.MAX_VALUE : (long) v;
   }
 
-  /**
-   * MergeTrigger is passed to
-   * {@link MergePolicy#findMerges(MergeTrigger, SegmentInfos)} to indicate the
-   * event that triggered the merge.
-   */
-  public static enum MergeTrigger {
-    /**
-     * Merge was triggered by a segment flush.
-     */
-    SEGMENT_FLUSH, 
-    /**
-     * Merge was triggered by a full flush. Full flushes
-     * can be caused by a commit, NRT reader reopen or a close call on the index writer.
-     */
-    FULL_FLUSH,
-    /**
-     * Merge has been triggered explicitly by the user.
-     */
-    EXPLICIT,
-    
-    /**
-     * Merge was triggered by a successfully finished merge.
-     */
-    MERGE_FINISHED,
-  }
 }

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/MergeScheduler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/MergeScheduler.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/MergeScheduler.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/MergeScheduler.java Sun Mar 16 19:39:10 2014
@@ -36,8 +36,12 @@ public abstract class MergeScheduler imp
   protected MergeScheduler() {
   }
 
-  /** Run the merges provided by {@link IndexWriter#getNextMerge()}. */
-  public abstract void merge(IndexWriter writer) throws IOException;
+  /** Run the merges provided by {@link IndexWriter#getNextMerge()}.
+   * @param writer the {@link IndexWriter} to obtain the merges from.
+   * @param trigger the {@link MergeTrigger} that caused this merge to happen
+   * @param newMergesFound <code>true</code> iff any new merges were found by the caller otherwise <code>false</code>
+   * */
+  public abstract void merge(IndexWriter writer, MergeTrigger trigger, boolean newMergesFound) throws IOException;
 
   /** Close this MergeScheduler. */
   @Override

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/MultiFields.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/MultiFields.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/MultiFields.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/MultiFields.java Sun Mar 16 19:39:10 2014
@@ -49,7 +49,7 @@ import org.apache.lucene.util.MergedIter
 public final class MultiFields extends Fields {
   private final Fields[] subs;
   private final ReaderSlice[] subSlices;
-  private final Map<String,Terms> terms = new ConcurrentHashMap<String,Terms>();
+  private final Map<String,Terms> terms = new ConcurrentHashMap<>();
 
   /** Returns a single {@link Fields} instance for this
    *  reader, merging fields/terms/docs/positions on the
@@ -69,8 +69,8 @@ public final class MultiFields extends F
         // already an atomic reader / reader with one leave
         return leaves.get(0).reader().fields();
       default:
-        final List<Fields> fields = new ArrayList<Fields>();
-        final List<ReaderSlice> slices = new ArrayList<ReaderSlice>();
+        final List<Fields> fields = new ArrayList<>();
+        final List<ReaderSlice> slices = new ArrayList<>();
         for (final AtomicReaderContext ctx : leaves) {
           final AtomicReader r = ctx.reader();
           final Fields f = r.fields();
@@ -203,7 +203,7 @@ public final class MultiFields extends F
     for(int i=0;i<subs.length;i++) {
       subIterators[i] = subs[i].iterator();
     }
-    return new MergedIterator<String>(subIterators);
+    return new MergedIterator<>(subIterators);
   }
 
   @Override
@@ -215,8 +215,8 @@ public final class MultiFields extends F
 
     // Lazy init: first time this field is requested, we
     // create & add to terms:
-    final List<Terms> subs2 = new ArrayList<Terms>();
-    final List<ReaderSlice> slices2 = new ArrayList<ReaderSlice>();
+    final List<Terms> subs2 = new ArrayList<>();
+    final List<ReaderSlice> slices2 = new ArrayList<>();
 
     // Gather all sub-readers that share this field
     for(int i=0;i<subs.length;i++) {
@@ -269,7 +269,7 @@ public final class MultiFields extends F
    *  will be unavailable.
    */
   public static Collection<String> getIndexedFields(IndexReader reader) {
-    final Collection<String> fields = new HashSet<String>();
+    final Collection<String> fields = new HashSet<>();
     for(final FieldInfo fieldInfo : getMergedFieldInfos(reader)) {
       if (fieldInfo.isIndexed()) {
         fields.add(fieldInfo.name);

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/MultiTerms.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/MultiTerms.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/MultiTerms.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/MultiTerms.java Sun Mar 16 19:39:10 2014
@@ -70,7 +70,7 @@ public final class MultiTerms extends Te
 
   @Override
   public TermsEnum intersect(CompiledAutomaton compiled, BytesRef startTerm) throws IOException {
-    final List<MultiTermsEnum.TermsEnumIndex> termsEnums = new ArrayList<MultiTermsEnum.TermsEnumIndex>();
+    final List<MultiTermsEnum.TermsEnumIndex> termsEnums = new ArrayList<>();
     for(int i=0;i<subs.length;i++) {
       final TermsEnum termsEnum = subs[i].intersect(compiled, startTerm);
       if (termsEnum != null) {
@@ -88,7 +88,7 @@ public final class MultiTerms extends Te
   @Override
   public TermsEnum iterator(TermsEnum reuse) throws IOException {
 
-    final List<MultiTermsEnum.TermsEnumIndex> termsEnums = new ArrayList<MultiTermsEnum.TermsEnumIndex>();
+    final List<MultiTermsEnum.TermsEnumIndex> termsEnums = new ArrayList<>();
     for(int i=0;i<subs.length;i++) {
       final TermsEnum termsEnum = subs[i].iterator(null);
       if (termsEnum != null) {

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/NoMergeScheduler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/NoMergeScheduler.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/NoMergeScheduler.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/NoMergeScheduler.java Sun Mar 16 19:39:10 2014
@@ -40,7 +40,7 @@ public final class NoMergeScheduler exte
   public void close() {}
 
   @Override
-  public void merge(IndexWriter writer) {}
+  public void merge(IndexWriter writer, MergeTrigger trigger, boolean newMergesFound) {}
 
   @Override
   public MergeScheduler clone() {

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/ParallelAtomicReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/ParallelAtomicReader.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/ParallelAtomicReader.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/ParallelAtomicReader.java Sun Mar 16 19:39:10 2014
@@ -56,8 +56,8 @@ public class ParallelAtomicReader extend
   private final boolean closeSubReaders;
   private final int maxDoc, numDocs;
   private final boolean hasDeletions;
-  private final SortedMap<String,AtomicReader> fieldToReader = new TreeMap<String,AtomicReader>();
-  private final SortedMap<String,AtomicReader> tvFieldToReader = new TreeMap<String,AtomicReader>();
+  private final SortedMap<String,AtomicReader> fieldToReader = new TreeMap<>();
+  private final SortedMap<String,AtomicReader> tvFieldToReader = new TreeMap<>();
   
   /** Create a ParallelAtomicReader based on the provided
    *  readers; auto-closes the given readers on {@link #close()}. */
@@ -151,7 +151,7 @@ public class ParallelAtomicReader extend
   
   // Single instance of this, per ParallelReader instance
   private final class ParallelFields extends Fields {
-    final Map<String,Terms> fields = new TreeMap<String,Terms>();
+    final Map<String,Terms> fields = new TreeMap<>();
     
     ParallelFields() {
     }

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/PersistentSnapshotDeletionPolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/PersistentSnapshotDeletionPolicy.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/PersistentSnapshotDeletionPolicy.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/PersistentSnapshotDeletionPolicy.java Sun Mar 16 19:39:10 2014
@@ -253,7 +253,7 @@ public class PersistentSnapshotDeletionP
   private synchronized void loadPriorSnapshots() throws IOException {
     long genLoaded = -1;
     IOException ioe = null;
-    List<String> snapshotFiles = new ArrayList<String>();
+    List<String> snapshotFiles = new ArrayList<>();
     String[] files;
     try {
       files = dir.listAll();
@@ -267,7 +267,7 @@ public class PersistentSnapshotDeletionP
         long gen = Long.parseLong(file.substring(SNAPSHOTS_PREFIX.length()));
         if (genLoaded == -1 || gen > genLoaded) {
           snapshotFiles.add(file);
-          Map<Long,Integer> m = new HashMap<Long,Integer>();    
+          Map<Long,Integer> m = new HashMap<>();
           IndexInput in = dir.openInput(file, IOContext.DEFAULT);
           try {
             CodecUtil.checkHeader(in, CODEC_NAME, VERSION_START, VERSION_START);

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java Sun Mar 16 19:39:10 2014
@@ -78,7 +78,7 @@ class ReadersAndUpdates {
   // updates on the merged segment too.
   private boolean isMerging = false;
   
-  private final Map<String,NumericFieldUpdates> mergingNumericUpdates = new HashMap<String,NumericFieldUpdates>();
+  private final Map<String,NumericFieldUpdates> mergingNumericUpdates = new HashMap<>();
   
   public ReadersAndUpdates(IndexWriter writer, SegmentCommitInfo info) {
     this.info = info;
@@ -448,7 +448,7 @@ class ReadersAndUpdates {
     
     // create a new map, keeping only the gens that are in use
     Map<Long,Set<String>> genUpdatesFiles = info.getUpdatesFiles();
-    Map<Long,Set<String>> newGenUpdatesFiles = new HashMap<Long,Set<String>>();
+    Map<Long,Set<String>> newGenUpdatesFiles = new HashMap<>();
     final long fieldInfosGen = info.getFieldInfosGen();
     for (FieldInfo fi : fieldInfos) {
       long dvGen = fi.getDocValuesGen();

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/SegmentCommitInfo.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/SegmentCommitInfo.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/SegmentCommitInfo.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/SegmentCommitInfo.java Sun Mar 16 19:39:10 2014
@@ -56,7 +56,7 @@ public class SegmentCommitInfo {
   private long nextWriteFieldInfosGen;
 
   // Track the per-generation updates files
-  private final Map<Long,Set<String>> genUpdatesFiles = new HashMap<Long,Set<String>>();
+  private final Map<Long,Set<String>> genUpdatesFiles = new HashMap<>();
   
   private volatile long sizeInBytes = -1;
 
@@ -147,7 +147,7 @@ public class SegmentCommitInfo {
   /** Returns all files in use by this segment. */
   public Collection<String> files() throws IOException {
     // Start from the wrapped info's files:
-    Collection<String> files = new HashSet<String>(info.files());
+    Collection<String> files = new HashSet<>(info.files());
 
     // TODO we could rely on TrackingDir.getCreatedFiles() (like we do for
     // updates) and then maybe even be able to remove LiveDocsFormat.files().
@@ -257,7 +257,7 @@ public class SegmentCommitInfo {
     
     // deep clone
     for (Entry<Long,Set<String>> e : genUpdatesFiles.entrySet()) {
-      other.genUpdatesFiles.put(e.getKey(), new HashSet<String>(e.getValue()));
+      other.genUpdatesFiles.put(e.getKey(), new HashSet<>(e.getValue()));
     }
     
     return other;

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/SegmentCoreReaders.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/SegmentCoreReaders.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/SegmentCoreReaders.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/SegmentCoreReaders.java Sun Mar 16 19:39:10 2014
@@ -79,7 +79,7 @@ final class SegmentCoreReaders {
   final CloseableThreadLocal<Map<String,Object>> normsLocal = new CloseableThreadLocal<Map<String,Object>>() {
     @Override
     protected Map<String,Object> initialValue() {
-      return new HashMap<String,Object>();
+      return new HashMap<>();
     }
   };
 

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/SegmentDocValues.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/SegmentDocValues.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/SegmentDocValues.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/SegmentDocValues.java Sun Mar 16 19:39:10 2014
@@ -35,7 +35,7 @@ import org.apache.lucene.util.RefCount;
  */
 final class SegmentDocValues {
 
-  private final Map<Long,RefCount<DocValuesProducer>> genDVProducers = new HashMap<Long,RefCount<DocValuesProducer>>();
+  private final Map<Long,RefCount<DocValuesProducer>> genDVProducers = new HashMap<>();
 
   private RefCount<DocValuesProducer> newDocValuesProducer(SegmentCommitInfo si, IOContext context, Directory dir,
       DocValuesFormat dvFormat, final Long gen, List<FieldInfo> infos) throws IOException {

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/SegmentInfos.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/SegmentInfos.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/SegmentInfos.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/SegmentInfos.java Sun Mar 16 19:39:10 2014
@@ -139,9 +139,9 @@ public final class SegmentInfos implemen
                                // there was an IOException that had interrupted a commit
 
   /** Opaque Map&lt;String, String&gt; that user can specify during IndexWriter.commit */
-  public Map<String,String> userData = Collections.<String,String>emptyMap();
+  public Map<String,String> userData = Collections.emptyMap();
   
-  private List<SegmentCommitInfo> segments = new ArrayList<SegmentCommitInfo>();
+  private List<SegmentCommitInfo> segments = new ArrayList<>();
   
   /**
    * If non-null, information about loading segments_N files
@@ -355,7 +355,7 @@ public final class SegmentInfos implemen
           if (numGensUpdatesFiles == 0) {
             genUpdatesFiles = Collections.emptyMap();
           } else {
-            genUpdatesFiles = new HashMap<Long,Set<String>>(numGensUpdatesFiles);
+            genUpdatesFiles = new HashMap<>(numGensUpdatesFiles);
             for (int i = 0; i < numGensUpdatesFiles; i++) {
               genUpdatesFiles.put(input.readLong(), input.readStringSet());
             }
@@ -471,13 +471,13 @@ public final class SegmentInfos implemen
     try {
       final SegmentInfos sis = (SegmentInfos) super.clone();
       // deep clone, first recreate all collections:
-      sis.segments = new ArrayList<SegmentCommitInfo>(size());
+      sis.segments = new ArrayList<>(size());
       for(final SegmentCommitInfo info : this) {
         assert info.info.getCodec() != null;
         // dont directly access segments, use add method!!!
         sis.add(info.clone());
       }
-      sis.userData = new HashMap<String,String>(userData);
+      sis.userData = new HashMap<>(userData);
       return sis;
     } catch (CloneNotSupportedException e) {
       throw new RuntimeException("should not happen", e);
@@ -832,7 +832,7 @@ public final class SegmentInfos implemen
    *  The returned collection is recomputed on each
    *  invocation.  */
   public Collection<String> files(Directory dir, boolean includeSegmentsFile) throws IOException {
-    HashSet<String> files = new HashSet<String>();
+    HashSet<String> files = new HashSet<>();
     if (includeSegmentsFile) {
       final String segmentFileName = getSegmentsFileName();
       if (segmentFileName != null) {
@@ -978,7 +978,7 @@ public final class SegmentInfos implemen
   
   /** applies all changes caused by committing a merge to this SegmentInfos */
   void applyMergeChanges(MergePolicy.OneMerge merge, boolean dropSegment) {
-    final Set<SegmentCommitInfo> mergedAway = new HashSet<SegmentCommitInfo>(merge.segments);
+    final Set<SegmentCommitInfo> mergedAway = new HashSet<>(merge.segments);
     boolean inserted = false;
     int newSegIdx = 0;
     for (int segIdx = 0, cnt = segments.size(); segIdx < cnt; segIdx++) {
@@ -1010,7 +1010,7 @@ public final class SegmentInfos implemen
   }
 
   List<SegmentCommitInfo> createBackupSegmentInfos() {
-    final List<SegmentCommitInfo> list = new ArrayList<SegmentCommitInfo>(size());
+    final List<SegmentCommitInfo> list = new ArrayList<>(size());
     for(final SegmentCommitInfo info : this) {
       assert info.info.getCodec() != null;
       list.add(info.clone());

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java Sun Mar 16 19:39:10 2014
@@ -155,8 +155,8 @@ final class SegmentMerger {
         DocValuesType type = field.getDocValuesType();
         if (type != null) {
           if (type == DocValuesType.NUMERIC) {
-            List<NumericDocValues> toMerge = new ArrayList<NumericDocValues>();
-            List<Bits> docsWithField = new ArrayList<Bits>();
+            List<NumericDocValues> toMerge = new ArrayList<>();
+            List<Bits> docsWithField = new ArrayList<>();
             for (AtomicReader reader : mergeState.readers) {
               NumericDocValues values = reader.getNumericDocValues(field.name);
               Bits bits = reader.getDocsWithField(field.name);
@@ -169,8 +169,8 @@ final class SegmentMerger {
             }
             consumer.mergeNumericField(field, mergeState, toMerge, docsWithField);
           } else if (type == DocValuesType.BINARY) {
-            List<BinaryDocValues> toMerge = new ArrayList<BinaryDocValues>();
-            List<Bits> docsWithField = new ArrayList<Bits>();
+            List<BinaryDocValues> toMerge = new ArrayList<>();
+            List<Bits> docsWithField = new ArrayList<>();
             for (AtomicReader reader : mergeState.readers) {
               BinaryDocValues values = reader.getBinaryDocValues(field.name);
               Bits bits = reader.getDocsWithField(field.name);
@@ -183,7 +183,7 @@ final class SegmentMerger {
             }
             consumer.mergeBinaryField(field, mergeState, toMerge, docsWithField);
           } else if (type == DocValuesType.SORTED) {
-            List<SortedDocValues> toMerge = new ArrayList<SortedDocValues>();
+            List<SortedDocValues> toMerge = new ArrayList<>();
             for (AtomicReader reader : mergeState.readers) {
               SortedDocValues values = reader.getSortedDocValues(field.name);
               if (values == null) {
@@ -193,7 +193,7 @@ final class SegmentMerger {
             }
             consumer.mergeSortedField(field, mergeState, toMerge);
           } else if (type == DocValuesType.SORTED_SET) {
-            List<SortedSetDocValues> toMerge = new ArrayList<SortedSetDocValues>();
+            List<SortedSetDocValues> toMerge = new ArrayList<>();
             for (AtomicReader reader : mergeState.readers) {
               SortedSetDocValues values = reader.getSortedSetDocValues(field.name);
               if (values == null) {
@@ -223,8 +223,8 @@ final class SegmentMerger {
     try {
       for (FieldInfo field : mergeState.fieldInfos) {
         if (field.hasNorms()) {
-          List<NumericDocValues> toMerge = new ArrayList<NumericDocValues>();
-          List<Bits> docsWithField = new ArrayList<Bits>();
+          List<NumericDocValues> toMerge = new ArrayList<>();
+          List<Bits> docsWithField = new ArrayList<>();
           for (AtomicReader reader : mergeState.readers) {
             NumericDocValues norms = reader.getNormValues(field.name);
             if (norms == null) {
@@ -358,8 +358,8 @@ final class SegmentMerger {
 
   private void mergeTerms(SegmentWriteState segmentWriteState) throws IOException {
     
-    final List<Fields> fields = new ArrayList<Fields>();
-    final List<ReaderSlice> slices = new ArrayList<ReaderSlice>();
+    final List<Fields> fields = new ArrayList<>();
+    final List<ReaderSlice> slices = new ArrayList<>();
 
     int docBase = 0;
 

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java Sun Mar 16 19:39:10 2014
@@ -60,22 +60,22 @@ public final class SegmentReader extends
   final CloseableThreadLocal<Map<String,Object>> docValuesLocal = new CloseableThreadLocal<Map<String,Object>>() {
     @Override
     protected Map<String,Object> initialValue() {
-      return new HashMap<String,Object>();
+      return new HashMap<>();
     }
   };
 
   final CloseableThreadLocal<Map<String,Bits>> docsWithFieldLocal = new CloseableThreadLocal<Map<String,Bits>>() {
     @Override
     protected Map<String,Bits> initialValue() {
-      return new HashMap<String,Bits>();
+      return new HashMap<>();
     }
   };
 
-  final Map<String,DocValuesProducer> dvProducers = new HashMap<String,DocValuesProducer>();
+  final Map<String,DocValuesProducer> dvProducers = new HashMap<>();
   
   final FieldInfos fieldInfos;
 
-  private final List<Long> dvGens = new ArrayList<Long>();
+  private final List<Long> dvGens = new ArrayList<>();
   
   /**
    * Constructs a new SegmentReader with a new core.
@@ -221,7 +221,7 @@ public final class SegmentReader extends
   
   // returns a gen->List<FieldInfo> mapping. Fields without DV updates have gen=-1
   private Map<Long,List<FieldInfo>> getGenInfos() {
-    final Map<Long,List<FieldInfo>> genInfos = new HashMap<Long,List<FieldInfo>>();
+    final Map<Long,List<FieldInfo>> genInfos = new HashMap<>();
     for (FieldInfo fi : fieldInfos) {
       if (fi.getDocValuesType() == null) {
         continue;
@@ -229,7 +229,7 @@ public final class SegmentReader extends
       long gen = fi.getDocValuesGen();
       List<FieldInfo> infos = genInfos.get(gen);
       if (infos == null) {
-        infos = new ArrayList<FieldInfo>();
+        infos = new ArrayList<>();
         genInfos.put(gen, infos);
       }
       infos.add(fi);

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/SerialMergeScheduler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/SerialMergeScheduler.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/SerialMergeScheduler.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/SerialMergeScheduler.java Sun Mar 16 19:39:10 2014
@@ -31,7 +31,7 @@ public class SerialMergeScheduler extend
    * "synchronized" so that even if the application is using
    * multiple threads, only one merge may run at a time. */
   @Override
-  synchronized public void merge(IndexWriter writer) throws IOException {
+  synchronized public void merge(IndexWriter writer, MergeTrigger trigger, boolean newMergesFound) throws IOException {
 
     while(true) {
       MergePolicy.OneMerge merge = writer.getNextMerge();

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/SlowCompositeReaderWrapper.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/SlowCompositeReaderWrapper.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/SlowCompositeReaderWrapper.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/SlowCompositeReaderWrapper.java Sun Mar 16 19:39:10 2014
@@ -180,7 +180,7 @@ public final class SlowCompositeReaderWr
   
   // TODO: this could really be a weak map somewhere else on the coreCacheKey,
   // but do we really need to optimize slow-wrapper any more?
-  private final Map<String,OrdinalMap> cachedOrdMaps = new HashMap<String,OrdinalMap>();
+  private final Map<String,OrdinalMap> cachedOrdMaps = new HashMap<>();
 
   @Override
   public NumericDocValues getNormValues(String field) throws IOException {

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/SnapshotDeletionPolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/SnapshotDeletionPolicy.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/SnapshotDeletionPolicy.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/SnapshotDeletionPolicy.java Sun Mar 16 19:39:10 2014
@@ -47,10 +47,10 @@ public class SnapshotDeletionPolicy exte
 
   /** Records how many snapshots are held against each
    *  commit generation */
-  protected Map<Long,Integer> refCounts = new HashMap<Long,Integer>();
+  protected Map<Long,Integer> refCounts = new HashMap<>();
 
   /** Used to map gen to IndexCommit. */
-  protected Map<Long,IndexCommit> indexCommits = new HashMap<Long,IndexCommit>();
+  protected Map<Long,IndexCommit> indexCommits = new HashMap<>();
 
   /** Wrapped {@link IndexDeletionPolicy} */
   private IndexDeletionPolicy primary;
@@ -167,7 +167,7 @@ public class SnapshotDeletionPolicy exte
 
   /** Returns all IndexCommits held by at least one snapshot. */
   public synchronized List<IndexCommit> getSnapshots() {
-    return new ArrayList<IndexCommit>(indexCommits.values());
+    return new ArrayList<>(indexCommits.values());
   }
 
   /** Returns the total number of snapshots currently held. */
@@ -192,15 +192,15 @@ public class SnapshotDeletionPolicy exte
     SnapshotDeletionPolicy other = (SnapshotDeletionPolicy) super.clone();
     other.primary = this.primary.clone();
     other.lastCommit = null;
-    other.refCounts = new HashMap<Long,Integer>(refCounts);
-    other.indexCommits = new HashMap<Long,IndexCommit>(indexCommits);
+    other.refCounts = new HashMap<>(refCounts);
+    other.indexCommits = new HashMap<>(indexCommits);
     return other;
   }
 
   /** Wraps each {@link IndexCommit} as a {@link
    *  SnapshotCommitPoint}. */
   private List<IndexCommit> wrapCommits(List<? extends IndexCommit> commits) {
-    List<IndexCommit> wrappedCommits = new ArrayList<IndexCommit>(commits.size());
+    List<IndexCommit> wrappedCommits = new ArrayList<>(commits.size());
     for (IndexCommit ic : commits) {
       wrappedCommits.add(new SnapshotCommitPoint(ic));
     }

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/StandardDirectoryReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/StandardDirectoryReader.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/StandardDirectoryReader.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/StandardDirectoryReader.java Sun Mar 16 19:39:10 2014
@@ -78,7 +78,7 @@ final class StandardDirectoryReader exte
     // no need to process segments in reverse order
     final int numSegments = infos.size();
 
-    List<SegmentReader> readers = new ArrayList<SegmentReader>();
+    List<SegmentReader> readers = new ArrayList<>();
     final Directory dir = writer.getDirectory();
 
     final SegmentInfos segmentInfos = infos.clone();
@@ -134,7 +134,7 @@ final class StandardDirectoryReader exte
 
     // we put the old SegmentReaders in a map, that allows us
     // to lookup a reader using its segment name
-    final Map<String,Integer> segmentReaders = new HashMap<String,Integer>();
+    final Map<String,Integer> segmentReaders = new HashMap<>();
 
     if (oldReaders != null) {
       // create a Map SegmentName->SegmentReader

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/StoredDocument.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/StoredDocument.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/StoredDocument.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/StoredDocument.java Sun Mar 16 19:39:10 2014
@@ -34,7 +34,7 @@ import org.apache.lucene.util.BytesRef;
 // TODO: shouldn't this really be in the .document package?
 public class StoredDocument implements Iterable<StorableField> {
 
-  private final List<StorableField> fields = new ArrayList<StorableField>();
+  private final List<StorableField> fields = new ArrayList<>();
 
   /** Sole constructor. */
   public StoredDocument() {
@@ -61,7 +61,7 @@ public class StoredDocument implements I
    * @return a <code>StorableField[]</code> array
    */
   public StorableField[] getFields(String name) {
-    List<StorableField> result = new ArrayList<StorableField>();
+    List<StorableField> result = new ArrayList<>();
     for (StorableField field : fields) {
       if (field.name().equals(name)) {
         result.add(field);
@@ -112,7 +112,7 @@ public class StoredDocument implements I
    * @return a <code>BytesRef[]</code> of binary field values
    */
    public final BytesRef[] getBinaryValues(String name) {
-     final List<BytesRef> result = new ArrayList<BytesRef>();
+     final List<BytesRef> result = new ArrayList<>();
      for (StorableField field : fields) {
        if (field.name().equals(name)) {
          final BytesRef bytes = field.binaryValue();
@@ -158,7 +158,7 @@ public class StoredDocument implements I
     * @return a <code>String[]</code> of field values
     */
    public final String[] getValues(String name) {
-     List<String> result = new ArrayList<String>();
+     List<String> result = new ArrayList<>();
      for (StorableField field : fields) {
        if (field.name().equals(name) && field.stringValue() != null) {
          result.add(field.stringValue());

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/TermsHash.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/TermsHash.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/TermsHash.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/TermsHash.java Sun Mar 16 19:39:10 2014
@@ -96,11 +96,11 @@ final class TermsHash extends InvertedDo
 
   @Override
   void flush(Map<String,InvertedDocConsumerPerField> fieldsToFlush, final SegmentWriteState state) throws IOException {
-    Map<String,TermsHashConsumerPerField> childFields = new HashMap<String,TermsHashConsumerPerField>();
+    Map<String,TermsHashConsumerPerField> childFields = new HashMap<>();
     Map<String,InvertedDocConsumerPerField> nextChildFields;
 
     if (nextTermsHash != null) {
-      nextChildFields = new HashMap<String,InvertedDocConsumerPerField>();
+      nextChildFields = new HashMap<>();
     } else {
       nextChildFields = null;
     }

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/TermsHashPerField.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/TermsHashPerField.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/TermsHashPerField.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/TermsHashPerField.java Sun Mar 16 19:39:10 2014
@@ -179,12 +179,11 @@ final class TermsHashPerField extends In
     try {
       termID = bytesHash.add(termBytesRef, termAtt.fillBytesRef());
     } catch (MaxBytesLengthExceededException e) {
-      // Not enough room in current block
-      // Just skip this term, to remain as robust as
-      // possible during indexing.  A TokenFilter
-      // can be inserted into the analyzer chain if
-      // other behavior is wanted (pruning the term
-      // to a prefix, throwing an exception, etc).
+      // Term is too large; record this here (can't throw an
+      // exc because DocInverterPerField will then abort the
+      // entire segment) and then throw an exc later in
+      // DocInverterPerField.java.  LengthFilter can always be
+      // used to prune the term before indexing:
       if (docState.maxTermPrefix == null) {
         final int saved = termBytesRef.length;
         try {

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/ThreadAffinityDocumentsWriterThreadPool.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/ThreadAffinityDocumentsWriterThreadPool.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/ThreadAffinityDocumentsWriterThreadPool.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/ThreadAffinityDocumentsWriterThreadPool.java Sun Mar 16 19:39:10 2014
@@ -31,7 +31,7 @@ import org.apache.lucene.index.Documents
  * minimal contended {@link ThreadState}.
  */
 class ThreadAffinityDocumentsWriterThreadPool extends DocumentsWriterPerThreadPool {
-  private Map<Thread, ThreadState> threadBindings = new ConcurrentHashMap<Thread, ThreadState>();
+  private Map<Thread, ThreadState> threadBindings = new ConcurrentHashMap<>();
   
   /**
    * Creates a new {@link ThreadAffinityDocumentsWriterThreadPool} with a given maximum of {@link ThreadState}s.
@@ -78,7 +78,7 @@ class ThreadAffinityDocumentsWriterThrea
   @Override
   public ThreadAffinityDocumentsWriterThreadPool clone() {
     ThreadAffinityDocumentsWriterThreadPool clone = (ThreadAffinityDocumentsWriterThreadPool) super.clone();
-    clone.threadBindings = new ConcurrentHashMap<Thread, ThreadState>();
+    clone.threadBindings = new ConcurrentHashMap<>();
     return clone;
   }
 }

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/TieredMergePolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/TieredMergePolicy.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/TieredMergePolicy.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/TieredMergePolicy.java Sun Mar 16 19:39:10 2014
@@ -280,9 +280,9 @@ public class TieredMergePolicy extends M
       return null;
     }
     final Collection<SegmentCommitInfo> merging = writer.get().getMergingSegments();
-    final Collection<SegmentCommitInfo> toBeMerged = new HashSet<SegmentCommitInfo>();
+    final Collection<SegmentCommitInfo> toBeMerged = new HashSet<>();
 
-    final List<SegmentCommitInfo> infosSorted = new ArrayList<SegmentCommitInfo>(infos.asList());
+    final List<SegmentCommitInfo> infosSorted = new ArrayList<>(infos.asList());
     Collections.sort(infosSorted, new SegmentByteSizeDescending());
 
     // Compute total index bytes & print details about the index
@@ -341,7 +341,7 @@ public class TieredMergePolicy extends M
       // Gather eligible segments for merging, ie segments
       // not already being merged and not already picked (by
       // prior iteration of this loop) for merging:
-      final List<SegmentCommitInfo> eligible = new ArrayList<SegmentCommitInfo>();
+      final List<SegmentCommitInfo> eligible = new ArrayList<>();
       for(int idx = tooBigCount; idx<infosSorted.size(); idx++) {
         final SegmentCommitInfo info = infosSorted.get(idx);
         if (merging.contains(info)) {
@@ -374,7 +374,7 @@ public class TieredMergePolicy extends M
 
           long totAfterMergeBytes = 0;
 
-          final List<SegmentCommitInfo> candidate = new ArrayList<SegmentCommitInfo>();
+          final List<SegmentCommitInfo> candidate = new ArrayList<>();
           boolean hitTooLarge = false;
           for(int idx = startIdx;idx<eligible.size() && candidate.size() < maxMergeAtOnce;idx++) {
             final SegmentCommitInfo info = eligible.get(idx);
@@ -497,7 +497,7 @@ public class TieredMergePolicy extends M
       message("findForcedMerges maxSegmentCount=" + maxSegmentCount + " infos=" + writer.get().segString(infos) + " segmentsToMerge=" + segmentsToMerge);
     }
 
-    List<SegmentCommitInfo> eligible = new ArrayList<SegmentCommitInfo>();
+    List<SegmentCommitInfo> eligible = new ArrayList<>();
     boolean forceMergeRunning = false;
     final Collection<SegmentCommitInfo> merging = writer.get().getMergingSegments();
     boolean segmentIsOriginal = false;
@@ -568,7 +568,7 @@ public class TieredMergePolicy extends M
     if (verbose()) {
       message("findForcedDeletesMerges infos=" + writer.get().segString(infos) + " forceMergeDeletesPctAllowed=" + forceMergeDeletesPctAllowed);
     }
-    final List<SegmentCommitInfo> eligible = new ArrayList<SegmentCommitInfo>();
+    final List<SegmentCommitInfo> eligible = new ArrayList<>();
     final Collection<SegmentCommitInfo> merging = writer.get().getMergingSegments();
     for(SegmentCommitInfo info : infos) {
       double pctDeletes = 100.*((double) writer.get().numDeletedDocs(info))/info.info.getDocCount();

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/UpgradeIndexMergePolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/UpgradeIndexMergePolicy.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/UpgradeIndexMergePolicy.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/UpgradeIndexMergePolicy.java Sun Mar 16 19:39:10 2014
@@ -17,7 +17,6 @@ package org.apache.lucene.index;
  * limitations under the License.
  */
 
-import org.apache.lucene.index.MergePolicy.MergeTrigger;
 import org.apache.lucene.util.Constants;
 
 import java.io.IOException;
@@ -83,7 +82,7 @@ public class UpgradeIndexMergePolicy ext
   @Override
   public MergeSpecification findForcedMerges(SegmentInfos segmentInfos, int maxSegmentCount, Map<SegmentCommitInfo,Boolean> segmentsToMerge) throws IOException {
     // first find all old segments
-    final Map<SegmentCommitInfo,Boolean> oldSegments = new HashMap<SegmentCommitInfo,Boolean>();
+    final Map<SegmentCommitInfo,Boolean> oldSegments = new HashMap<>();
     for (final SegmentCommitInfo si : segmentInfos) {
       final Boolean v = segmentsToMerge.get(si);
       if (v != null && shouldUpgradeSegment(si)) {
@@ -114,7 +113,7 @@ public class UpgradeIndexMergePolicy ext
         message("findForcedMerges: " +  base.getClass().getSimpleName() +
         " does not want to merge all old segments, merge remaining ones into new segment: " + oldSegments);
       }
-      final List<SegmentCommitInfo> newInfos = new ArrayList<SegmentCommitInfo>();
+      final List<SegmentCommitInfo> newInfos = new ArrayList<>();
       for (final SegmentCommitInfo si : segmentInfos) {
         if (oldSegments.containsKey(si)) {
           newInfos.add(si);

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java Sun Mar 16 19:39:10 2014
@@ -68,7 +68,7 @@ public class BooleanQuery extends Query 
     BooleanQuery.maxClauseCount = maxClauseCount;
   }
 
-  private ArrayList<BooleanClause> clauses = new ArrayList<BooleanClause>();
+  private ArrayList<BooleanClause> clauses = new ArrayList<>();
   private final boolean disableCoord;
 
   /** Constructs an empty boolean query. */
@@ -165,8 +165,7 @@ public class BooleanQuery extends Query 
    * Expert: the Weight for BooleanQuery, used to
    * normalize, score and explain these queries.
    *
-   * <p>NOTE: this API and implementation is subject to
-   * change suddenly in the next release.</p>
+   * @lucene.experimental
    */
   protected class BooleanWeight extends Weight {
     /** The Similarity implementation. */
@@ -179,7 +178,7 @@ public class BooleanQuery extends Query 
       throws IOException {
       this.similarity = searcher.getSimilarity();
       this.disableCoord = disableCoord;
-      weights = new ArrayList<Weight>(clauses.size());
+      weights = new ArrayList<>(clauses.size());
       for (int i = 0 ; i < clauses.size(); i++) {
         BooleanClause c = clauses.get(i);
         Weight w = c.getQuery().createWeight(searcher);
@@ -242,7 +241,7 @@ public class BooleanQuery extends Query 
       for (Iterator<Weight> wIter = weights.iterator(); wIter.hasNext();) {
         Weight w = wIter.next();
         BooleanClause c = cIter.next();
-        if (w.scorer(context, true, true, context.reader().getLiveDocs()) == null) {
+        if (w.scorer(context, context.reader().getLiveDocs()) == null) {
           if (c.isRequired()) {
             fail = true;
             Explanation r = new Explanation(0.0f, "no match on required clause (" + c.getQuery().toString() + ")");
@@ -305,22 +304,30 @@ public class BooleanQuery extends Query 
     }
 
     @Override
-    public Scorer scorer(AtomicReaderContext context, boolean scoreDocsInOrder,
-        boolean topScorer, Bits acceptDocs)
-        throws IOException {
-      List<Scorer> required = new ArrayList<Scorer>();
-      List<Scorer> prohibited = new ArrayList<Scorer>();
-      List<Scorer> optional = new ArrayList<Scorer>();
+    public BulkScorer bulkScorer(AtomicReaderContext context, boolean scoreDocsInOrder,
+                                 Bits acceptDocs) throws IOException {
+
+      if (scoreDocsInOrder || minNrShouldMatch > 1) {
+        // TODO: (LUCENE-4872) in some cases BooleanScorer may be faster for minNrShouldMatch
+        // but the same is even true of pure conjunctions...
+        return super.bulkScorer(context, scoreDocsInOrder, acceptDocs);
+      }
+
+      List<BulkScorer> prohibited = new ArrayList<BulkScorer>();
+      List<BulkScorer> optional = new ArrayList<BulkScorer>();
       Iterator<BooleanClause> cIter = clauses.iterator();
       for (Weight w  : weights) {
         BooleanClause c =  cIter.next();
-        Scorer subScorer = w.scorer(context, true, false, acceptDocs);
+        BulkScorer subScorer = w.bulkScorer(context, false, acceptDocs);
         if (subScorer == null) {
           if (c.isRequired()) {
             return null;
           }
         } else if (c.isRequired()) {
-          required.add(subScorer);
+          // TODO: there are some cases where BooleanScorer
+          // would handle conjunctions faster than
+          // BooleanScorer2...
+          return super.bulkScorer(context, scoreDocsInOrder, acceptDocs);
         } else if (c.isProhibited()) {
           prohibited.add(subScorer);
         } else {
@@ -328,20 +335,33 @@ public class BooleanQuery extends Query 
         }
       }
 
-      // NOTE: we could also use BooleanScorer, if we knew
-      // this BooleanQuery was embedded in another
-      // BooleanQuery that was also using BooleanScorer (ie,
-      // BooleanScorer can nest).  But this is hard to
-      // detect and we never do so today... (ie, we only
-      // return BooleanScorer for topScorer):
-
       // Check if we can and should return a BooleanScorer
-      // TODO: (LUCENE-4872) in some cases BooleanScorer may be faster for minNrShouldMatch
-      // but the same is even true of pure conjunctions...
-      if (!scoreDocsInOrder && topScorer && required.size() == 0 && minNrShouldMatch <= 1) {
-        return new BooleanScorer(this, disableCoord, minNrShouldMatch, optional, prohibited, maxCoord);
+      return new BooleanScorer(this, disableCoord, minNrShouldMatch, optional, prohibited, maxCoord);
+    }
+
+    @Override
+    public Scorer scorer(AtomicReaderContext context, Bits acceptDocs)
+        throws IOException {
+      List<Scorer> required = new ArrayList<>();
+      List<Scorer> prohibited = new ArrayList<>();
+      List<Scorer> optional = new ArrayList<>();
+      Iterator<BooleanClause> cIter = clauses.iterator();
+      for (Weight w  : weights) {
+        BooleanClause c =  cIter.next();
+        Scorer subScorer = w.scorer(context, acceptDocs);
+        if (subScorer == null) {
+          if (c.isRequired()) {
+            return null;
+          }
+        } else if (c.isRequired()) {
+          required.add(subScorer);
+        } else if (c.isProhibited()) {
+          prohibited.add(subScorer);
+        } else {
+          optional.add(subScorer);
+        }
       }
-      
+
       if (required.size() == 0 && optional.size() == 0) {
         // no required and optional clauses.
         return null;
@@ -373,9 +393,14 @@ public class BooleanQuery extends Query 
     
     @Override
     public boolean scoresDocsOutOfOrder() {
+      if (minNrShouldMatch > 1) {
+        // BS2 (in-order) will be used by scorer()
+        return false;
+      }
       for (BooleanClause c : clauses) {
         if (c.isRequired()) {
-          return false; // BS2 (in-order) will be used by scorer()
+          // BS2 (in-order) will be used by scorer()
+          return false;
         }
       }
       

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/search/BooleanScorer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/search/BooleanScorer.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/search/BooleanScorer.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/search/BooleanScorer.java Sun Mar 16 19:39:10 2014
@@ -23,6 +23,7 @@ import java.util.Collection;
 import java.util.List;
 
 import org.apache.lucene.index.AtomicReaderContext;
+import org.apache.lucene.index.DocsEnum;
 import org.apache.lucene.search.BooleanQuery.BooleanWeight;
 
 /* Description from Doug Cutting (excerpted from
@@ -58,7 +59,7 @@ import org.apache.lucene.search.BooleanQ
  * conjunction can reduce the number of priority queue
  * updates for the optional terms. */
 
-final class BooleanScorer extends Scorer {
+final class BooleanScorer extends BulkScorer {
   
   private static final class BooleanScorerCollector extends Collector {
     private BucketTable bucketTable;
@@ -108,38 +109,6 @@ final class BooleanScorer extends Scorer
 
   }
   
-  // An internal class which is used in score(Collector, int) for setting the
-  // current score. This is required since Collector exposes a setScorer method
-  // and implementations that need the score will call scorer.score().
-  // Therefore the only methods that are implemented are score() and doc().
-  private static final class BucketScorer extends Scorer {
-
-    double score;
-    int doc = NO_MORE_DOCS;
-    int freq;
-    
-    public BucketScorer(Weight weight) { super(weight); }
-    
-    @Override
-    public int advance(int target) { return NO_MORE_DOCS; }
-
-    @Override
-    public int docID() { return doc; }
-
-    @Override
-    public int freq() { return freq; }
-
-    @Override
-    public int nextDoc() { return NO_MORE_DOCS; }
-    
-    @Override
-    public float score() { return (float)score; }
-    
-    @Override
-    public long cost() { return 1; }
-
-  }
-
   static final class Bucket {
     int doc = -1;            // tells if bucket is valid
     double score;             // incremental score
@@ -175,19 +144,21 @@ final class BooleanScorer extends Scorer
   }
 
   static final class SubScorer {
-    public Scorer scorer;
+    public BulkScorer scorer;
     // TODO: re-enable this if BQ ever sends us required clauses
     //public boolean required = false;
     public boolean prohibited;
     public Collector collector;
     public SubScorer next;
+    public boolean more;
 
-    public SubScorer(Scorer scorer, boolean required, boolean prohibited,
+    public SubScorer(BulkScorer scorer, boolean required, boolean prohibited,
         Collector collector, SubScorer next) {
       if (required) {
         throw new IllegalArgumentException("this scorer cannot handle required=true");
       }
       this.scorer = scorer;
+      this.more = true;
       // TODO: re-enable this if BQ ever sends us required clauses
       //this.required = required;
       this.prohibited = prohibited;
@@ -206,26 +177,20 @@ final class BooleanScorer extends Scorer
   private Bucket current;
   // Any time a prohibited clause matches we set bit 0:
   private static final int PROHIBITED_MASK = 1;
-  
+
+  private final Weight weight;
+
   BooleanScorer(BooleanWeight weight, boolean disableCoord, int minNrShouldMatch,
-      List<Scorer> optionalScorers, List<Scorer> prohibitedScorers, int maxCoord) throws IOException {
-    super(weight);
+      List<BulkScorer> optionalScorers, List<BulkScorer> prohibitedScorers, int maxCoord) throws IOException {
     this.minNrShouldMatch = minNrShouldMatch;
+    this.weight = weight;
 
-    if (optionalScorers != null && optionalScorers.size() > 0) {
-      for (Scorer scorer : optionalScorers) {
-        if (scorer.nextDoc() != NO_MORE_DOCS) {
-          scorers = new SubScorer(scorer, false, false, bucketTable.newCollector(0), scorers);
-        }
-      }
+    for (BulkScorer scorer : optionalScorers) {
+      scorers = new SubScorer(scorer, false, false, bucketTable.newCollector(0), scorers);
     }
     
-    if (prohibitedScorers != null && prohibitedScorers.size() > 0) {
-      for (Scorer scorer : prohibitedScorers) {
-        if (scorer.nextDoc() != NO_MORE_DOCS) {
-          scorers = new SubScorer(scorer, false, true, bucketTable.newCollector(PROHIBITED_MASK), scorers);
-        }
-      }
+    for (BulkScorer scorer : prohibitedScorers) {
+      scorers = new SubScorer(scorer, false, true, bucketTable.newCollector(PROHIBITED_MASK), scorers);
     }
 
     coordFactors = new float[optionalScorers.size() + 1];
@@ -234,17 +199,15 @@ final class BooleanScorer extends Scorer
     }
   }
 
-  // firstDocID is ignored since nextDoc() initializes 'current'
   @Override
-  public boolean score(Collector collector, int max, int firstDocID) throws IOException {
-    // Make sure it's only BooleanScorer that calls us:
-    assert firstDocID == -1;
+  public boolean score(Collector collector, int max) throws IOException {
+
     boolean more;
     Bucket tmp;
-    BucketScorer bs = new BucketScorer(weight);
+    FakeScorer fs = new FakeScorer();
 
     // The internal loop will set the score and doc before calling collect.
-    collector.setScorer(bs);
+    collector.setScorer(fs);
     do {
       bucketTable.first = null;
       
@@ -263,7 +226,7 @@ final class BooleanScorer extends Scorer
           // that should work)... but in theory an outside
           // app could pass a different max so we must check
           // it:
-          if (current.doc >= max){
+          if (current.doc >= max) {
             tmp = current;
             current = current.next;
             tmp.next = bucketTable.first;
@@ -272,9 +235,9 @@ final class BooleanScorer extends Scorer
           }
           
           if (current.coord >= minNrShouldMatch) {
-            bs.score = current.score * coordFactors[current.coord];
-            bs.doc = current.doc;
-            bs.freq = current.coord;
+            fs.score = (float) (current.score * coordFactors[current.coord]);
+            fs.doc = current.doc;
+            fs.freq = current.coord;
             collector.collect(current.doc);
           }
         }
@@ -292,9 +255,9 @@ final class BooleanScorer extends Scorer
       more = false;
       end += BucketTable.SIZE;
       for (SubScorer sub = scorers; sub != null; sub = sub.next) {
-        int subScorerDocID = sub.scorer.docID();
-        if (subScorerDocID != NO_MORE_DOCS) {
-          more |= sub.scorer.score(sub.collector, end, subScorerDocID);
+        if (sub.more) {
+          sub.more = sub.scorer.score(sub.collector, end);
+          more |= sub.more;
         }
       }
       current = bucketTable.first;
@@ -303,43 +266,8 @@ final class BooleanScorer extends Scorer
 
     return false;
   }
-  
-  @Override
-  public int advance(int target) {
-    throw new UnsupportedOperationException();
-  }
-
-  @Override
-  public int docID() {
-    throw new UnsupportedOperationException();
-  }
-
-  @Override
-  public int nextDoc() {
-    throw new UnsupportedOperationException();
-  }
-
-  @Override
-  public float score() {
-    throw new UnsupportedOperationException();
-  }
 
   @Override
-  public int freq() throws IOException {
-    throw new UnsupportedOperationException();
-  }
-
-  @Override
-  public long cost() {
-    return Integer.MAX_VALUE;
-  }
-
-  @Override
-  public void score(Collector collector) throws IOException {
-    score(collector, Integer.MAX_VALUE, -1);
-  }
-  
-  @Override
   public String toString() {
     StringBuilder buffer = new StringBuilder();
     buffer.append("boolean(");
@@ -350,9 +278,4 @@ final class BooleanScorer extends Scorer
     buffer.append(")");
     return buffer.toString();
   }
-  
-  @Override
-  public Collection<ChildScorer> getChildren() {
-    throw new UnsupportedOperationException();
-  }
 }