You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by mi...@apache.org on 2014/03/16 20:39:37 UTC

svn commit: r1578144 [6/37] - in /lucene/dev/branches/lucene5376_2: ./ dev-tools/ dev-tools/idea/.idea/libraries/ dev-tools/idea/solr/contrib/dataimporthandler/ dev-tools/idea/solr/contrib/map-reduce/ dev-tools/idea/solr/core/src/test/ dev-tools/script...

Modified: lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/memory/FSTTermsReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/memory/FSTTermsReader.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/memory/FSTTermsReader.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/memory/FSTTermsReader.java Sun Mar 16 19:39:10 2014
@@ -65,7 +65,7 @@ import org.apache.lucene.codecs.CodecUti
  */
 
 public class FSTTermsReader extends FieldsProducer {
-  final TreeMap<String, TermsReader> fields = new TreeMap<String, TermsReader>();
+  final TreeMap<String, TermsReader> fields = new TreeMap<>();
   final PostingsReaderBase postingsReader;
   final IndexInput in;
   //static boolean TEST = false;
@@ -172,7 +172,7 @@ public class FSTTermsReader extends Fiel
       this.sumDocFreq = sumDocFreq;
       this.docCount = docCount;
       this.longsSize = longsSize;
-      this.dict = new FST<FSTTermOutputs.TermData>(in, new FSTTermOutputs(fieldInfo, longsSize));
+      this.dict = new FST<>(in, new FSTTermOutputs(fieldInfo, longsSize));
     }
 
     @Override
@@ -307,7 +307,7 @@ public class FSTTermsReader extends Fiel
 
       SegmentTermsEnum() throws IOException {
         super();
-        this.fstEnum = new BytesRefFSTEnum<FSTTermOutputs.TermData>(dict);
+        this.fstEnum = new BytesRefFSTEnum<>(dict);
         this.decoded = false;
         this.seekPending = false;
         this.meta = null;
@@ -411,7 +411,7 @@ public class FSTTermsReader extends Fiel
         int fsaState;
 
         Frame() {
-          this.fstArc = new FST.Arc<FSTTermOutputs.TermData>();
+          this.fstArc = new FST.Arc<>();
           this.fsaState = -1;
         }
 
@@ -697,7 +697,7 @@ public class FSTTermsReader extends Fiel
   }
 
   static<T> void walk(FST<T> fst) throws IOException {
-    final ArrayList<FST.Arc<T>> queue = new ArrayList<FST.Arc<T>>();
+    final ArrayList<FST.Arc<T>> queue = new ArrayList<>();
     final BitSet seen = new BitSet();
     final FST.BytesReader reader = fst.getBytesReader();
     final FST.Arc<T> startArc = fst.getFirstArc(new FST.Arc<T>());

Modified: lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/memory/FSTTermsWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/memory/FSTTermsWriter.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/memory/FSTTermsWriter.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/memory/FSTTermsWriter.java Sun Mar 16 19:39:10 2014
@@ -130,7 +130,7 @@ public class FSTTermsWriter extends Fiel
   final FieldInfos fieldInfos;
   final IndexOutput out;
   final int maxDoc;
-  final List<FieldMetaData> fields = new ArrayList<FieldMetaData>();
+  final List<FieldMetaData> fields = new ArrayList<>();
 
   public FSTTermsWriter(SegmentWriteState state, PostingsWriterBase postingsWriter) throws IOException {
     final String termsFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, TERMS_EXTENSION);
@@ -259,7 +259,7 @@ public class FSTTermsWriter extends Fiel
       this.fieldInfo = fieldInfo;
       this.longsSize = postingsWriter.setField(fieldInfo);
       this.outputs = new FSTTermOutputs(fieldInfo, longsSize);
-      this.builder = new Builder<FSTTermOutputs.TermData>(FST.INPUT_TYPE.BYTE1, outputs);
+      this.builder = new Builder<>(FST.INPUT_TYPE.BYTE1, outputs);
     }
 
     public void finishTerm(BytesRef text, BlockTermState state) throws IOException {

Modified: lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/memory/MemoryDocValuesConsumer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/memory/MemoryDocValuesConsumer.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/memory/MemoryDocValuesConsumer.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/memory/MemoryDocValuesConsumer.java Sun Mar 16 19:39:10 2014
@@ -158,7 +158,7 @@ class MemoryDocValuesConsumer extends Do
       } else {
         meta.writeByte(TABLE_COMPRESSED); // table-compressed
         Long[] decode = uniqueValues.toArray(new Long[uniqueValues.size()]);
-        final HashMap<Long,Integer> encode = new HashMap<Long,Integer>();
+        final HashMap<Long,Integer> encode = new HashMap<>();
         data.writeVInt(decode.length);
         for (int i = 0; i < decode.length; i++) {
           data.writeLong(decode[i]);
@@ -281,7 +281,7 @@ class MemoryDocValuesConsumer extends Do
     meta.writeByte(FST);
     meta.writeLong(data.getFilePointer());
     PositiveIntOutputs outputs = PositiveIntOutputs.getSingleton();
-    Builder<Long> builder = new Builder<Long>(INPUT_TYPE.BYTE1, outputs);
+    Builder<Long> builder = new Builder<>(INPUT_TYPE.BYTE1, outputs);
     IntsRef scratch = new IntsRef();
     long ord = 0;
     for (BytesRef v : values) {

Modified: lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/memory/MemoryDocValuesProducer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/memory/MemoryDocValuesProducer.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/memory/MemoryDocValuesProducer.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/memory/MemoryDocValuesProducer.java Sun Mar 16 19:39:10 2014
@@ -68,12 +68,12 @@ class MemoryDocValuesProducer extends Do
   
   // ram instances we have already loaded
   private final Map<Integer,NumericDocValues> numericInstances = 
-      new HashMap<Integer,NumericDocValues>();
+      new HashMap<>();
   private final Map<Integer,BinaryDocValues> binaryInstances =
-      new HashMap<Integer,BinaryDocValues>();
+      new HashMap<>();
   private final Map<Integer,FST<Long>> fstInstances =
-      new HashMap<Integer,FST<Long>>();
-  private final Map<Integer,Bits> docsWithFieldInstances = new HashMap<Integer,Bits>();
+      new HashMap<>();
+  private final Map<Integer,Bits> docsWithFieldInstances = new HashMap<>();
   
   private final int maxDoc;
   private final AtomicLong ramBytesUsed;
@@ -104,9 +104,9 @@ class MemoryDocValuesProducer extends Do
       version = CodecUtil.checkHeader(in, metaCodec, 
                                       VERSION_START,
                                       VERSION_CURRENT);
-      numerics = new HashMap<Integer,NumericEntry>();
-      binaries = new HashMap<Integer,BinaryEntry>();
-      fsts = new HashMap<Integer,FSTEntry>();
+      numerics = new HashMap<>();
+      binaries = new HashMap<>();
+      fsts = new HashMap<>();
       readFields(in, state.fieldInfos);
       ramBytesUsed = new AtomicLong(RamUsageEstimator.shallowSizeOfInstance(getClass()));
       success = true;
@@ -314,7 +314,7 @@ class MemoryDocValuesProducer extends Do
       instance = fstInstances.get(field.number);
       if (instance == null) {
         data.seek(entry.offset);
-        instance = new FST<Long>(data, PositiveIntOutputs.getSingleton());
+        instance = new FST<>(data, PositiveIntOutputs.getSingleton());
         ramBytesUsed.addAndGet(instance.sizeInBytes());
         fstInstances.put(field.number, instance);
       }
@@ -324,10 +324,10 @@ class MemoryDocValuesProducer extends Do
     
     // per-thread resources
     final BytesReader in = fst.getBytesReader();
-    final Arc<Long> firstArc = new Arc<Long>();
-    final Arc<Long> scratchArc = new Arc<Long>();
+    final Arc<Long> firstArc = new Arc<>();
+    final Arc<Long> scratchArc = new Arc<>();
     final IntsRef scratchInts = new IntsRef();
-    final BytesRefFSTEnum<Long> fstEnum = new BytesRefFSTEnum<Long>(fst); 
+    final BytesRefFSTEnum<Long> fstEnum = new BytesRefFSTEnum<>(fst);
     
     return new SortedDocValues() {
       @Override
@@ -389,7 +389,7 @@ class MemoryDocValuesProducer extends Do
       instance = fstInstances.get(field.number);
       if (instance == null) {
         data.seek(entry.offset);
-        instance = new FST<Long>(data, PositiveIntOutputs.getSingleton());
+        instance = new FST<>(data, PositiveIntOutputs.getSingleton());
         ramBytesUsed.addAndGet(instance.sizeInBytes());
         fstInstances.put(field.number, instance);
       }
@@ -399,10 +399,10 @@ class MemoryDocValuesProducer extends Do
     
     // per-thread resources
     final BytesReader in = fst.getBytesReader();
-    final Arc<Long> firstArc = new Arc<Long>();
-    final Arc<Long> scratchArc = new Arc<Long>();
+    final Arc<Long> firstArc = new Arc<>();
+    final Arc<Long> scratchArc = new Arc<>();
     final IntsRef scratchInts = new IntsRef();
-    final BytesRefFSTEnum<Long> fstEnum = new BytesRefFSTEnum<Long>(fst); 
+    final BytesRefFSTEnum<Long> fstEnum = new BytesRefFSTEnum<>(fst);
     final BytesRef ref = new BytesRef();
     final ByteArrayDataInput input = new ByteArrayDataInput();
     return new SortedSetDocValues() {
@@ -546,14 +546,14 @@ class MemoryDocValuesProducer extends Do
     // maybe we should add a FSTEnum that supports this operation?
     final FST<Long> fst;
     final FST.BytesReader bytesReader;
-    final Arc<Long> firstArc = new Arc<Long>();
-    final Arc<Long> scratchArc = new Arc<Long>();
+    final Arc<Long> firstArc = new Arc<>();
+    final Arc<Long> scratchArc = new Arc<>();
     final IntsRef scratchInts = new IntsRef();
     final BytesRef scratchBytes = new BytesRef();
     
     FSTTermsEnum(FST<Long> fst) {
       this.fst = fst;
-      in = new BytesRefFSTEnum<Long>(fst);
+      in = new BytesRefFSTEnum<>(fst);
       bytesReader = fst.getBytesReader();
     }
 

Modified: lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/memory/MemoryPostingsFormat.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/memory/MemoryPostingsFormat.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/memory/MemoryPostingsFormat.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/memory/MemoryPostingsFormat.java Sun Mar 16 19:39:10 2014
@@ -119,7 +119,7 @@ public final class MemoryPostingsFormat 
       this.field = field;
       this.doPackFST = doPackFST;
       this.acceptableOverheadRatio = acceptableOverheadRatio;
-      builder = new Builder<BytesRef>(FST.INPUT_TYPE.BYTE1, 0, 0, true, true, Integer.MAX_VALUE, outputs, null, doPackFST, acceptableOverheadRatio, true, 15);
+      builder = new Builder<>(FST.INPUT_TYPE.BYTE1, 0, 0, true, true, Integer.MAX_VALUE, outputs, null, doPackFST, acceptableOverheadRatio, true, 15);
     }
 
     private class PostingsWriter {
@@ -740,7 +740,7 @@ public final class MemoryPostingsFormat 
 
     public FSTTermsEnum(FieldInfo field, FST<BytesRef> fst) {
       this.field = field;
-      fstEnum = new BytesRefFSTEnum<BytesRef>(fst);
+      fstEnum = new BytesRefFSTEnum<>(fst);
     }
 
     private void decodeMetaData() {
@@ -895,7 +895,7 @@ public final class MemoryPostingsFormat 
       sumDocFreq = in.readVLong();
       docCount = in.readVInt();
       
-      fst = new FST<BytesRef>(in, outputs);
+      fst = new FST<>(in, outputs);
     }
 
     @Override
@@ -953,7 +953,7 @@ public final class MemoryPostingsFormat 
     final String fileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, EXTENSION);
     final IndexInput in = state.directory.openInput(fileName, IOContext.READONCE);
 
-    final SortedMap<String,TermsReader> fields = new TreeMap<String,TermsReader>();
+    final SortedMap<String,TermsReader> fields = new TreeMap<>();
 
     try {
       while(true) {

Modified: lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/pulsing/PulsingPostingsReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/pulsing/PulsingPostingsReader.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/pulsing/PulsingPostingsReader.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/pulsing/PulsingPostingsReader.java Sun Mar 16 19:39:10 2014
@@ -76,7 +76,7 @@ public class PulsingPostingsReader exten
         version < PulsingPostingsWriter.VERSION_META_ARRAY) {
       fields = null;
     } else {
-      fields = new TreeMap<Integer, Integer>();
+      fields = new TreeMap<>();
       String summaryFileName = IndexFileNames.segmentFileName(segmentState.segmentInfo.name, segmentState.segmentSuffix, PulsingPostingsWriter.SUMMARY_EXTENSION);
       IndexInput in = null;
       try { 
@@ -628,7 +628,7 @@ public class PulsingPostingsReader exten
     // another pulsing, because this is just stupid and wasteful. 
     // we still have to be careful in case someone does Pulsing(Stomping(Pulsing(...
     private final Map<PulsingPostingsReader,DocsEnum> enums = 
-      new IdentityHashMap<PulsingPostingsReader,DocsEnum>();
+      new IdentityHashMap<>();
       
     @Override
     public Map<PulsingPostingsReader,DocsEnum> enums() {

Modified: lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/pulsing/PulsingPostingsWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/pulsing/PulsingPostingsWriter.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/pulsing/PulsingPostingsWriter.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/pulsing/PulsingPostingsWriter.java Sun Mar 16 19:39:10 2014
@@ -125,7 +125,7 @@ public final class PulsingPostingsWriter
    *  for this term) is <= maxPositions, then the postings are
    *  inlined into terms dict */
   public PulsingPostingsWriter(SegmentWriteState state, int maxPositions, PostingsWriterBase wrappedPostingsWriter) {
-    fields = new ArrayList<FieldMetaData>();
+    fields = new ArrayList<>();
     this.maxPositions = maxPositions;
     // We simply wrap another postings writer, but only call
     // on it when tot positions is >= the cutoff:

Modified: lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextDocValuesReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextDocValuesReader.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextDocValuesReader.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextDocValuesReader.java Sun Mar 16 19:39:10 2014
@@ -67,7 +67,7 @@ class SimpleTextDocValuesReader extends 
   final int maxDoc;
   final IndexInput data;
   final BytesRef scratch = new BytesRef();
-  final Map<String,OneField> fields = new HashMap<String,OneField>();
+  final Map<String,OneField> fields = new HashMap<>();
   
   public SimpleTextDocValuesReader(SegmentReadState state, String ext) throws IOException {
     // System.out.println("dir=" + state.directory + " seg=" + state.segmentInfo.name + " file=" + IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, ext));

Modified: lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextDocValuesWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextDocValuesWriter.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextDocValuesWriter.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextDocValuesWriter.java Sun Mar 16 19:39:10 2014
@@ -52,7 +52,7 @@ class SimpleTextDocValuesWriter extends 
   final IndexOutput data;
   final BytesRef scratch = new BytesRef();
   final int numDocs;
-  private final Set<String> fieldsSeen = new HashSet<String>(); // for asserting
+  private final Set<String> fieldsSeen = new HashSet<>(); // for asserting
   
   public SimpleTextDocValuesWriter(SegmentWriteState state, String ext) throws IOException {
     // System.out.println("WRITE: " + IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, ext) + " " + state.segmentInfo.getDocCount() + " docs");

Modified: lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextFieldInfosReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextFieldInfosReader.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextFieldInfosReader.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextFieldInfosReader.java Sun Mar 16 19:39:10 2014
@@ -111,7 +111,7 @@ public class SimpleTextFieldInfosReader 
         SimpleTextUtil.readLine(input, scratch);
         assert StringHelper.startsWith(scratch, NUM_ATTS);
         int numAtts = Integer.parseInt(readString(NUM_ATTS.length, scratch));
-        Map<String,String> atts = new HashMap<String,String>();
+        Map<String,String> atts = new HashMap<>();
 
         for (int j = 0; j < numAtts; j++) {
           SimpleTextUtil.readLine(input, scratch);

Modified: lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextFieldsReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextFieldsReader.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextFieldsReader.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextFieldsReader.java Sun Mar 16 19:39:10 2014
@@ -83,7 +83,7 @@ class SimpleTextFieldsReader extends Fie
   
   private TreeMap<String,Long> readFields(IndexInput in) throws IOException {
     BytesRef scratch = new BytesRef(10);
-    TreeMap<String,Long> fields = new TreeMap<String,Long>();
+    TreeMap<String,Long> fields = new TreeMap<>();
     
     while (true) {
       SimpleTextUtil.readLine(in, scratch);
@@ -106,7 +106,7 @@ class SimpleTextFieldsReader extends Fie
 
     public SimpleTextTermsEnum(FST<PairOutputs.Pair<Long,PairOutputs.Pair<Long,Long>>> fst, IndexOptions indexOptions) {
       this.indexOptions = indexOptions;
-      fstEnum = new BytesRefFSTEnum<PairOutputs.Pair<Long,PairOutputs.Pair<Long,Long>>>(fst);
+      fstEnum = new BytesRefFSTEnum<>(fst);
     }
 
     @Override
@@ -513,10 +513,10 @@ class SimpleTextFieldsReader extends Fie
     private void loadTerms() throws IOException {
       PositiveIntOutputs posIntOutputs = PositiveIntOutputs.getSingleton();
       final Builder<PairOutputs.Pair<Long,PairOutputs.Pair<Long,Long>>> b;
-      final PairOutputs<Long,Long> outputsInner = new PairOutputs<Long,Long>(posIntOutputs, posIntOutputs);
-      final PairOutputs<Long,PairOutputs.Pair<Long,Long>> outputs = new PairOutputs<Long,PairOutputs.Pair<Long,Long>>(posIntOutputs,
+      final PairOutputs<Long,Long> outputsInner = new PairOutputs<>(posIntOutputs, posIntOutputs);
+      final PairOutputs<Long,PairOutputs.Pair<Long,Long>> outputs = new PairOutputs<>(posIntOutputs,
                                                                                                                       outputsInner);
-      b = new Builder<PairOutputs.Pair<Long,PairOutputs.Pair<Long,Long>>>(FST.INPUT_TYPE.BYTE1, outputs);
+      b = new Builder<>(FST.INPUT_TYPE.BYTE1, outputs);
       IndexInput in = SimpleTextFieldsReader.this.in.clone();
       in.seek(termsStart);
       final BytesRef lastTerm = new BytesRef(10);
@@ -633,7 +633,7 @@ class SimpleTextFieldsReader extends Fie
     return Collections.unmodifiableSet(fields.keySet()).iterator();
   }
 
-  private final Map<String,SimpleTextTerms> termsCache = new HashMap<String,SimpleTextTerms>();
+  private final Map<String,SimpleTextTerms> termsCache = new HashMap<>();
 
   @Override
   synchronized public Terms terms(String field) throws IOException {

Modified: lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextSegmentInfoReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextSegmentInfoReader.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextSegmentInfoReader.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextSegmentInfoReader.java Sun Mar 16 19:39:10 2014
@@ -72,7 +72,7 @@ public class SimpleTextSegmentInfoReader
       SimpleTextUtil.readLine(input, scratch);
       assert StringHelper.startsWith(scratch, SI_NUM_DIAG);
       int numDiag = Integer.parseInt(readString(SI_NUM_DIAG.length, scratch));
-      Map<String,String> diagnostics = new HashMap<String,String>();
+      Map<String,String> diagnostics = new HashMap<>();
 
       for (int i = 0; i < numDiag; i++) {
         SimpleTextUtil.readLine(input, scratch);
@@ -88,7 +88,7 @@ public class SimpleTextSegmentInfoReader
       SimpleTextUtil.readLine(input, scratch);
       assert StringHelper.startsWith(scratch, SI_NUM_FILES);
       int numFiles = Integer.parseInt(readString(SI_NUM_FILES.length, scratch));
-      Set<String> files = new HashSet<String>();
+      Set<String> files = new HashSet<>();
 
       for (int i = 0; i < numFiles; i++) {
         SimpleTextUtil.readLine(input, scratch);

Modified: lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextTermVectorsReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextTermVectorsReader.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextTermVectorsReader.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextTermVectorsReader.java Sun Mar 16 19:39:10 2014
@@ -96,7 +96,7 @@ public class SimpleTextTermVectorsReader
   
   @Override
   public Fields get(int doc) throws IOException {
-    SortedMap<String,SimpleTVTerms> fields = new TreeMap<String,SimpleTVTerms>();
+    SortedMap<String,SimpleTVTerms> fields = new TreeMap<>();
     in.seek(offsets[doc]);
     readLine();
     assert StringHelper.startsWith(scratch, NUMFIELDS);
@@ -261,7 +261,7 @@ public class SimpleTextTermVectorsReader
       this.hasOffsets = hasOffsets;
       this.hasPositions = hasPositions;
       this.hasPayloads = hasPayloads;
-      terms = new TreeMap<BytesRef,SimpleTVPostings>();
+      terms = new TreeMap<>();
     }
     
     @Override

Modified: lucene/dev/branches/lucene5376_2/lucene/codecs/src/test/org/apache/lucene/codecs/pulsing/TestPulsingReuse.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/codecs/src/test/org/apache/lucene/codecs/pulsing/TestPulsingReuse.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/codecs/src/test/org/apache/lucene/codecs/pulsing/TestPulsingReuse.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/codecs/src/test/org/apache/lucene/codecs/pulsing/TestPulsingReuse.java Sun Mar 16 19:39:10 2014
@@ -56,7 +56,7 @@ public class TestPulsingReuse extends Lu
     
     AtomicReader segment = getOnlySegmentReader(ir);
     DocsEnum reuse = null;
-    Map<DocsEnum,Boolean> allEnums = new IdentityHashMap<DocsEnum,Boolean>();
+    Map<DocsEnum,Boolean> allEnums = new IdentityHashMap<>();
     TermsEnum te = segment.terms("foo").iterator(null);
     while (te.next() != null) {
       reuse = te.docs(null, reuse, DocsEnum.FLAG_NONE);
@@ -97,7 +97,7 @@ public class TestPulsingReuse extends Lu
     
     AtomicReader segment = getOnlySegmentReader(ir);
     DocsEnum reuse = null;
-    Map<DocsEnum,Boolean> allEnums = new IdentityHashMap<DocsEnum,Boolean>();
+    Map<DocsEnum,Boolean> allEnums = new IdentityHashMap<>();
     TermsEnum te = segment.terms("foo").iterator(null);
     while (te.next() != null) {
       reuse = te.docs(null, reuse, DocsEnum.FLAG_NONE);

Modified: lucene/dev/branches/lucene5376_2/lucene/common-build.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/common-build.xml?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/common-build.xml (original)
+++ lucene/dev/branches/lucene5376_2/lucene/common-build.xml Sun Mar 16 19:39:10 2014
@@ -989,6 +989,7 @@
                 <propertyref prefix="tests.timeoutSuite" />
                 <propertyref prefix="tests.jettyConnector" />
                 <propertyref prefix="tests.disableHdfs" />
+                <propertyref prefix="tests.filter" />
             </syspropertyset>
 
             <!-- Pass randomized settings to the forked JVM. -->
@@ -1333,7 +1334,7 @@ ${tests-output}/junit4-*.suites     - pe
   
 ]]></fail>
     <echo>Code coverage with Atlassian Clover enabled.</echo>
-    <ivy:cachepath organisation="com.cenqua.clover" module="clover" revision="3.2.0"
+    <ivy:cachepath organisation="com.cenqua.clover" module="clover" revision="3.2.2"
       inline="true" conf="master" pathid="clover.classpath"/>
     <taskdef resource="cloverlib.xml" classpathref="clover.classpath" />
     <mkdir dir="${clover.db.dir}"/>

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/analysis/Analyzer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/analysis/Analyzer.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/analysis/Analyzer.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/analysis/Analyzer.java Sun Mar 16 19:39:10 2014
@@ -73,7 +73,7 @@ public abstract class Analyzer implement
   private final ReuseStrategy reuseStrategy;
 
   // non final as it gets nulled if closed; pkg private for access by ReuseStrategy's final helper methods:
-  CloseableThreadLocal<Object> storedValue = new CloseableThreadLocal<Object>();
+  CloseableThreadLocal<Object> storedValue = new CloseableThreadLocal<>();
 
   /**
    * Create a new Analyzer, reusing the same set of components per-thread
@@ -417,7 +417,7 @@ public abstract class Analyzer implement
     public void setReusableComponents(Analyzer analyzer, String fieldName, TokenStreamComponents components) {
       Map<String, TokenStreamComponents> componentsPerField = (Map<String, TokenStreamComponents>) getStoredValue(analyzer);
       if (componentsPerField == null) {
-        componentsPerField = new HashMap<String, TokenStreamComponents>();
+        componentsPerField = new HashMap<>();
         setStoredValue(analyzer, componentsPerField);
       }
       componentsPerField.put(fieldName, components);

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/analysis/CachingTokenFilter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/analysis/CachingTokenFilter.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/analysis/CachingTokenFilter.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/analysis/CachingTokenFilter.java Sun Mar 16 19:39:10 2014
@@ -51,7 +51,7 @@ public final class CachingTokenFilter ex
   public final boolean incrementToken() throws IOException {
     if (cache == null) {
       // fill cache lazily
-      cache = new LinkedList<AttributeSource.State>();
+      cache = new LinkedList<>();
       fillCache();
       iterator = cache.iterator();
     }

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/BlockTreeTermsReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/BlockTreeTermsReader.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/BlockTreeTermsReader.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/BlockTreeTermsReader.java Sun Mar 16 19:39:10 2014
@@ -96,7 +96,7 @@ public class BlockTreeTermsReader extend
   // produce DocsEnum on demand
   private final PostingsReaderBase postingsReader;
 
-  private final TreeMap<String,FieldReader> fields = new TreeMap<String,FieldReader>();
+  private final TreeMap<String,FieldReader> fields = new TreeMap<>();
 
   /** File offset where the directory starts in the terms file. */
   private long dirOffset;
@@ -474,7 +474,7 @@ public class BlockTreeTermsReader extend
         final IndexInput clone = indexIn.clone();
         //System.out.println("start=" + indexStartFP + " field=" + fieldInfo.name);
         clone.seek(indexStartFP);
-        index = new FST<BytesRef>(clone, ByteSequenceOutputs.getSingleton());
+        index = new FST<>(clone, ByteSequenceOutputs.getSingleton());
         
         /*
         if (false) {
@@ -848,7 +848,7 @@ public class BlockTreeTermsReader extend
           stack[idx] = new Frame(idx);
         }
         for(int arcIdx=0;arcIdx<arcs.length;arcIdx++) {
-          arcs[arcIdx] = new FST.Arc<BytesRef>();
+          arcs[arcIdx] = new FST.Arc<>();
         }
 
         if (index == null) {
@@ -917,7 +917,7 @@ public class BlockTreeTermsReader extend
             new FST.Arc[ArrayUtil.oversize(1+ord, RamUsageEstimator.NUM_BYTES_OBJECT_REF)];
           System.arraycopy(arcs, 0, next, 0, arcs.length);
           for(int arcOrd=arcs.length;arcOrd<next.length;arcOrd++) {
-            next[arcOrd] = new FST.Arc<BytesRef>();
+            next[arcOrd] = new FST.Arc<>();
           }
           arcs = next;
         }
@@ -1299,7 +1299,7 @@ public class BlockTreeTermsReader extend
         // Init w/ root block; don't use index since it may
         // not (and need not) have been loaded
         for(int arcIdx=0;arcIdx<arcs.length;arcIdx++) {
-          arcs[arcIdx] = new FST.Arc<BytesRef>();
+          arcs[arcIdx] = new FST.Arc<>();
         }
 
         currentFrame = staticFrame;
@@ -1441,7 +1441,7 @@ public class BlockTreeTermsReader extend
               new FST.Arc[ArrayUtil.oversize(1+ord, RamUsageEstimator.NUM_BYTES_OBJECT_REF)];
           System.arraycopy(arcs, 0, next, 0, arcs.length);
           for(int arcOrd=arcs.length;arcOrd<next.length;arcOrd++) {
-            next[arcOrd] = new FST.Arc<BytesRef>();
+            next[arcOrd] = new FST.Arc<>();
           }
           arcs = next;
         }

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/BlockTreeTermsWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/BlockTreeTermsWriter.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/BlockTreeTermsWriter.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/BlockTreeTermsWriter.java Sun Mar 16 19:39:10 2014
@@ -264,7 +264,7 @@ public class BlockTreeTermsWriter extend
     }
   }
 
-  private final List<FieldMetaData> fields = new ArrayList<FieldMetaData>();
+  private final List<FieldMetaData> fields = new ArrayList<>();
   // private final String segment;
 
   /** Create a new writer.  The number of items (terms or
@@ -462,7 +462,7 @@ public class BlockTreeTermsWriter extend
       }
 
       final ByteSequenceOutputs outputs = ByteSequenceOutputs.getSingleton();
-      final Builder<BytesRef> indexBuilder = new Builder<BytesRef>(FST.INPUT_TYPE.BYTE1,
+      final Builder<BytesRef> indexBuilder = new Builder<>(FST.INPUT_TYPE.BYTE1,
                                                                    0, 0, true, false, Integer.MAX_VALUE,
                                                                    outputs, null, false,
                                                                    PackedInts.COMPACT, true, 15);
@@ -510,7 +510,7 @@ public class BlockTreeTermsWriter extend
     // Builder?  Takes FST and unions it w/ current
     // FST.
     private void append(Builder<BytesRef> builder, FST<BytesRef> subIndex) throws IOException {
-      final BytesRefFSTEnum<BytesRef> subIndexEnum = new BytesRefFSTEnum<BytesRef>(subIndex);
+      final BytesRefFSTEnum<BytesRef> subIndexEnum = new BytesRefFSTEnum<>(subIndex);
       BytesRefFSTEnum.InputOutput<BytesRef> indexEnt;
       while((indexEnt = subIndexEnum.next()) != null) {
         //if (DEBUG) {
@@ -538,7 +538,7 @@ public class BlockTreeTermsWriter extend
     private final Builder<Object> blockBuilder;
 
     // PendingTerm or PendingBlock:
-    private final List<PendingEntry> pending = new ArrayList<PendingEntry>();
+    private final List<PendingEntry> pending = new ArrayList<>();
 
     // Index into pending of most recently written block
     private int lastBlockIndex = -1;
@@ -593,7 +593,7 @@ public class BlockTreeTermsWriter extend
             // stragglers!  carry count upwards
             node.inputCount = totCount;
           }
-          frontier[idx] = new Builder.UnCompiledNode<Object>(blockBuilder, idx);
+          frontier[idx] = new Builder.UnCompiledNode<>(blockBuilder, idx);
         }
       }
     }
@@ -743,7 +743,7 @@ public class BlockTreeTermsWriter extend
         int curStart = count;
         subCount = 0;
 
-        final List<PendingBlock> floorBlocks = new ArrayList<PendingBlock>();
+        final List<PendingBlock> floorBlocks = new ArrayList<>();
         PendingBlock firstBlock = null;
 
         for(int sub=0;sub<numSubs;sub++) {
@@ -925,7 +925,7 @@ public class BlockTreeTermsWriter extend
         }
         termCount = length;
       } else {
-        subIndices = new ArrayList<FST<BytesRef>>();
+        subIndices = new ArrayList<>();
         termCount = 0;
         for (PendingEntry ent : slice) {
           if (ent.isTerm) {
@@ -1042,7 +1042,7 @@ public class BlockTreeTermsWriter extend
       // This Builder is just used transiently to fragment
       // terms into "good" blocks; we don't save the
       // resulting FST:
-      blockBuilder = new Builder<Object>(FST.INPUT_TYPE.BYTE1,
+      blockBuilder = new Builder<>(FST.INPUT_TYPE.BYTE1,
                                          0, 0, true,
                                          true, Integer.MAX_VALUE,
                                          noOutputs,

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/Codec.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/Codec.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/Codec.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/Codec.java Sun Mar 16 19:39:10 2014
@@ -39,7 +39,7 @@ import org.apache.lucene.util.NamedSPILo
 public abstract class Codec implements NamedSPILoader.NamedSPI {
 
   private static final NamedSPILoader<Codec> loader =
-    new NamedSPILoader<Codec>(Codec.class);
+    new NamedSPILoader<>(Codec.class);
 
   private final String name;
 

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/DocValuesFormat.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/DocValuesFormat.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/DocValuesFormat.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/DocValuesFormat.java Sun Mar 16 19:39:10 2014
@@ -41,7 +41,7 @@ import org.apache.lucene.util.NamedSPILo
 public abstract class DocValuesFormat implements NamedSPILoader.NamedSPI {
   
   private static final NamedSPILoader<DocValuesFormat> loader =
-      new NamedSPILoader<DocValuesFormat>(DocValuesFormat.class);
+      new NamedSPILoader<>(DocValuesFormat.class);
   
   /** Unique name that's used to retrieve this format when
    *  reading the index.

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/PostingsFormat.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/PostingsFormat.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/PostingsFormat.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/PostingsFormat.java Sun Mar 16 19:39:10 2014
@@ -42,7 +42,7 @@ import org.apache.lucene.util.NamedSPILo
 public abstract class PostingsFormat implements NamedSPILoader.NamedSPI {
 
   private static final NamedSPILoader<PostingsFormat> loader =
-    new NamedSPILoader<PostingsFormat>(PostingsFormat.class);
+    new NamedSPILoader<>(PostingsFormat.class);
 
   /** Zero-length {@code PostingsFormat} array. */
   public static final PostingsFormat[] EMPTY = new PostingsFormat[0];

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressingTermVectorsWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressingTermVectorsWriter.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressingTermVectorsWriter.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressingTermVectorsWriter.java Sun Mar 16 19:39:10 2014
@@ -92,7 +92,7 @@ public final class CompressingTermVector
     final int posStart, offStart, payStart;
     DocData(int numFields, int posStart, int offStart, int payStart) {
       this.numFields = numFields;
-      this.fields = new ArrayDeque<FieldData>(numFields);
+      this.fields = new ArrayDeque<>(numFields);
       this.posStart = posStart;
       this.offStart = offStart;
       this.payStart = payStart;
@@ -214,7 +214,7 @@ public final class CompressingTermVector
     this.chunkSize = chunkSize;
 
     numDocs = 0;
-    pendingDocs = new ArrayDeque<DocData>();
+    pendingDocs = new ArrayDeque<>();
     termSuffixes = new GrowableByteArrayDataOutput(ArrayUtil.oversize(chunkSize, 1));
     payloadBytes = new GrowableByteArrayDataOutput(ArrayUtil.oversize(1, 1));
     lastTerm = new BytesRef(ArrayUtil.oversize(30, 1));
@@ -393,7 +393,7 @@ public final class CompressingTermVector
 
   /** Returns a sorted array containing unique field numbers */
   private int[] flushFieldNums() throws IOException {
-    SortedSet<Integer> fieldNums = new TreeSet<Integer>();
+    SortedSet<Integer> fieldNums = new TreeSet<>();
     for (DocData dd : pendingDocs) {
       for (FieldData fd : dd.fields) {
         fieldNums.add(fd.fieldNum);

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40DocValuesReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40DocValuesReader.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40DocValuesReader.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40DocValuesReader.java Sun Mar 16 19:39:10 2014
@@ -57,11 +57,11 @@ final class Lucene40DocValuesReader exte
 
   // ram instances we have already loaded
   private final Map<Integer,NumericDocValues> numericInstances =
-      new HashMap<Integer,NumericDocValues>();
+      new HashMap<>();
   private final Map<Integer,BinaryDocValues> binaryInstances =
-      new HashMap<Integer,BinaryDocValues>();
+      new HashMap<>();
   private final Map<Integer,SortedDocValues> sortedInstances =
-      new HashMap<Integer,SortedDocValues>();
+      new HashMap<>();
 
   private final AtomicLong ramBytesUsed;
 

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40TermVectorsReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40TermVectorsReader.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40TermVectorsReader.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40TermVectorsReader.java Sun Mar 16 19:39:10 2014
@@ -221,7 +221,7 @@ public class Lucene40TermVectorsReader e
   private class TVFields extends Fields {
     private final int[] fieldNumbers;
     private final long[] fieldFPs;
-    private final Map<Integer,Integer> fieldNumberToIndex = new HashMap<Integer,Integer>();
+    private final Map<Integer,Integer> fieldNumberToIndex = new HashMap<>();
 
     public TVFields(int docID) throws IOException {
       seekTvx(docID);

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/lucene42/Lucene42DocValuesProducer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/lucene42/Lucene42DocValuesProducer.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/lucene42/Lucene42DocValuesProducer.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/lucene42/Lucene42DocValuesProducer.java Sun Mar 16 19:39:10 2014
@@ -67,11 +67,11 @@ class Lucene42DocValuesProducer extends 
   
   // ram instances we have already loaded
   private final Map<Integer,NumericDocValues> numericInstances = 
-      new HashMap<Integer,NumericDocValues>();
+      new HashMap<>();
   private final Map<Integer,BinaryDocValues> binaryInstances =
-      new HashMap<Integer,BinaryDocValues>();
+      new HashMap<>();
   private final Map<Integer,FST<Long>> fstInstances =
-      new HashMap<Integer,FST<Long>>();
+      new HashMap<>();
   
   private final int maxDoc;
   private final AtomicLong ramBytesUsed;
@@ -103,9 +103,9 @@ class Lucene42DocValuesProducer extends 
       version = CodecUtil.checkHeader(in, metaCodec, 
                                       VERSION_START,
                                       VERSION_CURRENT);
-      numerics = new HashMap<Integer,NumericEntry>();
-      binaries = new HashMap<Integer,BinaryEntry>();
-      fsts = new HashMap<Integer,FSTEntry>();
+      numerics = new HashMap<>();
+      binaries = new HashMap<>();
+      fsts = new HashMap<>();
       readFields(in, state.fieldInfos);
 
       success = true;
@@ -297,7 +297,7 @@ class Lucene42DocValuesProducer extends 
       instance = fstInstances.get(field.number);
       if (instance == null) {
         data.seek(entry.offset);
-        instance = new FST<Long>(data, PositiveIntOutputs.getSingleton());
+        instance = new FST<>(data, PositiveIntOutputs.getSingleton());
         ramBytesUsed.addAndGet(instance.sizeInBytes());
         fstInstances.put(field.number, instance);
       }
@@ -307,10 +307,10 @@ class Lucene42DocValuesProducer extends 
     
     // per-thread resources
     final BytesReader in = fst.getBytesReader();
-    final Arc<Long> firstArc = new Arc<Long>();
-    final Arc<Long> scratchArc = new Arc<Long>();
+    final Arc<Long> firstArc = new Arc<>();
+    final Arc<Long> scratchArc = new Arc<>();
     final IntsRef scratchInts = new IntsRef();
-    final BytesRefFSTEnum<Long> fstEnum = new BytesRefFSTEnum<Long>(fst); 
+    final BytesRefFSTEnum<Long> fstEnum = new BytesRefFSTEnum<>(fst);
     
     return new SortedDocValues() {
       @Override
@@ -372,7 +372,7 @@ class Lucene42DocValuesProducer extends 
       instance = fstInstances.get(field.number);
       if (instance == null) {
         data.seek(entry.offset);
-        instance = new FST<Long>(data, PositiveIntOutputs.getSingleton());
+        instance = new FST<>(data, PositiveIntOutputs.getSingleton());
         ramBytesUsed.addAndGet(instance.sizeInBytes());
         fstInstances.put(field.number, instance);
       }
@@ -382,10 +382,10 @@ class Lucene42DocValuesProducer extends 
     
     // per-thread resources
     final BytesReader in = fst.getBytesReader();
-    final Arc<Long> firstArc = new Arc<Long>();
-    final Arc<Long> scratchArc = new Arc<Long>();
+    final Arc<Long> firstArc = new Arc<>();
+    final Arc<Long> scratchArc = new Arc<>();
     final IntsRef scratchInts = new IntsRef();
-    final BytesRefFSTEnum<Long> fstEnum = new BytesRefFSTEnum<Long>(fst); 
+    final BytesRefFSTEnum<Long> fstEnum = new BytesRefFSTEnum<>(fst);
     final BytesRef ref = new BytesRef();
     final ByteArrayDataInput input = new ByteArrayDataInput();
     return new SortedSetDocValues() {
@@ -493,14 +493,14 @@ class Lucene42DocValuesProducer extends 
     // maybe we should add a FSTEnum that supports this operation?
     final FST<Long> fst;
     final FST.BytesReader bytesReader;
-    final Arc<Long> firstArc = new Arc<Long>();
-    final Arc<Long> scratchArc = new Arc<Long>();
+    final Arc<Long> firstArc = new Arc<>();
+    final Arc<Long> scratchArc = new Arc<>();
     final IntsRef scratchInts = new IntsRef();
     final BytesRef scratchBytes = new BytesRef();
     
     FSTTermsEnum(FST<Long> fst) {
       this.fst = fst;
-      in = new BytesRefFSTEnum<Long>(fst);
+      in = new BytesRefFSTEnum<>(fst);
       bytesReader = fst.getBytesReader();
     }
 

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/lucene42/Lucene42NormsConsumer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/lucene42/Lucene42NormsConsumer.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/lucene42/Lucene42NormsConsumer.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/lucene42/Lucene42NormsConsumer.java Sun Mar 16 19:39:10 2014
@@ -131,7 +131,7 @@ class Lucene42NormsConsumer extends DocV
       } else {
         meta.writeByte(TABLE_COMPRESSED); // table-compressed
         Long[] decode = uniqueValues.toArray(new Long[uniqueValues.size()]);
-        final HashMap<Long,Integer> encode = new HashMap<Long,Integer>();
+        final HashMap<Long,Integer> encode = new HashMap<>();
         data.writeVInt(decode.length);
         for (int i = 0; i < decode.length; i++) {
           data.writeLong(decode[i]);

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/lucene45/Lucene45DocValuesConsumer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/lucene45/Lucene45DocValuesConsumer.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/lucene45/Lucene45DocValuesConsumer.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/lucene45/Lucene45DocValuesConsumer.java Sun Mar 16 19:39:10 2014
@@ -189,7 +189,7 @@ public class Lucene45DocValuesConsumer e
         break;
       case TABLE_COMPRESSED:
         final Long[] decode = uniqueValues.toArray(new Long[uniqueValues.size()]);
-        final HashMap<Long,Integer> encode = new HashMap<Long,Integer>();
+        final HashMap<Long,Integer> encode = new HashMap<>();
         meta.writeVInt(decode.length);
         for (int i = 0; i < decode.length; i++) {
           meta.writeLong(decode[i]);

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/lucene45/Lucene45DocValuesProducer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/lucene45/Lucene45DocValuesProducer.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/lucene45/Lucene45DocValuesProducer.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/lucene45/Lucene45DocValuesProducer.java Sun Mar 16 19:39:10 2014
@@ -73,8 +73,8 @@ public class Lucene45DocValuesProducer e
   private final int version;
 
   // memory-resident structures
-  private final Map<Integer,MonotonicBlockPackedReader> addressInstances = new HashMap<Integer,MonotonicBlockPackedReader>();
-  private final Map<Integer,MonotonicBlockPackedReader> ordIndexInstances = new HashMap<Integer,MonotonicBlockPackedReader>();
+  private final Map<Integer,MonotonicBlockPackedReader> addressInstances = new HashMap<>();
+  private final Map<Integer,MonotonicBlockPackedReader> ordIndexInstances = new HashMap<>();
   
   /** expert: instantiates a new reader */
   protected Lucene45DocValuesProducer(SegmentReadState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension) throws IOException {
@@ -87,11 +87,11 @@ public class Lucene45DocValuesProducer e
       version = CodecUtil.checkHeader(in, metaCodec, 
                                       Lucene45DocValuesFormat.VERSION_START,
                                       Lucene45DocValuesFormat.VERSION_CURRENT);
-      numerics = new HashMap<Integer,NumericEntry>();
-      ords = new HashMap<Integer,NumericEntry>();
-      ordIndexes = new HashMap<Integer,NumericEntry>();
-      binaries = new HashMap<Integer,BinaryEntry>();
-      sortedSets = new HashMap<Integer,SortedSetEntry>();
+      numerics = new HashMap<>();
+      ords = new HashMap<>();
+      ordIndexes = new HashMap<>();
+      binaries = new HashMap<>();
+      sortedSets = new HashMap<>();
       readFields(in, state.fieldInfos);
 
       success = true;

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/perfield/PerFieldDocValuesFormat.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/perfield/PerFieldDocValuesFormat.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/perfield/PerFieldDocValuesFormat.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/perfield/PerFieldDocValuesFormat.java Sun Mar 16 19:39:10 2014
@@ -92,8 +92,8 @@ public abstract class PerFieldDocValuesF
     
   private class FieldsWriter extends DocValuesConsumer {
 
-    private final Map<DocValuesFormat,ConsumerAndSuffix> formats = new HashMap<DocValuesFormat,ConsumerAndSuffix>();
-    private final Map<String,Integer> suffixes = new HashMap<String,Integer>();
+    private final Map<DocValuesFormat,ConsumerAndSuffix> formats = new HashMap<>();
+    private final Map<String,Integer> suffixes = new HashMap<>();
     
     private final SegmentWriteState segmentWriteState;
     
@@ -209,8 +209,8 @@ public abstract class PerFieldDocValuesF
 
   private class FieldsReader extends DocValuesProducer {
 
-    private final Map<String,DocValuesProducer> fields = new TreeMap<String,DocValuesProducer>();
-    private final Map<String,DocValuesProducer> formats = new HashMap<String,DocValuesProducer>();
+    private final Map<String,DocValuesProducer> fields = new TreeMap<>();
+    private final Map<String,DocValuesProducer> formats = new HashMap<>();
 
     public FieldsReader(final SegmentReadState readState) throws IOException {
 
@@ -245,7 +245,7 @@ public abstract class PerFieldDocValuesF
 
     private FieldsReader(FieldsReader other) {
 
-      Map<DocValuesProducer,DocValuesProducer> oldToNew = new IdentityHashMap<DocValuesProducer,DocValuesProducer>();
+      Map<DocValuesProducer,DocValuesProducer> oldToNew = new IdentityHashMap<>();
       // First clone all formats
       for(Map.Entry<String,DocValuesProducer> ent : other.formats.entrySet()) {
         DocValuesProducer values = ent.getValue();

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/perfield/PerFieldPostingsFormat.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/perfield/PerFieldPostingsFormat.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/perfield/PerFieldPostingsFormat.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/codecs/perfield/PerFieldPostingsFormat.java Sun Mar 16 19:39:10 2014
@@ -75,7 +75,7 @@ public abstract class PerFieldPostingsFo
 
   /** Group of fields written by one PostingsFormat */
   static class FieldsGroup {
-    final Set<String> fields = new TreeSet<String>();
+    final Set<String> fields = new TreeSet<>();
     int suffix;
 
     /** Custom SegmentWriteState for this group of fields,
@@ -111,10 +111,10 @@ public abstract class PerFieldPostingsFo
 
       // Maps a PostingsFormat instance to the suffix it
       // should use
-      Map<PostingsFormat,FieldsGroup> formatToGroups = new HashMap<PostingsFormat,FieldsGroup>();
+      Map<PostingsFormat,FieldsGroup> formatToGroups = new HashMap<>();
 
       // Holds last suffix of each PostingFormat name
-      Map<String,Integer> suffixes = new HashMap<String,Integer>();
+      Map<String,Integer> suffixes = new HashMap<>();
 
       // First pass: assign field -> PostingsFormat
       for(String field : fields) {
@@ -182,8 +182,8 @@ public abstract class PerFieldPostingsFo
 
   private class FieldsReader extends FieldsProducer {
 
-    private final Map<String,FieldsProducer> fields = new TreeMap<String,FieldsProducer>();
-    private final Map<String,FieldsProducer> formats = new HashMap<String,FieldsProducer>();
+    private final Map<String,FieldsProducer> fields = new TreeMap<>();
+    private final Map<String,FieldsProducer> formats = new HashMap<>();
 
     public FieldsReader(final SegmentReadState readState) throws IOException {
 

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/document/Document.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/document/Document.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/document/Document.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/document/Document.java Sun Mar 16 19:39:10 2014
@@ -44,7 +44,7 @@ import org.apache.lucene.util.FilterIter
 
 public final class Document implements IndexDocument {
 
-  private final List<Field> fields = new ArrayList<Field>();
+  private final List<Field> fields = new ArrayList<>();
 
   /** Constructs a new document with no fields. */
   public Document() {}
@@ -140,7 +140,7 @@ public final class Document implements I
   * @return a <code>BytesRef[]</code> of binary field values
   */
   public final BytesRef[] getBinaryValues(String name) {
-    final List<BytesRef> result = new ArrayList<BytesRef>();
+    final List<BytesRef> result = new ArrayList<>();
 
     for (Iterator<StorableField> it = storedFieldsIterator(); it.hasNext(); ) {
       StorableField field = it.next();
@@ -199,7 +199,7 @@ public final class Document implements I
    * @return a <code>Field[]</code> array
    */
   public Field[] getFields(String name) {
-    List<Field> result = new ArrayList<Field>();
+    List<Field> result = new ArrayList<>();
     for (Field field : fields) {
       if (field.name().equals(name)) {
         result.add(field);
@@ -234,7 +234,7 @@ public final class Document implements I
    * @return a <code>String[]</code> of field values
    */
   public final String[] getValues(String name) {
-    List<String> result = new ArrayList<String>();
+    List<String> result = new ArrayList<>();
 
     for (Iterator<StorableField> it = storedFieldsIterator(); it.hasNext(); ) {
       StorableField field = it.next();

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/document/DocumentStoredFieldVisitor.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/document/DocumentStoredFieldVisitor.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/document/DocumentStoredFieldVisitor.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/document/DocumentStoredFieldVisitor.java Sun Mar 16 19:39:10 2014
@@ -49,7 +49,7 @@ public class DocumentStoredFieldVisitor 
 
   /** Load only fields named in the provided fields. */
   public DocumentStoredFieldVisitor(String... fields) {
-    fieldsToAdd = new HashSet<String>(fields.length);
+    fieldsToAdd = new HashSet<>(fields.length);
     for(String field : fields) {
       fieldsToAdd.add(field);
     }

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/BufferedUpdates.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/BufferedUpdates.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/BufferedUpdates.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/BufferedUpdates.java Sun Mar 16 19:39:10 2014
@@ -95,9 +95,9 @@ class BufferedUpdates {
   
   final AtomicInteger numTermDeletes = new AtomicInteger();
   final AtomicInteger numNumericUpdates = new AtomicInteger();
-  final Map<Term,Integer> terms = new HashMap<Term,Integer>();
-  final Map<Query,Integer> queries = new HashMap<Query,Integer>();
-  final List<Integer> docIDs = new ArrayList<Integer>();
+  final Map<Term,Integer> terms = new HashMap<>();
+  final Map<Query,Integer> queries = new HashMap<>();
+  final List<Integer> docIDs = new ArrayList<>();
 
   // Map<dvField,Map<updateTerm,NumericUpdate>>
   // For each field we keep an ordered list of NumericUpdates, key'd by the
@@ -106,7 +106,7 @@ class BufferedUpdates {
   // one that came in wins), and helps us detect faster if the same Term is
   // used to update the same field multiple times (so we later traverse it
   // only once).
-  final Map<String,LinkedHashMap<Term,NumericUpdate>> numericUpdates = new HashMap<String,LinkedHashMap<Term,NumericUpdate>>();
+  final Map<String,LinkedHashMap<Term,NumericUpdate>> numericUpdates = new HashMap<>();
 
   public static final Integer MAX_INT = Integer.valueOf(Integer.MAX_VALUE);
 
@@ -187,7 +187,7 @@ class BufferedUpdates {
   public void addNumericUpdate(NumericUpdate update, int docIDUpto) {
     LinkedHashMap<Term,NumericUpdate> fieldUpdates = numericUpdates.get(update.field);
     if (fieldUpdates == null) {
-      fieldUpdates = new LinkedHashMap<Term,NumericUpdate>();
+      fieldUpdates = new LinkedHashMap<>();
       numericUpdates.put(update.field, fieldUpdates);
       bytesUsed.addAndGet(BYTES_PER_NUMERIC_FIELD_ENTRY);
     }

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/BufferedUpdatesStream.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/BufferedUpdatesStream.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/BufferedUpdatesStream.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/BufferedUpdatesStream.java Sun Mar 16 19:39:10 2014
@@ -54,7 +54,7 @@ import org.apache.lucene.util.InfoStream
 class BufferedUpdatesStream {
 
   // TODO: maybe linked list?
-  private final List<FrozenBufferedUpdates> updates = new ArrayList<FrozenBufferedUpdates>();
+  private final List<FrozenBufferedUpdates> updates = new ArrayList<>();
 
   // Starts at 1 so that SegmentInfos that have never had
   // deletes applied (whose bufferedDelGen defaults to 0)
@@ -167,7 +167,7 @@ class BufferedUpdatesStream {
 
     final long gen = nextGen++;
 
-    List<SegmentCommitInfo> infos2 = new ArrayList<SegmentCommitInfo>();
+    List<SegmentCommitInfo> infos2 = new ArrayList<>();
     infos2.addAll(infos);
     Collections.sort(infos2, sortSegInfoByDelGen);
 
@@ -240,7 +240,7 @@ class BufferedUpdatesStream {
 
         if (segAllDeletes) {
           if (allDeleted == null) {
-            allDeleted = new ArrayList<SegmentCommitInfo>();
+            allDeleted = new ArrayList<>();
           }
           allDeleted.add(info);
         }
@@ -290,7 +290,7 @@ class BufferedUpdatesStream {
 
           if (segAllDeletes) {
             if (allDeleted == null) {
-              allDeleted = new ArrayList<SegmentCommitInfo>();
+              allDeleted = new ArrayList<>();
             }
             allDeleted.add(info);
           }

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java Sun Mar 16 19:39:10 2014
@@ -92,13 +92,13 @@ public class CheckIndex {
 
     /** Empty unless you passed specific segments list to check as optional 3rd argument.
      *  @see CheckIndex#checkIndex(List) */
-    public List<String> segmentsChecked = new ArrayList<String>();
+    public List<String> segmentsChecked = new ArrayList<>();
   
     /** True if the index was created with a newer version of Lucene than the CheckIndex tool. */
     public boolean toolOutOfDate;
 
     /** List of {@link SegmentInfoStatus} instances, detailing status of each segment. */
-    public List<SegmentInfoStatus> segmentInfos = new ArrayList<SegmentInfoStatus>();
+    public List<SegmentInfoStatus> segmentInfos = new ArrayList<>();
   
     /** Directory index is in. */
     public Directory dir;
@@ -1069,7 +1069,7 @@ public class CheckIndex {
           final BlockTreeTermsReader.Stats stats = ((BlockTreeTermsReader.FieldReader) fieldTerms).computeStats();
           assert stats != null;
           if (status.blockTreeStats == null) {
-            status.blockTreeStats = new HashMap<String,BlockTreeTermsReader.Stats>();
+            status.blockTreeStats = new HashMap<>();
           }
           status.blockTreeStats.put(field, stats);
         }
@@ -1831,7 +1831,7 @@ public class CheckIndex {
     boolean doFix = false;
     boolean doCrossCheckTermVectors = false;
     boolean verbose = false;
-    List<String> onlySegments = new ArrayList<String>();
+    List<String> onlySegments = new ArrayList<>();
     String indexPath = null;
     String dirImpl = null;
     int i = 0;

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/CoalescedUpdates.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/CoalescedUpdates.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/CoalescedUpdates.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/CoalescedUpdates.java Sun Mar 16 19:39:10 2014
@@ -28,9 +28,9 @@ import org.apache.lucene.index.BufferedU
 import org.apache.lucene.util.MergedIterator;
 
 class CoalescedUpdates {
-  final Map<Query,Integer> queries = new HashMap<Query,Integer>();
-  final List<Iterable<Term>> iterables = new ArrayList<Iterable<Term>>();
-  final List<NumericUpdate> numericDVUpdates = new ArrayList<NumericUpdate>();
+  final Map<Query,Integer> queries = new HashMap<>();
+  final List<Iterable<Term>> iterables = new ArrayList<>();
+  final List<NumericUpdate> numericDVUpdates = new ArrayList<>();
   
   @Override
   public String toString() {
@@ -62,7 +62,7 @@ class CoalescedUpdates {
        for (int i = 0; i < iterables.size(); i++) {
          subs[i] = iterables.get(i).iterator();
        }
-       return new MergedIterator<Term>(subs);
+       return new MergedIterator<>(subs);
      }
    };
   }

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/CompositeReaderContext.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/CompositeReaderContext.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/CompositeReaderContext.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/CompositeReaderContext.java Sun Mar 16 19:39:10 2014
@@ -80,7 +80,7 @@ public final class CompositeReaderContex
   
   private static final class Builder {
     private final CompositeReader reader;
-    private final List<AtomicReaderContext> leaves = new ArrayList<AtomicReaderContext>();
+    private final List<AtomicReaderContext> leaves = new ArrayList<>();
     private int leafDocBase = 0;
     
     public Builder(CompositeReader reader) {

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/ConcurrentMergeScheduler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/ConcurrentMergeScheduler.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/ConcurrentMergeScheduler.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/ConcurrentMergeScheduler.java Sun Mar 16 19:39:10 2014
@@ -47,7 +47,7 @@ public class ConcurrentMergeScheduler ex
   private int mergeThreadPriority = -1;
 
   /** List of currently active {@link MergeThread}s. */
-  protected List<MergeThread> mergeThreads = new ArrayList<MergeThread>();
+  protected List<MergeThread> mergeThreads = new ArrayList<>();
   
   /** 
    * Default {@code maxThreadCount}.
@@ -171,7 +171,7 @@ public class ConcurrentMergeScheduler ex
 
     // Only look at threads that are alive & not in the
     // process of stopping (ie have an active merge):
-    final List<MergeThread> activeMerges = new ArrayList<MergeThread>();
+    final List<MergeThread> activeMerges = new ArrayList<>();
 
     int threadIdx = 0;
     while (threadIdx < mergeThreads.size()) {
@@ -308,7 +308,7 @@ public class ConcurrentMergeScheduler ex
   }
 
   @Override
-  public synchronized void merge(IndexWriter writer) throws IOException {
+  public synchronized void merge(IndexWriter writer, MergeTrigger trigger, boolean newMergesFound) throws IOException {
 
     assert !Thread.holdsLock(writer);
 
@@ -571,7 +571,7 @@ public class ConcurrentMergeScheduler ex
     ConcurrentMergeScheduler clone = (ConcurrentMergeScheduler) super.clone();
     clone.writer = null;
     clone.dir = null;
-    clone.mergeThreads = new ArrayList<MergeThread>();
+    clone.mergeThreads = new ArrayList<>();
     return clone;
   }
 }

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DirectoryReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DirectoryReader.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DirectoryReader.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DirectoryReader.java Sun Mar 16 19:39:10 2014
@@ -224,7 +224,7 @@ public abstract class DirectoryReader ex
   public static List<IndexCommit> listCommits(Directory dir) throws IOException {
     final String[] files = dir.listAll();
 
-    List<IndexCommit> commits = new ArrayList<IndexCommit>();
+    List<IndexCommit> commits = new ArrayList<>();
 
     SegmentInfos latest = new SegmentInfos();
     latest.read(dir);

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocFieldProcessor.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocFieldProcessor.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocFieldProcessor.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocFieldProcessor.java Sun Mar 16 19:39:10 2014
@@ -69,7 +69,7 @@ final class DocFieldProcessor extends Do
   @Override
   public void flush(SegmentWriteState state) throws IOException {
 
-    Map<String,DocFieldConsumerPerField> childFields = new HashMap<String,DocFieldConsumerPerField>();
+    Map<String,DocFieldConsumerPerField> childFields = new HashMap<>();
     Collection<DocFieldConsumerPerField> fields = fields();
     for (DocFieldConsumerPerField f : fields) {
       childFields.put(f.getFieldInfo().name, f);
@@ -132,7 +132,7 @@ final class DocFieldProcessor extends Do
   }
 
   public Collection<DocFieldConsumerPerField> fields() {
-    Collection<DocFieldConsumerPerField> fields = new HashSet<DocFieldConsumerPerField>();
+    Collection<DocFieldConsumerPerField> fields = new HashSet<>();
     for(int i=0;i<fieldHash.length;i++) {
       DocFieldProcessorPerField field = fieldHash[i];
       while(field != null) {
@@ -209,11 +209,6 @@ final class DocFieldProcessor extends Do
       final DocFieldProcessorPerField perField = fields[i];
       perField.consumer.processFields(perField.fields, perField.fieldCount);
     }
-
-    if (docState.maxTermPrefix != null && docState.infoStream.isEnabled("IW")) {
-      docState.infoStream.message("IW", "WARNING: document contains at least one immense term (whose UTF8 encoding is longer than the max length " + DocumentsWriterPerThread.MAX_TERM_LENGTH_UTF8 + "), all of which were skipped.  Please correct the analyzer to not produce such terms.  The prefix of the first immense term is: '" + docState.maxTermPrefix + "...'");
-      docState.maxTermPrefix = null;
-    }
   }
 
   private DocFieldProcessorPerField processField(FieldInfos.Builder fieldInfos,

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocInverter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocInverter.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocInverter.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocInverter.java Sun Mar 16 19:39:10 2014
@@ -41,8 +41,8 @@ final class DocInverter extends DocField
   @Override
   void flush(Map<String, DocFieldConsumerPerField> fieldsToFlush, SegmentWriteState state) throws IOException {
 
-    Map<String, InvertedDocConsumerPerField> childFieldsToFlush = new HashMap<String, InvertedDocConsumerPerField>();
-    Map<String, InvertedDocEndConsumerPerField> endChildFieldsToFlush = new HashMap<String, InvertedDocEndConsumerPerField>();
+    Map<String, InvertedDocConsumerPerField> childFieldsToFlush = new HashMap<>();
+    Map<String, InvertedDocEndConsumerPerField> endChildFieldsToFlush = new HashMap<>();
 
     for (Map.Entry<String, DocFieldConsumerPerField> fieldToFlush : fieldsToFlush.entrySet()) {
       DocInverterPerField perField = (DocInverterPerField) fieldToFlush.getValue();

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocInverterPerField.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocInverterPerField.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocInverterPerField.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocInverterPerField.java Sun Mar 16 19:39:10 2014
@@ -23,7 +23,6 @@ import org.apache.lucene.analysis.TokenS
 import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
 import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
 import org.apache.lucene.index.FieldInfo.IndexOptions;
-import org.apache.lucene.util.IOUtils;
 
 /**
  * Holds state for inverting all occurrences of a single
@@ -182,6 +181,17 @@ final class DocInverterPerField extends 
           // when we come back around to the field...
           fieldState.position += posIncrAttribute.getPositionIncrement();
           fieldState.offset += offsetAttribute.endOffset();
+
+
+          if (docState.maxTermPrefix != null) {
+            final String msg = "Document contains at least one immense term in field=\"" + fieldInfo.name + "\" (whose UTF8 encoding is longer than the max length " + DocumentsWriterPerThread.MAX_TERM_LENGTH_UTF8 + "), all of which were skipped.  Please correct the analyzer to not produce such terms.  The prefix of the first immense term is: '" + docState.maxTermPrefix + "...'";
+            if (docState.infoStream.isEnabled("IW")) {
+              docState.infoStream.message("IW", "ERROR: " + msg);
+            }
+            docState.maxTermPrefix = null;
+            throw new IllegalArgumentException(msg);
+          }
+
           /* if success was false above there is an exception coming through and we won't get here.*/
           succeededInProcessingField = true;
         } finally {

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocTermOrds.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocTermOrds.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocTermOrds.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocTermOrds.java Sun Mar 16 19:39:10 2014
@@ -342,7 +342,7 @@ public class DocTermOrds {
         } catch (UnsupportedOperationException uoe) {
           // Reader cannot provide ord support, so we wrap
           // our own support by creating our own terms index:
-          indexedTerms = new ArrayList<BytesRef>();
+          indexedTerms = new ArrayList<>();
           indexedTermsBytes = new PagedBytes(15);
           //System.out.println("NO ORDS");
         }

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocValuesProcessor.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocValuesProcessor.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocValuesProcessor.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocValuesProcessor.java Sun Mar 16 19:39:10 2014
@@ -33,7 +33,7 @@ final class DocValuesProcessor extends S
   // TODO: somewhat wasteful we also keep a map here; would
   // be more efficient if we could "reuse" the map/hash
   // lookup DocFieldProcessor already did "above"
-  private final Map<String,DocValuesWriter> writers = new HashMap<String,DocValuesWriter>();
+  private final Map<String,DocValuesWriter> writers = new HashMap<>();
   private final Counter bytesUsed;
 
   public DocValuesProcessor(Counter bytesUsed) {

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocumentsWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocumentsWriter.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocumentsWriter.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocumentsWriter.java Sun Mar 16 19:39:10 2014
@@ -135,7 +135,7 @@ final class DocumentsWriter {
     this.perThreadPool = config.getIndexerThreadPool();
     flushPolicy = config.getFlushPolicy();
     this.writer = writer;
-    this.events = new ConcurrentLinkedQueue<Event>();
+    this.events = new ConcurrentLinkedQueue<>();
     flushControl = new DocumentsWriterFlushControl(this, config, writer.bufferedUpdatesStream);
   }
   
@@ -207,7 +207,7 @@ final class DocumentsWriter {
   synchronized void abort(IndexWriter writer) {
     assert !Thread.holdsLock(writer) : "IndexWriter lock should never be hold when aborting";
     boolean success = false;
-    final Set<String> newFilesSet = new HashSet<String>();
+    final Set<String> newFilesSet = new HashSet<>();
     try {
       deleteQueue.clear();
       if (infoStream.isEnabled("DW")) {
@@ -243,7 +243,7 @@ final class DocumentsWriter {
     try {
       deleteQueue.clear();
       final int limit = perThreadPool.getMaxThreadStates();
-      final Set<String> newFilesSet = new HashSet<String>();
+      final Set<String> newFilesSet = new HashSet<>();
       for (int i = 0; i < limit; i++) {
         final ThreadState perThread = perThreadPool.getThreadState(i);
         perThread.lock();

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterDeleteQueue.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterDeleteQueue.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterDeleteQueue.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterDeleteQueue.java Sun Mar 16 19:39:10 2014
@@ -93,7 +93,7 @@ final class DocumentsWriterDeleteQueue {
      * we use a sentinel instance as our initial tail. No slice will ever try to
      * apply this tail since the head is always omitted.
      */
-    tail = new Node<Object>(null); // sentinel
+    tail = new Node<>(null); // sentinel
     globalSlice = new DeleteSlice(tail);
   }
 

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterFlushControl.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterFlushControl.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterFlushControl.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterFlushControl.java Sun Mar 16 19:39:10 2014
@@ -51,10 +51,10 @@ final class DocumentsWriterFlushControl 
   private int numDocsSinceStalled = 0; // only with assert
   final AtomicBoolean flushDeletes = new AtomicBoolean(false);
   private boolean fullFlush = false;
-  private final Queue<DocumentsWriterPerThread> flushQueue = new LinkedList<DocumentsWriterPerThread>();
+  private final Queue<DocumentsWriterPerThread> flushQueue = new LinkedList<>();
   // only for safety reasons if a DWPT is close to the RAM limit
-  private final Queue<BlockedFlush> blockedFlushes = new LinkedList<BlockedFlush>();
-  private final IdentityHashMap<DocumentsWriterPerThread, Long> flushingWriters = new IdentityHashMap<DocumentsWriterPerThread, Long>();
+  private final Queue<BlockedFlush> blockedFlushes = new LinkedList<>();
+  private final IdentityHashMap<DocumentsWriterPerThread, Long> flushingWriters = new IdentityHashMap<>();
 
 
   double maxConfiguredRamBuffer = 0;
@@ -531,7 +531,7 @@ final class DocumentsWriterFlushControl 
     return true;
   }
 
-  private final List<DocumentsWriterPerThread> fullFlushBuffer = new ArrayList<DocumentsWriterPerThread>();
+  private final List<DocumentsWriterPerThread> fullFlushBuffer = new ArrayList<>();
 
   void addFlushableState(ThreadState perThread) {
     if (infoStream.isEnabled("DWFC")) {

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterFlushQueue.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterFlushQueue.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterFlushQueue.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterFlushQueue.java Sun Mar 16 19:39:10 2014
@@ -28,7 +28,7 @@ import org.apache.lucene.index.Documents
  * @lucene.internal 
  */
 class DocumentsWriterFlushQueue {
-  private final Queue<FlushTicket> queue = new LinkedList<FlushTicket>();
+  private final Queue<FlushTicket> queue = new LinkedList<>();
   // we track tickets separately since count must be present even before the ticket is
   // constructed ie. queue.size would not reflect it.
   private final AtomicInteger ticketCount = new AtomicInteger();

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java Sun Mar 16 19:39:10 2014
@@ -464,7 +464,7 @@ class DocumentsWriterPerThread {
     try {
       consumer.flush(flushState);
       pendingUpdates.terms.clear();
-      segmentInfo.setFiles(new HashSet<String>(directory.getCreatedFiles()));
+      segmentInfo.setFiles(new HashSet<>(directory.getCreatedFiles()));
 
       final SegmentCommitInfo segmentInfoPerCommit = new SegmentCommitInfo(segmentInfo, 0, -1L, -1L);
       if (infoStream.isEnabled("DWPT")) {
@@ -510,7 +510,7 @@ class DocumentsWriterPerThread {
     }
   }
   
-  private final Set<String> filesToDelete = new HashSet<String>(); 
+  private final Set<String> filesToDelete = new HashSet<>();
   
   public Set<String> pendingFilesToDelete() {
     return filesToDelete;

Modified: lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterStallControl.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterStallControl.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterStallControl.java (original)
+++ lucene/dev/branches/lucene5376_2/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterStallControl.java Sun Mar 16 19:39:10 2014
@@ -42,7 +42,7 @@ final class DocumentsWriterStallControl 
   private volatile boolean stalled;
   private int numWaiting; // only with assert
   private boolean wasStalled; // only with assert
-  private final Map<Thread, Boolean> waiting = new IdentityHashMap<Thread, Boolean>(); // only with assert
+  private final Map<Thread, Boolean> waiting = new IdentityHashMap<>(); // only with assert
   
   /**
    * Update the stalled flag status. This method will set the stalled flag to