You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by rm...@apache.org on 2014/03/12 15:39:39 UTC

svn commit: r1576755 [5/27] - in /lucene/dev/trunk: lucene/ lucene/analysis/common/src/java/org/apache/lucene/analysis/charfilter/ lucene/analysis/common/src/java/org/apache/lucene/analysis/compound/ lucene/analysis/common/src/java/org/apache/lucene/an...

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressingTermVectorsWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressingTermVectorsWriter.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressingTermVectorsWriter.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressingTermVectorsWriter.java Wed Mar 12 14:39:17 2014
@@ -92,7 +92,7 @@ public final class CompressingTermVector
     final int posStart, offStart, payStart;
     DocData(int numFields, int posStart, int offStart, int payStart) {
       this.numFields = numFields;
-      this.fields = new ArrayDeque<FieldData>(numFields);
+      this.fields = new ArrayDeque<>(numFields);
       this.posStart = posStart;
       this.offStart = offStart;
       this.payStart = payStart;
@@ -214,7 +214,7 @@ public final class CompressingTermVector
     this.chunkSize = chunkSize;
 
     numDocs = 0;
-    pendingDocs = new ArrayDeque<DocData>();
+    pendingDocs = new ArrayDeque<>();
     termSuffixes = new GrowableByteArrayDataOutput(ArrayUtil.oversize(chunkSize, 1));
     payloadBytes = new GrowableByteArrayDataOutput(ArrayUtil.oversize(1, 1));
     lastTerm = new BytesRef(ArrayUtil.oversize(30, 1));
@@ -393,7 +393,7 @@ public final class CompressingTermVector
 
   /** Returns a sorted array containing unique field numbers */
   private int[] flushFieldNums() throws IOException {
-    SortedSet<Integer> fieldNums = new TreeSet<Integer>();
+    SortedSet<Integer> fieldNums = new TreeSet<>();
     for (DocData dd : pendingDocs) {
       for (FieldData fd : dd.fields) {
         fieldNums.add(fd.fieldNum);

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40DocValuesReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40DocValuesReader.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40DocValuesReader.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40DocValuesReader.java Wed Mar 12 14:39:17 2014
@@ -57,11 +57,11 @@ final class Lucene40DocValuesReader exte
 
   // ram instances we have already loaded
   private final Map<Integer,NumericDocValues> numericInstances =
-      new HashMap<Integer,NumericDocValues>();
+      new HashMap<>();
   private final Map<Integer,BinaryDocValues> binaryInstances =
-      new HashMap<Integer,BinaryDocValues>();
+      new HashMap<>();
   private final Map<Integer,SortedDocValues> sortedInstances =
-      new HashMap<Integer,SortedDocValues>();
+      new HashMap<>();
 
   private final AtomicLong ramBytesUsed;
 

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40TermVectorsReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40TermVectorsReader.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40TermVectorsReader.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40TermVectorsReader.java Wed Mar 12 14:39:17 2014
@@ -221,7 +221,7 @@ public class Lucene40TermVectorsReader e
   private class TVFields extends Fields {
     private final int[] fieldNumbers;
     private final long[] fieldFPs;
-    private final Map<Integer,Integer> fieldNumberToIndex = new HashMap<Integer,Integer>();
+    private final Map<Integer,Integer> fieldNumberToIndex = new HashMap<>();
 
     public TVFields(int docID) throws IOException {
       seekTvx(docID);

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/codecs/lucene42/Lucene42DocValuesProducer.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/codecs/lucene42/Lucene42DocValuesProducer.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/codecs/lucene42/Lucene42DocValuesProducer.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/codecs/lucene42/Lucene42DocValuesProducer.java Wed Mar 12 14:39:17 2014
@@ -67,11 +67,11 @@ class Lucene42DocValuesProducer extends 
   
   // ram instances we have already loaded
   private final Map<Integer,NumericDocValues> numericInstances = 
-      new HashMap<Integer,NumericDocValues>();
+      new HashMap<>();
   private final Map<Integer,BinaryDocValues> binaryInstances =
-      new HashMap<Integer,BinaryDocValues>();
+      new HashMap<>();
   private final Map<Integer,FST<Long>> fstInstances =
-      new HashMap<Integer,FST<Long>>();
+      new HashMap<>();
   
   private final int maxDoc;
   private final AtomicLong ramBytesUsed;
@@ -103,9 +103,9 @@ class Lucene42DocValuesProducer extends 
       version = CodecUtil.checkHeader(in, metaCodec, 
                                       VERSION_START,
                                       VERSION_CURRENT);
-      numerics = new HashMap<Integer,NumericEntry>();
-      binaries = new HashMap<Integer,BinaryEntry>();
-      fsts = new HashMap<Integer,FSTEntry>();
+      numerics = new HashMap<>();
+      binaries = new HashMap<>();
+      fsts = new HashMap<>();
       readFields(in, state.fieldInfos);
 
       success = true;
@@ -297,7 +297,7 @@ class Lucene42DocValuesProducer extends 
       instance = fstInstances.get(field.number);
       if (instance == null) {
         data.seek(entry.offset);
-        instance = new FST<Long>(data, PositiveIntOutputs.getSingleton());
+        instance = new FST<>(data, PositiveIntOutputs.getSingleton());
         ramBytesUsed.addAndGet(instance.sizeInBytes());
         fstInstances.put(field.number, instance);
       }
@@ -307,10 +307,10 @@ class Lucene42DocValuesProducer extends 
     
     // per-thread resources
     final BytesReader in = fst.getBytesReader();
-    final Arc<Long> firstArc = new Arc<Long>();
-    final Arc<Long> scratchArc = new Arc<Long>();
+    final Arc<Long> firstArc = new Arc<>();
+    final Arc<Long> scratchArc = new Arc<>();
     final IntsRef scratchInts = new IntsRef();
-    final BytesRefFSTEnum<Long> fstEnum = new BytesRefFSTEnum<Long>(fst); 
+    final BytesRefFSTEnum<Long> fstEnum = new BytesRefFSTEnum<>(fst);
     
     return new SortedDocValues() {
       @Override
@@ -372,7 +372,7 @@ class Lucene42DocValuesProducer extends 
       instance = fstInstances.get(field.number);
       if (instance == null) {
         data.seek(entry.offset);
-        instance = new FST<Long>(data, PositiveIntOutputs.getSingleton());
+        instance = new FST<>(data, PositiveIntOutputs.getSingleton());
         ramBytesUsed.addAndGet(instance.sizeInBytes());
         fstInstances.put(field.number, instance);
       }
@@ -382,10 +382,10 @@ class Lucene42DocValuesProducer extends 
     
     // per-thread resources
     final BytesReader in = fst.getBytesReader();
-    final Arc<Long> firstArc = new Arc<Long>();
-    final Arc<Long> scratchArc = new Arc<Long>();
+    final Arc<Long> firstArc = new Arc<>();
+    final Arc<Long> scratchArc = new Arc<>();
     final IntsRef scratchInts = new IntsRef();
-    final BytesRefFSTEnum<Long> fstEnum = new BytesRefFSTEnum<Long>(fst); 
+    final BytesRefFSTEnum<Long> fstEnum = new BytesRefFSTEnum<>(fst);
     final BytesRef ref = new BytesRef();
     final ByteArrayDataInput input = new ByteArrayDataInput();
     return new SortedSetDocValues() {
@@ -493,14 +493,14 @@ class Lucene42DocValuesProducer extends 
     // maybe we should add a FSTEnum that supports this operation?
     final FST<Long> fst;
     final FST.BytesReader bytesReader;
-    final Arc<Long> firstArc = new Arc<Long>();
-    final Arc<Long> scratchArc = new Arc<Long>();
+    final Arc<Long> firstArc = new Arc<>();
+    final Arc<Long> scratchArc = new Arc<>();
     final IntsRef scratchInts = new IntsRef();
     final BytesRef scratchBytes = new BytesRef();
     
     FSTTermsEnum(FST<Long> fst) {
       this.fst = fst;
-      in = new BytesRefFSTEnum<Long>(fst);
+      in = new BytesRefFSTEnum<>(fst);
       bytesReader = fst.getBytesReader();
     }
 

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/codecs/lucene42/Lucene42NormsConsumer.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/codecs/lucene42/Lucene42NormsConsumer.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/codecs/lucene42/Lucene42NormsConsumer.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/codecs/lucene42/Lucene42NormsConsumer.java Wed Mar 12 14:39:17 2014
@@ -131,7 +131,7 @@ class Lucene42NormsConsumer extends DocV
       } else {
         meta.writeByte(TABLE_COMPRESSED); // table-compressed
         Long[] decode = uniqueValues.toArray(new Long[uniqueValues.size()]);
-        final HashMap<Long,Integer> encode = new HashMap<Long,Integer>();
+        final HashMap<Long,Integer> encode = new HashMap<>();
         data.writeVInt(decode.length);
         for (int i = 0; i < decode.length; i++) {
           data.writeLong(decode[i]);

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/codecs/lucene45/Lucene45DocValuesConsumer.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/codecs/lucene45/Lucene45DocValuesConsumer.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/codecs/lucene45/Lucene45DocValuesConsumer.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/codecs/lucene45/Lucene45DocValuesConsumer.java Wed Mar 12 14:39:17 2014
@@ -189,7 +189,7 @@ public class Lucene45DocValuesConsumer e
         break;
       case TABLE_COMPRESSED:
         final Long[] decode = uniqueValues.toArray(new Long[uniqueValues.size()]);
-        final HashMap<Long,Integer> encode = new HashMap<Long,Integer>();
+        final HashMap<Long,Integer> encode = new HashMap<>();
         meta.writeVInt(decode.length);
         for (int i = 0; i < decode.length; i++) {
           meta.writeLong(decode[i]);

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/codecs/lucene45/Lucene45DocValuesProducer.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/codecs/lucene45/Lucene45DocValuesProducer.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/codecs/lucene45/Lucene45DocValuesProducer.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/codecs/lucene45/Lucene45DocValuesProducer.java Wed Mar 12 14:39:17 2014
@@ -73,8 +73,8 @@ public class Lucene45DocValuesProducer e
   private final int version;
 
   // memory-resident structures
-  private final Map<Integer,MonotonicBlockPackedReader> addressInstances = new HashMap<Integer,MonotonicBlockPackedReader>();
-  private final Map<Integer,MonotonicBlockPackedReader> ordIndexInstances = new HashMap<Integer,MonotonicBlockPackedReader>();
+  private final Map<Integer,MonotonicBlockPackedReader> addressInstances = new HashMap<>();
+  private final Map<Integer,MonotonicBlockPackedReader> ordIndexInstances = new HashMap<>();
   
   /** expert: instantiates a new reader */
   protected Lucene45DocValuesProducer(SegmentReadState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension) throws IOException {
@@ -87,11 +87,11 @@ public class Lucene45DocValuesProducer e
       version = CodecUtil.checkHeader(in, metaCodec, 
                                       Lucene45DocValuesFormat.VERSION_START,
                                       Lucene45DocValuesFormat.VERSION_CURRENT);
-      numerics = new HashMap<Integer,NumericEntry>();
-      ords = new HashMap<Integer,NumericEntry>();
-      ordIndexes = new HashMap<Integer,NumericEntry>();
-      binaries = new HashMap<Integer,BinaryEntry>();
-      sortedSets = new HashMap<Integer,SortedSetEntry>();
+      numerics = new HashMap<>();
+      ords = new HashMap<>();
+      ordIndexes = new HashMap<>();
+      binaries = new HashMap<>();
+      sortedSets = new HashMap<>();
       readFields(in, state.fieldInfos);
 
       success = true;

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/codecs/perfield/PerFieldDocValuesFormat.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/codecs/perfield/PerFieldDocValuesFormat.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/codecs/perfield/PerFieldDocValuesFormat.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/codecs/perfield/PerFieldDocValuesFormat.java Wed Mar 12 14:39:17 2014
@@ -92,8 +92,8 @@ public abstract class PerFieldDocValuesF
     
   private class FieldsWriter extends DocValuesConsumer {
 
-    private final Map<DocValuesFormat,ConsumerAndSuffix> formats = new HashMap<DocValuesFormat,ConsumerAndSuffix>();
-    private final Map<String,Integer> suffixes = new HashMap<String,Integer>();
+    private final Map<DocValuesFormat,ConsumerAndSuffix> formats = new HashMap<>();
+    private final Map<String,Integer> suffixes = new HashMap<>();
     
     private final SegmentWriteState segmentWriteState;
     
@@ -209,8 +209,8 @@ public abstract class PerFieldDocValuesF
 
   private class FieldsReader extends DocValuesProducer {
 
-    private final Map<String,DocValuesProducer> fields = new TreeMap<String,DocValuesProducer>();
-    private final Map<String,DocValuesProducer> formats = new HashMap<String,DocValuesProducer>();
+    private final Map<String,DocValuesProducer> fields = new TreeMap<>();
+    private final Map<String,DocValuesProducer> formats = new HashMap<>();
 
     public FieldsReader(final SegmentReadState readState) throws IOException {
 
@@ -245,7 +245,7 @@ public abstract class PerFieldDocValuesF
 
     private FieldsReader(FieldsReader other) {
 
-      Map<DocValuesProducer,DocValuesProducer> oldToNew = new IdentityHashMap<DocValuesProducer,DocValuesProducer>();
+      Map<DocValuesProducer,DocValuesProducer> oldToNew = new IdentityHashMap<>();
       // First clone all formats
       for(Map.Entry<String,DocValuesProducer> ent : other.formats.entrySet()) {
         DocValuesProducer values = ent.getValue();

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/codecs/perfield/PerFieldPostingsFormat.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/codecs/perfield/PerFieldPostingsFormat.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/codecs/perfield/PerFieldPostingsFormat.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/codecs/perfield/PerFieldPostingsFormat.java Wed Mar 12 14:39:17 2014
@@ -75,7 +75,7 @@ public abstract class PerFieldPostingsFo
 
   /** Group of fields written by one PostingsFormat */
   static class FieldsGroup {
-    final Set<String> fields = new TreeSet<String>();
+    final Set<String> fields = new TreeSet<>();
     int suffix;
 
     /** Custom SegmentWriteState for this group of fields,
@@ -111,10 +111,10 @@ public abstract class PerFieldPostingsFo
 
       // Maps a PostingsFormat instance to the suffix it
       // should use
-      Map<PostingsFormat,FieldsGroup> formatToGroups = new HashMap<PostingsFormat,FieldsGroup>();
+      Map<PostingsFormat,FieldsGroup> formatToGroups = new HashMap<>();
 
       // Holds last suffix of each PostingFormat name
-      Map<String,Integer> suffixes = new HashMap<String,Integer>();
+      Map<String,Integer> suffixes = new HashMap<>();
 
       // First pass: assign field -> PostingsFormat
       for(String field : fields) {
@@ -182,8 +182,8 @@ public abstract class PerFieldPostingsFo
 
   private class FieldsReader extends FieldsProducer {
 
-    private final Map<String,FieldsProducer> fields = new TreeMap<String,FieldsProducer>();
-    private final Map<String,FieldsProducer> formats = new HashMap<String,FieldsProducer>();
+    private final Map<String,FieldsProducer> fields = new TreeMap<>();
+    private final Map<String,FieldsProducer> formats = new HashMap<>();
 
     public FieldsReader(final SegmentReadState readState) throws IOException {
 

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/document/Document.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/document/Document.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/document/Document.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/document/Document.java Wed Mar 12 14:39:17 2014
@@ -44,7 +44,7 @@ import org.apache.lucene.util.FilterIter
 
 public final class Document implements IndexDocument {
 
-  private final List<Field> fields = new ArrayList<Field>();
+  private final List<Field> fields = new ArrayList<>();
 
   /** Constructs a new document with no fields. */
   public Document() {}
@@ -140,7 +140,7 @@ public final class Document implements I
   * @return a <code>BytesRef[]</code> of binary field values
   */
   public final BytesRef[] getBinaryValues(String name) {
-    final List<BytesRef> result = new ArrayList<BytesRef>();
+    final List<BytesRef> result = new ArrayList<>();
 
     for (Iterator<StorableField> it = storedFieldsIterator(); it.hasNext(); ) {
       StorableField field = it.next();
@@ -199,7 +199,7 @@ public final class Document implements I
    * @return a <code>Field[]</code> array
    */
   public Field[] getFields(String name) {
-    List<Field> result = new ArrayList<Field>();
+    List<Field> result = new ArrayList<>();
     for (Field field : fields) {
       if (field.name().equals(name)) {
         result.add(field);
@@ -234,7 +234,7 @@ public final class Document implements I
    * @return a <code>String[]</code> of field values
    */
   public final String[] getValues(String name) {
-    List<String> result = new ArrayList<String>();
+    List<String> result = new ArrayList<>();
 
     for (Iterator<StorableField> it = storedFieldsIterator(); it.hasNext(); ) {
       StorableField field = it.next();

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/document/DocumentStoredFieldVisitor.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/document/DocumentStoredFieldVisitor.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/document/DocumentStoredFieldVisitor.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/document/DocumentStoredFieldVisitor.java Wed Mar 12 14:39:17 2014
@@ -49,7 +49,7 @@ public class DocumentStoredFieldVisitor 
 
   /** Load only fields named in the provided fields. */
   public DocumentStoredFieldVisitor(String... fields) {
-    fieldsToAdd = new HashSet<String>(fields.length);
+    fieldsToAdd = new HashSet<>(fields.length);
     for(String field : fields) {
       fieldsToAdd.add(field);
     }

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/BufferedUpdates.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/BufferedUpdates.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/BufferedUpdates.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/BufferedUpdates.java Wed Mar 12 14:39:17 2014
@@ -95,9 +95,9 @@ class BufferedUpdates {
   
   final AtomicInteger numTermDeletes = new AtomicInteger();
   final AtomicInteger numNumericUpdates = new AtomicInteger();
-  final Map<Term,Integer> terms = new HashMap<Term,Integer>();
-  final Map<Query,Integer> queries = new HashMap<Query,Integer>();
-  final List<Integer> docIDs = new ArrayList<Integer>();
+  final Map<Term,Integer> terms = new HashMap<>();
+  final Map<Query,Integer> queries = new HashMap<>();
+  final List<Integer> docIDs = new ArrayList<>();
 
   // Map<dvField,Map<updateTerm,NumericUpdate>>
   // For each field we keep an ordered list of NumericUpdates, key'd by the
@@ -106,7 +106,7 @@ class BufferedUpdates {
   // one that came in wins), and helps us detect faster if the same Term is
   // used to update the same field multiple times (so we later traverse it
   // only once).
-  final Map<String,LinkedHashMap<Term,NumericUpdate>> numericUpdates = new HashMap<String,LinkedHashMap<Term,NumericUpdate>>();
+  final Map<String,LinkedHashMap<Term,NumericUpdate>> numericUpdates = new HashMap<>();
 
   public static final Integer MAX_INT = Integer.valueOf(Integer.MAX_VALUE);
 
@@ -187,7 +187,7 @@ class BufferedUpdates {
   public void addNumericUpdate(NumericUpdate update, int docIDUpto) {
     LinkedHashMap<Term,NumericUpdate> fieldUpdates = numericUpdates.get(update.field);
     if (fieldUpdates == null) {
-      fieldUpdates = new LinkedHashMap<Term,NumericUpdate>();
+      fieldUpdates = new LinkedHashMap<>();
       numericUpdates.put(update.field, fieldUpdates);
       bytesUsed.addAndGet(BYTES_PER_NUMERIC_FIELD_ENTRY);
     }

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/BufferedUpdatesStream.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/BufferedUpdatesStream.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/BufferedUpdatesStream.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/BufferedUpdatesStream.java Wed Mar 12 14:39:17 2014
@@ -54,7 +54,7 @@ import org.apache.lucene.util.InfoStream
 class BufferedUpdatesStream {
 
   // TODO: maybe linked list?
-  private final List<FrozenBufferedUpdates> updates = new ArrayList<FrozenBufferedUpdates>();
+  private final List<FrozenBufferedUpdates> updates = new ArrayList<>();
 
   // Starts at 1 so that SegmentInfos that have never had
   // deletes applied (whose bufferedDelGen defaults to 0)
@@ -167,7 +167,7 @@ class BufferedUpdatesStream {
 
     final long gen = nextGen++;
 
-    List<SegmentCommitInfo> infos2 = new ArrayList<SegmentCommitInfo>();
+    List<SegmentCommitInfo> infos2 = new ArrayList<>();
     infos2.addAll(infos);
     Collections.sort(infos2, sortSegInfoByDelGen);
 
@@ -240,7 +240,7 @@ class BufferedUpdatesStream {
 
         if (segAllDeletes) {
           if (allDeleted == null) {
-            allDeleted = new ArrayList<SegmentCommitInfo>();
+            allDeleted = new ArrayList<>();
           }
           allDeleted.add(info);
         }
@@ -290,7 +290,7 @@ class BufferedUpdatesStream {
 
           if (segAllDeletes) {
             if (allDeleted == null) {
-              allDeleted = new ArrayList<SegmentCommitInfo>();
+              allDeleted = new ArrayList<>();
             }
             allDeleted.add(info);
           }

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java Wed Mar 12 14:39:17 2014
@@ -92,13 +92,13 @@ public class CheckIndex {
 
     /** Empty unless you passed specific segments list to check as optional 3rd argument.
      *  @see CheckIndex#checkIndex(List) */
-    public List<String> segmentsChecked = new ArrayList<String>();
+    public List<String> segmentsChecked = new ArrayList<>();
   
     /** True if the index was created with a newer version of Lucene than the CheckIndex tool. */
     public boolean toolOutOfDate;
 
     /** List of {@link SegmentInfoStatus} instances, detailing status of each segment. */
-    public List<SegmentInfoStatus> segmentInfos = new ArrayList<SegmentInfoStatus>();
+    public List<SegmentInfoStatus> segmentInfos = new ArrayList<>();
   
     /** Directory index is in. */
     public Directory dir;
@@ -1069,7 +1069,7 @@ public class CheckIndex {
           final BlockTreeTermsReader.Stats stats = ((BlockTreeTermsReader.FieldReader) fieldTerms).computeStats();
           assert stats != null;
           if (status.blockTreeStats == null) {
-            status.blockTreeStats = new HashMap<String,BlockTreeTermsReader.Stats>();
+            status.blockTreeStats = new HashMap<>();
           }
           status.blockTreeStats.put(field, stats);
         }
@@ -1831,7 +1831,7 @@ public class CheckIndex {
     boolean doFix = false;
     boolean doCrossCheckTermVectors = false;
     boolean verbose = false;
-    List<String> onlySegments = new ArrayList<String>();
+    List<String> onlySegments = new ArrayList<>();
     String indexPath = null;
     String dirImpl = null;
     int i = 0;

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/CoalescedUpdates.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/CoalescedUpdates.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/CoalescedUpdates.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/CoalescedUpdates.java Wed Mar 12 14:39:17 2014
@@ -28,9 +28,9 @@ import org.apache.lucene.index.BufferedU
 import org.apache.lucene.util.MergedIterator;
 
 class CoalescedUpdates {
-  final Map<Query,Integer> queries = new HashMap<Query,Integer>();
-  final List<Iterable<Term>> iterables = new ArrayList<Iterable<Term>>();
-  final List<NumericUpdate> numericDVUpdates = new ArrayList<NumericUpdate>();
+  final Map<Query,Integer> queries = new HashMap<>();
+  final List<Iterable<Term>> iterables = new ArrayList<>();
+  final List<NumericUpdate> numericDVUpdates = new ArrayList<>();
   
   @Override
   public String toString() {
@@ -62,7 +62,7 @@ class CoalescedUpdates {
        for (int i = 0; i < iterables.size(); i++) {
          subs[i] = iterables.get(i).iterator();
        }
-       return new MergedIterator<Term>(subs);
+       return new MergedIterator<>(subs);
      }
    };
   }

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/CompositeReaderContext.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/CompositeReaderContext.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/CompositeReaderContext.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/CompositeReaderContext.java Wed Mar 12 14:39:17 2014
@@ -80,7 +80,7 @@ public final class CompositeReaderContex
   
   private static final class Builder {
     private final CompositeReader reader;
-    private final List<AtomicReaderContext> leaves = new ArrayList<AtomicReaderContext>();
+    private final List<AtomicReaderContext> leaves = new ArrayList<>();
     private int leafDocBase = 0;
     
     public Builder(CompositeReader reader) {

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/ConcurrentMergeScheduler.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/ConcurrentMergeScheduler.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/ConcurrentMergeScheduler.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/ConcurrentMergeScheduler.java Wed Mar 12 14:39:17 2014
@@ -47,7 +47,7 @@ public class ConcurrentMergeScheduler ex
   private int mergeThreadPriority = -1;
 
   /** List of currently active {@link MergeThread}s. */
-  protected List<MergeThread> mergeThreads = new ArrayList<MergeThread>();
+  protected List<MergeThread> mergeThreads = new ArrayList<>();
   
   /** 
    * Default {@code maxThreadCount}.
@@ -171,7 +171,7 @@ public class ConcurrentMergeScheduler ex
 
     // Only look at threads that are alive & not in the
     // process of stopping (ie have an active merge):
-    final List<MergeThread> activeMerges = new ArrayList<MergeThread>();
+    final List<MergeThread> activeMerges = new ArrayList<>();
 
     int threadIdx = 0;
     while (threadIdx < mergeThreads.size()) {
@@ -571,7 +571,7 @@ public class ConcurrentMergeScheduler ex
     ConcurrentMergeScheduler clone = (ConcurrentMergeScheduler) super.clone();
     clone.writer = null;
     clone.dir = null;
-    clone.mergeThreads = new ArrayList<MergeThread>();
+    clone.mergeThreads = new ArrayList<>();
     return clone;
   }
 }

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DirectoryReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DirectoryReader.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DirectoryReader.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DirectoryReader.java Wed Mar 12 14:39:17 2014
@@ -224,7 +224,7 @@ public abstract class DirectoryReader ex
   public static List<IndexCommit> listCommits(Directory dir) throws IOException {
     final String[] files = dir.listAll();
 
-    List<IndexCommit> commits = new ArrayList<IndexCommit>();
+    List<IndexCommit> commits = new ArrayList<>();
 
     SegmentInfos latest = new SegmentInfos();
     latest.read(dir);

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocFieldProcessor.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocFieldProcessor.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocFieldProcessor.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocFieldProcessor.java Wed Mar 12 14:39:17 2014
@@ -69,7 +69,7 @@ final class DocFieldProcessor extends Do
   @Override
   public void flush(SegmentWriteState state) throws IOException {
 
-    Map<String,DocFieldConsumerPerField> childFields = new HashMap<String,DocFieldConsumerPerField>();
+    Map<String,DocFieldConsumerPerField> childFields = new HashMap<>();
     Collection<DocFieldConsumerPerField> fields = fields();
     for (DocFieldConsumerPerField f : fields) {
       childFields.put(f.getFieldInfo().name, f);
@@ -132,7 +132,7 @@ final class DocFieldProcessor extends Do
   }
 
   public Collection<DocFieldConsumerPerField> fields() {
-    Collection<DocFieldConsumerPerField> fields = new HashSet<DocFieldConsumerPerField>();
+    Collection<DocFieldConsumerPerField> fields = new HashSet<>();
     for(int i=0;i<fieldHash.length;i++) {
       DocFieldProcessorPerField field = fieldHash[i];
       while(field != null) {

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocInverter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocInverter.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocInverter.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocInverter.java Wed Mar 12 14:39:17 2014
@@ -41,8 +41,8 @@ final class DocInverter extends DocField
   @Override
   void flush(Map<String, DocFieldConsumerPerField> fieldsToFlush, SegmentWriteState state) throws IOException {
 
-    Map<String, InvertedDocConsumerPerField> childFieldsToFlush = new HashMap<String, InvertedDocConsumerPerField>();
-    Map<String, InvertedDocEndConsumerPerField> endChildFieldsToFlush = new HashMap<String, InvertedDocEndConsumerPerField>();
+    Map<String, InvertedDocConsumerPerField> childFieldsToFlush = new HashMap<>();
+    Map<String, InvertedDocEndConsumerPerField> endChildFieldsToFlush = new HashMap<>();
 
     for (Map.Entry<String, DocFieldConsumerPerField> fieldToFlush : fieldsToFlush.entrySet()) {
       DocInverterPerField perField = (DocInverterPerField) fieldToFlush.getValue();

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocTermOrds.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocTermOrds.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocTermOrds.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocTermOrds.java Wed Mar 12 14:39:17 2014
@@ -342,7 +342,7 @@ public class DocTermOrds {
         } catch (UnsupportedOperationException uoe) {
           // Reader cannot provide ord support, so we wrap
           // our own support by creating our own terms index:
-          indexedTerms = new ArrayList<BytesRef>();
+          indexedTerms = new ArrayList<>();
           indexedTermsBytes = new PagedBytes(15);
           //System.out.println("NO ORDS");
         }

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocValuesProcessor.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocValuesProcessor.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocValuesProcessor.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocValuesProcessor.java Wed Mar 12 14:39:17 2014
@@ -33,7 +33,7 @@ final class DocValuesProcessor extends S
   // TODO: somewhat wasteful we also keep a map here; would
   // be more efficient if we could "reuse" the map/hash
   // lookup DocFieldProcessor already did "above"
-  private final Map<String,DocValuesWriter> writers = new HashMap<String,DocValuesWriter>();
+  private final Map<String,DocValuesWriter> writers = new HashMap<>();
   private final Counter bytesUsed;
 
   public DocValuesProcessor(Counter bytesUsed) {

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocumentsWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocumentsWriter.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocumentsWriter.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocumentsWriter.java Wed Mar 12 14:39:17 2014
@@ -135,7 +135,7 @@ final class DocumentsWriter {
     this.perThreadPool = config.getIndexerThreadPool();
     flushPolicy = config.getFlushPolicy();
     this.writer = writer;
-    this.events = new ConcurrentLinkedQueue<Event>();
+    this.events = new ConcurrentLinkedQueue<>();
     flushControl = new DocumentsWriterFlushControl(this, config, writer.bufferedUpdatesStream);
   }
   
@@ -207,7 +207,7 @@ final class DocumentsWriter {
   synchronized void abort(IndexWriter writer) {
     assert !Thread.holdsLock(writer) : "IndexWriter lock should never be hold when aborting";
     boolean success = false;
-    final Set<String> newFilesSet = new HashSet<String>();
+    final Set<String> newFilesSet = new HashSet<>();
     try {
       deleteQueue.clear();
       if (infoStream.isEnabled("DW")) {
@@ -243,7 +243,7 @@ final class DocumentsWriter {
     try {
       deleteQueue.clear();
       final int limit = perThreadPool.getMaxThreadStates();
-      final Set<String> newFilesSet = new HashSet<String>();
+      final Set<String> newFilesSet = new HashSet<>();
       for (int i = 0; i < limit; i++) {
         final ThreadState perThread = perThreadPool.getThreadState(i);
         perThread.lock();

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterDeleteQueue.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterDeleteQueue.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterDeleteQueue.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterDeleteQueue.java Wed Mar 12 14:39:17 2014
@@ -93,7 +93,7 @@ final class DocumentsWriterDeleteQueue {
      * we use a sentinel instance as our initial tail. No slice will ever try to
      * apply this tail since the head is always omitted.
      */
-    tail = new Node<Object>(null); // sentinel
+    tail = new Node<>(null); // sentinel
     globalSlice = new DeleteSlice(tail);
   }
 

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterFlushControl.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterFlushControl.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterFlushControl.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterFlushControl.java Wed Mar 12 14:39:17 2014
@@ -51,10 +51,10 @@ final class DocumentsWriterFlushControl 
   private int numDocsSinceStalled = 0; // only with assert
   final AtomicBoolean flushDeletes = new AtomicBoolean(false);
   private boolean fullFlush = false;
-  private final Queue<DocumentsWriterPerThread> flushQueue = new LinkedList<DocumentsWriterPerThread>();
+  private final Queue<DocumentsWriterPerThread> flushQueue = new LinkedList<>();
   // only for safety reasons if a DWPT is close to the RAM limit
-  private final Queue<BlockedFlush> blockedFlushes = new LinkedList<BlockedFlush>();
-  private final IdentityHashMap<DocumentsWriterPerThread, Long> flushingWriters = new IdentityHashMap<DocumentsWriterPerThread, Long>();
+  private final Queue<BlockedFlush> blockedFlushes = new LinkedList<>();
+  private final IdentityHashMap<DocumentsWriterPerThread, Long> flushingWriters = new IdentityHashMap<>();
 
 
   double maxConfiguredRamBuffer = 0;
@@ -531,7 +531,7 @@ final class DocumentsWriterFlushControl 
     return true;
   }
 
-  private final List<DocumentsWriterPerThread> fullFlushBuffer = new ArrayList<DocumentsWriterPerThread>();
+  private final List<DocumentsWriterPerThread> fullFlushBuffer = new ArrayList<>();
 
   void addFlushableState(ThreadState perThread) {
     if (infoStream.isEnabled("DWFC")) {

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterFlushQueue.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterFlushQueue.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterFlushQueue.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterFlushQueue.java Wed Mar 12 14:39:17 2014
@@ -28,7 +28,7 @@ import org.apache.lucene.index.Documents
  * @lucene.internal 
  */
 class DocumentsWriterFlushQueue {
-  private final Queue<FlushTicket> queue = new LinkedList<FlushTicket>();
+  private final Queue<FlushTicket> queue = new LinkedList<>();
   // we track tickets separately since count must be present even before the ticket is
   // constructed ie. queue.size would not reflect it.
   private final AtomicInteger ticketCount = new AtomicInteger();

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java Wed Mar 12 14:39:17 2014
@@ -464,7 +464,7 @@ class DocumentsWriterPerThread {
     try {
       consumer.flush(flushState);
       pendingUpdates.terms.clear();
-      segmentInfo.setFiles(new HashSet<String>(directory.getCreatedFiles()));
+      segmentInfo.setFiles(new HashSet<>(directory.getCreatedFiles()));
 
       final SegmentCommitInfo segmentInfoPerCommit = new SegmentCommitInfo(segmentInfo, 0, -1L, -1L);
       if (infoStream.isEnabled("DWPT")) {
@@ -510,7 +510,7 @@ class DocumentsWriterPerThread {
     }
   }
   
-  private final Set<String> filesToDelete = new HashSet<String>(); 
+  private final Set<String> filesToDelete = new HashSet<>();
   
   public Set<String> pendingFilesToDelete() {
     return filesToDelete;

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterStallControl.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterStallControl.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterStallControl.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterStallControl.java Wed Mar 12 14:39:17 2014
@@ -42,7 +42,7 @@ final class DocumentsWriterStallControl 
   private volatile boolean stalled;
   private int numWaiting; // only with assert
   private boolean wasStalled; // only with assert
-  private final Map<Thread, Boolean> waiting = new IdentityHashMap<Thread, Boolean>(); // only with assert
+  private final Map<Thread, Boolean> waiting = new IdentityHashMap<>(); // only with assert
   
   /**
    * Update the stalled flag status. This method will set the stalled flag to

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/FieldInfo.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/FieldInfo.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/FieldInfo.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/FieldInfo.java Wed Mar 12 14:39:17 2014
@@ -324,7 +324,7 @@ public final class FieldInfo {
    */
   public String putAttribute(String key, String value) {
     if (attributes == null) {
-      attributes = new HashMap<String,String>();
+      attributes = new HashMap<>();
     }
     return attributes.put(key, value);
   }

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/FieldInfos.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/FieldInfos.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/FieldInfos.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/FieldInfos.java Wed Mar 12 14:39:17 2014
@@ -41,8 +41,8 @@ public class FieldInfos implements Itera
   private final boolean hasNorms;
   private final boolean hasDocValues;
   
-  private final SortedMap<Integer,FieldInfo> byNumber = new TreeMap<Integer,FieldInfo>();
-  private final HashMap<String,FieldInfo> byName = new HashMap<String,FieldInfo>();
+  private final SortedMap<Integer,FieldInfo> byNumber = new TreeMap<>();
+  private final HashMap<String,FieldInfo> byName = new HashMap<>();
   private final Collection<FieldInfo> values; // for an unmodifiable iterator
   
   /**
@@ -174,9 +174,9 @@ public class FieldInfos implements Itera
     private int lowestUnassignedFieldNumber = -1;
     
     FieldNumbers() {
-      this.nameToNumber = new HashMap<String, Integer>();
-      this.numberToName = new HashMap<Integer, String>();
-      this.docValuesType = new HashMap<String,DocValuesType>();
+      this.nameToNumber = new HashMap<>();
+      this.numberToName = new HashMap<>();
+      this.docValuesType = new HashMap<>();
     }
     
     /**
@@ -250,7 +250,7 @@ public class FieldInfos implements Itera
   }
   
   static final class Builder {
-    private final HashMap<String,FieldInfo> byName = new HashMap<String,FieldInfo>();
+    private final HashMap<String,FieldInfo> byName = new HashMap<>();
     final FieldNumbers globalFieldNumbers;
 
     Builder() {

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/FreqProxFields.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/FreqProxFields.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/FreqProxFields.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/FreqProxFields.java Wed Mar 12 14:39:17 2014
@@ -35,7 +35,7 @@ import org.apache.lucene.util.BytesRef;
  *  PostingsFormat. */
 
 class FreqProxFields extends Fields {
-  final Map<String,FreqProxTermsWriterPerField> fields = new LinkedHashMap<String,FreqProxTermsWriterPerField>();
+  final Map<String,FreqProxTermsWriterPerField> fields = new LinkedHashMap<>();
 
   public FreqProxFields(List<FreqProxTermsWriterPerField> fieldList) {
     // NOTE: fields are already sorted by field name

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/FreqProxTermsWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/FreqProxTermsWriter.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/FreqProxTermsWriter.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/FreqProxTermsWriter.java Wed Mar 12 14:39:17 2014
@@ -36,7 +36,7 @@ final class FreqProxTermsWriter extends 
     // flushed segment:
     if (state.segUpdates != null && state.segUpdates.terms.size() > 0) {
       Map<Term,Integer> segDeletes = state.segUpdates.terms;
-      List<Term> deleteTerms = new ArrayList<Term>(segDeletes.keySet());
+      List<Term> deleteTerms = new ArrayList<>(segDeletes.keySet());
       Collections.sort(deleteTerms);
       String lastField = null;
       TermsEnum termsEnum = null;
@@ -87,7 +87,7 @@ final class FreqProxTermsWriter extends 
 
     // Gather all FieldData's that have postings, across all
     // ThreadStates
-    List<FreqProxTermsWriterPerField> allFields = new ArrayList<FreqProxTermsWriterPerField>();
+    List<FreqProxTermsWriterPerField> allFields = new ArrayList<>();
 
     for (TermsHashConsumerPerField f : fieldsToFlush.values()) {
       final FreqProxTermsWriterPerField perField = (FreqProxTermsWriterPerField) f;

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/FrozenBufferedUpdates.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/FrozenBufferedUpdates.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/FrozenBufferedUpdates.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/FrozenBufferedUpdates.java Wed Mar 12 14:39:17 2014
@@ -83,7 +83,7 @@ class FrozenBufferedUpdates {
     // so that it maps to all fields it affects, sorted by their docUpto, and traverse
     // that Term only once, applying the update to all fields that still need to be
     // updated. 
-    List<NumericUpdate> allUpdates = new ArrayList<NumericUpdate>();
+    List<NumericUpdate> allUpdates = new ArrayList<>();
     int numericUpdatesSize = 0;
     for (LinkedHashMap<Term,NumericUpdate> fieldUpdates : deletes.numericUpdates.values()) {
       for (NumericUpdate update : fieldUpdates.values()) {

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/IndexFileDeleter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/IndexFileDeleter.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/IndexFileDeleter.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/IndexFileDeleter.java Wed Mar 12 14:39:17 2014
@@ -81,21 +81,21 @@ final class IndexFileDeleter implements 
   /* Reference count for all files in the index.
    * Counts how many existing commits reference a file.
    **/
-  private Map<String, RefCount> refCounts = new HashMap<String, RefCount>();
+  private Map<String, RefCount> refCounts = new HashMap<>();
 
   /* Holds all commits (segments_N) currently in the index.
    * This will have just 1 commit if you are using the
    * default delete policy (KeepOnlyLastCommitDeletionPolicy).
    * Other policies may leave commit points live for longer
    * in which case this list would be longer than 1: */
-  private List<CommitPoint> commits = new ArrayList<CommitPoint>();
+  private List<CommitPoint> commits = new ArrayList<>();
 
   /* Holds files we had incref'd from the previous
    * non-commit checkpoint: */
-  private final List<String> lastFiles = new ArrayList<String>();
+  private final List<String> lastFiles = new ArrayList<>();
 
   /* Commits that the IndexDeletionPolicy have decided to delete: */
-  private List<CommitPoint> commitsToDelete = new ArrayList<CommitPoint>();
+  private List<CommitPoint> commitsToDelete = new ArrayList<>();
 
   private final InfoStream infoStream;
   private Directory directory;
@@ -597,7 +597,7 @@ final class IndexFileDeleter implements 
           infoStream.message("IFD", "unable to remove file \"" + fileName + "\": " + e.toString() + "; Will re-try later.");
         }
         if (deletable == null) {
-          deletable = new ArrayList<String>();
+          deletable = new ArrayList<>();
         }
         deletable.add(fileName);                  // add to deletable
       }

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java Wed Mar 12 14:39:17 2014
@@ -235,7 +235,7 @@ public class IndexWriter implements Clos
   final IndexFileDeleter deleter;
 
   // used by forceMerge to note those needing merging
-  private Map<SegmentCommitInfo,Boolean> segmentsToMerge = new HashMap<SegmentCommitInfo,Boolean>();
+  private Map<SegmentCommitInfo,Boolean> segmentsToMerge = new HashMap<>();
   private int mergeMaxNumSegments;
 
   private Lock writeLock;
@@ -245,13 +245,13 @@ public class IndexWriter implements Clos
 
   // Holds all SegmentInfo instances currently involved in
   // merges
-  private HashSet<SegmentCommitInfo> mergingSegments = new HashSet<SegmentCommitInfo>();
+  private HashSet<SegmentCommitInfo> mergingSegments = new HashSet<>();
 
   private MergePolicy mergePolicy;
   private final MergeScheduler mergeScheduler;
-  private LinkedList<MergePolicy.OneMerge> pendingMerges = new LinkedList<MergePolicy.OneMerge>();
-  private Set<MergePolicy.OneMerge> runningMerges = new HashSet<MergePolicy.OneMerge>();
-  private List<MergePolicy.OneMerge> mergeExceptions = new ArrayList<MergePolicy.OneMerge>();
+  private LinkedList<MergePolicy.OneMerge> pendingMerges = new LinkedList<>();
+  private Set<MergePolicy.OneMerge> runningMerges = new HashSet<>();
+  private List<MergePolicy.OneMerge> mergeExceptions = new ArrayList<>();
   private long mergeGen;
   private boolean stopMerges;
 
@@ -422,7 +422,7 @@ public class IndexWriter implements Clos
 
   class ReaderPool {
     
-    private final Map<SegmentCommitInfo,ReadersAndUpdates> readerMap = new HashMap<SegmentCommitInfo,ReadersAndUpdates>();
+    private final Map<SegmentCommitInfo,ReadersAndUpdates> readerMap = new HashMap<>();
 
     // used only by asserts
     public synchronized boolean infoIsLive(SegmentCommitInfo info) {
@@ -603,7 +603,7 @@ public class IndexWriter implements Clos
     // Make sure that every segment appears only once in the
     // pool:
     private boolean noDups() {
-      Set<String> seen = new HashSet<String>();
+      Set<String> seen = new HashSet<>();
       for(SegmentCommitInfo info : readerMap.keySet()) {
         assert !seen.contains(info.info.name);
         seen.add(info.info.name);
@@ -2346,12 +2346,12 @@ public class IndexWriter implements Clos
   }
 
   private synchronized void resetMergeExceptions() {
-    mergeExceptions = new ArrayList<MergePolicy.OneMerge>();
+    mergeExceptions = new ArrayList<>();
     mergeGen++;
   }
 
   private void noDupDirs(Directory... dirs) {
-    HashSet<Directory> dups = new HashSet<Directory>();
+    HashSet<Directory> dups = new HashSet<>();
     for(int i=0;i<dirs.length;i++) {
       if (dups.contains(dirs[i]))
         throw new IllegalArgumentException("Directory " + dirs[i] + " appears more than once");
@@ -2365,7 +2365,7 @@ public class IndexWriter implements Clos
    *  to match with a call to {@link IOUtils#close} in a
    *  finally clause. */
   private List<Lock> acquireWriteLocks(Directory... dirs) throws IOException {
-    List<Lock> locks = new ArrayList<Lock>();
+    List<Lock> locks = new ArrayList<>();
     for(int i=0;i<dirs.length;i++) {
       boolean success = false;
       try {
@@ -2444,7 +2444,7 @@ public class IndexWriter implements Clos
 
       flush(false, true);
 
-      List<SegmentCommitInfo> infos = new ArrayList<SegmentCommitInfo>();
+      List<SegmentCommitInfo> infos = new ArrayList<>();
 
       boolean success = false;
       try {
@@ -2567,7 +2567,7 @@ public class IndexWriter implements Clos
       flush(false, true);
 
       String mergedName = newSegmentName();
-      final List<AtomicReader> mergeReaders = new ArrayList<AtomicReader>();
+      final List<AtomicReader> mergeReaders = new ArrayList<>();
       for (IndexReader indexReader : readers) {
         numDocs += indexReader.numDocs();
         for (AtomicReaderContext ctx : indexReader.leaves()) {
@@ -2606,7 +2606,7 @@ public class IndexWriter implements Clos
 
       SegmentCommitInfo infoPerCommit = new SegmentCommitInfo(info, 0, -1L, -1L);
 
-      info.setFiles(new HashSet<String>(trackingDir.getCreatedFiles()));
+      info.setFiles(new HashSet<>(trackingDir.getCreatedFiles()));
       trackingDir.getCreatedFiles().clear();
                                          
       setDiagnostics(info, SOURCE_ADDINDEXES_READERS);
@@ -2684,7 +2684,7 @@ public class IndexWriter implements Clos
                                           info.info.getDiagnostics());
     SegmentCommitInfo newInfoPerCommit = new SegmentCommitInfo(newInfo, info.getDelCount(), info.getDelGen(), info.getFieldInfosGen());
 
-    Set<String> segFiles = new HashSet<String>();
+    Set<String> segFiles = new HashSet<>();
 
     // Build up new segment's file names.  Must do this
     // before writing SegmentInfo:
@@ -2880,7 +2880,7 @@ public class IndexWriter implements Clos
    * contents after calling this method has no effect.
    */
   public final synchronized void setCommitData(Map<String,String> commitUserData) {
-    segmentInfos.setUserData(new HashMap<String,String>(commitUserData));
+    segmentInfos.setUserData(new HashMap<>(commitUserData));
     ++changeCount;
   }
   
@@ -3203,7 +3203,7 @@ public class IndexWriter implements Clos
     ReadersAndUpdates mergedDeletesAndUpdates = null;
     boolean initWritableLiveDocs = false;
     MergePolicy.DocMap docMap = null;
-    final Map<String,NumericFieldUpdates> mergedFieldUpdates = new HashMap<String,NumericFieldUpdates>();
+    final Map<String,NumericFieldUpdates> mergedFieldUpdates = new HashMap<>();
     
     for (int i = 0; i < sourceSegments.size(); i++) {
       SegmentCommitInfo info = sourceSegments.get(i);
@@ -3857,7 +3857,7 @@ public class IndexWriter implements Clos
     // names.
     final String mergeSegmentName = newSegmentName();
     SegmentInfo si = new SegmentInfo(directory, Constants.LUCENE_MAIN_VERSION, mergeSegmentName, -1, false, codec, null);
-    Map<String,String> details = new HashMap<String,String>();
+    Map<String,String> details = new HashMap<>();
     details.put("mergeMaxNumSegments", "" + merge.maxNumSegments);
     details.put("mergeFactor", Integer.toString(merge.segments.size()));
     setDiagnostics(si, SOURCE_MERGE, details);
@@ -3878,7 +3878,7 @@ public class IndexWriter implements Clos
   }
 
   private static void setDiagnostics(SegmentInfo info, String source, Map<String,String> details) {
-    Map<String,String> diagnostics = new HashMap<String,String>();
+    Map<String,String> diagnostics = new HashMap<>();
     diagnostics.put("source", source);
     diagnostics.put("lucene.version", Constants.LUCENE_VERSION);
     diagnostics.put("os", Constants.OS_NAME);
@@ -3972,7 +3972,7 @@ public class IndexWriter implements Clos
       infoStream.message("IW", "merging " + segString(merge.segments));
     }
 
-    merge.readers = new ArrayList<SegmentReader>();
+    merge.readers = new ArrayList<>();
 
     // This is try/finally to make sure merger's readers are
     // closed:
@@ -4069,7 +4069,7 @@ public class IndexWriter implements Clos
         }
       }
       assert mergeState.segmentInfo == merge.info.info;
-      merge.info.info.setFiles(new HashSet<String>(dirWrapper.getCreatedFiles()));
+      merge.info.info.setFiles(new HashSet<>(dirWrapper.getCreatedFiles()));
 
       // Record which codec was used to write the segment
 
@@ -4316,7 +4316,7 @@ public class IndexWriter implements Clos
   // For infoStream output
   synchronized SegmentInfos toLiveInfos(SegmentInfos sis) {
     final SegmentInfos newSIS = new SegmentInfos();
-    final Map<SegmentCommitInfo,SegmentCommitInfo> liveSIS = new HashMap<SegmentCommitInfo,SegmentCommitInfo>();        
+    final Map<SegmentCommitInfo,SegmentCommitInfo> liveSIS = new HashMap<>();
     for(SegmentCommitInfo info : segmentInfos) {
       liveSIS.put(info, info);
     }
@@ -4608,7 +4608,7 @@ public class IndexWriter implements Clos
     }
 
     // Replace all previous files with the CFS/CFE files:
-    Set<String> siFiles = new HashSet<String>();
+    Set<String> siFiles = new HashSet<>();
     siFiles.add(fileName);
     siFiles.add(IndexFileNames.segmentFileName(info.name, "", IndexFileNames.COMPOUND_FILE_ENTRIES_EXTENSION));
     info.setFiles(siFiles);

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/IndexWriterConfig.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/IndexWriterConfig.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/IndexWriterConfig.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/IndexWriterConfig.java Wed Mar 12 14:39:17 2014
@@ -130,7 +130,7 @@ public final class IndexWriterConfig ext
 
   // indicates whether this config instance is already attached to a writer.
   // not final so that it can be cloned properly.
-  private SetOnce<IndexWriter> writer = new SetOnce<IndexWriter>();
+  private SetOnce<IndexWriter> writer = new SetOnce<>();
   
   /**
    * Sets the {@link IndexWriter} this config is attached to.

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/LogMergePolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/LogMergePolicy.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/LogMergePolicy.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/LogMergePolicy.java Wed Mar 12 14:39:17 2014
@@ -468,7 +468,7 @@ public abstract class LogMergePolicy ext
 
     // Compute levels, which is just log (base mergeFactor)
     // of the size of each segment
-    final List<SegmentInfoAndLevel> levels = new ArrayList<SegmentInfoAndLevel>();
+    final List<SegmentInfoAndLevel> levels = new ArrayList<>();
     final float norm = (float) Math.log(mergeFactor);
 
     final Collection<SegmentCommitInfo> mergingSegments = writer.get().getMergingSegments();
@@ -570,7 +570,7 @@ public abstract class LogMergePolicy ext
         } else if (!anyTooLarge) {
           if (spec == null)
             spec = new MergeSpecification();
-          final List<SegmentCommitInfo> mergeInfos = new ArrayList<SegmentCommitInfo>();
+          final List<SegmentCommitInfo> mergeInfos = new ArrayList<>();
           for(int i=start;i<end;i++) {
             mergeInfos.add(levels.get(i).info);
             assert infos.contains(levels.get(i).info);

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/MergePolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/MergePolicy.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/MergePolicy.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/MergePolicy.java Wed Mar 12 14:39:17 2014
@@ -122,7 +122,7 @@ public abstract class MergePolicy implem
       if (0 == segments.size())
         throw new RuntimeException("segments must include at least one segment");
       // clone the list, as the in list may be based off original SegmentInfos and may be modified
-      this.segments = new ArrayList<SegmentCommitInfo>(segments);
+      this.segments = new ArrayList<>(segments);
       int count = 0;
       for(SegmentCommitInfo info : segments) {
         count += info.info.getDocCount();
@@ -140,7 +140,7 @@ public abstract class MergePolicy implem
       if (readers == null) {
         throw new IllegalStateException("IndexWriter has not initialized readers from the segment infos yet");
       }
-      final List<AtomicReader> readers = new ArrayList<AtomicReader>(this.readers.size());
+      final List<AtomicReader> readers = new ArrayList<>(this.readers.size());
       for (AtomicReader reader : this.readers) {
         if (reader.numDocs() > 0) {
           readers.add(reader);
@@ -295,7 +295,7 @@ public abstract class MergePolicy implem
      * The subset of segments to be included in the primitive merge.
      */
 
-    public final List<OneMerge> merges = new ArrayList<OneMerge>();
+    public final List<OneMerge> merges = new ArrayList<>();
 
     /** Sole constructor.  Use {@link
      *  #add(MergePolicy.OneMerge)} to add merges. */
@@ -393,7 +393,7 @@ public abstract class MergePolicy implem
       // should not happen
       throw new RuntimeException(e);
     }
-    clone.writer = new SetOnce<IndexWriter>();
+    clone.writer = new SetOnce<>();
     return clone;
   }
 
@@ -412,7 +412,7 @@ public abstract class MergePolicy implem
    * defaults than the {@link MergePolicy}
    */
   protected MergePolicy(double defaultNoCFSRatio, long defaultMaxCFSSegmentSize) {
-    writer = new SetOnce<IndexWriter>();
+    writer = new SetOnce<>();
     this.noCFSRatio = defaultNoCFSRatio;
     this.maxCFSSegmentSize = defaultMaxCFSSegmentSize;
   }

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/MultiFields.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/MultiFields.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/MultiFields.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/MultiFields.java Wed Mar 12 14:39:17 2014
@@ -49,7 +49,7 @@ import org.apache.lucene.util.MergedIter
 public final class MultiFields extends Fields {
   private final Fields[] subs;
   private final ReaderSlice[] subSlices;
-  private final Map<String,Terms> terms = new ConcurrentHashMap<String,Terms>();
+  private final Map<String,Terms> terms = new ConcurrentHashMap<>();
 
   /** Returns a single {@link Fields} instance for this
    *  reader, merging fields/terms/docs/positions on the
@@ -69,8 +69,8 @@ public final class MultiFields extends F
         // already an atomic reader / reader with one leave
         return leaves.get(0).reader().fields();
       default:
-        final List<Fields> fields = new ArrayList<Fields>();
-        final List<ReaderSlice> slices = new ArrayList<ReaderSlice>();
+        final List<Fields> fields = new ArrayList<>();
+        final List<ReaderSlice> slices = new ArrayList<>();
         for (final AtomicReaderContext ctx : leaves) {
           final AtomicReader r = ctx.reader();
           final Fields f = r.fields();
@@ -203,7 +203,7 @@ public final class MultiFields extends F
     for(int i=0;i<subs.length;i++) {
       subIterators[i] = subs[i].iterator();
     }
-    return new MergedIterator<String>(subIterators);
+    return new MergedIterator<>(subIterators);
   }
 
   @Override
@@ -215,8 +215,8 @@ public final class MultiFields extends F
 
     // Lazy init: first time this field is requested, we
     // create & add to terms:
-    final List<Terms> subs2 = new ArrayList<Terms>();
-    final List<ReaderSlice> slices2 = new ArrayList<ReaderSlice>();
+    final List<Terms> subs2 = new ArrayList<>();
+    final List<ReaderSlice> slices2 = new ArrayList<>();
 
     // Gather all sub-readers that share this field
     for(int i=0;i<subs.length;i++) {
@@ -269,7 +269,7 @@ public final class MultiFields extends F
    *  will be unavailable.
    */
   public static Collection<String> getIndexedFields(IndexReader reader) {
-    final Collection<String> fields = new HashSet<String>();
+    final Collection<String> fields = new HashSet<>();
     for(final FieldInfo fieldInfo : getMergedFieldInfos(reader)) {
       if (fieldInfo.isIndexed()) {
         fields.add(fieldInfo.name);

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/MultiTerms.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/MultiTerms.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/MultiTerms.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/MultiTerms.java Wed Mar 12 14:39:17 2014
@@ -70,7 +70,7 @@ public final class MultiTerms extends Te
 
   @Override
   public TermsEnum intersect(CompiledAutomaton compiled, BytesRef startTerm) throws IOException {
-    final List<MultiTermsEnum.TermsEnumIndex> termsEnums = new ArrayList<MultiTermsEnum.TermsEnumIndex>();
+    final List<MultiTermsEnum.TermsEnumIndex> termsEnums = new ArrayList<>();
     for(int i=0;i<subs.length;i++) {
       final TermsEnum termsEnum = subs[i].intersect(compiled, startTerm);
       if (termsEnum != null) {
@@ -88,7 +88,7 @@ public final class MultiTerms extends Te
   @Override
   public TermsEnum iterator(TermsEnum reuse) throws IOException {
 
-    final List<MultiTermsEnum.TermsEnumIndex> termsEnums = new ArrayList<MultiTermsEnum.TermsEnumIndex>();
+    final List<MultiTermsEnum.TermsEnumIndex> termsEnums = new ArrayList<>();
     for(int i=0;i<subs.length;i++) {
       final TermsEnum termsEnum = subs[i].iterator(null);
       if (termsEnum != null) {

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/ParallelAtomicReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/ParallelAtomicReader.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/ParallelAtomicReader.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/ParallelAtomicReader.java Wed Mar 12 14:39:17 2014
@@ -56,8 +56,8 @@ public class ParallelAtomicReader extend
   private final boolean closeSubReaders;
   private final int maxDoc, numDocs;
   private final boolean hasDeletions;
-  private final SortedMap<String,AtomicReader> fieldToReader = new TreeMap<String,AtomicReader>();
-  private final SortedMap<String,AtomicReader> tvFieldToReader = new TreeMap<String,AtomicReader>();
+  private final SortedMap<String,AtomicReader> fieldToReader = new TreeMap<>();
+  private final SortedMap<String,AtomicReader> tvFieldToReader = new TreeMap<>();
   
   /** Create a ParallelAtomicReader based on the provided
    *  readers; auto-closes the given readers on {@link #close()}. */
@@ -151,7 +151,7 @@ public class ParallelAtomicReader extend
   
   // Single instance of this, per ParallelReader instance
   private final class ParallelFields extends Fields {
-    final Map<String,Terms> fields = new TreeMap<String,Terms>();
+    final Map<String,Terms> fields = new TreeMap<>();
     
     ParallelFields() {
     }

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/PersistentSnapshotDeletionPolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/PersistentSnapshotDeletionPolicy.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/PersistentSnapshotDeletionPolicy.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/PersistentSnapshotDeletionPolicy.java Wed Mar 12 14:39:17 2014
@@ -241,13 +241,13 @@ public class PersistentSnapshotDeletionP
   private synchronized void loadPriorSnapshots() throws IOException {
     long genLoaded = -1;
     IOException ioe = null;
-    List<String> snapshotFiles = new ArrayList<String>();
+    List<String> snapshotFiles = new ArrayList<>();
     for(String file : dir.listAll()) {
       if (file.startsWith(SNAPSHOTS_PREFIX)) {
         long gen = Long.parseLong(file.substring(SNAPSHOTS_PREFIX.length()));
         if (genLoaded == -1 || gen > genLoaded) {
           snapshotFiles.add(file);
-          Map<Long,Integer> m = new HashMap<Long,Integer>();    
+          Map<Long,Integer> m = new HashMap<>();
           IndexInput in = dir.openInput(file, IOContext.DEFAULT);
           try {
             CodecUtil.checkHeader(in, CODEC_NAME, VERSION_START, VERSION_START);

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java Wed Mar 12 14:39:17 2014
@@ -78,7 +78,7 @@ class ReadersAndUpdates {
   // updates on the merged segment too.
   private boolean isMerging = false;
   
-  private final Map<String,NumericFieldUpdates> mergingNumericUpdates = new HashMap<String,NumericFieldUpdates>();
+  private final Map<String,NumericFieldUpdates> mergingNumericUpdates = new HashMap<>();
   
   public ReadersAndUpdates(IndexWriter writer, SegmentCommitInfo info) {
     this.info = info;
@@ -448,7 +448,7 @@ class ReadersAndUpdates {
     
     // create a new map, keeping only the gens that are in use
     Map<Long,Set<String>> genUpdatesFiles = info.getUpdatesFiles();
-    Map<Long,Set<String>> newGenUpdatesFiles = new HashMap<Long,Set<String>>();
+    Map<Long,Set<String>> newGenUpdatesFiles = new HashMap<>();
     final long fieldInfosGen = info.getFieldInfosGen();
     for (FieldInfo fi : fieldInfos) {
       long dvGen = fi.getDocValuesGen();

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentCommitInfo.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentCommitInfo.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentCommitInfo.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentCommitInfo.java Wed Mar 12 14:39:17 2014
@@ -56,7 +56,7 @@ public class SegmentCommitInfo {
   private long nextWriteFieldInfosGen;
 
   // Track the per-generation updates files
-  private final Map<Long,Set<String>> genUpdatesFiles = new HashMap<Long,Set<String>>();
+  private final Map<Long,Set<String>> genUpdatesFiles = new HashMap<>();
   
   private volatile long sizeInBytes = -1;
 
@@ -147,7 +147,7 @@ public class SegmentCommitInfo {
   /** Returns all files in use by this segment. */
   public Collection<String> files() throws IOException {
     // Start from the wrapped info's files:
-    Collection<String> files = new HashSet<String>(info.files());
+    Collection<String> files = new HashSet<>(info.files());
 
     // TODO we could rely on TrackingDir.getCreatedFiles() (like we do for
     // updates) and then maybe even be able to remove LiveDocsFormat.files().
@@ -257,7 +257,7 @@ public class SegmentCommitInfo {
     
     // deep clone
     for (Entry<Long,Set<String>> e : genUpdatesFiles.entrySet()) {
-      other.genUpdatesFiles.put(e.getKey(), new HashSet<String>(e.getValue()));
+      other.genUpdatesFiles.put(e.getKey(), new HashSet<>(e.getValue()));
     }
     
     return other;

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentCoreReaders.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentCoreReaders.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentCoreReaders.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentCoreReaders.java Wed Mar 12 14:39:17 2014
@@ -79,7 +79,7 @@ final class SegmentCoreReaders {
   final CloseableThreadLocal<Map<String,Object>> normsLocal = new CloseableThreadLocal<Map<String,Object>>() {
     @Override
     protected Map<String,Object> initialValue() {
-      return new HashMap<String,Object>();
+      return new HashMap<>();
     }
   };
 

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentDocValues.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentDocValues.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentDocValues.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentDocValues.java Wed Mar 12 14:39:17 2014
@@ -35,7 +35,7 @@ import org.apache.lucene.util.RefCount;
  */
 final class SegmentDocValues {
 
-  private final Map<Long,RefCount<DocValuesProducer>> genDVProducers = new HashMap<Long,RefCount<DocValuesProducer>>();
+  private final Map<Long,RefCount<DocValuesProducer>> genDVProducers = new HashMap<>();
 
   private RefCount<DocValuesProducer> newDocValuesProducer(SegmentCommitInfo si, IOContext context, Directory dir,
       DocValuesFormat dvFormat, final Long gen, List<FieldInfo> infos) throws IOException {

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentInfos.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentInfos.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentInfos.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentInfos.java Wed Mar 12 14:39:17 2014
@@ -139,9 +139,9 @@ public final class SegmentInfos implemen
                                // there was an IOException that had interrupted a commit
 
   /** Opaque Map&lt;String, String&gt; that user can specify during IndexWriter.commit */
-  public Map<String,String> userData = Collections.<String,String>emptyMap();
+  public Map<String,String> userData = Collections.emptyMap();
   
-  private List<SegmentCommitInfo> segments = new ArrayList<SegmentCommitInfo>();
+  private List<SegmentCommitInfo> segments = new ArrayList<>();
   
   /**
    * If non-null, information about loading segments_N files
@@ -355,7 +355,7 @@ public final class SegmentInfos implemen
           if (numGensUpdatesFiles == 0) {
             genUpdatesFiles = Collections.emptyMap();
           } else {
-            genUpdatesFiles = new HashMap<Long,Set<String>>(numGensUpdatesFiles);
+            genUpdatesFiles = new HashMap<>(numGensUpdatesFiles);
             for (int i = 0; i < numGensUpdatesFiles; i++) {
               genUpdatesFiles.put(input.readLong(), input.readStringSet());
             }
@@ -471,13 +471,13 @@ public final class SegmentInfos implemen
     try {
       final SegmentInfos sis = (SegmentInfos) super.clone();
       // deep clone, first recreate all collections:
-      sis.segments = new ArrayList<SegmentCommitInfo>(size());
+      sis.segments = new ArrayList<>(size());
       for(final SegmentCommitInfo info : this) {
         assert info.info.getCodec() != null;
         // dont directly access segments, use add method!!!
         sis.add(info.clone());
       }
-      sis.userData = new HashMap<String,String>(userData);
+      sis.userData = new HashMap<>(userData);
       return sis;
     } catch (CloneNotSupportedException e) {
       throw new RuntimeException("should not happen", e);
@@ -832,7 +832,7 @@ public final class SegmentInfos implemen
    *  The returned collection is recomputed on each
    *  invocation.  */
   public Collection<String> files(Directory dir, boolean includeSegmentsFile) throws IOException {
-    HashSet<String> files = new HashSet<String>();
+    HashSet<String> files = new HashSet<>();
     if (includeSegmentsFile) {
       final String segmentFileName = getSegmentsFileName();
       if (segmentFileName != null) {
@@ -978,7 +978,7 @@ public final class SegmentInfos implemen
   
   /** applies all changes caused by committing a merge to this SegmentInfos */
   void applyMergeChanges(MergePolicy.OneMerge merge, boolean dropSegment) {
-    final Set<SegmentCommitInfo> mergedAway = new HashSet<SegmentCommitInfo>(merge.segments);
+    final Set<SegmentCommitInfo> mergedAway = new HashSet<>(merge.segments);
     boolean inserted = false;
     int newSegIdx = 0;
     for (int segIdx = 0, cnt = segments.size(); segIdx < cnt; segIdx++) {
@@ -1010,7 +1010,7 @@ public final class SegmentInfos implemen
   }
 
   List<SegmentCommitInfo> createBackupSegmentInfos() {
-    final List<SegmentCommitInfo> list = new ArrayList<SegmentCommitInfo>(size());
+    final List<SegmentCommitInfo> list = new ArrayList<>(size());
     for(final SegmentCommitInfo info : this) {
       assert info.info.getCodec() != null;
       list.add(info.clone());

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java Wed Mar 12 14:39:17 2014
@@ -155,8 +155,8 @@ final class SegmentMerger {
         DocValuesType type = field.getDocValuesType();
         if (type != null) {
           if (type == DocValuesType.NUMERIC) {
-            List<NumericDocValues> toMerge = new ArrayList<NumericDocValues>();
-            List<Bits> docsWithField = new ArrayList<Bits>();
+            List<NumericDocValues> toMerge = new ArrayList<>();
+            List<Bits> docsWithField = new ArrayList<>();
             for (AtomicReader reader : mergeState.readers) {
               NumericDocValues values = reader.getNumericDocValues(field.name);
               Bits bits = reader.getDocsWithField(field.name);
@@ -169,8 +169,8 @@ final class SegmentMerger {
             }
             consumer.mergeNumericField(field, mergeState, toMerge, docsWithField);
           } else if (type == DocValuesType.BINARY) {
-            List<BinaryDocValues> toMerge = new ArrayList<BinaryDocValues>();
-            List<Bits> docsWithField = new ArrayList<Bits>();
+            List<BinaryDocValues> toMerge = new ArrayList<>();
+            List<Bits> docsWithField = new ArrayList<>();
             for (AtomicReader reader : mergeState.readers) {
               BinaryDocValues values = reader.getBinaryDocValues(field.name);
               Bits bits = reader.getDocsWithField(field.name);
@@ -183,7 +183,7 @@ final class SegmentMerger {
             }
             consumer.mergeBinaryField(field, mergeState, toMerge, docsWithField);
           } else if (type == DocValuesType.SORTED) {
-            List<SortedDocValues> toMerge = new ArrayList<SortedDocValues>();
+            List<SortedDocValues> toMerge = new ArrayList<>();
             for (AtomicReader reader : mergeState.readers) {
               SortedDocValues values = reader.getSortedDocValues(field.name);
               if (values == null) {
@@ -193,7 +193,7 @@ final class SegmentMerger {
             }
             consumer.mergeSortedField(field, mergeState, toMerge);
           } else if (type == DocValuesType.SORTED_SET) {
-            List<SortedSetDocValues> toMerge = new ArrayList<SortedSetDocValues>();
+            List<SortedSetDocValues> toMerge = new ArrayList<>();
             for (AtomicReader reader : mergeState.readers) {
               SortedSetDocValues values = reader.getSortedSetDocValues(field.name);
               if (values == null) {
@@ -223,8 +223,8 @@ final class SegmentMerger {
     try {
       for (FieldInfo field : mergeState.fieldInfos) {
         if (field.hasNorms()) {
-          List<NumericDocValues> toMerge = new ArrayList<NumericDocValues>();
-          List<Bits> docsWithField = new ArrayList<Bits>();
+          List<NumericDocValues> toMerge = new ArrayList<>();
+          List<Bits> docsWithField = new ArrayList<>();
           for (AtomicReader reader : mergeState.readers) {
             NumericDocValues norms = reader.getNormValues(field.name);
             if (norms == null) {
@@ -358,8 +358,8 @@ final class SegmentMerger {
 
   private void mergeTerms(SegmentWriteState segmentWriteState) throws IOException {
     
-    final List<Fields> fields = new ArrayList<Fields>();
-    final List<ReaderSlice> slices = new ArrayList<ReaderSlice>();
+    final List<Fields> fields = new ArrayList<>();
+    final List<ReaderSlice> slices = new ArrayList<>();
 
     int docBase = 0;
 

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java Wed Mar 12 14:39:17 2014
@@ -60,22 +60,22 @@ public final class SegmentReader extends
   final CloseableThreadLocal<Map<String,Object>> docValuesLocal = new CloseableThreadLocal<Map<String,Object>>() {
     @Override
     protected Map<String,Object> initialValue() {
-      return new HashMap<String,Object>();
+      return new HashMap<>();
     }
   };
 
   final CloseableThreadLocal<Map<String,Bits>> docsWithFieldLocal = new CloseableThreadLocal<Map<String,Bits>>() {
     @Override
     protected Map<String,Bits> initialValue() {
-      return new HashMap<String,Bits>();
+      return new HashMap<>();
     }
   };
 
-  final Map<String,DocValuesProducer> dvProducers = new HashMap<String,DocValuesProducer>();
+  final Map<String,DocValuesProducer> dvProducers = new HashMap<>();
   
   final FieldInfos fieldInfos;
 
-  private final List<Long> dvGens = new ArrayList<Long>();
+  private final List<Long> dvGens = new ArrayList<>();
   
   /**
    * Constructs a new SegmentReader with a new core.
@@ -221,7 +221,7 @@ public final class SegmentReader extends
   
   // returns a gen->List<FieldInfo> mapping. Fields without DV updates have gen=-1
   private Map<Long,List<FieldInfo>> getGenInfos() {
-    final Map<Long,List<FieldInfo>> genInfos = new HashMap<Long,List<FieldInfo>>();
+    final Map<Long,List<FieldInfo>> genInfos = new HashMap<>();
     for (FieldInfo fi : fieldInfos) {
       if (fi.getDocValuesType() == null) {
         continue;
@@ -229,7 +229,7 @@ public final class SegmentReader extends
       long gen = fi.getDocValuesGen();
       List<FieldInfo> infos = genInfos.get(gen);
       if (infos == null) {
-        infos = new ArrayList<FieldInfo>();
+        infos = new ArrayList<>();
         genInfos.put(gen, infos);
       }
       infos.add(fi);

Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SlowCompositeReaderWrapper.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SlowCompositeReaderWrapper.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SlowCompositeReaderWrapper.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SlowCompositeReaderWrapper.java Wed Mar 12 14:39:17 2014
@@ -180,7 +180,7 @@ public final class SlowCompositeReaderWr
   
   // TODO: this could really be a weak map somewhere else on the coreCacheKey,
   // but do we really need to optimize slow-wrapper any more?
-  private final Map<String,OrdinalMap> cachedOrdMaps = new HashMap<String,OrdinalMap>();
+  private final Map<String,OrdinalMap> cachedOrdMaps = new HashMap<>();
 
   @Override
   public NumericDocValues getNormValues(String field) throws IOException {