You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by jp...@apache.org on 2019/10/14 17:57:29 UTC

[lucene-solr] branch branch_8x updated (d82682c -> e5ca494)

This is an automated email from the ASF dual-hosted git repository.

jpountz pushed a change to branch branch_8x
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git.


    from d82682c  LUCENE-8746: Refactor EdgeTree  (#878)
     new da73a6a  LUCENE-8920: Disable direct addressing of arcs. (#950)
     new dad15ba  LUCENE-9001: Fix race condition in SetOnce (#931)
     new 93f9a09  LUCENE-8979: Code Cleanup: Use entryset for map iteration wherever possible. - part 2
     new 575f8a6  LUCENE-8994: Code Cleanup - Pass values to list constructor instead of empty constructor followed by addAll(). (#919)
     new e5ca494  LUCENE-9003: Compute numDocs() lazily. (#939)

The 5 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 lucene/CHANGES.txt                                 |  19 ++--
 .../analysis/query/QueryAutoStopWordAnalyzer.java  |   8 +-
 .../analysis/ja/dict/TokenInfoDictionary$fst.dat   | Bin 1698570 -> 1698570 bytes
 .../analysis/ko/dict/TokenInfoDictionary$fst.dat   | Bin 5641400 -> 5640903 bytes
 .../lucene/benchmark/byTask/utils/Config.java      |  19 ++--
 .../blocktreeords/OrdsBlockTreeTermsReader.java    |   3 +-
 .../codecs/bloom/BloomFilteringPostingsFormat.java |   3 +-
 .../lucene/codecs/memory/FSTOrdTermsReader.java    |   3 +-
 .../lucene/codecs/memory/FSTTermsReader.java       |   3 +-
 .../codecs/blocktree/BlockTreeTermsReader.java     |   3 +-
 .../apache/lucene/index/BaseCompositeReader.java   |  22 ++++-
 .../java/org/apache/lucene/index/IndexReader.java  |   7 +-
 .../org/apache/lucene/search/BooleanQuery.java     |  11 ++-
 .../src/java/org/apache/lucene/util/SetOnce.java   |  36 ++++---
 .../java/org/apache/lucene/util/fst/Builder.java   |   3 -
 .../src/java/org/apache/lucene/util/fst/FST.java   | 110 +++------------------
 .../lucene/index/TestFilterDirectoryReader.java    |  72 ++++++++++++++
 .../test/org/apache/lucene/util/TestSetOnce.java   |   9 ++
 .../monitor/MultipassTermFilteredPresearcher.java  |   4 +-
 .../lucene/monitor/TermFilteredPresearcher.java    |   4 +-
 .../builders/MultiPhraseQueryNodeBuilder.java      |  10 +-
 .../lucene/replicator/IndexReplicationHandler.java |   3 +-
 .../idversion/VersionBlockTreeTermsReader.java     |   3 +-
 .../lucene/spatial3d/geom/StandardObjects.java     |   4 +-
 .../search/suggest/document/ContextQuery.java      |   6 +-
 .../dependencies/GetMavenDependenciesTask.java     |  22 +++--
 .../solr/analytics/AnalyticsRequestManager.java    |   6 +-
 .../solr/handler/dataimport/RegexTransformer.java  |   3 +-
 .../solr/prometheus/collector/MetricSamples.java   |   7 +-
 .../solr/response/VelocityResponseWriter.java      |   5 +-
 .../client/solrj/embedded/JettySolrRunner.java     |  10 +-
 .../apache/solr/cloud/OverseerTaskProcessor.java   |   6 +-
 .../java/org/apache/solr/cloud/ZkController.java   |   3 +-
 .../java/org/apache/solr/cloud/ZkShardTerms.java   |   5 +-
 .../cloud/api/collections/DeleteReplicaCmd.java    |   5 +-
 .../api/collections/ReindexCollectionCmd.java      |   3 +-
 .../autoscaling/sim/SimNodeStateProvider.java      |   3 +-
 .../src/java/org/apache/solr/core/SolrCores.java   |  11 +--
 .../solr/handler/admin/CoreAdminHandler.java       |   6 +-
 .../handler/admin/SegmentsInfoRequestHandler.java  |   3 +-
 .../solr/handler/component/SearchHandler.java      |   3 +-
 .../solr/handler/component/TermsComponent.java     |  16 +--
 .../apache/solr/handler/loader/CSVLoaderBase.java  |   6 +-
 .../solr/response/PHPSerializedResponseWriter.java |  11 ++-
 .../src/java/org/apache/solr/rest/RestManager.java |   5 +-
 .../analysis/ManagedSynonymFilterFactory.java      |  23 +++--
 .../analysis/ManagedSynonymGraphFilterFactory.java |   6 +-
 .../solr/schema/FileExchangeRateProvider.java      |   6 +-
 .../TopGroupsShardRequestFactory.java              |   3 +-
 .../SearchGroupShardResponseProcessor.java         |  10 +-
 .../TopGroupsShardResponseProcessor.java           |  18 ++--
 .../java/org/apache/solr/servlet/HttpSolrCall.java |   3 +-
 .../AddSchemaFieldsUpdateProcessorFactory.java     |  14 +--
 .../CloneFieldUpdateProcessorFactory.java          |   4 +-
 .../java/org/apache/solr/util/SimplePostTool.java  |   6 +-
 .../src/java/org/apache/solr/util/SolrCLI.java     |   3 +-
 .../java/org/apache/solr/util/SolrPluginUtils.java |   8 +-
 .../client/solrj/impl/BaseCloudSolrClient.java     |   7 +-
 .../org/apache/solr/client/solrj/io/Tuple.java     |   3 +-
 .../client/solrj/io/eval/SetValueEvaluator.java    |   3 +-
 .../client/solrj/io/eval/TermVectorsEvaluator.java |   3 +-
 .../client/solrj/io/graph/ShortestPathStream.java  |   3 +-
 .../solr/client/solrj/io/ops/GroupOperation.java   |   6 +-
 .../client/solrj/io/stream/CloudSolrStream.java    |   6 +-
 .../client/solrj/io/stream/DeepRandomStream.java   |   6 +-
 .../solrj/io/stream/FeaturesSelectionStream.java   |   4 +-
 .../solrj/io/stream/SignificantTermsStream.java    |  10 +-
 .../solr/client/solrj/io/stream/StatsStream.java   |   6 +-
 .../client/solrj/io/stream/TextLogitStream.java    |   4 +-
 .../solr/client/solrj/io/stream/ZplotStream.java   |   6 +-
 .../solrj/response/schema/SchemaResponse.java      |   9 +-
 .../solr/cloud/AbstractFullDistribZkTestBase.java  |   3 +-
 72 files changed, 352 insertions(+), 335 deletions(-)


[lucene-solr] 01/05: LUCENE-8920: Disable direct addressing of arcs. (#950)

Posted by jp...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

jpountz pushed a commit to branch branch_8x
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit da73a6aff511b85df306f8066c2d7d7b6453bc77
Author: Adrien Grand <jp...@gmail.com>
AuthorDate: Mon Oct 14 18:30:36 2019 +0200

    LUCENE-8920: Disable direct addressing of arcs. (#950)
---
 .../analysis/ja/dict/TokenInfoDictionary$fst.dat   | Bin 1698570 -> 1698570 bytes
 .../analysis/ko/dict/TokenInfoDictionary$fst.dat   | Bin 5641400 -> 5640903 bytes
 .../java/org/apache/lucene/util/fst/Builder.java   |   3 -
 .../src/java/org/apache/lucene/util/fst/FST.java   | 110 +++------------------
 4 files changed, 11 insertions(+), 102 deletions(-)

diff --git a/lucene/analysis/kuromoji/src/resources/org/apache/lucene/analysis/ja/dict/TokenInfoDictionary$fst.dat b/lucene/analysis/kuromoji/src/resources/org/apache/lucene/analysis/ja/dict/TokenInfoDictionary$fst.dat
index 9328c53..c06fd4a 100644
Binary files a/lucene/analysis/kuromoji/src/resources/org/apache/lucene/analysis/ja/dict/TokenInfoDictionary$fst.dat and b/lucene/analysis/kuromoji/src/resources/org/apache/lucene/analysis/ja/dict/TokenInfoDictionary$fst.dat differ
diff --git a/lucene/analysis/nori/src/resources/org/apache/lucene/analysis/ko/dict/TokenInfoDictionary$fst.dat b/lucene/analysis/nori/src/resources/org/apache/lucene/analysis/ko/dict/TokenInfoDictionary$fst.dat
index 4bacb9b..fa0cb32 100644
Binary files a/lucene/analysis/nori/src/resources/org/apache/lucene/analysis/ko/dict/TokenInfoDictionary$fst.dat and b/lucene/analysis/nori/src/resources/org/apache/lucene/analysis/ko/dict/TokenInfoDictionary$fst.dat differ
diff --git a/lucene/core/src/java/org/apache/lucene/util/fst/Builder.java b/lucene/core/src/java/org/apache/lucene/util/fst/Builder.java
index bb9a682..c54b144 100644
--- a/lucene/core/src/java/org/apache/lucene/util/fst/Builder.java
+++ b/lucene/core/src/java/org/apache/lucene/util/fst/Builder.java
@@ -50,9 +50,6 @@ import org.apache.lucene.util.fst.FST.INPUT_TYPE; // javadoc
 
 public class Builder<T> {
 
-  // The amount of Arc array oversizing used to enable direct addressing of Arcs by their labels
-  static final int DIRECT_ARC_LOAD_FACTOR = 4;
-
   private final NodeHash<T> dedupHash;
   final FST<T> fst;
   private final T NO_OUTPUT;
diff --git a/lucene/core/src/java/org/apache/lucene/util/fst/FST.java b/lucene/core/src/java/org/apache/lucene/util/fst/FST.java
index 26f5e51..e0692b4 100644
--- a/lucene/core/src/java/org/apache/lucene/util/fst/FST.java
+++ b/lucene/core/src/java/org/apache/lucene/util/fst/FST.java
@@ -88,8 +88,6 @@ public final class FST<T> implements Accountable {
   // this means either of these things in different contexts
   // in the midst of a direct array:
   private static final byte BIT_MISSING_ARC = 1 << 6;
-  // at the start of a direct array:
-  private static final byte ARCS_AS_ARRAY_WITH_GAPS = BIT_MISSING_ARC;
 
   /**
    * @see #shouldExpand(Builder, Builder.UnCompiledNode)
@@ -109,7 +107,7 @@ public final class FST<T> implements Accountable {
   // Increment version to change it
   private static final String FILE_FORMAT_NAME = "FST";
   private static final int VERSION_START = 6;
-  private static final int VERSION_CURRENT = 7;
+  private static final int VERSION_CURRENT = VERSION_START;
 
   // Never serialized; just used to represent the virtual
   // final node w/ no arcs:
@@ -645,35 +643,19 @@ public final class FST<T> implements Accountable {
       assert maxBytesPerArc > 0;
       // 2nd pass just "expands" all arcs to take up a fixed byte size
 
-      // If more than (1 / DIRECT_ARC_LOAD_FACTOR) of the "slots" would be occupied, write an arc
-      // array that may have holes in it so that we can address the arcs directly by label without
-      // binary search
-      int labelRange = nodeIn.arcs[nodeIn.numArcs - 1].label - nodeIn.arcs[0].label + 1;
-      boolean writeDirectly = labelRange > 0 && labelRange < Builder.DIRECT_ARC_LOAD_FACTOR * nodeIn.numArcs;
-
-      //System.out.println("write int @pos=" + (fixedArrayStart-4) + " numArcs=" + nodeIn.numArcs);
       // create the header
       // TODO: clean this up: or just rewind+reuse and deal with it
       byte header[] = new byte[MAX_HEADER_SIZE]; 
       ByteArrayDataOutput bad = new ByteArrayDataOutput(header);
       // write a "false" first arc:
-      if (writeDirectly) {
-        bad.writeByte(ARCS_AS_ARRAY_WITH_GAPS);
-        bad.writeVInt(labelRange);
-      } else {
-        bad.writeByte(ARCS_AS_ARRAY_PACKED);
-        bad.writeVInt(nodeIn.numArcs);
-      }
+      bad.writeByte(ARCS_AS_ARRAY_PACKED);
+      bad.writeVInt(nodeIn.numArcs);
       bad.writeVInt(maxBytesPerArc);
       int headerLen = bad.getPosition();
       
       final long fixedArrayStart = startAddress + headerLen;
 
-      if (writeDirectly) {
-        writeArrayWithGaps(builder, nodeIn, fixedArrayStart, maxBytesPerArc, labelRange);
-      } else {
-        writeArrayPacked(builder, nodeIn, fixedArrayStart, maxBytesPerArc);
-      }
+      writeArrayPacked(builder, nodeIn, fixedArrayStart, maxBytesPerArc);
       
       // now write the header
       builder.bytes.writeBytes(startAddress, header, 0, headerLen);
@@ -707,45 +689,7 @@ public final class FST<T> implements Accountable {
     }
   }
 
-  private void writeArrayWithGaps(Builder<T> builder, Builder.UnCompiledNode<T> nodeIn, long fixedArrayStart, int maxBytesPerArc, int labelRange) {
-    // expand the arcs in place, backwards
-    long srcPos = builder.bytes.getPosition();
-    long destPos = fixedArrayStart + labelRange * maxBytesPerArc;
-    // if destPos == srcPos it means all the arcs were the same length, and the array of them is *already* direct
-    assert destPos >= srcPos;
-    if (destPos > srcPos) {
-      builder.bytes.skipBytes((int) (destPos - srcPos));
-      int arcIdx = nodeIn.numArcs - 1;
-      int firstLabel = nodeIn.arcs[0].label;
-      int nextLabel = nodeIn.arcs[arcIdx].label;
-      for (int directArcIdx = labelRange - 1; directArcIdx >= 0; directArcIdx--) {
-        destPos -= maxBytesPerArc;
-        if (directArcIdx == nextLabel - firstLabel) {
-          int arcLen = builder.reusedBytesPerArc[arcIdx];
-          srcPos -= arcLen;
-          //System.out.println("  direct pack idx=" + directArcIdx + " arcIdx=" + arcIdx + " srcPos=" + srcPos + " destPos=" + destPos + " label=" + nextLabel);
-          if (srcPos != destPos) {
-            //System.out.println("  copy len=" + builder.reusedBytesPerArc[arcIdx]);
-            assert destPos > srcPos: "destPos=" + destPos + " srcPos=" + srcPos + " arcIdx=" + arcIdx + " maxBytesPerArc=" + maxBytesPerArc + " reusedBytesPerArc[arcIdx]=" + builder.reusedBytesPerArc[arcIdx] + " nodeIn.numArcs=" + nodeIn.numArcs;
-            builder.bytes.copyBytes(srcPos, destPos, arcLen);
-            if (arcIdx == 0) {
-              break;
-            }
-          }
-          --arcIdx;
-          nextLabel = nodeIn.arcs[arcIdx].label;
-        } else {
-          assert directArcIdx > arcIdx;
-          // mark this as a missing arc
-          //System.out.println("  direct pack idx=" + directArcIdx + " no arc");
-          builder.bytes.writeByte(destPos, BIT_MISSING_ARC);
-        }
-      }
-    }
-  }
-
-  /** Fills virtual 'start' arc, ie, an empty incoming arc to
-   *  the FST's start node */
+  /** Fills virtual 'start' arc, ie, an empty incoming arc to the FST's start node */
   public Arc<T> getFirstArc(Arc<T> arc) {
     T NO_OUTPUT = outputs.getNoOutput();
 
@@ -786,18 +730,13 @@ public final class FST<T> implements Accountable {
     } else {
       in.setPosition(follow.target);
       final byte b = in.readByte();
-      if (b == ARCS_AS_ARRAY_PACKED || b == ARCS_AS_ARRAY_WITH_GAPS) {
+      if (b == ARCS_AS_ARRAY_PACKED) {
         // array: jump straight to end
         arc.numArcs = in.readVInt();
         arc.bytesPerArc = in.readVInt();
         //System.out.println("  array numArcs=" + arc.numArcs + " bpa=" + arc.bytesPerArc);
         arc.posArcsStart = in.getPosition();
-        if (b == ARCS_AS_ARRAY_WITH_GAPS) {
-          arc.arcIdx = Integer.MIN_VALUE;
-          arc.nextArc = arc.posArcsStart - (arc.numArcs - 1) * arc.bytesPerArc;
-        } else {
-          arc.arcIdx = arc.numArcs - 2;
-        }
+        arc.arcIdx = arc.numArcs - 2;
       } else {
         arc.flags = b;
         // non-array: linear scan
@@ -868,7 +807,7 @@ public final class FST<T> implements Accountable {
     //System.out.println("   flags=" + arc.flags);
 
     byte flags = in.readByte();
-    if (flags == ARCS_AS_ARRAY_PACKED || flags == ARCS_AS_ARRAY_WITH_GAPS) {
+    if (flags == ARCS_AS_ARRAY_PACKED) {
       //System.out.println("  fixedArray");
       // this is first arc in a fixed-array
       arc.numArcs = in.readVInt();
@@ -901,7 +840,7 @@ public final class FST<T> implements Accountable {
     } else {
       in.setPosition(follow.target);
       byte flags = in.readByte();
-      return flags == ARCS_AS_ARRAY_PACKED || flags == ARCS_AS_ARRAY_WITH_GAPS;
+      return flags == ARCS_AS_ARRAY_PACKED;
     }
   }
 
@@ -931,7 +870,7 @@ public final class FST<T> implements Accountable {
       in.setPosition(pos);
 
       final byte flags = in.readByte();
-      if (flags == ARCS_AS_ARRAY_PACKED || flags == ARCS_AS_ARRAY_WITH_GAPS) {
+      if (flags == ARCS_AS_ARRAY_PACKED) {
         //System.out.println("    nextArc fixed array");
         in.readVInt();
 
@@ -1140,34 +1079,7 @@ public final class FST<T> implements Accountable {
     // System.out.println("fta label=" + (char) labelToMatch);
 
     byte flags = in.readByte();
-    if (flags == ARCS_AS_ARRAY_WITH_GAPS) {
-      arc.numArcs = in.readVInt();
-      arc.bytesPerArc = in.readVInt();
-      arc.posArcsStart = in.getPosition();
-
-      // Array is direct; address by label
-      in.skipBytes(1);
-      int firstLabel = readLabel(in);
-      int arcPos = labelToMatch - firstLabel;
-      if (arcPos == 0) {
-        arc.nextArc = arc.posArcsStart;
-      } else if (arcPos > 0) {
-        if (arcPos >= arc.numArcs) {
-          return null;
-        }
-        in.setPosition(arc.posArcsStart - arc.bytesPerArc * arcPos);
-        flags = in.readByte();
-        if (flag(flags, BIT_MISSING_ARC)) {
-          return null;
-        }
-        // point to flags that we just read
-        arc.nextArc = in.getPosition() + 1;
-      } else {
-        return null;
-      }
-      arc.arcIdx = Integer.MIN_VALUE;
-      return readNextRealArc(arc, in);
-    } else if (flags == ARCS_AS_ARRAY_PACKED) {
+    if (flags == ARCS_AS_ARRAY_PACKED) {
       arc.numArcs = in.readVInt();
       arc.bytesPerArc = in.readVInt();
       arc.posArcsStart = in.getPosition();


[lucene-solr] 04/05: LUCENE-8994: Code Cleanup - Pass values to list constructor instead of empty constructor followed by addAll(). (#919)

Posted by jp...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

jpountz pushed a commit to branch branch_8x
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 575f8a6ad8f462b0a61f0ea0d092bfebab1afce2
Author: Koen De Groote <kd...@gmail.com>
AuthorDate: Mon Oct 14 18:45:47 2019 +0200

    LUCENE-8994: Code Cleanup - Pass values to list constructor instead of empty constructor followed by addAll(). (#919)
---
 lucene/CHANGES.txt                                            |  3 +++
 .../lucene/codecs/blocktreeords/OrdsBlockTreeTermsReader.java |  3 +--
 .../lucene/codecs/bloom/BloomFilteringPostingsFormat.java     |  3 +--
 .../org/apache/lucene/codecs/memory/FSTOrdTermsReader.java    |  3 +--
 .../java/org/apache/lucene/codecs/memory/FSTTermsReader.java  |  3 +--
 .../apache/lucene/codecs/blocktree/BlockTreeTermsReader.java  |  3 +--
 .../org/apache/lucene/replicator/IndexReplicationHandler.java |  3 +--
 .../lucene/codecs/idversion/VersionBlockTreeTermsReader.java  |  3 +--
 .../apache/lucene/dependencies/GetMavenDependenciesTask.java  |  3 +--
 .../org/apache/solr/handler/dataimport/RegexTransformer.java  |  3 +--
 solr/core/src/java/org/apache/solr/cloud/ZkController.java    |  3 +--
 .../solr/cloud/api/collections/ReindexCollectionCmd.java      |  3 +--
 .../solr/cloud/autoscaling/sim/SimNodeStateProvider.java      |  3 +--
 solr/core/src/java/org/apache/solr/core/SolrCores.java        | 11 +++++------
 .../apache/solr/handler/admin/SegmentsInfoRequestHandler.java |  3 +--
 .../java/org/apache/solr/handler/component/SearchHandler.java |  3 +--
 .../rest/schema/analysis/ManagedSynonymFilterFactory.java     |  8 +++-----
 .../schema/analysis/ManagedSynonymGraphFilterFactory.java     |  6 ++----
 solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java  |  3 +--
 solr/core/src/java/org/apache/solr/util/SolrCLI.java          |  3 +--
 .../apache/solr/client/solrj/impl/BaseCloudSolrClient.java    |  3 +--
 .../solrj/src/java/org/apache/solr/client/solrj/io/Tuple.java |  3 +--
 .../apache/solr/client/solrj/io/eval/SetValueEvaluator.java   |  3 +--
 .../solr/client/solrj/io/eval/TermVectorsEvaluator.java       |  3 +--
 .../apache/solr/client/solrj/io/graph/ShortestPathStream.java |  3 +--
 .../org/apache/solr/client/solrj/io/ops/GroupOperation.java   |  6 ++----
 .../solr/client/solrj/response/schema/SchemaResponse.java     |  9 +++------
 .../org/apache/solr/cloud/AbstractFullDistribZkTestBase.java  |  3 +--
 28 files changed, 40 insertions(+), 69 deletions(-)

diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 251432a..16ad6a4 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -35,6 +35,8 @@ Other
 * LUCENE-8746: Refactor EdgeTree - Introduce a Component tree that represents the tree of components (e.g polygons).
   Edge tree is now just a tree of edges. (Ignacio Vera)
 
+* LUCENE-8994: Code Cleanup - Pass values to list constructor instead of empty constructor followed by addAll(). (Koen De Groote)
+
 Build
 
 * Upgrade forbiddenapis to version 2.7; upgrade Groovy to 2.4.17.  (Uwe Schindler)
@@ -171,6 +173,7 @@ Other
 * LUCENE-8999: LuceneTestCase.expectThrows now propogates assert/assumption failures up to the test
   w/o wrapping in a new assertion failure unless the caller has explicitly expected them (hossman)
 
+
 ======================= Lucene 8.2.0 =======================
 
 API Changes
diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/blocktreeords/OrdsBlockTreeTermsReader.java b/lucene/codecs/src/java/org/apache/lucene/codecs/blocktreeords/OrdsBlockTreeTermsReader.java
index e07cee0..3350e46 100644
--- a/lucene/codecs/src/java/org/apache/lucene/codecs/blocktreeords/OrdsBlockTreeTermsReader.java
+++ b/lucene/codecs/src/java/org/apache/lucene/codecs/blocktreeords/OrdsBlockTreeTermsReader.java
@@ -237,8 +237,7 @@ public final class OrdsBlockTreeTermsReader extends FieldsProducer {
   
   @Override
   public Collection<Accountable> getChildResources() {
-    List<Accountable> resources = new ArrayList<>();
-    resources.addAll(Accountables.namedAccountables("field", fields));
+    List<Accountable> resources = new ArrayList<>(Accountables.namedAccountables("field", fields));
     resources.add(Accountables.namedAccountable("delegate", postingsReader));
     return Collections.unmodifiableList(resources);
   }
diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/bloom/BloomFilteringPostingsFormat.java b/lucene/codecs/src/java/org/apache/lucene/codecs/bloom/BloomFilteringPostingsFormat.java
index b9a2399..0bb09f3 100644
--- a/lucene/codecs/src/java/org/apache/lucene/codecs/bloom/BloomFilteringPostingsFormat.java
+++ b/lucene/codecs/src/java/org/apache/lucene/codecs/bloom/BloomFilteringPostingsFormat.java
@@ -392,8 +392,7 @@ public final class BloomFilteringPostingsFormat extends PostingsFormat {
 
     @Override
     public Collection<Accountable> getChildResources() {
-      List<Accountable> resources = new ArrayList<>();
-      resources.addAll(Accountables.namedAccountables("field", bloomsByFieldName));
+      List<Accountable> resources = new ArrayList<>(Accountables.namedAccountables("field", bloomsByFieldName));
       if (delegateFieldsProducer != null) {
         resources.add(Accountables.namedAccountable("delegate", delegateFieldsProducer));
       }
diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/memory/FSTOrdTermsReader.java b/lucene/codecs/src/java/org/apache/lucene/codecs/memory/FSTOrdTermsReader.java
index 12110d9..0064c24 100644
--- a/lucene/codecs/src/java/org/apache/lucene/codecs/memory/FSTOrdTermsReader.java
+++ b/lucene/codecs/src/java/org/apache/lucene/codecs/memory/FSTOrdTermsReader.java
@@ -864,8 +864,7 @@ public class FSTOrdTermsReader extends FieldsProducer {
   
   @Override
   public Collection<Accountable> getChildResources() {
-    List<Accountable> resources = new ArrayList<>();
-    resources.addAll(Accountables.namedAccountables("field", fields));
+    List<Accountable> resources = new ArrayList<>(Accountables.namedAccountables("field", fields));
     resources.add(Accountables.namedAccountable("delegate", postingsReader));
     return Collections.unmodifiableList(resources);
   }
diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/memory/FSTTermsReader.java b/lucene/codecs/src/java/org/apache/lucene/codecs/memory/FSTTermsReader.java
index 43528ce..a79d21c 100644
--- a/lucene/codecs/src/java/org/apache/lucene/codecs/memory/FSTTermsReader.java
+++ b/lucene/codecs/src/java/org/apache/lucene/codecs/memory/FSTTermsReader.java
@@ -765,8 +765,7 @@ public class FSTTermsReader extends FieldsProducer {
   
   @Override
   public Collection<Accountable> getChildResources() {
-    List<Accountable> resources = new ArrayList<>();
-    resources.addAll(Accountables.namedAccountables("field", fields));
+    List<Accountable> resources = new ArrayList<>(Accountables.namedAccountables("field", fields));
     resources.add(Accountables.namedAccountable("delegate", postingsReader));
     return Collections.unmodifiableCollection(resources);
   }
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/blocktree/BlockTreeTermsReader.java b/lucene/core/src/java/org/apache/lucene/codecs/blocktree/BlockTreeTermsReader.java
index 0e9fe6b..3713452 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/blocktree/BlockTreeTermsReader.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/blocktree/BlockTreeTermsReader.java
@@ -336,8 +336,7 @@ public final class BlockTreeTermsReader extends FieldsProducer {
 
   @Override
   public Collection<Accountable> getChildResources() {
-    List<Accountable> resources = new ArrayList<>();
-    resources.addAll(Accountables.namedAccountables("field", fields));
+    List<Accountable> resources = new ArrayList<>(Accountables.namedAccountables("field", fields));
     resources.add(Accountables.namedAccountable("delegate", postingsReader));
     return Collections.unmodifiableList(resources);
   }
diff --git a/lucene/replicator/src/java/org/apache/lucene/replicator/IndexReplicationHandler.java b/lucene/replicator/src/java/org/apache/lucene/replicator/IndexReplicationHandler.java
index fbb4a26..b666db0 100644
--- a/lucene/replicator/src/java/org/apache/lucene/replicator/IndexReplicationHandler.java
+++ b/lucene/replicator/src/java/org/apache/lucene/replicator/IndexReplicationHandler.java
@@ -144,8 +144,7 @@ public class IndexReplicationHandler implements ReplicationHandler {
       // if there were any IO errors reading the expected commit point (i.e.
       // segments files mismatch), then ignore that commit either.
       if (commit != null && commit.getSegmentsFileName().equals(segmentsFile)) {
-        Set<String> commitFiles = new HashSet<>();
-        commitFiles.addAll(commit.getFileNames());
+        Set<String> commitFiles = new HashSet<>(commit.getFileNames());
         Matcher matcher = IndexFileNames.CODEC_FILE_PATTERN.matcher("");
         for (String file : dir.listAll()) {
           if (!commitFiles.contains(file)
diff --git a/lucene/sandbox/src/java/org/apache/lucene/codecs/idversion/VersionBlockTreeTermsReader.java b/lucene/sandbox/src/java/org/apache/lucene/codecs/idversion/VersionBlockTreeTermsReader.java
index b33b258..8001a22 100644
--- a/lucene/sandbox/src/java/org/apache/lucene/codecs/idversion/VersionBlockTreeTermsReader.java
+++ b/lucene/sandbox/src/java/org/apache/lucene/codecs/idversion/VersionBlockTreeTermsReader.java
@@ -233,8 +233,7 @@ public final class VersionBlockTreeTermsReader extends FieldsProducer {
   
   @Override
   public Collection<Accountable> getChildResources() {
-    List<Accountable> resources = new ArrayList<>();
-    resources.addAll(Accountables.namedAccountables("field", fields));
+    List<Accountable> resources = new ArrayList<>(Accountables.namedAccountables("field", fields));
     resources.add(Accountables.namedAccountable("delegate", postingsReader));
     return Collections.unmodifiableList(resources);
   }
diff --git a/lucene/tools/src/java/org/apache/lucene/dependencies/GetMavenDependenciesTask.java b/lucene/tools/src/java/org/apache/lucene/dependencies/GetMavenDependenciesTask.java
index df3668d..570016c 100644
--- a/lucene/tools/src/java/org/apache/lucene/dependencies/GetMavenDependenciesTask.java
+++ b/lucene/tools/src/java/org/apache/lucene/dependencies/GetMavenDependenciesTask.java
@@ -443,8 +443,7 @@ public class GetMavenDependenciesTask extends Task {
   private void appendAllInternalDependencies(StringBuilder builder) {
     for (Map.Entry<String, SortedSet<String>> entry : internalCompileScopeDependencies.entrySet()) {
       String artifactId = entry.getKey();
-      List<String> exclusions = new ArrayList<>();
-      exclusions.addAll(entry.getValue());
+      List<String> exclusions = new ArrayList<>(entry.getValue());
       SortedSet<ExternalDependency> extDeps = allExternalDependencies.get(artifactId);
       if (null != extDeps) {
         for (ExternalDependency externalDependency : extDeps) {
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/RegexTransformer.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/RegexTransformer.java
index 7a919de..719deca 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/RegexTransformer.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/RegexTransformer.java
@@ -131,8 +131,7 @@ public class RegexTransformer extends Transformer {
   @SuppressWarnings("unchecked")
   private List<String> readBySplit(String splitBy, String value) {
     String[] vals = value.split(splitBy);
-    List<String> l = new ArrayList<>();
-    l.addAll(Arrays.asList(vals));
+    List<String> l = new ArrayList<>(Arrays.asList(vals));
     return l;
   }
 
diff --git a/solr/core/src/java/org/apache/solr/cloud/ZkController.java b/solr/core/src/java/org/apache/solr/cloud/ZkController.java
index ecad280..6ff2d5f 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ZkController.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ZkController.java
@@ -2017,8 +2017,7 @@ public class ZkController implements Closeable {
     ZkNodeProps props = null;
     if (data != null) {
       props = ZkNodeProps.load(data);
-      Map<String, Object> newProps = new HashMap<>();
-      newProps.putAll(props.getProperties());
+      Map<String, Object> newProps = new HashMap<>(props.getProperties());
       newProps.put(CONFIGNAME_PROP, confSetName);
       props = new ZkNodeProps(newProps);
     } else {
diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/ReindexCollectionCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/ReindexCollectionCmd.java
index da9bacb..6059d3b 100644
--- a/solr/core/src/java/org/apache/solr/cloud/api/collections/ReindexCollectionCmd.java
+++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/ReindexCollectionCmd.java
@@ -521,11 +521,10 @@ public class ReindexCollectionCmd implements OverseerCollectionMessageHandler.Cm
   private Map<String, Object> setReindexingState(String collection, State state, Map<String, Object> props) throws Exception {
     String path = ZkStateReader.COLLECTIONS_ZKNODE + "/" + collection + REINDEXING_STATE_PATH;
     DistribStateManager stateManager = ocmh.cloudManager.getDistribStateManager();
-    Map<String, Object> copyProps = new HashMap<>();
     if (props == null) { // retrieve existing props, if any
       props = Utils.getJson(stateManager, path);
     }
-    copyProps.putAll(props);
+    Map<String, Object> copyProps = new HashMap<>(props);
     copyProps.put("state", state.toLower());
     if (stateManager.hasData(path)) {
       stateManager.setData(path, Utils.toJSON(copyProps), -1);
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimNodeStateProvider.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimNodeStateProvider.java
index e1df6fd..3a9d775 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimNodeStateProvider.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimNodeStateProvider.java
@@ -313,9 +313,8 @@ public class SimNodeStateProvider implements NodeStateProvider {
     if (tags.isEmpty()) {
       return new HashMap<>();
     }
-    Map<String, Object> result = new HashMap<>();
     Map<String, Object> metrics = getReplicaMetricsValues(node, tags.stream().filter(s -> s.startsWith("metrics:solr.core.")).collect(Collectors.toList()));
-    result.putAll(metrics);
+    Map<String, Object> result = new HashMap<>(metrics);
     Map<String, Object> values = nodeValues.get(node);
     if (values == null) {
       return result;
diff --git a/solr/core/src/java/org/apache/solr/core/SolrCores.java b/solr/core/src/java/org/apache/solr/core/SolrCores.java
index 1347830..3fdb618 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrCores.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrCores.java
@@ -182,10 +182,9 @@ class SolrCores {
    */
 
   List<SolrCore> getCores() {
-    List<SolrCore> lst = new ArrayList<>();
 
     synchronized (modifyLock) {
-      lst.addAll(cores.values());
+      List<SolrCore> lst = new ArrayList<>(cores.values());
       return lst;
     }
   }
@@ -201,10 +200,10 @@ class SolrCores {
    * @return List of currently loaded cores.
    */
   Set<String> getLoadedCoreNames() {
-    Set<String> set = new TreeSet<>();
+    Set<String> set;
 
     synchronized (modifyLock) {
-      set.addAll(cores.keySet());
+      set = new TreeSet<>(cores.keySet());
       if (getTransientCacheHandler() != null) {
         set.addAll(getTransientCacheHandler().getLoadedCoreNames());
       }
@@ -239,9 +238,9 @@ class SolrCores {
    * @return all cores names, whether loaded or unloaded, transient or permanent.
    */
   public Collection<String> getAllCoreNames() {
-    Set<String> set = new TreeSet<>();
+    Set<String> set;
     synchronized (modifyLock) {
-      set.addAll(cores.keySet());
+      set = new TreeSet<>(cores.keySet());
       if (getTransientCacheHandler() != null) {
         set.addAll(getTransientCacheHandler().getAllCoreNames());
       }
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/SegmentsInfoRequestHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/SegmentsInfoRequestHandler.java
index a7b044e..90ac7ce 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/SegmentsInfoRequestHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/SegmentsInfoRequestHandler.java
@@ -170,8 +170,7 @@ public class SegmentsInfoRequestHandler extends RequestHandlerBase {
       }
     }
     SimpleOrderedMap<Object> segmentInfo = null;
-    List<SegmentCommitInfo> sortable = new ArrayList<>();
-    sortable.addAll(infos.asList());
+    List<SegmentCommitInfo> sortable = new ArrayList<>(infos.asList());
     // Order by the number of live docs. The display is logarithmic so it is a little jumbled visually
     sortable.sort((s1, s2) ->
       (s2.info.maxDoc() - s2.getDelCount()) - (s1.info.maxDoc() - s1.getDelCount())
diff --git a/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java b/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java
index ecdc0ec..64b8c9a 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java
@@ -116,9 +116,8 @@ public class SearchHandler extends RequestHandlerBase implements SolrCoreAware ,
   public void inform(SolrCore core)
   {
     this.core = core;
-    Set<String> missing = new HashSet<>();
     List<String> c = (List<String>) initArgs.get(INIT_COMPONENTS);
-    missing.addAll(core.getSearchComponents().checkContains(c));
+    Set<String> missing = new HashSet<>(core.getSearchComponents().checkContains(c));
     List<String> first = (List<String>) initArgs.get(INIT_FIRST_COMPONENTS);
     missing.addAll(core.getSearchComponents().checkContains(first));
     List<String> last = (List<String>) initArgs.get(INIT_LAST_COMPONENTS);
diff --git a/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymFilterFactory.java b/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymFilterFactory.java
index 87878ce..fb4cc04 100644
--- a/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymFilterFactory.java
+++ b/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymFilterFactory.java
@@ -160,9 +160,8 @@ public class ManagedSynonymFilterFactory extends BaseManagedTokenFilterFactory {
                 "Invalid synonym file format! Expected a list of synonyms for "+key+
                 " but got "+mapping.getClass().getName());
           }
-                    
-          Set<String> sortedVals = new TreeSet<>();
-          sortedVals.addAll((List<String>) entry.getValue());
+
+          Set<String> sortedVals = new TreeSet<>((List<String>) entry.getValue());
           cpsm.mappings.put(key, sortedVals);        
         }
       }
@@ -195,8 +194,7 @@ public class ManagedSynonymFilterFactory extends BaseManagedTokenFilterFactory {
         if (cpsm == null)
           cpsm = new CasePreservedSynonymMappings();
 
-        Set<String> treeTerms = new TreeSet<>();
-        treeTerms.addAll(jsonList);
+        Set<String> treeTerms = new TreeSet<>(jsonList);
         cpsm.mappings.put(origTerm, treeTerms);
         madeChanges = true;
         // only add the cpsm to the synonymMappings if it has valid data
diff --git a/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymGraphFilterFactory.java b/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymGraphFilterFactory.java
index 6d7c1f5..b6472fe 100644
--- a/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymGraphFilterFactory.java
+++ b/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymGraphFilterFactory.java
@@ -156,8 +156,7 @@ public class ManagedSynonymGraphFilterFactory extends BaseManagedTokenFilterFact
                     " but got "+mapping.getClass().getName());
           }
 
-          Set<String> sortedVals = new TreeSet<>();
-          sortedVals.addAll((List<String>) entry.getValue());
+          Set<String> sortedVals = new TreeSet<>((List<String>) entry.getValue());
           cpsm.mappings.put(key, sortedVals);
         }
       }
@@ -190,8 +189,7 @@ public class ManagedSynonymGraphFilterFactory extends BaseManagedTokenFilterFact
         if (cpsm == null)
           cpsm = new CasePreservedSynonymMappings();
 
-        Set<String> treeTerms = new TreeSet<>();
-        treeTerms.addAll(jsonList);
+        Set<String> treeTerms = new TreeSet<>(jsonList);
         cpsm.mappings.put(origTerm, treeTerms);
         madeChanges = true;
         // only add the cpsm to the synonymMappings if it has valid data
diff --git a/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java b/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java
index 546a195..042c392 100644
--- a/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java
+++ b/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java
@@ -1012,8 +1012,7 @@ public class HttpSolrCall {
     Collections.shuffle(slices, random);
 
     for (Slice slice : slices) {
-      List<Replica> randomizedReplicas = new ArrayList<>();
-      randomizedReplicas.addAll(slice.getReplicas());
+      List<Replica> randomizedReplicas = new ArrayList<>(slice.getReplicas());
       Collections.shuffle(randomizedReplicas, random);
 
       for (Replica replica : randomizedReplicas) {
diff --git a/solr/core/src/java/org/apache/solr/util/SolrCLI.java b/solr/core/src/java/org/apache/solr/util/SolrCLI.java
index a958316..1e7c691 100755
--- a/solr/core/src/java/org/apache/solr/util/SolrCLI.java
+++ b/solr/core/src/java/org/apache/solr/util/SolrCLI.java
@@ -987,8 +987,7 @@ public class SolrCLI {
         cloudManager.saveSnapshot(targetDir, true, redact);
         System.err.println("- saved autoscaling snapshot to " + targetDir.getAbsolutePath());
       }
-      HashSet<String> liveNodes = new HashSet<>();
-      liveNodes.addAll(cloudManager.getClusterStateProvider().getLiveNodes());
+      HashSet<String> liveNodes = new HashSet<>(cloudManager.getClusterStateProvider().getLiveNodes());
       boolean withSuggestions = cli.hasOption("s");
       boolean withDiagnostics = cli.hasOption("d") || cli.hasOption("n");
       boolean withSortedNodes = cli.hasOption("n");
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BaseCloudSolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BaseCloudSolrClient.java
index f00e20d..0461e67 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BaseCloudSolrClient.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BaseCloudSolrClient.java
@@ -587,8 +587,7 @@ public abstract class BaseCloudSolrClient extends SolrClient {
       }
       nonRoutableRequest.setParams(nonRoutableParams);
       nonRoutableRequest.setBasicAuthCredentials(request.getBasicAuthUser(), request.getBasicAuthPassword());
-      List<String> urlList = new ArrayList<>();
-      urlList.addAll(routes.keySet());
+      List<String> urlList = new ArrayList<>(routes.keySet());
       Collections.shuffle(urlList, rand);
       LBSolrClient.Req req = new LBSolrClient.Req(nonRoutableRequest, urlList);
       try {
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/Tuple.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/Tuple.java
index 1af5f08..56d86fe 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/Tuple.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/Tuple.java
@@ -194,8 +194,7 @@ public class Tuple implements Cloneable, MapWriter {
   }
 
   public Tuple clone() {
-    HashMap m = new HashMap();
-    m.putAll(fields);
+    HashMap m = new HashMap(fields);
     Tuple clone = new Tuple(m);
     return clone;
   }
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/SetValueEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/SetValueEvaluator.java
index 9ffc32f..8ded259 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/SetValueEvaluator.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/SetValueEvaluator.java
@@ -47,8 +47,7 @@ public class SetValueEvaluator extends RecursiveObjectEvaluator implements ManyV
         value = ((String)value).replace("\"", "");
       }
       key = key.replace("\"", "");
-      Map map = new HashMap();
-      map.putAll(tuple.fields);
+      Map map = new HashMap(tuple.fields);
       map.put(key, value);
       return new Tuple(map);
     } else {
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TermVectorsEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TermVectorsEvaluator.java
index 7c09712..5d6dba9 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TermVectorsEvaluator.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TermVectorsEvaluator.java
@@ -87,7 +87,6 @@ public class TermVectorsEvaluator extends RecursiveObjectEvaluator implements Ma
 
       List<Tuple> tuples = (List<Tuple>) objects[0];
       TreeMap<String, Integer> docFreqs = new TreeMap();
-      List<String> features = new ArrayList();
       List<String> rowLabels = new ArrayList();
 
       for (Tuple tuple : tuples) {
@@ -148,7 +147,7 @@ public class TermVectorsEvaluator extends RecursiveObjectEvaluator implements Ma
       }
       int totalTerms = docFreqs.size();
       Set<String> keys = docFreqs.keySet();
-      features.addAll(keys);
+      List<String> features = new ArrayList(keys);
       double[][] docVec = new double[tuples.size()][];
       for (int t = 0; t < tuples.size(); t++) {
         Tuple tuple = tuples.get(t);
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/ShortestPathStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/ShortestPathStream.java
index c1dcceb..611417c 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/ShortestPathStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/ShortestPathStream.java
@@ -391,8 +391,7 @@ public class ShortestPathStream extends TupleStream implements Expressible {
             List<String> parents = v.get(p.peekFirst());
             if (parents != null) {
               for(String parent : parents) {
-                LinkedList newPath = new LinkedList();
-                newPath.addAll(p);
+                LinkedList newPath = new LinkedList(p);
                 newPath.addFirst(parent);
                 newPaths.add(newPath);
               }
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/ops/GroupOperation.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/ops/GroupOperation.java
index a2bd8c9..3db76ec 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/ops/GroupOperation.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/ops/GroupOperation.java
@@ -106,17 +106,15 @@ public class GroupOperation implements ReduceOperation {
   }
 
   public Tuple reduce() {
-    Map map = new HashMap();
-    List<Map> list = new ArrayList();
     LinkedList ll = new LinkedList();
     while(priorityQueue.size() > 0) {
       ll.addFirst(priorityQueue.poll().getMap());
       //This will clear priority queue and so it will be ready for the next group.
     }
 
-    list.addAll(ll);
+    List<Map> list = new ArrayList(ll);
     Map groupHead = list.get(0);
-    map.putAll(groupHead);
+    Map map = new HashMap(groupHead);
     map.put("group", list);
     return new Tuple(map);
   }
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/response/schema/SchemaResponse.java b/solr/solrj/src/java/org/apache/solr/client/solrj/response/schema/SchemaResponse.java
index 50a715c..eb9fdd5 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/response/schema/SchemaResponse.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/response/schema/SchemaResponse.java
@@ -169,8 +169,7 @@ public class SchemaResponse extends SolrResponseBase {
     List<Map<String, Object>> fieldsAttributes = new LinkedList<>();
     List<NamedList<Object>> fieldsResponse = (List<NamedList<Object>>) schemaNamedList.get("fields");
     for (NamedList<Object> fieldNamedList : fieldsResponse) {
-      Map<String, Object> fieldAttributes = new LinkedHashMap<>();
-      fieldAttributes.putAll(extractAttributeMap(fieldNamedList));
+      Map<String, Object> fieldAttributes = new LinkedHashMap<>(extractAttributeMap(fieldNamedList));
       fieldsAttributes.add(fieldAttributes);
     }
 
@@ -182,8 +181,7 @@ public class SchemaResponse extends SolrResponseBase {
     List<Map<String, Object>> dynamicFieldsAttributes = new LinkedList<>();
     List<NamedList<Object>> dynamicFieldsResponse = (List<NamedList<Object>>) schemaNamedList.get("dynamicFields");
     for (NamedList<Object> fieldNamedList : dynamicFieldsResponse) {
-      Map<String, Object> dynamicFieldAttributes = new LinkedHashMap<>();
-      dynamicFieldAttributes.putAll(extractAttributeMap(fieldNamedList));
+      Map<String, Object> dynamicFieldAttributes = new LinkedHashMap<>(extractAttributeMap(fieldNamedList));
       dynamicFieldsAttributes.add(dynamicFieldAttributes);
     }
 
@@ -195,8 +193,7 @@ public class SchemaResponse extends SolrResponseBase {
     List<Map<String, Object>> copyFieldsAttributes = new LinkedList<>();
     List<NamedList<Object>> copyFieldsResponse = (List<NamedList<Object>>) schemaNamedList.get("copyFields");
     for (NamedList<Object> copyFieldNamedList : copyFieldsResponse) {
-      Map<String, Object> copyFieldAttributes = new LinkedHashMap<>();
-      copyFieldAttributes.putAll(extractAttributeMap(copyFieldNamedList));
+      Map<String, Object> copyFieldAttributes = new LinkedHashMap<>(extractAttributeMap(copyFieldNamedList));
       copyFieldsAttributes.add(copyFieldAttributes);
     }
 
diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java
index d9de129..f2bd410 100644
--- a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java
+++ b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java
@@ -2158,8 +2158,7 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes
 
     log.info("Took {} ms to see all replicas become active.", timer.getTime());
 
-    List<Replica> replicas = new ArrayList<>();
-    replicas.addAll(notLeaders.values());
+    List<Replica> replicas = new ArrayList<>(notLeaders.values());
     return replicas;
   }
 


[lucene-solr] 02/05: LUCENE-9001: Fix race condition in SetOnce (#931)

Posted by jp...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

jpountz pushed a commit to branch branch_8x
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit dad15ba15e00df4fef024dae3b101b7969775ed6
Author: Przemko Robakowski <pr...@elastic.co>
AuthorDate: Mon Oct 14 18:33:46 2019 +0200

    LUCENE-9001: Fix race condition in SetOnce (#931)
---
 lucene/CHANGES.txt                                 |  4 +--
 .../src/java/org/apache/lucene/util/SetOnce.java   | 36 +++++++++++++++-------
 .../test/org/apache/lucene/util/TestSetOnce.java   |  9 ++++++
 3 files changed, 36 insertions(+), 13 deletions(-)

diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 0a986ab..4315a7d 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -25,8 +25,8 @@ Optimizations
   (Ignacio Vera, Adrien Grand)
 
 Bug Fixes
----------------------
-(No changes)
+
+* LUCENE-9001: Fix race condition in SetOnce. (Przemko Robakowski)
 
 Other
 ---------------------
diff --git a/lucene/core/src/java/org/apache/lucene/util/SetOnce.java b/lucene/core/src/java/org/apache/lucene/util/SetOnce.java
index 9be88ec..3c3f277 100644
--- a/lucene/core/src/java/org/apache/lucene/util/SetOnce.java
+++ b/lucene/core/src/java/org/apache/lucene/util/SetOnce.java
@@ -16,7 +16,7 @@
  */
 package org.apache.lucene.util;
 
-import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicReference;
 
 
 /**
@@ -36,16 +36,24 @@ public final class SetOnce<T> implements Cloneable {
       super("The object cannot be set twice!");
     }
   }
-  
-  private volatile T obj = null;
-  private final AtomicBoolean set;
+
+  /** Holding object and marking that it was already set */
+  private static final class Wrapper<T> {
+    private T object;
+
+    private Wrapper(T object) {
+      this.object = object;
+    }
+  }
+
+  private final AtomicReference<Wrapper<T>> set;
   
   /**
    * A default constructor which does not set the internal object, and allows
    * setting it by calling {@link #set(Object)}.
    */
   public SetOnce() {
-    set = new AtomicBoolean(false);
+    set = new AtomicReference<>();
   }
 
   /**
@@ -57,21 +65,27 @@ public final class SetOnce<T> implements Cloneable {
    * @see #set(Object)
    */
   public SetOnce(T obj) {
-    this.obj = obj;
-    set = new AtomicBoolean(true);
+    set = new AtomicReference<>(new Wrapper<>(obj));
   }
   
   /** Sets the given object. If the object has already been set, an exception is thrown. */
   public final void set(T obj) {
-    if (set.compareAndSet(false, true)) {
-      this.obj = obj;
-    } else {
+    if (!trySet(obj)) {
       throw new AlreadySetException();
     }
   }
+
+  /** Sets the given object if none was set before.
+   *
+   * @return true if object was set successfully, false otherwise
+   * */
+  public final boolean trySet(T obj) {
+    return set.compareAndSet(null, new Wrapper<>(obj));
+  }
   
   /** Returns the object set by {@link #set(Object)}. */
   public final T get() {
-    return obj;
+    Wrapper<T> wrapper = set.get();
+    return wrapper == null ? null : wrapper.object;
   }
 }
diff --git a/lucene/core/src/test/org/apache/lucene/util/TestSetOnce.java b/lucene/core/src/test/org/apache/lucene/util/TestSetOnce.java
index 0d93b88..ae12321 100644
--- a/lucene/core/src/test/org/apache/lucene/util/TestSetOnce.java
+++ b/lucene/core/src/test/org/apache/lucene/util/TestSetOnce.java
@@ -69,6 +69,15 @@ public class TestSetOnce extends LuceneTestCase {
     assertEquals(5, set.get().intValue());
     set.set(7);
   }
+
+  @Test
+  public void testTrySet() {
+    SetOnce<Integer> set = new SetOnce<>();
+    assertTrue(set.trySet(5));
+    assertEquals(5, set.get().intValue());
+    assertFalse(set.trySet(7));
+    assertEquals(5, set.get().intValue());
+  }
   
   @Test
   public void testSetMultiThreaded() throws Exception {


[lucene-solr] 05/05: LUCENE-9003: Compute numDocs() lazily. (#939)

Posted by jp...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

jpountz pushed a commit to branch branch_8x
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit e5ca494769ce31a25fb27b2f4e1153632843dd3f
Author: Adrien Grand <jp...@gmail.com>
AuthorDate: Mon Oct 14 18:47:58 2019 +0200

    LUCENE-9003: Compute numDocs() lazily. (#939)
---
 lucene/CHANGES.txt                                 |  6 +-
 .../apache/lucene/index/BaseCompositeReader.java   | 22 +++++--
 .../java/org/apache/lucene/index/IndexReader.java  |  7 ++-
 .../lucene/index/TestFilterDirectoryReader.java    | 72 ++++++++++++++++++++++
 4 files changed, 100 insertions(+), 7 deletions(-)

diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 16ad6a4..5e6c6f4 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -18,12 +18,16 @@ Improvements
 (No changes)
 
 Optimizations
----------------------
 
 * LUCENE-8928: When building a kd-tree for dimensions n > 2, compute exact bounds for an inner node every N splits
   to improve the quality of the tree. N is defined by SPLITS_BEFORE_EXACT_BOUNDS which is set to 4.
   (Ignacio Vera, Adrien Grand)
 
+* BaseDirectoryReader no longer sums up the `LeafReader#numDocs` of its leaves
+  eagerly. This especially helps when creating views of readers that hide
+  documents, since computing the number of live documents is an expensive
+  operation. (Adrien Grand)
+
 Bug Fixes
 
 * LUCENE-9001: Fix race condition in SetOnce. (Przemko Robakowski)
diff --git a/lucene/core/src/java/org/apache/lucene/index/BaseCompositeReader.java b/lucene/core/src/java/org/apache/lucene/index/BaseCompositeReader.java
index 8a19fe1..f6aab56 100644
--- a/lucene/core/src/java/org/apache/lucene/index/BaseCompositeReader.java
+++ b/lucene/core/src/java/org/apache/lucene/index/BaseCompositeReader.java
@@ -50,7 +50,7 @@ public abstract class BaseCompositeReader<R extends IndexReader> extends Composi
   private final R[] subReaders;
   private final int[] starts;       // 1st docno for each reader
   private final int maxDoc;
-  private final int numDocs;
+  private int numDocs = -1;         // computed lazily
 
   /** List view solely for {@link #getSequentialSubReaders()},
    * for effectiveness the array is used internally. */
@@ -68,12 +68,11 @@ public abstract class BaseCompositeReader<R extends IndexReader> extends Composi
     this.subReaders = subReaders;
     this.subReadersList = Collections.unmodifiableList(Arrays.asList(subReaders));
     starts = new int[subReaders.length + 1];    // build starts array
-    long maxDoc = 0, numDocs = 0;
+    long maxDoc = 0;
     for (int i = 0; i < subReaders.length; i++) {
       starts[i] = (int) maxDoc;
       final IndexReader r = subReaders[i];
       maxDoc += r.maxDoc();      // compute maxDocs
-      numDocs += r.numDocs();    // compute numDocs
       r.registerParentReader(this);
     }
 
@@ -89,7 +88,6 @@ public abstract class BaseCompositeReader<R extends IndexReader> extends Composi
 
     this.maxDoc = Math.toIntExact(maxDoc);
     starts[subReaders.length] = this.maxDoc;
-    this.numDocs = Math.toIntExact(numDocs);
   }
 
   @Override
@@ -102,6 +100,22 @@ public abstract class BaseCompositeReader<R extends IndexReader> extends Composi
   @Override
   public final int numDocs() {
     // Don't call ensureOpen() here (it could affect performance)
+    // We want to compute numDocs() lazily so that creating a wrapper that hides
+    // some documents isn't slow at wrapping time, but on the first time that
+    // numDocs() is called. This can help as there are lots of use-cases of a
+    // reader that don't involve calling numDocs().
+    // However it's not crucial to make sure that we don't call numDocs() more
+    // than once on the sub readers, since they likely cache numDocs() anyway,
+    // hence the lack of synchronization.
+    int numDocs = this.numDocs;
+    if (numDocs == -1) {
+      numDocs = 0;
+      for (IndexReader r : subReaders) {
+        numDocs += r.numDocs();
+      }
+      assert numDocs >= 0;
+      this.numDocs = numDocs;
+    }
     return numDocs;
   }
 
diff --git a/lucene/core/src/java/org/apache/lucene/index/IndexReader.java b/lucene/core/src/java/org/apache/lucene/index/IndexReader.java
index c87f5da..ff24144 100644
--- a/lucene/core/src/java/org/apache/lucene/index/IndexReader.java
+++ b/lucene/core/src/java/org/apache/lucene/index/IndexReader.java
@@ -301,7 +301,9 @@ public abstract class IndexReader implements Closeable {
     return vectors.terms(field);
   }
 
-  /** Returns the number of documents in this index. */
+  /** Returns the number of documents in this index.
+   *  <p><b>NOTE</b>: This operation may run in O(maxDoc). Implementations that
+   *  can't return this number in constant-time should cache it. */
   public abstract int numDocs();
 
   /** Returns one greater than the largest possible document number.
@@ -310,7 +312,8 @@ public abstract class IndexReader implements Closeable {
    */
   public abstract int maxDoc();
 
-  /** Returns the number of deleted documents. */
+  /** Returns the number of deleted documents.
+   *  <p><b>NOTE</b>: This operation may run in O(maxDoc). */
   public final int numDeletedDocs() {
     return maxDoc() - numDocs();
   }
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestFilterDirectoryReader.java b/lucene/core/src/test/org/apache/lucene/index/TestFilterDirectoryReader.java
index 62a4294..2a00b29 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestFilterDirectoryReader.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestFilterDirectoryReader.java
@@ -21,6 +21,7 @@ import java.io.IOException;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
+import java.util.concurrent.atomic.AtomicLong;
 
 import org.apache.lucene.document.Document;
 import org.apache.lucene.index.FilterDirectoryReader.SubReaderWrapper;
@@ -76,4 +77,75 @@ public class TestFilterDirectoryReader extends LuceneTestCase {
     dir.close();
   }
 
+  private static class NumDocsCountingSubReaderWrapper extends SubReaderWrapper {
+
+    private final AtomicLong numDocsCallCount;
+
+    NumDocsCountingSubReaderWrapper(AtomicLong numDocsCallCount) {
+      this.numDocsCallCount = numDocsCallCount;
+    }
+
+    @Override
+    public LeafReader wrap(LeafReader reader) {
+      return new FilterLeafReader(reader) {
+        @Override
+        public int numDocs() {
+          numDocsCallCount.incrementAndGet();
+          return super.numDocs();
+        }
+
+        @Override
+        public CacheHelper getCoreCacheHelper() {
+          return in.getCoreCacheHelper();
+        }
+
+        @Override
+        public CacheHelper getReaderCacheHelper() {
+          return in.getReaderCacheHelper();
+        }
+      };
+    }
+
+  }
+
+  private static class NumDocsCountingFilterDirectoryReader extends FilterDirectoryReader {
+
+    private final AtomicLong numDocsCallCount;
+
+    public NumDocsCountingFilterDirectoryReader(DirectoryReader in, AtomicLong numDocsCallCount) throws IOException {
+      super(in, new NumDocsCountingSubReaderWrapper(numDocsCallCount));
+      this.numDocsCallCount = numDocsCallCount;
+    }
+
+    @Override
+    protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException {
+      return new NumDocsCountingFilterDirectoryReader(in, numDocsCallCount);
+    }
+
+    @Override
+    public CacheHelper getReaderCacheHelper() {
+      return in.getReaderCacheHelper();
+    }
+
+  }
+
+  public void testFilterDirectoryReaderNumDocsIsLazy() throws IOException {
+    Directory dir = newDirectory();
+    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig());
+    w.addDocument(new Document());
+    DirectoryReader directoryReader = DirectoryReader.open(w);
+    w.close();
+
+    AtomicLong numDocsCallCount = new AtomicLong();
+    DirectoryReader directoryReaderWrapper = new NumDocsCountingFilterDirectoryReader(directoryReader, numDocsCallCount);
+    assertEquals(0L, numDocsCallCount.get());
+    assertEquals(1, directoryReaderWrapper.numDocs());
+    assertEquals(1L, numDocsCallCount.get()); // one segment, so called once
+    assertEquals(1, directoryReaderWrapper.numDocs());
+    assertEquals(1L, numDocsCallCount.get());
+
+    directoryReader.close();
+    dir.close();
+  }
+
 }


[lucene-solr] 03/05: LUCENE-8979: Code Cleanup: Use entryset for map iteration wherever possible. - part 2

Posted by jp...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

jpountz pushed a commit to branch branch_8x
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 93f9a093cc72251a09ac7c27468241a1deb68d6e
Author: Koen De Groote <kd...@gmail.com>
AuthorDate: Mon Oct 14 18:36:19 2019 +0200

    LUCENE-8979: Code Cleanup: Use entryset for map iteration wherever possible. - part 2
---
 lucene/CHANGES.txt                                  |  6 +++---
 .../analysis/query/QueryAutoStopWordAnalyzer.java   |  8 +++++---
 .../lucene/benchmark/byTask/utils/Config.java       | 19 +++++++++++--------
 .../java/org/apache/lucene/search/BooleanQuery.java | 11 +++++++----
 .../monitor/MultipassTermFilteredPresearcher.java   |  4 ++--
 .../lucene/monitor/TermFilteredPresearcher.java     |  4 ++--
 .../builders/MultiPhraseQueryNodeBuilder.java       | 10 ++++------
 .../lucene/spatial3d/geom/StandardObjects.java      |  4 ++--
 .../search/suggest/document/ContextQuery.java       |  6 +++---
 .../dependencies/GetMavenDependenciesTask.java      | 21 ++++++++++++---------
 .../solr/analytics/AnalyticsRequestManager.java     |  6 +++---
 .../solr/prometheus/collector/MetricSamples.java    |  7 ++++---
 .../solr/response/VelocityResponseWriter.java       |  5 +++--
 .../solr/client/solrj/embedded/JettySolrRunner.java | 10 ++++------
 .../apache/solr/cloud/OverseerTaskProcessor.java    |  6 +++---
 .../java/org/apache/solr/cloud/ZkShardTerms.java    |  5 +++--
 .../cloud/api/collections/DeleteReplicaCmd.java     |  5 +++--
 .../apache/solr/handler/admin/CoreAdminHandler.java |  6 +++---
 .../solr/handler/component/TermsComponent.java      | 16 +++++++++-------
 .../apache/solr/handler/loader/CSVLoaderBase.java   |  6 +++---
 .../solr/response/PHPSerializedResponseWriter.java  | 11 +++++++----
 .../src/java/org/apache/solr/rest/RestManager.java  |  5 +++--
 .../analysis/ManagedSynonymFilterFactory.java       | 17 +++++++++--------
 .../solr/schema/FileExchangeRateProvider.java       |  6 +++---
 .../TopGroupsShardRequestFactory.java               |  3 +--
 .../SearchGroupShardResponseProcessor.java          | 10 ++++++----
 .../TopGroupsShardResponseProcessor.java            | 18 +++++++++---------
 .../AddSchemaFieldsUpdateProcessorFactory.java      | 14 ++++++++------
 .../processor/CloneFieldUpdateProcessorFactory.java |  4 ++--
 .../java/org/apache/solr/util/SimplePostTool.java   |  6 +++---
 .../java/org/apache/solr/util/SolrPluginUtils.java  |  8 ++++----
 .../solr/client/solrj/impl/BaseCloudSolrClient.java |  4 ++--
 .../client/solrj/io/stream/CloudSolrStream.java     |  6 +++---
 .../client/solrj/io/stream/DeepRandomStream.java    |  6 +++---
 .../solrj/io/stream/FeaturesSelectionStream.java    |  4 ++--
 .../solrj/io/stream/SignificantTermsStream.java     | 10 +++++-----
 .../solr/client/solrj/io/stream/StatsStream.java    |  6 +++---
 .../client/solrj/io/stream/TextLogitStream.java     |  4 ++--
 .../solr/client/solrj/io/stream/ZplotStream.java    |  6 +++---
 39 files changed, 167 insertions(+), 146 deletions(-)

diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 4315a7d..251432a 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -29,8 +29,8 @@ Bug Fixes
 * LUCENE-9001: Fix race condition in SetOnce. (Przemko Robakowski)
 
 Other
----------------------
-(No changes)
+
+* LUCENE-8979: Code Cleanup: Use entryset for map iteration wherever possible. - Part 2 (Koen De Groote)
 
 * LUCENE-8746: Refactor EdgeTree - Introduce a Component tree that represents the tree of components (e.g polygons).
   Edge tree is now just a tree of edges. (Ignacio Vera)
@@ -161,7 +161,7 @@ Other
 
 * LUCENE-8758: QuadPrefixTree: removed levelS and levelN fields which weren't used. (Amish Shah)
 
-* LUCENE-8975: Code Cleanup: Use entryset for map iteration wherever possible.
+* LUCENE-8975: Code Cleanup: Use entryset for map iteration wherever possible. (Koen De Groote)
 
 * LUCENE-8993, LUCENE-8807: Changed all repository and download references in build files
   to HTTPS. (Uwe Schindler)
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/query/QueryAutoStopWordAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/query/QueryAutoStopWordAnalyzer.java
index e863206..01a199d 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/query/QueryAutoStopWordAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/query/QueryAutoStopWordAnalyzer.java
@@ -200,10 +200,12 @@ public final class QueryAutoStopWordAnalyzer extends AnalyzerWrapper {
    */
   public Term[] getStopWords() {
     List<Term> allStopWords = new ArrayList<>();
-    for (String fieldName : stopWordsPerField.keySet()) {
-      Set<String> stopWords = stopWordsPerField.get(fieldName);
+    for (Map.Entry<String, Set<String>> entry : stopWordsPerField.entrySet()) {
+      String field = entry.getKey();
+      Set<String> stopWords = entry.getValue();
+
       for (String text : stopWords) {
-        allStopWords.add(new Term(fieldName, text));
+        allStopWords.add(new Term(field, text));
       }
     }
     return allStopWords.toArray(new Term[allStopWords.size()]);
diff --git a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/utils/Config.java b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/utils/Config.java
index 07bb846..1c7a98d 100644
--- a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/utils/Config.java
+++ b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/utils/Config.java
@@ -25,6 +25,7 @@ import java.util.ArrayList;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
 import java.util.Properties;
 import java.util.StringTokenizer;
 
@@ -276,8 +277,9 @@ public class Config {
     // log changes in values
     if (valByRound.size() > 0) {
       sb.append(": ");
-      for (final String name : valByRound.keySet()) {
-        Object a = valByRound.get(name);
+      for (final Map.Entry<String, Object> entry : valByRound.entrySet()) {
+        final String name = entry.getKey();
+        Object a = entry.getValue();
         if (a instanceof int[]) {
           int ai[] = (int[]) a;
           int n1 = (roundNumber - 1) % ai.length;
@@ -388,9 +390,8 @@ public class Config {
       return "";
     }
     StringBuilder sb = new StringBuilder();
-    for (final String name : colForValByRound.keySet()) {
-      String colName = colForValByRound.get(name);
-      sb.append(" ").append(colName);
+    for (final String colName : colForValByRound.values()) {
+      sb.append(' ').append(colName);
     }
     return sb.toString();
   }
@@ -403,15 +404,17 @@ public class Config {
       return "";
     }
     StringBuilder sb = new StringBuilder();
-    for (final String name : colForValByRound.keySet()) {
-      String colName = colForValByRound.get(name);
+    for (final Map.Entry<String, String> entry : colForValByRound.entrySet()) {
+      String colName = entry.getValue();
       String template = " " + colName;
       if (roundNum < 0) {
         // just append blanks
         sb.append(Format.formatPaddLeft("-", template));
       } else {
+        String valByRoundName = entry.getKey();
+
         // append actual values, for that round
-        Object a = valByRound.get(name);
+        Object a = valByRound.get(valByRoundName);
         if (a instanceof int[]) {
           int ai[] = (int[]) a;
           int n = roundNum % ai.length;
diff --git a/lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java b/lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java
index 0484fe7..d72f138 100644
--- a/lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java
@@ -510,10 +510,13 @@ public class BooleanQuery extends Query implements Iterable<BooleanClause> {
 
   @Override
   public void visit(QueryVisitor visitor) {
-    for (BooleanClause.Occur occur : clauseSets.keySet()) {
-      if (clauseSets.get(occur).size() > 0) {
-        QueryVisitor v = visitor.getSubVisitor(occur, this);
-        for (Query q : clauseSets.get(occur)) {
+    for (Map.Entry<Occur, Collection<Query>> entry : clauseSets.entrySet()) {
+      Occur clauseOccur = entry.getKey();
+      Collection<Query> clauseQueries = entry.getValue();
+
+      if (clauseQueries.size() > 0) {
+        QueryVisitor v = visitor.getSubVisitor(clauseOccur, this);
+        for (Query q : clauseQueries) {
           q.visit(v);
         }
       }
diff --git a/lucene/monitor/src/java/org/apache/lucene/monitor/MultipassTermFilteredPresearcher.java b/lucene/monitor/src/java/org/apache/lucene/monitor/MultipassTermFilteredPresearcher.java
index b155e13..8f25da9 100644
--- a/lucene/monitor/src/java/org/apache/lucene/monitor/MultipassTermFilteredPresearcher.java
+++ b/lucene/monitor/src/java/org/apache/lucene/monitor/MultipassTermFilteredPresearcher.java
@@ -113,8 +113,8 @@ public class MultipassTermFilteredPresearcher extends TermFilteredPresearcher {
     @Override
     public Query build() {
       Map<String, BytesRef[]> collectedTerms = new HashMap<>();
-      for (String field : terms.keySet()) {
-        collectedTerms.put(field, convertHash(terms.get(field)));
+      for (Map.Entry<String, BytesRefHash> entry : terms.entrySet()) {
+        collectedTerms.put(entry.getKey(), convertHash(entry.getValue()));
       }
       BooleanQuery.Builder parent = new BooleanQuery.Builder();
       for (int i = 0; i < passes; i++) {
diff --git a/lucene/monitor/src/java/org/apache/lucene/monitor/TermFilteredPresearcher.java b/lucene/monitor/src/java/org/apache/lucene/monitor/TermFilteredPresearcher.java
index a526f7e..a1f8ce1 100644
--- a/lucene/monitor/src/java/org/apache/lucene/monitor/TermFilteredPresearcher.java
+++ b/lucene/monitor/src/java/org/apache/lucene/monitor/TermFilteredPresearcher.java
@@ -225,8 +225,8 @@ public class TermFilteredPresearcher extends Presearcher {
       @Override
       public Query build() {
         BooleanQuery.Builder builder = new BooleanQuery.Builder();
-        for (String field : terms.keySet()) {
-          builder.add(new TermInSetQuery(field, terms.get(field)), BooleanClause.Occur.SHOULD);
+        for (Map.Entry<String, List<BytesRef>> entry : terms.entrySet()) {
+          builder.add(new TermInSetQuery(entry.getKey(), entry.getValue()), BooleanClause.Occur.SHOULD);
         }
         return builder.build();
       }
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/MultiPhraseQueryNodeBuilder.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/MultiPhraseQueryNodeBuilder.java
index 35debe9..6de6659 100644
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/MultiPhraseQueryNodeBuilder.java
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/MultiPhraseQueryNodeBuilder.java
@@ -18,6 +18,7 @@ package org.apache.lucene.queryparser.flexible.standard.builders;
 
 import java.util.LinkedList;
 import java.util.List;
+import java.util.Map;
 import java.util.TreeMap;
 
 import org.apache.lucene.index.Term;
@@ -67,12 +68,9 @@ public class MultiPhraseQueryNodeBuilder implements StandardQueryBuilder {
 
       }
 
-      for (int positionIncrement : positionTermMap.keySet()) {
-        List<Term> termList = positionTermMap.get(positionIncrement);
-
-        phraseQueryBuilder.add(termList.toArray(new Term[termList.size()]),
-            positionIncrement);
-
+      for (Map.Entry<Integer, List<Term>> entry : positionTermMap.entrySet()) {
+        List<Term> termList = entry.getValue();
+        phraseQueryBuilder.add(termList.toArray(new Term[termList.size()]), entry.getKey());
       }
 
     }
diff --git a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/StandardObjects.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/StandardObjects.java
index 4e0acae..3283dbc 100644
--- a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/StandardObjects.java
+++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/StandardObjects.java
@@ -77,8 +77,8 @@ class StandardObjects {
    classRegsitry.put(GeoExactCircle.class, 37);
    classRegsitry.put(GeoS2Shape.class, 38);
 
-   for (Class<?> clazz : classRegsitry.keySet()){
-     codeRegsitry.put(classRegsitry.get(clazz), clazz);
+   for (Map.Entry<Class<?>, Integer> entry : classRegsitry.entrySet()){
+     codeRegsitry.put(entry.getValue(), entry.getKey());
    }
   }
 }
diff --git a/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/ContextQuery.java b/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/ContextQuery.java
index f5062d2..54f1508 100644
--- a/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/ContextQuery.java
+++ b/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/ContextQuery.java
@@ -156,15 +156,15 @@ public class ContextQuery extends CompletionQuery implements Accountable {
   public String toString(String field) {
     StringBuilder buffer = new StringBuilder();
     BytesRefBuilder scratch = new BytesRefBuilder();
-    for (IntsRef context : contexts.keySet()) {
+    for (Map.Entry<IntsRef, ContextMetaData> entry : contexts.entrySet()) {
       if (buffer.length() != 0) {
         buffer.append(",");
       } else {
         buffer.append("contexts");
         buffer.append(":[");
       }
-      buffer.append(Util.toBytesRef(context, scratch).utf8ToString());
-      ContextMetaData metaData = contexts.get(context);
+      buffer.append(Util.toBytesRef(entry.getKey(), scratch).utf8ToString());
+      ContextMetaData metaData = entry.getValue();
       if (metaData.exact == false) {
         buffer.append("*");
       }
diff --git a/lucene/tools/src/java/org/apache/lucene/dependencies/GetMavenDependenciesTask.java b/lucene/tools/src/java/org/apache/lucene/dependencies/GetMavenDependenciesTask.java
index 610323f..df3668d 100644
--- a/lucene/tools/src/java/org/apache/lucene/dependencies/GetMavenDependenciesTask.java
+++ b/lucene/tools/src/java/org/apache/lucene/dependencies/GetMavenDependenciesTask.java
@@ -312,10 +312,10 @@ public class GetMavenDependenciesTask extends Task {
     // Delay adding shared compile-scope dependencies until after all have been processed,
     // so dependency sharing is limited to a depth of one.
     Map<String,SortedSet<ExternalDependency>> sharedDependencies = new HashMap<>();
-    for (String module : interModuleExternalCompileScopeDependencies.keySet()) {
+    for (Map.Entry<String, Set<String>> entry : interModuleExternalCompileScopeDependencies.entrySet()) {
       TreeSet<ExternalDependency> deps = new TreeSet<>();
-      sharedDependencies.put(module, deps);
-      Set<String> moduleDependencies = interModuleExternalCompileScopeDependencies.get(module);
+      sharedDependencies.put(entry.getKey(), deps);
+      Set<String> moduleDependencies = entry.getValue();
       if (null != moduleDependencies) {
         for (String otherArtifactId : moduleDependencies) {
           SortedSet<ExternalDependency> otherExtDeps = allExternalDependencies.get(otherArtifactId); 
@@ -329,13 +329,14 @@ public class GetMavenDependenciesTask extends Task {
         }
       }
     }
-    for (String module : interModuleExternalTestScopeDependencies.keySet()) {
+    for (Map.Entry<String, Set<String>> entry : interModuleExternalTestScopeDependencies.entrySet()) {
+      String module = entry.getKey();
       SortedSet<ExternalDependency> deps = sharedDependencies.get(module);
       if (null == deps) {
         deps = new TreeSet<>();
         sharedDependencies.put(module, deps);
       }
-      Set<String> moduleDependencies = interModuleExternalTestScopeDependencies.get(module);
+      Set<String> moduleDependencies = entry.getValue();
       if (null != moduleDependencies) {
         for (String otherArtifactId : moduleDependencies) {
           int testScopePos = otherArtifactId.indexOf(":test");
@@ -362,13 +363,14 @@ public class GetMavenDependenciesTask extends Task {
         }
       }
     }
-    for (String module : sharedDependencies.keySet()) {
+    for (Map.Entry<String, SortedSet<ExternalDependency>> entry : sharedDependencies.entrySet()) {
+      String module = entry.getKey();
       SortedSet<ExternalDependency> deps = allExternalDependencies.get(module);
       if (null == deps) {
         deps = new TreeSet<>();
         allExternalDependencies.put(module, deps);
       }
-      for (ExternalDependency dep : sharedDependencies.get(module)) {
+      for (ExternalDependency dep : entry.getValue()) {
         String dependencyCoordinate = dep.groupId + ":" + dep.artifactId;
         if (globalOptionalExternalDependencies.contains(dependencyCoordinate)
             || (perModuleOptionalExternalDependencies.containsKey(module)
@@ -439,9 +441,10 @@ public class GetMavenDependenciesTask extends Task {
    * dependencies.
    */
   private void appendAllInternalDependencies(StringBuilder builder) {
-    for (String artifactId : internalCompileScopeDependencies.keySet()) {
+    for (Map.Entry<String, SortedSet<String>> entry : internalCompileScopeDependencies.entrySet()) {
+      String artifactId = entry.getKey();
       List<String> exclusions = new ArrayList<>();
-      exclusions.addAll(internalCompileScopeDependencies.get(artifactId));
+      exclusions.addAll(entry.getValue());
       SortedSet<ExternalDependency> extDeps = allExternalDependencies.get(artifactId);
       if (null != extDeps) {
         for (ExternalDependency externalDependency : extDeps) {
diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/AnalyticsRequestManager.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/AnalyticsRequestManager.java
index f1bfba8..f58bb6e 100644
--- a/solr/contrib/analytics/src/java/org/apache/solr/analytics/AnalyticsRequestManager.java
+++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/AnalyticsRequestManager.java
@@ -179,9 +179,9 @@ public class AnalyticsRequestManager {
     ungroupedReductionManager.exportData();
 
     output.writeInt(groupingManagers.size());
-    for (String groupingName : groupingManagers.keySet()) {
-      output.writeUTF(groupingName);
-      groupingManagers.get(groupingName).exportShardData(output);
+    for (Map.Entry<String, AnalyticsGroupingManager> entry : groupingManagers.entrySet()) {
+      output.writeUTF(entry.getKey());
+      entry.getValue().exportShardData(output);
     }
   }
 
diff --git a/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/collector/MetricSamples.java b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/collector/MetricSamples.java
index dce4c63..d71616c 100644
--- a/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/collector/MetricSamples.java
+++ b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/collector/MetricSamples.java
@@ -53,13 +53,14 @@ public class MetricSamples {
   }
 
   public void addAll(MetricSamples other) {
-    for (String key : other.samplesByMetricName.keySet()) {
+    for (Map.Entry<String, Collector.MetricFamilySamples> entry : other.samplesByMetricName.entrySet()) {
+      String key = entry.getKey();
       if (this.samplesByMetricName.containsKey(key)) {
-        for (Collector.MetricFamilySamples.Sample sample : other.samplesByMetricName.get(key).samples) {
+        for (Collector.MetricFamilySamples.Sample sample : entry.getValue().samples) {
           addSampleIfMetricExists(key, sample);
         }
       } else {
-        this.samplesByMetricName.put(key, other.samplesByMetricName.get(key));
+        this.samplesByMetricName.put(key, entry.getValue());
       }
     }
   }
diff --git a/solr/contrib/velocity/src/java/org/apache/solr/response/VelocityResponseWriter.java b/solr/contrib/velocity/src/java/org/apache/solr/response/VelocityResponseWriter.java
index 828d29b..6279ee3 100644
--- a/solr/contrib/velocity/src/java/org/apache/solr/response/VelocityResponseWriter.java
+++ b/solr/contrib/velocity/src/java/org/apache/solr/response/VelocityResponseWriter.java
@@ -235,8 +235,9 @@ public class VelocityResponseWriter implements QueryResponseWriter, SolrCoreAwar
         </queryResponseWriter>
 */
     // Custom tools can override any of the built-in tools provided above, by registering one with the same name
-    for(String name : customTools.keySet()) {
-      Object customTool = SolrCore.createInstance(customTools.get(name), Object.class, "VrW custom tool: " + name, request.getCore(), request.getCore().getResourceLoader());
+    for(Map.Entry<String, String> entry : customTools.entrySet()) {
+      String name = entry.getKey();
+      Object customTool = SolrCore.createInstance(entry.getValue(), Object.class, "VrW custom tool: " + name, request.getCore(), request.getCore().getResourceLoader());
       if (customTool instanceof LocaleConfig) {
         ((LocaleConfig)customTool).configure(toolConfig);
       }
diff --git a/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java b/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java
index dd8de23..c1e8bc2 100644
--- a/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java
+++ b/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java
@@ -371,14 +371,12 @@ public class JettySolrRunner {
 
         debugFilter = root.addFilter(DebugFilter.class, "*", EnumSet.of(DispatcherType.REQUEST) );
         extraFilters = new LinkedList<>();
-        for (Class<? extends Filter> filterClass : config.extraFilters.keySet()) {
-          extraFilters.add(root.addFilter(filterClass, config.extraFilters.get(filterClass),
-              EnumSet.of(DispatcherType.REQUEST)));
+        for (Map.Entry<Class<? extends Filter>, String> entry : config.extraFilters.entrySet()) {
+          extraFilters.add(root.addFilter(entry.getKey(), entry.getValue(), EnumSet.of(DispatcherType.REQUEST)));
         }
 
-        for (ServletHolder servletHolder : config.extraServlets.keySet()) {
-          String pathSpec = config.extraServlets.get(servletHolder);
-          root.addServlet(servletHolder, pathSpec);
+        for (Map.Entry<ServletHolder, String> entry : config.extraServlets.entrySet()) {
+          root.addServlet(entry.getKey(), entry.getValue());
         }
         dispatchFilter = root.getServletHandler().newFilterHolder(Source.EMBEDDED);
         dispatchFilter.setHeldClass(SolrDispatchFilter.class);
diff --git a/solr/core/src/java/org/apache/solr/cloud/OverseerTaskProcessor.java b/solr/core/src/java/org/apache/solr/cloud/OverseerTaskProcessor.java
index f45d37f..4afe7c4 100644
--- a/solr/core/src/java/org/apache/solr/cloud/OverseerTaskProcessor.java
+++ b/solr/core/src/java/org/apache/solr/cloud/OverseerTaskProcessor.java
@@ -352,10 +352,10 @@ public class OverseerTaskProcessor implements Runnable, Closeable {
 
   private void cleanUpWorkQueue() throws KeeperException, InterruptedException {
     synchronized (completedTasks) {
-      for (String id : completedTasks.keySet()) {
-        workQueue.remove(completedTasks.get(id));
+      for (Map.Entry<String, QueueEvent> entry : completedTasks.entrySet()) {
+        workQueue.remove(entry.getValue());
         synchronized (runningZKTasks) {
-          runningZKTasks.remove(id);
+          runningZKTasks.remove(entry.getKey());
         }
       }
       completedTasks.clear();
diff --git a/solr/core/src/java/org/apache/solr/cloud/ZkShardTerms.java b/solr/core/src/java/org/apache/solr/cloud/ZkShardTerms.java
index 01fe62b..206ccd2 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ZkShardTerms.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ZkShardTerms.java
@@ -482,9 +482,10 @@ public class ZkShardTerms implements AutoCloseable{
 
       HashMap<String, Long> newValues = new HashMap<>(values);
       long leaderTerm = newValues.get(leader);
-      for (String key : newValues.keySet()) {
+      for (Map.Entry<String, Long> entry : newValues.entrySet()) {
+        String key = entry.getKey();
         if (replicasNeedingRecovery.contains(key)) foundReplicasInLowerTerms = true;
-        if (Objects.equals(newValues.get(key), leaderTerm)) {
+        if (Objects.equals(entry.getValue(), leaderTerm)) {
           if(skipIncreaseTermOf(key, replicasNeedingRecovery)) {
             changed = true;
           } else {
diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteReplicaCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteReplicaCmd.java
index a6b45b5..83f9141 100644
--- a/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteReplicaCmd.java
+++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteReplicaCmd.java
@@ -145,9 +145,10 @@ public class DeleteReplicaCmd implements Cmd {
       }
     }
 
-    for (Slice shardSlice: shardToReplicasMapping.keySet()) {
+    for (Map.Entry<Slice, Set<String>> entry : shardToReplicasMapping.entrySet()) {
+      Slice shardSlice = entry.getKey();
       String shardId = shardSlice.getName();
-      Set<String> replicas = shardToReplicasMapping.get(shardSlice);
+      Set<String> replicas = entry.getValue();
       //callDeleteReplica on all replicas
       for (String replica: replicas) {
         log.debug("Deleting replica {}  for shard {} based on count {}", replica, shardId, count);
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminHandler.java
index 45cb063..cd6cd38 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminHandler.java
@@ -242,10 +242,10 @@ public class CoreAdminHandler extends RequestHandlerBase implements PermissionNa
     Map<String, String> coreParams = new HashMap<>();
 
     // standard core create parameters
-    for (String param : paramToProp.keySet()) {
-      String value = params.get(param, null);
+    for (Map.Entry<String, String> entry : paramToProp.entrySet()) {
+      String value = params.get(entry.getKey(), null);
       if (StringUtils.isNotEmpty(value)) {
-        coreParams.put(paramToProp.get(param), value);
+        coreParams.put(entry.getValue(), value);
       }
     }
 
diff --git a/solr/core/src/java/org/apache/solr/handler/component/TermsComponent.java b/solr/core/src/java/org/apache/solr/handler/component/TermsComponent.java
index f5b03cf..7aeec0f 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/TermsComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/TermsComponent.java
@@ -23,6 +23,7 @@ import java.util.HashMap;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Locale;
+import java.util.Map;
 import java.util.regex.Pattern;
 
 import org.apache.lucene.index.IndexReaderContext;
@@ -505,15 +506,16 @@ public class TermsComponent extends SearchComponent {
       TermsResponse termsResponse = new TermsResponse(terms);
 
       // loop though each field and add each term+freq to map
-      for (String key : fieldmap.keySet()) {
-        HashMap<String, TermsResponse.Term> termmap = fieldmap.get(key);
-        List<TermsResponse.Term> termlist = termsResponse.getTerms(key);
+      for (Map.Entry<String, HashMap<String, TermsResponse.Term>> entry : fieldmap.entrySet()) {
+        List<TermsResponse.Term> termlist = termsResponse.getTerms(entry.getKey());
 
         // skip this field if there are no terms
         if (termlist == null) {
           continue;
         }
 
+        HashMap<String, TermsResponse.Term> termmap = entry.getValue();
+
         // loop though each term
         for (TermsResponse.Term tc : termlist) {
           String term = tc.getTerm();
@@ -553,13 +555,13 @@ public class TermsComponent extends SearchComponent {
       }
 
       // loop through each field we want terms from
-      for (String key : fieldmap.keySet()) {
+      for (Map.Entry<String, HashMap<String, TermsResponse.Term>> entry : fieldmap.entrySet()) {
         NamedList<Object> fieldterms = new SimpleOrderedMap<>();
         TermsResponse.Term[] data = null;
         if (sort) {
-          data = getCountSorted(fieldmap.get(key));
+          data = getCountSorted(entry.getValue());
         } else {
-          data = getLexSorted(fieldmap.get(key));
+          data = getLexSorted(entry.getValue());
         }
 
         boolean includeTotalTermFreq = params.getBool(TermsParams.TERMS_TTF, false);
@@ -576,7 +578,7 @@ public class TermsComponent extends SearchComponent {
           }
         }
 
-        response.add(key, fieldterms);
+        response.add(entry.getKey(), fieldterms);
       }
 
       return response;
diff --git a/solr/core/src/java/org/apache/solr/handler/loader/CSVLoaderBase.java b/solr/core/src/java/org/apache/solr/handler/loader/CSVLoaderBase.java
index b503fa3..f5bc2c8 100644
--- a/solr/core/src/java/org/apache/solr/handler/loader/CSVLoaderBase.java
+++ b/solr/core/src/java/org/apache/solr/handler/loader/CSVLoaderBase.java
@@ -30,6 +30,7 @@ import org.apache.solr.internal.csv.CSVStrategy;
 import org.apache.solr.internal.csv.CSVParser;
 import org.apache.commons.io.IOUtils;
 
+import java.util.Map;
 import java.util.regex.Pattern;
 import java.util.List;
 import java.util.HashMap;
@@ -380,9 +381,8 @@ abstract class CSVLoaderBase extends ContentStreamLoader {
     }
 
     // add any literals
-    for (String fname : literals.keySet()) {
-      String val = literals.get(fname);
-      doc.addField(fname, val);
+    for (Map.Entry<String, String> entry : literals.entrySet()) {
+      doc.addField(entry.getKey(), entry.getValue());
     }
     if (rowId != null){
       doc.addField(rowId, line + rowIdOffset);
diff --git a/solr/core/src/java/org/apache/solr/response/PHPSerializedResponseWriter.java b/solr/core/src/java/org/apache/solr/response/PHPSerializedResponseWriter.java
index 05208be..7e090b8 100644
--- a/solr/core/src/java/org/apache/solr/response/PHPSerializedResponseWriter.java
+++ b/solr/core/src/java/org/apache/solr/response/PHPSerializedResponseWriter.java
@@ -22,6 +22,7 @@ import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Iterator;
 import java.util.LinkedHashMap;
+import java.util.Map;
 
 import org.apache.lucene.util.ArrayUtil;
 import org.apache.lucene.util.BytesRef;
@@ -137,16 +138,18 @@ class PHPSerializedWriter extends JSONWriter {
     }
 
     writeMapOpener(single.size() + multi.size());
-    for(String fname: single.keySet()){
-      Object val = single.get(fname);
+    for(Map.Entry<String, Object> entry : single.entrySet()){
+      String fname = entry.getKey();
+      Object val = entry.getValue();
       writeKey(fname, true);
       writeVal(fname, val);
     }
     
-    for(String fname: multi.keySet()){
+    for(Map.Entry<String, Object> entry : multi.entrySet()){
+      String fname = entry.getKey();
       writeKey(fname, true);
 
-      Object val = multi.get(fname);
+      Object val = entry.getValue();
       if (!(val instanceof Collection)) {
         // should never be reached if multivalued fields are stored as a Collection
         // so I'm assuming a size of 1 just to wrap the single value
diff --git a/solr/core/src/java/org/apache/solr/rest/RestManager.java b/solr/core/src/java/org/apache/solr/rest/RestManager.java
index abefc68..13717fb 100644
--- a/solr/core/src/java/org/apache/solr/rest/RestManager.java
+++ b/solr/core/src/java/org/apache/solr/rest/RestManager.java
@@ -768,11 +768,12 @@ public class RestManager {
     }      
     
     int numAttached = 0;
-    for (String resourceId : managed.keySet()) {
+    for (Map.Entry<String, ManagedResource> entry : managed.entrySet()) {
+      String resourceId = entry.getKey();
       if (resourceId.startsWith(routerPath)) {
         // the way restlet works is you attach a path w/o the routerPath
         String path = resourceId.substring(routerPath.length());
-        attachManagedResource(managed.get(resourceId), path, router);
+        attachManagedResource(entry.getValue(), path, router);
         ++numAttached;
       }
     }
diff --git a/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymFilterFactory.java b/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymFilterFactory.java
index 8d77c53..87878ce 100644
--- a/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymFilterFactory.java
+++ b/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymFilterFactory.java
@@ -142,7 +142,8 @@ public class ManagedSynonymFilterFactory extends BaseManagedTokenFilterFactory {
       synonymMappings = new TreeMap<>();
       if (managedData != null) {
         Map<String,Object> storedSyns = (Map<String,Object>)managedData;
-        for (String key : storedSyns.keySet()) {
+        for (Map.Entry<String, Object> entry : storedSyns.entrySet()) {
+          String key = entry.getKey();
 
           String caseKey = applyCaseSetting(ignoreCase, key);
           CasePreservedSynonymMappings cpsm = synonymMappings.get(caseKey);
@@ -153,7 +154,7 @@ public class ManagedSynonymFilterFactory extends BaseManagedTokenFilterFactory {
           
           // give the nature of our JSON parsing solution, we really have
           // no guarantees on what is in the file
-          Object mapping = storedSyns.get(key);
+          Object mapping = entry.getValue();
           if (!(mapping instanceof List)) {
             throw new SolrException(ErrorCode.SERVER_ERROR, 
                 "Invalid synonym file format! Expected a list of synonyms for "+key+
@@ -161,7 +162,7 @@ public class ManagedSynonymFilterFactory extends BaseManagedTokenFilterFactory {
           }
                     
           Set<String> sortedVals = new TreeSet<>();
-          sortedVals.addAll((List<String>)storedSyns.get(key));          
+          sortedVals.addAll((List<String>) entry.getValue());
           cpsm.mappings.put(key, sortedVals);        
         }
       }
@@ -269,8 +270,8 @@ public class ManagedSynonymFilterFactory extends BaseManagedTokenFilterFactory {
     protected Map<String,Set<String>> getStoredView() {
       Map<String,Set<String>> storedView = new TreeMap<>();
       for (CasePreservedSynonymMappings cpsm : synonymMappings.values()) {
-        for (String key : cpsm.mappings.keySet()) {
-          storedView.put(key, cpsm.mappings.get(key));
+        for (Map.Entry<String, Set<String>> entry : cpsm.mappings.entrySet()) {
+          storedView.put(entry.getKey(), entry.getValue());
         }
       }
       return storedView;
@@ -366,10 +367,10 @@ public class ManagedSynonymFilterFactory extends BaseManagedTokenFilterFactory {
     public void parse(Reader in) throws IOException, ParseException {
       boolean ignoreCase = synonymManager.getIgnoreCase();
       for (CasePreservedSynonymMappings cpsm : synonymManager.synonymMappings.values()) {
-        for (String term : cpsm.mappings.keySet()) {
-          for (String mapping : cpsm.mappings.get(term)) {
+        for (Map.Entry<String, Set<String>> entry : cpsm.mappings.entrySet()) {
+          for (String mapping : entry.getValue()) {
             // apply the case setting to match the behavior of the SynonymMap builder
-            CharsRef casedTerm = analyze(synonymManager.applyCaseSetting(ignoreCase, term), new CharsRefBuilder());
+            CharsRef casedTerm = analyze(synonymManager.applyCaseSetting(ignoreCase, entry.getKey()), new CharsRefBuilder());
             CharsRef casedMapping = analyze(synonymManager.applyCaseSetting(ignoreCase, mapping), new CharsRefBuilder());
             add(casedTerm, casedMapping, false);
           }          
diff --git a/solr/core/src/java/org/apache/solr/schema/FileExchangeRateProvider.java b/solr/core/src/java/org/apache/solr/schema/FileExchangeRateProvider.java
index fe3f2fd..dabf688 100644
--- a/solr/core/src/java/org/apache/solr/schema/FileExchangeRateProvider.java
+++ b/solr/core/src/java/org/apache/solr/schema/FileExchangeRateProvider.java
@@ -148,9 +148,9 @@ public class FileExchangeRateProvider implements ExchangeRateProvider {
   @Override
   public Set<String> listAvailableCurrencies() {
     Set<String> currencies = new HashSet<>();
-    for(String from : rates.keySet()) {
-      currencies.add(from);
-      for(String to : rates.get(from).keySet()) {
+    for(Map.Entry<String, Map<String, Double>> entry : rates.entrySet()) {
+      currencies.add(entry.getKey());
+      for(String to : entry.getValue().keySet()) {
         currencies.add(to);
       }
     }
diff --git a/solr/core/src/java/org/apache/solr/search/grouping/distributed/requestfactory/TopGroupsShardRequestFactory.java b/solr/core/src/java/org/apache/solr/search/grouping/distributed/requestfactory/TopGroupsShardRequestFactory.java
index 57776d9..13b81b7 100644
--- a/solr/core/src/java/org/apache/solr/search/grouping/distributed/requestfactory/TopGroupsShardRequestFactory.java
+++ b/solr/core/src/java/org/apache/solr/search/grouping/distributed/requestfactory/TopGroupsShardRequestFactory.java
@@ -65,8 +65,7 @@ public class TopGroupsShardRequestFactory implements ShardRequestFactory {
   private ShardRequest[] createRequestForSpecificShards(ResponseBuilder rb) {
     // Determine all unique shards to query for TopGroups
     Set<String> uniqueShards = new HashSet<>();
-    for (String command : rb.searchGroupToShards.keySet()) {
-      Map<SearchGroup<BytesRef>, Set<String>> groupsToShard = rb.searchGroupToShards.get(command);
+    for (Map<SearchGroup<BytesRef>, Set<String>> groupsToShard : rb.searchGroupToShards.values()) {
       for (Set<String> shards : groupsToShard.values()) {
         uniqueShards.addAll(shards);
       }
diff --git a/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/SearchGroupShardResponseProcessor.java b/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/SearchGroupShardResponseProcessor.java
index 71c34b8..016da1d 100644
--- a/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/SearchGroupShardResponseProcessor.java
+++ b/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/SearchGroupShardResponseProcessor.java
@@ -107,7 +107,8 @@ public class SearchGroupShardResponseProcessor implements ShardResponseProcessor
       @SuppressWarnings("unchecked")
       NamedList<NamedList> firstPhaseResult = (NamedList<NamedList>) srsp.getSolrResponse().getResponse().get("firstPhase");
       final Map<String, SearchGroupsFieldCommandResult> result = serializer.transformToNative(firstPhaseResult, groupSort, withinGroupSort, srsp.getShard());
-      for (String field : commandSearchGroups.keySet()) {
+      for (Map.Entry<String, List<Collection<SearchGroup<BytesRef>>>> entry : commandSearchGroups.entrySet()) {
+        String field = entry.getKey();
         final SearchGroupsFieldCommandResult firstPhaseCommandResult = result.get(field);
 
         final Integer groupCount = firstPhaseCommandResult.getGroupCount();
@@ -122,7 +123,7 @@ public class SearchGroupShardResponseProcessor implements ShardResponseProcessor
           continue;
         }
 
-        commandSearchGroups.get(field).add(searchGroups);
+        entry.getValue().add(searchGroups);
         for (SearchGroup<BytesRef> searchGroup : searchGroups) {
           Map<SearchGroup<BytesRef>, Set<String>> map = tempSearchGroupToShards.get(field);
           Set<String> shards = map.get(searchGroup);
@@ -137,8 +138,9 @@ public class SearchGroupShardResponseProcessor implements ShardResponseProcessor
     }
     rb.totalHitCount = hitCountDuringFirstPhase;
     rb.firstPhaseElapsedTime = maxElapsedTime;
-    for (String groupField : commandSearchGroups.keySet()) {
-      List<Collection<SearchGroup<BytesRef>>> topGroups = commandSearchGroups.get(groupField);
+    for (Map.Entry<String, List<Collection<SearchGroup<BytesRef>>>> entry : commandSearchGroups.entrySet()) {
+      String groupField = entry.getKey();
+      List<Collection<SearchGroup<BytesRef>>> topGroups = entry.getValue();
       Collection<SearchGroup<BytesRef>> mergedTopGroups = SearchGroup.merge(topGroups, groupSortSpec.getOffset(), groupSortSpec.getCount(), groupSort);
       if (mergedTopGroups == null) {
         continue;
diff --git a/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/TopGroupsShardResponseProcessor.java b/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/TopGroupsShardResponseProcessor.java
index 1525bcf..5a0cd62 100644
--- a/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/TopGroupsShardResponseProcessor.java
+++ b/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/TopGroupsShardResponseProcessor.java
@@ -125,8 +125,8 @@ public class TopGroupsShardResponseProcessor implements ShardResponseProcessor {
       Map<String, ?> result = serializer.transformToNative(secondPhaseResult, groupSort, withinGroupSort, srsp.getShard());
       int numFound = 0;
       float maxScore = Float.NaN;
-      for (String field : commandTopGroups.keySet()) {
-        TopGroups<BytesRef> topGroups = (TopGroups<BytesRef>) result.get(field);
+      for (Map.Entry<String, List<TopGroups<BytesRef>>> entry : commandTopGroups.entrySet()) {
+        TopGroups<BytesRef> topGroups = (TopGroups<BytesRef>) result.get(entry.getKey());
         if (topGroups == null) {
           continue;
         }
@@ -134,7 +134,7 @@ public class TopGroupsShardResponseProcessor implements ShardResponseProcessor {
           numFound += topGroups.totalHitCount;
           if (Float.isNaN(maxScore) || topGroups.maxScore > maxScore) maxScore = topGroups.maxScore;
         }
-        commandTopGroups.get(field).add(topGroups);
+        entry.getValue().add(topGroups);
       }
       for (String query : queries) {
         QueryCommandResult queryCommandResult = (QueryCommandResult) result.get(query);
@@ -150,8 +150,8 @@ public class TopGroupsShardResponseProcessor implements ShardResponseProcessor {
         individualShardInfo.add("maxScore", maxScore);
       }
     }
-    for (String groupField : commandTopGroups.keySet()) {
-      List<TopGroups<BytesRef>> topGroups = commandTopGroups.get(groupField);
+    for (Map.Entry<String, List<TopGroups<BytesRef>>> entry : commandTopGroups.entrySet()) {
+      List<TopGroups<BytesRef>> topGroups = entry.getValue();
       if (topGroups.isEmpty()) {
         continue;
       }
@@ -164,7 +164,7 @@ public class TopGroupsShardResponseProcessor implements ShardResponseProcessor {
           docsPerGroup += subTopGroups.totalGroupedHitCount;
         }
       }
-      rb.mergedTopGroups.put(groupField, TopGroups.merge(topGroups.toArray(topGroupsArr), groupSort, withinGroupSort, groupOffsetDefault, docsPerGroup, TopGroups.ScoreMergeMode.None));
+      rb.mergedTopGroups.put(entry.getKey(), TopGroups.merge(topGroups.toArray(topGroupsArr), groupSort, withinGroupSort, groupOffsetDefault, docsPerGroup, TopGroups.ScoreMergeMode.None));
     }
 
     // calculate topN and start for group.query
@@ -177,8 +177,8 @@ public class TopGroupsShardResponseProcessor implements ShardResponseProcessor {
       topN = limit >= 0? limit: Integer.MAX_VALUE;
     }
 
-    for (String query : commandTopDocs.keySet()) {
-      List<QueryCommandResult> queryCommandResults = commandTopDocs.get(query);
+    for (Map.Entry<String, List<QueryCommandResult>> entry : commandTopDocs.entrySet()) {
+      List<QueryCommandResult> queryCommandResults = entry.getValue();
       List<TopDocs> topDocs = new ArrayList<>(queryCommandResults.size());
       int mergedMatches = 0;
       float maxScore = Float.NaN;
@@ -202,7 +202,7 @@ public class TopGroupsShardResponseProcessor implements ShardResponseProcessor {
         mergedTopDocs = TopDocs.merge(
             withinGroupSort, start, topN, topDocs.toArray(new TopFieldDocs[topDocs.size()]), true);
       }
-      rb.mergedQueryCommandResults.put(query, new QueryCommandResult(mergedTopDocs, mergedMatches, maxScore));
+      rb.mergedQueryCommandResults.put(entry.getKey(), new QueryCommandResult(mergedTopDocs, mergedMatches, maxScore));
     }
     fillResultIds(rb);
   }
diff --git a/solr/core/src/java/org/apache/solr/update/processor/AddSchemaFieldsUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/AddSchemaFieldsUpdateProcessorFactory.java
index 131d72c..6d102d7 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/AddSchemaFieldsUpdateProcessorFactory.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/AddSchemaFieldsUpdateProcessorFactory.java
@@ -424,11 +424,12 @@ public class AddSchemaFieldsUpdateProcessorFactory extends UpdateRequestProcesso
           builder.append("]");
           builder.append("\nCopyFields to be added to the schema: [");
           isFirst = true;
-          for (String fieldName : newCopyFields.keySet()) {
+          for (Map.Entry<String, Map<Integer, List<CopyFieldDef>>> entry : newCopyFields.entrySet()) {
+            String fieldName = entry.getKey();
             builder.append(isFirst ? "" : ",");
             isFirst = false;
             builder.append("source=").append(fieldName).append("{");
-            for (List<CopyFieldDef> copyFieldDefList : newCopyFields.get(fieldName).values()) {
+            for (List<CopyFieldDef> copyFieldDefList : entry.getValue().values()) {
               for (CopyFieldDef copyFieldDef : copyFieldDefList) {
                 builder.append("{dest=").append(copyFieldDef.getDest(fieldName));
                 builder.append(", maxChars=").append(copyFieldDef.getMaxChars()).append("}");
@@ -445,10 +446,11 @@ public class AddSchemaFieldsUpdateProcessorFactory extends UpdateRequestProcesso
           try {
             IndexSchema newSchema = oldSchema.addFields(newFields, Collections.emptyMap(), false);
             // Add copyFields
-            for (String srcField : newCopyFields.keySet()) {
-              for (Integer maxChars : newCopyFields.get(srcField).keySet()) {
-                newSchema = newSchema.addCopyFields(srcField, 
-                  newCopyFields.get(srcField).get(maxChars).stream().map(f -> f.getDest(srcField)).collect(Collectors.toList()), 
+            for (Map.Entry<String, Map<Integer, List<CopyFieldDef>>> entry : newCopyFields.entrySet()) {
+              String srcField = entry.getKey();
+              for (Integer maxChars : entry.getValue().keySet()) {
+                newSchema = newSchema.addCopyFields(srcField,
+                    entry.getValue().get(maxChars).stream().map(f -> f.getDest(srcField)).collect(Collectors.toList()),
                   maxChars);
               }
             }
diff --git a/solr/core/src/java/org/apache/solr/update/processor/CloneFieldUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/CloneFieldUpdateProcessorFactory.java
index ba20c9a..f0f3308 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/CloneFieldUpdateProcessorFactory.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/CloneFieldUpdateProcessorFactory.java
@@ -458,8 +458,8 @@ public class CloneFieldUpdateProcessorFactory
           destMap.put(resolvedDest, destField);
         }
 
-        for (String dest : destMap.keySet()) {
-          doc.put(dest, destMap.get(dest));
+        for (Map.Entry<String, SolrInputField> entry : destMap.entrySet()) {
+          doc.put(entry.getKey(), entry.getValue());
         }
         super.processAdd(cmd);
       }
diff --git a/solr/core/src/java/org/apache/solr/util/SimplePostTool.java b/solr/core/src/java/org/apache/solr/util/SimplePostTool.java
index 1b0cfcf..d48c413 100644
--- a/solr/core/src/java/org/apache/solr/util/SimplePostTool.java
+++ b/solr/core/src/java/org/apache/solr/util/SimplePostTool.java
@@ -709,9 +709,9 @@ public class SimplePostTool {
    * @return true if this is a supported content type
    */
   protected boolean typeSupported(String type) {
-    for(String key : mimeMap.keySet()) {
-      if(mimeMap.get(key).equals(type)) {
-        if(fileTypes.contains(key))
+    for(Map.Entry<String, String> entry : mimeMap.entrySet()) {
+      if(entry.getValue().equals(type)) {
+        if(fileTypes.contains(entry.getKey()))
           return true;
       }
     }
diff --git a/solr/core/src/java/org/apache/solr/util/SolrPluginUtils.java b/solr/core/src/java/org/apache/solr/util/SolrPluginUtils.java
index 2807ad6..c8f7f73 100644
--- a/solr/core/src/java/org/apache/solr/util/SolrPluginUtils.java
+++ b/solr/core/src/java/org/apache/solr/util/SolrPluginUtils.java
@@ -900,12 +900,12 @@ public class SolrPluginUtils {
         Alias a = aliases.get(field);
 
         List<Query> disjuncts = new ArrayList<>();
-        for (String f : a.fields.keySet()) {
+        for (Map.Entry<String, Float> entry : a.fields.entrySet()) {
 
-          Query sub = getFieldQuery(f,queryText,quoted, false);
+          Query sub = getFieldQuery(entry.getKey(),queryText,quoted, false);
           if (null != sub) {
-            if (null != a.fields.get(f)) {
-              sub = new BoostQuery(sub, a.fields.get(f));
+            if (null != entry.getValue()) {
+              sub = new BoostQuery(sub, entry.getValue());
             }
             disjuncts.add(sub);
           }
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BaseCloudSolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BaseCloudSolrClient.java
index 82645fd..f00e20d 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BaseCloudSolrClient.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BaseCloudSolrClient.java
@@ -746,8 +746,8 @@ public abstract class BaseCloudSolrClient extends SolrClient {
         throw toThrow;
       }
     }
-    for (String updateType: versions.keySet()) {
-      condensed.add(updateType, versions.get(updateType));
+    for (Map.Entry<String, NamedList> entry : versions.entrySet()) {
+      condensed.add(entry.getKey(), entry.getValue());
     }
     condensed.add("responseHeader", cheader);
     return condensed;
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CloudSolrStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CloudSolrStream.java
index f871473..8464cf3 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CloudSolrStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CloudSolrStream.java
@@ -359,9 +359,9 @@ public class CloudSolrStream extends TupleStream implements Expressible {
     }
 
     // Check collection case insensitive
-    for(String collectionMapKey : collectionsMap.keySet()) {
-      if(collectionMapKey.equalsIgnoreCase(collectionName)) {
-        return collectionsMap.get(collectionMapKey).getActiveSlicesArr();
+    for(Entry<String, DocCollection> entry : collectionsMap.entrySet()) {
+      if(entry.getKey().equalsIgnoreCase(collectionName)) {
+        return entry.getValue().getActiveSlicesArr();
       }
     }
 
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/DeepRandomStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/DeepRandomStream.java
index e750bcc..d2ef18c 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/DeepRandomStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/DeepRandomStream.java
@@ -298,9 +298,9 @@ public class DeepRandomStream extends TupleStream implements Expressible {
     }
 
     // Check collection case insensitive
-    for(String collectionMapKey : collectionsMap.keySet()) {
-      if(collectionMapKey.equalsIgnoreCase(collectionName)) {
-        return collectionsMap.get(collectionMapKey).getActiveSlicesArr();
+    for(Entry<String, DocCollection> entry : collectionsMap.entrySet()) {
+      if(entry.getKey().equalsIgnoreCase(collectionName)) {
+        return entry.getValue().getActiveSlicesArr();
       }
     }
 
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FeaturesSelectionStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FeaturesSelectionStream.java
index 3212dc6..22b807f 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FeaturesSelectionStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FeaturesSelectionStream.java
@@ -424,8 +424,8 @@ public class FeaturesSelectionStream extends TupleStream implements Expressible{
       params.add(DISTRIB, "false");
       params.add("fq","{!igain}");
 
-      for(String key : paramsMap.keySet()) {
-        params.add(key, paramsMap.get(key));
+      for(Map.Entry<String, String> entry : paramsMap.entrySet()) {
+        params.add(entry.getKey(), entry.getValue());
       }
 
       params.add("outcome", outcome);
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java
index fbe893d..80981e8 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java
@@ -315,10 +315,10 @@ public class SignificantTermsStream extends TupleStream implements Expressible{
 
         List<Map> maps = new ArrayList();
 
-        for(String term : mergeFreqs.keySet() ) {
-          int[] freqs = mergeFreqs.get(term);
+        for(Map.Entry<String, int[]> entry : mergeFreqs.entrySet()) {
+          int[] freqs = entry.getValue();
           Map map = new HashMap();
-          map.put("term", term);
+          map.put("term", entry.getKey());
           map.put("background", freqs[0]);
           map.put("foreground", freqs[1]);
 
@@ -389,8 +389,8 @@ public class SignificantTermsStream extends TupleStream implements Expressible{
       params.add(DISTRIB, "false");
       params.add("fq","{!significantTerms}");
 
-      for(String key : paramsMap.keySet()) {
-        params.add(key, paramsMap.get(key));
+      for(Map.Entry<String, String> entry : paramsMap.entrySet()) {
+        params.add(entry.getKey(), entry.getValue());
       }
 
       params.add("minDocFreq", Float.toString(minDocFreq));
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/StatsStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/StatsStream.java
index 6c6df59..1bb1546 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/StatsStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/StatsStream.java
@@ -302,16 +302,16 @@ public class StatsStream extends TupleStream implements Expressible  {
       }
     }
 
-    for(String field : m.keySet()) {
+    for(Entry<String, List<String>> entry : m.entrySet()) {
       StringBuilder buf = new StringBuilder();
-      List<String> stats = m.get(field);
+      List<String> stats = entry.getValue();
       buf.append("{!");
 
       for(String stat : stats) {
         buf.append(stat).append("=").append("true ");
       }
 
-      buf.append("}").append(field);
+      buf.append("}").append(entry.getKey());
       params.add("stats.field", buf.toString());
     }
   }
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TextLogitStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TextLogitStream.java
index dd9be6a..a493f5a 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TextLogitStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TextLogitStream.java
@@ -627,8 +627,8 @@ public class TextLogitStream extends TupleStream implements Expressible {
       params.add("terms", TextLogitStream.toString(terms));
       params.add("idfs", TextLogitStream.toString(idfs));
 
-      for(String key : paramsMap.keySet()) {
-        params.add(key, paramsMap.get(key));
+      for(Entry<String, String> entry : paramsMap.entrySet()) {
+        params.add(entry.getKey(), entry.getValue());
       }
 
       if(weights != null) {
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ZplotStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ZplotStream.java
index 66d9867..3b001cd 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ZplotStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ZplotStream.java
@@ -197,9 +197,9 @@ public class ZplotStream extends TupleStream implements Expressible {
       //Handle the vectors
       for (int i = 0; i < numTuples; i++) {
         Tuple tuple = new Tuple(new HashMap());
-        for (String key : evaluated.keySet()) {
-          List l = (List) evaluated.get(key);
-          tuple.put(key, l.get(i));
+        for (Map.Entry<String, Object> entry : evaluated.entrySet()) {
+          List l = (List) entry.getValue();
+          tuple.put(entry.getKey(), l.get(i));
         }
 
         outTuples.add(tuple);