You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by er...@apache.org on 2018/02/26 06:22:56 UTC

[1/3] lucene-solr:branch_7x: SOLR-10809: Get precommit lint warnings out of Solr core

Repository: lucene-solr
Updated Branches:
  refs/heads/branch_7x a96ac1300 -> 94b7f7dcb


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/solrj/src/test/org/apache/solr/common/util/ContentStreamTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/common/util/ContentStreamTest.java b/solr/solrj/src/test/org/apache/solr/common/util/ContentStreamTest.java
index 8ecc29c..58996e4 100644
--- a/solr/solrj/src/test/org/apache/solr/common/util/ContentStreamTest.java
+++ b/solr/solrj/src/test/org/apache/solr/common/util/ContentStreamTest.java
@@ -45,14 +45,16 @@ public class ContentStreamTest extends SolrTestCaseJ4
 
   public void testFileStream() throws IOException 
   {
-    InputStream is = new SolrResourceLoader().openResource( "solrj/README" );
-    assertNotNull( is );
-    File file = new File(createTempDir().toFile(), "README");
-    FileOutputStream os = new FileOutputStream(file);
-    IOUtils.copy(is, os);
-    os.close();
-    is.close();
-    
+    File file = null;
+    try (SolrResourceLoader loader = new SolrResourceLoader();
+         InputStream is = loader.openResource( "solrj/README" )) {
+      assertNotNull(is);
+      file = new File(createTempDir().toFile(), "README");
+      try (FileOutputStream os = new FileOutputStream(file)) {
+        IOUtils.copy(is, os);
+      }
+    }
+
     ContentStreamBase stream = new ContentStreamBase.FileStream(file);
     InputStream s = stream.getStream();
     FileInputStream fis = new FileInputStream(file);
@@ -74,13 +76,18 @@ public class ContentStreamTest extends SolrTestCaseJ4
 
   public void testURLStream() throws IOException 
   {
-    InputStream is = new SolrResourceLoader().openResource( "solrj/README" );
-    assertNotNull( is );
-    File file = new File(createTempDir().toFile(), "README");
-    FileOutputStream os = new FileOutputStream(file);
-    IOUtils.copy(is, os);
-    os.close();
-    is.close();
+    File file = null;
+    FileOutputStream os = null;
+
+    try (SolrResourceLoader loader = new SolrResourceLoader();
+         InputStream is = loader.openResource( "solrj/README" )) {
+      assertNotNull(is);
+      file = new File(createTempDir().toFile(), "README");
+      os = new FileOutputStream(file);
+      IOUtils.copy(is, os);
+      os.close();
+      is.close();
+    }
     
     ContentStreamBase stream = new ContentStreamBase.URLStream(new URL(file
         .toURI().toASCIIString()));


[3/3] lucene-solr:branch_7x: SOLR-10809: Get precommit lint warnings out of Solr core

Posted by er...@apache.org.
SOLR-10809: Get precommit lint warnings out of Solr core

(cherry picked from commit 6164643)


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/94b7f7dc
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/94b7f7dc
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/94b7f7dc

Branch: refs/heads/branch_7x
Commit: 94b7f7dcb2cf35ea4cac7bd142421f4e0ca739fa
Parents: a96ac13
Author: Erick Erickson <er...@apache.org>
Authored: Sun Feb 25 22:13:26 2018 -0800
Committer: Erick Erickson <er...@apache.org>
Committed: Sun Feb 25 22:14:54 2018 -0800

----------------------------------------------------------------------
 solr/CHANGES.txt                                |   2 +
 .../org/apache/solr/core/BlobRepository.java    |   4 +-
 .../org/apache/solr/core/CoreContainer.java     |   1 +
 .../apache/solr/core/HdfsDirectoryFactory.java  |   2 +
 .../handler/AnalysisRequestHandlerBase.java     |  19 +-
 .../org/apache/solr/handler/BlobHandler.java    |   6 +-
 .../apache/solr/handler/ReplicationHandler.java |   4 +-
 .../org/apache/solr/handler/sql/SolrTable.java  |   6 +-
 .../solr/highlight/DefaultSolrHighlighter.java  | 124 ++--
 .../solr/response/SmileResponseWriter.java      |   4 +-
 .../schema/OpenExchangeRatesOrgProvider.java    |  11 +-
 .../solr/search/CollapsingQParserPlugin.java    |   4 +-
 .../src/java/org/apache/solr/search/DocSet.java |   4 +-
 .../java/org/apache/solr/search/DocSetBase.java |   6 -
 .../solr/search/facet/UnInvertedField.java      |  12 +-
 .../solr/search/function/OrdFieldSource.java    |   2 +-
 .../search/function/ReverseOrdFieldSource.java  |   2 +-
 .../apache/solr/servlet/SolrDispatchFilter.java |  42 +-
 .../apache/solr/update/CdcrTransactionLog.java  |   6 +-
 .../apache/solr/update/HdfsTransactionLog.java  |  10 +-
 .../org/apache/solr/update/TransactionLog.java  |  12 +-
 .../org/apache/solr/util/SimplePostTool.java    |  20 +-
 .../org/apache/solr/util/SolrLogLayout.java     |   7 +-
 .../org/apache/solr/cloud/OverseerTest.java     | 123 ++--
 .../AutoAddReplicasPlanActionTest.java          |  13 +-
 .../solr/cloud/autoscaling/TestPolicyCloud.java |  91 +--
 .../solr/core/HdfsDirectoryFactoryTest.java     |  41 +-
 .../handler/admin/SecurityConfHandlerTest.java  |  34 +-
 .../DistributedDebugComponentTest.java          |  13 +-
 .../apache/solr/highlight/HighlighterTest.java  |  33 +-
 .../response/TestJavabinTupleStreamParser.java  |  17 +-
 .../solr/schema/TestSortableTextField.java      |   2 +-
 .../security/TestPKIAuthenticationPlugin.java   |   4 +-
 .../TestRuleBasedAuthorizationPlugin.java       |  11 +-
 .../TestSha256AuthenticationProvider.java       |  31 +-
 .../solr/spelling/SimpleQueryConverter.java     |   5 +-
 .../uninverting/TestFieldCacheVsDocValues.java  | 163 ++---
 .../solr/update/SolrCmdDistributorTest.java     | 618 ++++++++++---------
 .../solr/update/SolrIndexSplitterTest.java      |   6 +-
 .../RecordingUpdateProcessorFactory.java        |   1 +
 .../client/solrj/io/stream/DaemonStream.java    |   4 +-
 .../client/solrj/io/stream/FacetStream.java     |   4 +-
 .../io/stream/FeaturesSelectionStream.java      |   6 +-
 .../client/solrj/io/stream/TextLogitStream.java |   6 +-
 .../client/solrj/io/stream/TopicStream.java     |   5 +-
 .../solrj/io/stream/StreamExpressionTest.java   |   1 -
 .../stream/StreamExpressionToExpessionTest.java | 311 +++++-----
 .../StreamExpressionToExplanationTest.java      | 197 +++---
 .../client/solrj/io/stream/StreamingTest.java   |   1 +
 .../solrj/response/NoOpResponseParserTest.java  |   5 +-
 .../solrj/response/QueryResponseTest.java       |  41 +-
 .../solrj/response/TestClusteringResponse.java  |  15 +-
 .../solr/common/util/ContentStreamTest.java     |  37 +-
 53 files changed, 1107 insertions(+), 1042 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 7491c7f..f0c4f40 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -300,6 +300,8 @@ Other Changes
 
 * SOLR-12027: Increase thread lingering timeout to 80s. (Mikhail Khludnev)
 
+* SOLR-10809: Get precommit lint warnings out of Solr core (Erick Erickson)
+
 ==================  7.2.1 ==================
 
 Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/core/src/java/org/apache/solr/core/BlobRepository.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/BlobRepository.java b/solr/core/src/java/org/apache/solr/core/BlobRepository.java
index e4d5e04..48dd70a 100644
--- a/solr/core/src/java/org/apache/solr/core/BlobRepository.java
+++ b/solr/core/src/java/org/apache/solr/core/BlobRepository.java
@@ -164,7 +164,9 @@ public class BlobRepository {
       if (statusCode != 200) {
         throw new SolrException(SolrException.ErrorCode.NOT_FOUND, "no such blob or version available: " + key);
       }
-      b = SimplePostTool.inputStreamToByteArray(entity.getEntity().getContent());
+      try (InputStream is = entity.getEntity().getContent()) {
+        b = SimplePostTool.inputStreamToByteArray(is);
+      }
     } catch (Exception e) {
       if (e instanceof SolrException) {
         throw (SolrException) e;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/core/src/java/org/apache/solr/core/CoreContainer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/CoreContainer.java b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
index 38f1ad2..567f32b 100644
--- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java
+++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
@@ -1018,6 +1018,7 @@ public class CoreContainer {
    *
    * @return the newly created core
    */
+  @SuppressWarnings("resource")
   private SolrCore createFromDescriptor(CoreDescriptor dcore, boolean publishState, boolean newCollection) {
 
     if (isShutDown) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/core/src/java/org/apache/solr/core/HdfsDirectoryFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/HdfsDirectoryFactory.java b/solr/core/src/java/org/apache/solr/core/HdfsDirectoryFactory.java
index 761745b..4938689 100644
--- a/solr/core/src/java/org/apache/solr/core/HdfsDirectoryFactory.java
+++ b/solr/core/src/java/org/apache/solr/core/HdfsDirectoryFactory.java
@@ -189,6 +189,7 @@ public class HdfsDirectoryFactory extends CachingDirectoryFactory implements Sol
   }
 
   @Override
+  @SuppressWarnings("resource")
   protected Directory create(String path, LockFactory lockFactory, DirContext dirContext) throws IOException {
     assert params != null : "init must be called before create";
     LOG.info("creating directory factory for path {}", path);
@@ -203,6 +204,7 @@ public class HdfsDirectoryFactory extends CachingDirectoryFactory implements Sol
     boolean blockCacheReadEnabled = getConfig(BLOCKCACHE_READ_ENABLED, true);
     
     final HdfsDirectory hdfsDir;
+
     final Directory dir;
     if (blockCacheEnabled && dirContext != DirContext.META_DATA) {
       int numberOfBlocksPerBank = getConfig(NUMBEROFBLOCKSPERBANK, 16384);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/core/src/java/org/apache/solr/handler/AnalysisRequestHandlerBase.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/AnalysisRequestHandlerBase.java b/solr/core/src/java/org/apache/solr/handler/AnalysisRequestHandlerBase.java
index 579e5ef..54fdf99 100644
--- a/solr/core/src/java/org/apache/solr/handler/AnalysisRequestHandlerBase.java
+++ b/solr/core/src/java/org/apache/solr/handler/AnalysisRequestHandlerBase.java
@@ -117,9 +117,12 @@ public abstract class AnalysisRequestHandlerBase extends RequestHandlerBase {
     if (0 < cfiltfacs.length) {
       String source = value;
       for(CharFilterFactory cfiltfac : cfiltfacs ){
-        Reader reader = new StringReader(source);
-        reader = cfiltfac.create(reader);
-        source = writeCharStream(namedList, reader);
+        try (Reader sreader = new StringReader(source);
+             Reader reader = cfiltfac.create(sreader)) {
+          source = writeCharStream(namedList, reader);
+        } catch (IOException e) {
+          // do nothing.
+        }
       }
     }
 
@@ -139,9 +142,19 @@ public abstract class AnalysisRequestHandlerBase extends RequestHandlerBase {
       tokenStream = tokenFilterFactory.create(listBasedTokenStream);
       tokens = analyzeTokenStream(tokenStream);
       namedList.add(tokenStream.getClass().getName(), convertTokensToNamedLists(tokens, context));
+      try {
+        listBasedTokenStream.close();
+      } catch (IOException e) {
+        // do nothing;
+      }
       listBasedTokenStream = new ListBasedTokenStream(listBasedTokenStream, tokens);
     }
 
+    try {
+      listBasedTokenStream.close();
+    } catch (IOException e) {
+      // do nothing.
+    }
     return namedList;
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/core/src/java/org/apache/solr/handler/BlobHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/BlobHandler.java b/solr/core/src/java/org/apache/solr/handler/BlobHandler.java
index e3097fc..30301c0 100644
--- a/solr/core/src/java/org/apache/solr/handler/BlobHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/BlobHandler.java
@@ -17,6 +17,7 @@
 package org.apache.solr.handler;
 
 import java.io.IOException;
+import java.io.InputStream;
 import java.io.OutputStream;
 import java.lang.invoke.MethodHandles;
 import java.math.BigInteger;
@@ -107,7 +108,10 @@ public class BlobHandler extends RequestHandlerBase implements PluginInfoInitial
 
 
       for (ContentStream stream : req.getContentStreams()) {
-        ByteBuffer payload = SimplePostTool.inputStreamToByteArray(stream.getStream(), maxSize);
+        ByteBuffer payload;
+        try (InputStream is = stream.getStream()) {
+          payload = SimplePostTool.inputStreamToByteArray(is, maxSize);
+        }
         MessageDigest m = MessageDigest.getInstance("MD5");
         m.update(payload.array(), payload.position(), payload.limit());
         String md5 = new BigInteger(1, m.digest()).toString(16);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
index 0101559..43ec573 100644
--- a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
@@ -1173,7 +1173,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
   }
 
   @Override
-  @SuppressWarnings("unchecked")
+  @SuppressWarnings({"unchecked", "resource"})
   public void inform(SolrCore core) {
     this.core = core;
     registerCloseHook();
@@ -1256,7 +1256,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
         replicateOnStart = true;
         RefCounted<SolrIndexSearcher> s = core.getNewestSearcher(false);
         try {
-          DirectoryReader reader = s==null ? null : s.get().getIndexReader();
+          DirectoryReader reader = (s == null) ? null : s.get().getIndexReader();
           if (reader!=null && reader.getIndexCommit() != null && reader.getIndexCommit().getGeneration() != 1L) {
             try {
               if(replicateOnOptimize){

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java b/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java
index c0b35cf..46b09d2 100644
--- a/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java
+++ b/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java
@@ -514,7 +514,8 @@ class SolrTable extends AbstractQueryableTable implements TranslatableTable {
       // Do the rollups in parallel
       // Maintain the sort of the Tuples coming from the workers.
       StreamComparator comp = bucketSortComp(buckets, sortDirection);
-      ParallelStream parallelStream = new ParallelStream(zk, collection, tupleStream, numWorkers, comp);
+      @SuppressWarnings("resource")
+      final ParallelStream parallelStream = new ParallelStream(zk, collection, tupleStream, numWorkers, comp);
 
 
       parallelStream.setStreamFactory(factory);
@@ -740,7 +741,8 @@ class SolrTable extends AbstractQueryableTable implements TranslatableTable {
     if(numWorkers > 1) {
       // Do the unique in parallel
       // Maintain the sort of the Tuples coming from the workers.
-      ParallelStream parallelStream = new ParallelStream(zkHost, collection, tupleStream, numWorkers, comp);
+      @SuppressWarnings("resource")
+      final ParallelStream parallelStream = new ParallelStream(zkHost, collection, tupleStream, numWorkers, comp);
 
       StreamFactory factory = new StreamFactory()
           .withFunctionName("search", CloudSolrStream.class)

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/core/src/java/org/apache/solr/highlight/DefaultSolrHighlighter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/highlight/DefaultSolrHighlighter.java b/solr/core/src/java/org/apache/solr/highlight/DefaultSolrHighlighter.java
index 1029c7d..b8fbb25 100644
--- a/solr/core/src/java/org/apache/solr/highlight/DefaultSolrHighlighter.java
+++ b/solr/core/src/java/org/apache/solr/highlight/DefaultSolrHighlighter.java
@@ -573,81 +573,77 @@ public class DefaultSolrHighlighter extends SolrHighlighter implements PluginInf
     final TokenStream tvStream =
         TokenSources.getTermVectorTokenStreamOrNull(fieldName, tvFields, maxCharsToAnalyze - 1);
     //  We need to wrap in OffsetWindowTokenFilter if multi-valued
-    final OffsetWindowTokenFilter tvWindowStream;
-    if (tvStream != null && fieldValues.size() > 1) {
-      tvWindowStream = new OffsetWindowTokenFilter(tvStream);
-    } else {
-      tvWindowStream = null;
-    }
+    try (OffsetWindowTokenFilter tvWindowStream = (tvStream != null && fieldValues.size() > 1) ? new OffsetWindowTokenFilter(tvStream) : null) {
 
-    for (String thisText : fieldValues) {
-      if (mvToMatch <= 0 || maxCharsToAnalyze <= 0) {
-        break;
-      }
+      for (String thisText : fieldValues) {
+        if (mvToMatch <= 0 || maxCharsToAnalyze <= 0) {
+          break;
+        }
 
-      TokenStream tstream;
-      if (tvWindowStream != null) {
-        // if we have a multi-valued field with term vectors, then get the next offset window
-        tstream = tvWindowStream.advanceToNextWindowOfLength(thisText.length());
-      } else if (tvStream != null) {
-        tstream = tvStream; // single-valued with term vectors
-      } else {
-        // fall back to analyzer
-        tstream = createAnalyzerTStream(schemaField, thisText);
-      }
+        TokenStream tstream;
+        if (tvWindowStream != null) {
+          // if we have a multi-valued field with term vectors, then get the next offset window
+          tstream = tvWindowStream.advanceToNextWindowOfLength(thisText.length());
+        } else if (tvStream != null) {
+          tstream = tvStream; // single-valued with term vectors
+        } else {
+          // fall back to analyzer
+          tstream = createAnalyzerTStream(schemaField, thisText);
+        }
 
-      Highlighter highlighter;
-      if (params.getFieldBool(fieldName, HighlightParams.USE_PHRASE_HIGHLIGHTER, true)) {
-        // We're going to call getPhraseHighlighter and it might consume the tokenStream. If it does, the tokenStream
-        // needs to implement reset() efficiently.
-
-        //If the tokenStream is right from the term vectors, then CachingTokenFilter is unnecessary.
-        //  It should be okay if OffsetLimit won't get applied in this case.
-        final TokenStream tempTokenStream;
-        if (tstream != tvStream) {
-          if (maxCharsToAnalyze >= thisText.length()) {
-            tempTokenStream = new CachingTokenFilter(tstream);
+        Highlighter highlighter;
+        if (params.getFieldBool(fieldName, HighlightParams.USE_PHRASE_HIGHLIGHTER, true)) {
+          // We're going to call getPhraseHighlighter and it might consume the tokenStream. If it does, the tokenStream
+          // needs to implement reset() efficiently.
+
+          //If the tokenStream is right from the term vectors, then CachingTokenFilter is unnecessary.
+          //  It should be okay if OffsetLimit won't get applied in this case.
+          final TokenStream tempTokenStream;
+          if (tstream != tvStream) {
+            if (maxCharsToAnalyze >= thisText.length()) {
+              tempTokenStream = new CachingTokenFilter(tstream);
+            } else {
+              tempTokenStream = new CachingTokenFilter(new OffsetLimitTokenFilter(tstream, maxCharsToAnalyze));
+            }
           } else {
-            tempTokenStream = new CachingTokenFilter(new OffsetLimitTokenFilter(tstream, maxCharsToAnalyze));
+            tempTokenStream = tstream;
           }
-        } else {
-          tempTokenStream = tstream;
-        }
 
-        // get highlighter
-        highlighter = getPhraseHighlighter(query, fieldName, req, tempTokenStream);
+          // get highlighter
+          highlighter = getPhraseHighlighter(query, fieldName, req, tempTokenStream);
 
-        // if the CachingTokenFilter was consumed then use it going forward.
-        if (tempTokenStream instanceof CachingTokenFilter && ((CachingTokenFilter) tempTokenStream).isCached()) {
-          tstream = tempTokenStream;
+          // if the CachingTokenFilter was consumed then use it going forward.
+          if (tempTokenStream instanceof CachingTokenFilter && ((CachingTokenFilter) tempTokenStream).isCached()) {
+            tstream = tempTokenStream;
+          }
+          //tstream.reset(); not needed; getBestTextFragments will reset it.
+        } else {
+          // use "the old way"
+          highlighter = getHighlighter(query, fieldName, req);
         }
-        //tstream.reset(); not needed; getBestTextFragments will reset it.
-      } else {
-        // use "the old way"
-        highlighter = getHighlighter(query, fieldName, req);
-      }
 
-      highlighter.setMaxDocCharsToAnalyze(maxCharsToAnalyze);
-      maxCharsToAnalyze -= thisText.length();
-
-      // Highlight!
-      try {
-        TextFragment[] bestTextFragments =
-            highlighter.getBestTextFragments(tstream, thisText, mergeContiguousFragments, numFragments);
-        for (TextFragment bestTextFragment : bestTextFragments) {
-          if (bestTextFragment == null)//can happen via mergeContiguousFragments
-            continue;
-          // normally we want a score (must be highlighted), but if preserveMulti then we return a snippet regardless.
-          if (bestTextFragment.getScore() > 0 || preserveMulti) {
-            frags.add(bestTextFragment);
-            if (bestTextFragment.getScore() > 0)
-              --mvToMatch; // note: limits fragments (for multi-valued fields), not quite the number of values
+        highlighter.setMaxDocCharsToAnalyze(maxCharsToAnalyze);
+        maxCharsToAnalyze -= thisText.length();
+
+        // Highlight!
+        try {
+          TextFragment[] bestTextFragments =
+              highlighter.getBestTextFragments(tstream, thisText, mergeContiguousFragments, numFragments);
+          for (TextFragment bestTextFragment : bestTextFragments) {
+            if (bestTextFragment == null)//can happen via mergeContiguousFragments
+              continue;
+            // normally we want a score (must be highlighted), but if preserveMulti then we return a snippet regardless.
+            if (bestTextFragment.getScore() > 0 || preserveMulti) {
+              frags.add(bestTextFragment);
+              if (bestTextFragment.getScore() > 0)
+                --mvToMatch; // note: limits fragments (for multi-valued fields), not quite the number of values
+            }
           }
+        } catch (InvalidTokenOffsetsException e) {
+          throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
         }
-      } catch (InvalidTokenOffsetsException e) {
-        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
-      }
-    }//end field value loop
+      }//end field value loop
+    }
 
     // Put the fragments onto the Solr response (docSummaries)
     if (frags.size() > 0) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/core/src/java/org/apache/solr/response/SmileResponseWriter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/response/SmileResponseWriter.java b/solr/core/src/java/org/apache/solr/response/SmileResponseWriter.java
index 663b72a..0bd0658 100644
--- a/solr/core/src/java/org/apache/solr/response/SmileResponseWriter.java
+++ b/solr/core/src/java/org/apache/solr/response/SmileResponseWriter.java
@@ -30,7 +30,9 @@ public class SmileResponseWriter extends BinaryResponseWriter {
 
   @Override
   public void write(OutputStream out, SolrQueryRequest request, SolrQueryResponse response) throws IOException {
-    new SmileWriter(out, request, response).writeResponse();
+    try (SmileWriter sw = new SmileWriter(out, request, response)) {
+      sw.writeResponse();
+    }
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/core/src/java/org/apache/solr/schema/OpenExchangeRatesOrgProvider.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/OpenExchangeRatesOrgProvider.java b/solr/core/src/java/org/apache/solr/schema/OpenExchangeRatesOrgProvider.java
index 2b6cbf6..08e8d13 100644
--- a/solr/core/src/java/org/apache/solr/schema/OpenExchangeRatesOrgProvider.java
+++ b/solr/core/src/java/org/apache/solr/schema/OpenExchangeRatesOrgProvider.java
@@ -136,6 +136,7 @@ public class OpenExchangeRatesOrgProvider implements ExchangeRateProvider {
   }
 
   @Override
+  @SuppressWarnings("resource")
   public boolean reload() throws SolrException {
     InputStream ratesJsonStream = null;
     try {
@@ -151,10 +152,12 @@ public class OpenExchangeRatesOrgProvider implements ExchangeRateProvider {
     } catch (Exception e) {
       throw new SolrException(ErrorCode.SERVER_ERROR, "Error reloading exchange rates", e);
     } finally {
-      if (ratesJsonStream != null) try {
-        ratesJsonStream.close();
-      } catch (IOException e) {
-        throw new SolrException(ErrorCode.SERVER_ERROR, "Error closing stream", e);
+      if (ratesJsonStream != null) {
+        try {
+          ratesJsonStream.close();
+        } catch (IOException e) {
+          throw new SolrException(ErrorCode.SERVER_ERROR, "Error closing stream", e);
+        }
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java
index ed7b3362..559ee8b 100644
--- a/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java
+++ b/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java
@@ -1268,7 +1268,9 @@ public class CollapsingQParserPlugin extends QParserPlugin {
 
           Map<String, UninvertingReader.Type> mapping = new HashMap();
           mapping.put(collapseField, UninvertingReader.Type.SORTED);
-          UninvertingReader uninvertingReader = new UninvertingReader(new ReaderWrapper(searcher.getSlowAtomicReader(), collapseField), mapping);
+          @SuppressWarnings("resource") final UninvertingReader uninvertingReader =
+              new UninvertingReader(new ReaderWrapper(searcher.getSlowAtomicReader(), collapseField), mapping);
+
           docValuesProducer = new EmptyDocValuesProducer() {
               @Override
               public SortedDocValues getSorted(FieldInfo ignored) throws IOException {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/core/src/java/org/apache/solr/search/DocSet.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/DocSet.java b/solr/core/src/java/org/apache/solr/search/DocSet.java
index 172cce7..cd08ce9 100644
--- a/solr/core/src/java/org/apache/solr/search/DocSet.java
+++ b/solr/core/src/java/org/apache/solr/search/DocSet.java
@@ -16,8 +16,6 @@
  */
 package org.apache.solr.search;
 
-import java.io.Closeable;
-
 import org.apache.lucene.util.Accountable;
 import org.apache.solr.common.SolrException;
 
@@ -31,7 +29,7 @@ import org.apache.solr.common.SolrException;
  *
  * @since solr 0.9
  */
-public interface DocSet extends Closeable, Accountable, Cloneable /* extends Collection<Integer> */ {
+public interface DocSet extends Accountable, Cloneable /* extends Collection<Integer> */ {
   
   /**
    * Adds the specified document if it is not currently in the DocSet

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/core/src/java/org/apache/solr/search/DocSetBase.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/DocSetBase.java b/solr/core/src/java/org/apache/solr/search/DocSetBase.java
index 465c208..90a3aad 100644
--- a/solr/core/src/java/org/apache/solr/search/DocSetBase.java
+++ b/solr/core/src/java/org/apache/solr/search/DocSetBase.java
@@ -16,7 +16,6 @@
  */
 package org.apache.solr.search;
 
-import java.io.IOException;
 import java.util.Objects;
 
 import org.apache.lucene.index.LeafReader;
@@ -264,9 +263,4 @@ abstract class DocSetBase implements DocSet {
     }
   }
 
-
-  /** FUTURE: for off-heap */
-  @Override
-  public void close() throws IOException {
-  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/core/src/java/org/apache/solr/search/facet/UnInvertedField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/UnInvertedField.java b/solr/core/src/java/org/apache/solr/search/facet/UnInvertedField.java
index c9f3b0b..9c395b7 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/UnInvertedField.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/UnInvertedField.java
@@ -426,13 +426,11 @@ public class UnInvertedField extends DocTermOrds {
     for (TopTerm tt : bigTerms.values()) {
       if (tt.termNum >= startTermIndex && tt.termNum < endTermIndex) {
         // handle the biggest terms
-        try ( DocSet intersection = searcher.getDocSet(tt.termQuery, docs); )
-        {
-          int collected = processor.collectFirstPhase(intersection, tt.termNum - startTermIndex);
-          countAcc.incrementCount(tt.termNum - startTermIndex, collected);
-          if (collected > 0) {
-            uniqueTerms++;
-          }
+        DocSet intersection = searcher.getDocSet(tt.termQuery, docs);
+        int collected = processor.collectFirstPhase(intersection, tt.termNum - startTermIndex);
+        countAcc.incrementCount(tt.termNum - startTermIndex, collected);
+        if (collected > 0) {
+          uniqueTerms++;
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/core/src/java/org/apache/solr/search/function/OrdFieldSource.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/function/OrdFieldSource.java b/solr/core/src/java/org/apache/solr/search/function/OrdFieldSource.java
index 7cd8142..9681995 100644
--- a/solr/core/src/java/org/apache/solr/search/function/OrdFieldSource.java
+++ b/solr/core/src/java/org/apache/solr/search/function/OrdFieldSource.java
@@ -76,7 +76,7 @@ public class OrdFieldSource extends ValueSource {
     final LeafReader r;
     Object o = context.get("searcher");
     if (o instanceof SolrIndexSearcher) {
-      SolrIndexSearcher is = (SolrIndexSearcher) o;
+      @SuppressWarnings("resource") final SolrIndexSearcher is = (SolrIndexSearcher) o;
       SchemaField sf = is.getSchema().getFieldOrNull(field);
       if (sf != null && sf.getType().isPointField()) {
         throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/core/src/java/org/apache/solr/search/function/ReverseOrdFieldSource.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/function/ReverseOrdFieldSource.java b/solr/core/src/java/org/apache/solr/search/function/ReverseOrdFieldSource.java
index 0ada4d5..d75dca3 100644
--- a/solr/core/src/java/org/apache/solr/search/function/ReverseOrdFieldSource.java
+++ b/solr/core/src/java/org/apache/solr/search/function/ReverseOrdFieldSource.java
@@ -76,7 +76,7 @@ public class ReverseOrdFieldSource extends ValueSource {
     final LeafReader r;
     Object o = context.get("searcher");
     if (o instanceof SolrIndexSearcher) {
-      SolrIndexSearcher is = (SolrIndexSearcher) o;
+      @SuppressWarnings("resource")  final SolrIndexSearcher is = (SolrIndexSearcher) o;
       SchemaField sf = is.getSchema().getFieldOrNull(field);
       if (sf != null && sf.getType().isPointField()) {
         throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java b/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
index 4d944d2..edf616e 100644
--- a/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
+++ b/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
@@ -270,29 +270,33 @@ public class SolrDispatchFilter extends BaseSolrFilter {
    * @return the NodeConfig
    */
   public static NodeConfig loadNodeConfig(Path solrHome, Properties nodeProperties) {
+    NodeConfig cfg = null;
+    try (SolrResourceLoader loader = new SolrResourceLoader(solrHome, SolrDispatchFilter.class.getClassLoader(), nodeProperties)) {
+      if (!StringUtils.isEmpty(System.getProperty("solr.solrxml.location"))) {
+        log.warn("Solr property solr.solrxml.location is no longer supported. " +
+            "Will automatically load solr.xml from ZooKeeper if it exists");
+      }
 
-    SolrResourceLoader loader = new SolrResourceLoader(solrHome, SolrDispatchFilter.class.getClassLoader(), nodeProperties);
-    if (!StringUtils.isEmpty(System.getProperty("solr.solrxml.location"))) {
-      log.warn("Solr property solr.solrxml.location is no longer supported. " +
-               "Will automatically load solr.xml from ZooKeeper if it exists");
-    }
-
-    String zkHost = System.getProperty("zkHost");
-    if (!StringUtils.isEmpty(zkHost)) {
-      int startUpZkTimeOut = Integer.getInteger("waitForZk", 30);
-      startUpZkTimeOut *= 1000;
-      try (SolrZkClient zkClient = new SolrZkClient(zkHost, startUpZkTimeOut)) {
-        if (zkClient.exists("/solr.xml", true)) {
-          log.info("solr.xml found in ZooKeeper. Loading...");
-          byte[] data = zkClient.getData("/solr.xml", null, null, true);
-          return SolrXmlConfig.fromInputStream(loader, new ByteArrayInputStream(data));
+      String zkHost = System.getProperty("zkHost");
+      if (!StringUtils.isEmpty(zkHost)) {
+        int startUpZkTimeOut = Integer.getInteger("waitForZk", 30);
+        startUpZkTimeOut *= 1000;
+        try (SolrZkClient zkClient = new SolrZkClient(zkHost, startUpZkTimeOut)) {
+          if (zkClient.exists("/solr.xml", true)) {
+            log.info("solr.xml found in ZooKeeper. Loading...");
+            byte[] data = zkClient.getData("/solr.xml", null, null, true);
+            return SolrXmlConfig.fromInputStream(loader, new ByteArrayInputStream(data));
+          }
+        } catch (Exception e) {
+          throw new SolrException(ErrorCode.SERVER_ERROR, "Error occurred while loading solr.xml from zookeeper", e);
         }
-      } catch (Exception e) {
-        throw new SolrException(ErrorCode.SERVER_ERROR, "Error occurred while loading solr.xml from zookeeper", e);
+        log.info("Loading solr.xml from SolrHome (not found in ZooKeeper)");
       }
-      log.info("Loading solr.xml from SolrHome (not found in ZooKeeper)");
+      cfg = SolrXmlConfig.fromSolrHome(loader, loader.getInstancePath());
+    } catch (IOException e) {
+      // do nothing.
     }
-    return SolrXmlConfig.fromSolrHome(loader, loader.getInstancePath());
+    return cfg;
   }
   
   public CoreContainer getCores() {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/core/src/java/org/apache/solr/update/CdcrTransactionLog.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/CdcrTransactionLog.java b/solr/core/src/java/org/apache/solr/update/CdcrTransactionLog.java
index f959e14..3534f62 100644
--- a/solr/core/src/java/org/apache/solr/update/CdcrTransactionLog.java
+++ b/solr/core/src/java/org/apache/solr/update/CdcrTransactionLog.java
@@ -97,9 +97,9 @@ public class CdcrTransactionLog extends TransactionLog {
         // and 4 bytes for the number of records
         long pos = size - 4 - END_MESSAGE.length() - 1 - 4;
         if (pos < 0) return 0;
-
-        ChannelFastInputStream is = new ChannelFastInputStream(channel, pos);
-        return is.readInt();
+        try (ChannelFastInputStream is = new ChannelFastInputStream(channel, pos)) {
+          return is.readInt();
+        }
       }
     } catch (IOException e) {
       log.error("Error while reading number of records in tlog " + this, e);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/core/src/java/org/apache/solr/update/HdfsTransactionLog.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/HdfsTransactionLog.java b/solr/core/src/java/org/apache/solr/update/HdfsTransactionLog.java
index c478935..0f89016 100644
--- a/solr/core/src/java/org/apache/solr/update/HdfsTransactionLog.java
+++ b/solr/core/src/java/org/apache/solr/update/HdfsTransactionLog.java
@@ -188,8 +188,9 @@ public class HdfsTransactionLog extends TransactionLog {
     fis = fis != null ? fis : new FSDataFastInputStream(fs.open(tlogFile), 0);
     Map header = null;
     try {
-      LogCodec codec = new LogCodec(resolver);
-      header = (Map) codec.unmarshal(fis);
+      try (LogCodec codec = new LogCodec(resolver)) {
+        header = (Map) codec.unmarshal(fis);
+      }
       
       fis.readInt(); // skip size
     } finally {
@@ -258,8 +259,9 @@ public class HdfsTransactionLog extends TransactionLog {
           pos);
       try {
         dis.seek(pos);
-        LogCodec codec = new LogCodec(resolver);
-        return codec.readVal(new FastInputStream(dis));
+        try (LogCodec codec = new LogCodec(resolver)) {
+          return codec.readVal(new FastInputStream(dis));
+        }
       } finally {
         dis.close();
       }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/core/src/java/org/apache/solr/update/TransactionLog.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/TransactionLog.java b/solr/core/src/java/org/apache/solr/update/TransactionLog.java
index 7daf44f..35d722b 100644
--- a/solr/core/src/java/org/apache/solr/update/TransactionLog.java
+++ b/solr/core/src/java/org/apache/solr/update/TransactionLog.java
@@ -225,7 +225,7 @@ public class TransactionLog implements Closeable {
     byte[] buf = new byte[ END_MESSAGE.length() ];
     long pos = size - END_MESSAGE.length() - 4;
     if (pos < 0) return false;
-    ChannelFastInputStream is = new ChannelFastInputStream(channel, pos);
+    @SuppressWarnings("resource") final ChannelFastInputStream is = new ChannelFastInputStream(channel, pos);
     is.read(buf);
     for (int i=0; i<buf.length; i++) {
       if (buf[i] != END_MESSAGE.charAt(i)) return false;
@@ -257,7 +257,7 @@ public class TransactionLog implements Closeable {
   }
 
   public long writeData(Object o) {
-    LogCodec codec = new LogCodec(resolver);
+    @SuppressWarnings("resource") final LogCodec codec = new LogCodec(resolver);
     try {
       long pos = fos.size();   // if we had flushed, this should be equal to channel.position()
       codec.init(fos);
@@ -272,7 +272,7 @@ public class TransactionLog implements Closeable {
   private void readHeader(FastInputStream fis) throws IOException {
     // read existing header
     fis = fis != null ? fis : new ChannelFastInputStream(channel, 0);
-    LogCodec codec = new LogCodec(resolver);
+    @SuppressWarnings("resource") final LogCodec codec = new LogCodec(resolver);
     Map header = (Map)codec.unmarshal(fis);
 
     fis.readInt(); // skip size
@@ -507,6 +507,7 @@ public class TransactionLog implements Closeable {
 
 
   /* This method is thread safe */
+
   public Object lookup(long pos) {
     // A negative position can result from a log replay (which does not re-log, but does
     // update the version map.  This is OK since the node won't be ACTIVE when this happens.
@@ -526,8 +527,9 @@ public class TransactionLog implements Closeable {
       }
 
       ChannelFastInputStream fis = new ChannelFastInputStream(channel, pos);
-      LogCodec codec = new LogCodec(resolver);
-      return codec.readVal(fis);
+      try (LogCodec codec = new LogCodec(resolver)) {
+        return codec.readVal(fis);
+      }
     } catch (IOException e) {
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/core/src/java/org/apache/solr/util/SimplePostTool.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/util/SimplePostTool.java b/solr/core/src/java/org/apache/solr/util/SimplePostTool.java
index 327361e..a1d71a5 100644
--- a/solr/core/src/java/org/apache/solr/util/SimplePostTool.java
+++ b/solr/core/src/java/org/apache/solr/util/SimplePostTool.java
@@ -652,17 +652,17 @@ public class SimplePostTool {
    * @throws IOException If there is a low-level I/O error.
    */
   public static ByteBuffer inputStreamToByteArray(InputStream is, long maxSize) throws IOException {
-    BAOS bos =  new BAOS();
-    long sz = 0;
-    int next = is.read();
-    while (next > -1) {
-      if(++sz > maxSize) throw new BufferOverflowException();
-      bos.write(next);
-      next = is.read();
+    try (BAOS bos = new BAOS()) {
+      long sz = 0;
+      int next = is.read();
+      while (next > -1) {
+        if (++sz > maxSize) throw new BufferOverflowException();
+        bos.write(next);
+        next = is.read();
+      }
+      bos.flush();
+      return bos.getByteBuffer();
     }
-    bos.flush();
-    is.close();
-    return bos.getByteBuffer();
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/core/src/java/org/apache/solr/util/SolrLogLayout.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/util/SolrLogLayout.java b/solr/core/src/java/org/apache/solr/util/SolrLogLayout.java
index a60ada8..735cde2 100644
--- a/solr/core/src/java/org/apache/solr/util/SolrLogLayout.java
+++ b/solr/core/src/java/org/apache/solr/util/SolrLogLayout.java
@@ -146,8 +146,11 @@ public class SolrLogLayout extends Layout {
      ***/
     
     SolrRequestInfo requestInfo = SolrRequestInfo.getRequestInfo();
-    SolrQueryRequest req = requestInfo == null ? null : requestInfo.getReq();
-    SolrCore core = req == null ? null : req.getCore();
+
+    SolrCore core;
+    try (SolrQueryRequest req = (requestInfo == null) ? null : requestInfo.getReq()) {
+      core = (req == null) ? null : req.getCore();
+    }
     ZkController zkController = null;
     CoreInfo info = null;
     

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java b/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java
index 3b46922..59cd8f7 100644
--- a/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java
@@ -297,34 +297,34 @@ public class OverseerTest extends SolrTestCaseJ4 {
 
       overseerClient = electNewOverseer(server.getZkAddress());
 
-      ZkStateReader reader = new ZkStateReader(zkClient);
-      reader.createClusterStateWatchersAndUpdate();
+      try (ZkStateReader reader = new ZkStateReader(zkClient)) {
+        reader.createClusterStateWatchersAndUpdate();
 
-      zkController = new MockZKController(server.getZkAddress(), "127.0.0.1");
+        zkController = new MockZKController(server.getZkAddress(), "127.0.0.1");
 
-      final int numShards=6;
+        final int numShards = 6;
 
-      ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, CollectionParams.CollectionAction.CREATE.toLower(),
-          "name", COLLECTION,
-          ZkStateReader.REPLICATION_FACTOR, "1",
-          ZkStateReader.NUM_SHARDS_PROP, "3",
-          "createNodeSet", "");
-      ZkDistributedQueue q = Overseer.getStateUpdateQueue(zkClient);
-      q.offer(Utils.toJSON(m));
+        ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, CollectionParams.CollectionAction.CREATE.toLower(),
+            "name", COLLECTION,
+            ZkStateReader.REPLICATION_FACTOR, "1",
+            ZkStateReader.NUM_SHARDS_PROP, "3",
+            "createNodeSet", "");
+        ZkDistributedQueue q = Overseer.getStateUpdateQueue(zkClient);
+        q.offer(Utils.toJSON(m));
 
-      for (int i = 0; i < numShards; i++) {
-        assertNotNull("shard got no id?", zkController.publishState(COLLECTION, "core" + (i+1), "node" + (i+1), "shard"+((i%3)+1), Replica.State.ACTIVE, 3));
+        for (int i = 0; i < numShards; i++) {
+          assertNotNull("shard got no id?", zkController.publishState(COLLECTION, "core" + (i + 1), "node" + (i + 1), "shard" + ((i % 3) + 1), Replica.State.ACTIVE, 3));
+        }
+        final Map<String, Replica> rmap = reader.getClusterState().getCollection(COLLECTION).getSlice("shard1").getReplicasMap();
+        assertEquals(rmap.toString(), 2, rmap.size());
+        assertEquals(rmap.toString(), 2, reader.getClusterState().getCollection(COLLECTION).getSlice("shard2").getReplicasMap().size());
+        assertEquals(rmap.toString(), 2, reader.getClusterState().getCollection(COLLECTION).getSlice("shard3").getReplicasMap().size());
+
+        //make sure leaders are in cloud state
+        assertNotNull(reader.getLeaderUrl(COLLECTION, "shard1", 15000));
+        assertNotNull(reader.getLeaderUrl(COLLECTION, "shard2", 15000));
+        assertNotNull(reader.getLeaderUrl(COLLECTION, "shard3", 15000));
       }
-      final Map<String,Replica> rmap = reader.getClusterState().getCollection(COLLECTION).getSlice("shard1").getReplicasMap();
-      assertEquals(rmap.toString(), 2, rmap.size());
-      assertEquals(rmap.toString(), 2, reader.getClusterState().getCollection(COLLECTION).getSlice("shard2").getReplicasMap().size());
-      assertEquals(rmap.toString(), 2, reader.getClusterState().getCollection(COLLECTION).getSlice("shard3").getReplicasMap().size());
-
-      //make sure leaders are in cloud state
-      assertNotNull(reader.getLeaderUrl(COLLECTION, "shard1", 15000));
-      assertNotNull(reader.getLeaderUrl(COLLECTION, "shard2", 15000));
-      assertNotNull(reader.getLeaderUrl(COLLECTION, "shard3", 15000));
-
     } finally {
       close(zkClient);
       if (zkController != null) {
@@ -355,47 +355,48 @@ public class OverseerTest extends SolrTestCaseJ4 {
 
       overseerClient = electNewOverseer(server.getZkAddress());
 
-      ZkStateReader reader = new ZkStateReader(zkClient);
-      reader.createClusterStateWatchersAndUpdate();
-      
-      zkController = new MockZKController(server.getZkAddress(), "127.0.0.1");
+      try (ZkStateReader reader = new ZkStateReader(zkClient)) {
+        reader.createClusterStateWatchersAndUpdate();
 
-      final int numShards=3;
-      zkController.createCollection(COLLECTION, 3);
-      for (int i = 0; i < numShards; i++) {
-        assertNotNull("shard got no id?", zkController.publishState(COLLECTION, "core" + (i+1),
-            "node" + (i+1), "shard"+((i%3)+1) , Replica.State.ACTIVE, 3));
-      }
+        zkController = new MockZKController(server.getZkAddress(), "127.0.0.1");
 
-      assertEquals(1, reader.getClusterState().getCollection(COLLECTION).getSlice("shard1").getReplicasMap().size());
-      assertEquals(1, reader.getClusterState().getCollection(COLLECTION).getSlice("shard2").getReplicasMap().size());
-      assertEquals(1, reader.getClusterState().getCollection(COLLECTION).getSlice("shard3").getReplicasMap().size());
-      
-      //make sure leaders are in cloud state
-      assertNotNull(reader.getLeaderUrl(COLLECTION, "shard1", 15000));
-      assertNotNull(reader.getLeaderUrl(COLLECTION, "shard2", 15000));
-      assertNotNull(reader.getLeaderUrl(COLLECTION, "shard3", 15000));
-      
-      // publish a bad queue item
-      String emptyCollectionName = "";
-      zkController.publishState(emptyCollectionName, "core0", "node0", "shard1",  Replica.State.ACTIVE, 1);
-      zkController.publishState(emptyCollectionName, "core0", "node0", "shard1", null, 1);
-
-      zkController.createCollection("collection2", 3);
-      // make sure the Overseer is still processing items
-      for (int i = 0; i < numShards; i++) {
-        assertNotNull("shard got no id?", zkController.publishState("collection2",
-            "core" + (i + 1), "node" + (i + 1),"shard"+((i%3)+1), Replica.State.ACTIVE, 3));
-      }
+        final int numShards = 3;
+        zkController.createCollection(COLLECTION, 3);
+        for (int i = 0; i < numShards; i++) {
+          assertNotNull("shard got no id?", zkController.publishState(COLLECTION, "core" + (i + 1),
+              "node" + (i + 1), "shard" + ((i % 3) + 1), Replica.State.ACTIVE, 3));
+        }
 
-      assertEquals(1, reader.getClusterState().getCollection("collection2").getSlice("shard1").getReplicasMap().size());
-      assertEquals(1, reader.getClusterState().getCollection("collection2").getSlice("shard2").getReplicasMap().size());
-      assertEquals(1, reader.getClusterState().getCollection("collection2").getSlice("shard3").getReplicasMap().size());
-      
-      //make sure leaders are in cloud state
-      assertNotNull(reader.getLeaderUrl("collection2", "shard1", 15000));
-      assertNotNull(reader.getLeaderUrl("collection2", "shard2", 15000));
-      assertNotNull(reader.getLeaderUrl("collection2", "shard3", 15000));
+        assertEquals(1, reader.getClusterState().getCollection(COLLECTION).getSlice("shard1").getReplicasMap().size());
+        assertEquals(1, reader.getClusterState().getCollection(COLLECTION).getSlice("shard2").getReplicasMap().size());
+        assertEquals(1, reader.getClusterState().getCollection(COLLECTION).getSlice("shard3").getReplicasMap().size());
+
+        //make sure leaders are in cloud state
+        assertNotNull(reader.getLeaderUrl(COLLECTION, "shard1", 15000));
+        assertNotNull(reader.getLeaderUrl(COLLECTION, "shard2", 15000));
+        assertNotNull(reader.getLeaderUrl(COLLECTION, "shard3", 15000));
+
+        // publish a bad queue item
+        String emptyCollectionName = "";
+        zkController.publishState(emptyCollectionName, "core0", "node0", "shard1", Replica.State.ACTIVE, 1);
+        zkController.publishState(emptyCollectionName, "core0", "node0", "shard1", null, 1);
+
+        zkController.createCollection("collection2", 3);
+        // make sure the Overseer is still processing items
+        for (int i = 0; i < numShards; i++) {
+          assertNotNull("shard got no id?", zkController.publishState("collection2",
+              "core" + (i + 1), "node" + (i + 1), "shard" + ((i % 3) + 1), Replica.State.ACTIVE, 3));
+        }
+
+        assertEquals(1, reader.getClusterState().getCollection("collection2").getSlice("shard1").getReplicasMap().size());
+        assertEquals(1, reader.getClusterState().getCollection("collection2").getSlice("shard2").getReplicasMap().size());
+        assertEquals(1, reader.getClusterState().getCollection("collection2").getSlice("shard3").getReplicasMap().size());
+
+        //make sure leaders are in cloud state
+        assertNotNull(reader.getLeaderUrl("collection2", "shard1", 15000));
+        assertNotNull(reader.getLeaderUrl("collection2", "shard2", 15000));
+        assertNotNull(reader.getLeaderUrl("collection2", "shard3", 15000));
+      }
       
     } finally {
       close(zkClient);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoAddReplicasPlanActionTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoAddReplicasPlanActionTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoAddReplicasPlanActionTest.java
index d31bba5..9f86393 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoAddReplicasPlanActionTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoAddReplicasPlanActionTest.java
@@ -152,12 +152,13 @@ public class AutoAddReplicasPlanActionTest extends SolrCloudTestCase{
 
   @SuppressForbidden(reason = "Needs currentTimeMillis to create unique id")
   private List<SolrRequest> getOperations(JettySolrRunner actionJetty, String lostNodeName) throws Exception {
-    AutoAddReplicasPlanAction action = new AutoAddReplicasPlanAction();
-    TriggerEvent lostNode = new NodeLostTrigger.NodeLostEvent(TriggerEventType.NODELOST, ".auto_add_replicas", Collections.singletonList(System.currentTimeMillis()), Collections.singletonList(lostNodeName));
-    ActionContext context = new ActionContext(actionJetty.getCoreContainer().getZkController().getSolrCloudManager(), null, new HashMap<>());
-    action.process(lostNode, context);
-    List<SolrRequest> operations = (List) context.getProperty("operations");
-    return operations;
+    try (AutoAddReplicasPlanAction action = new AutoAddReplicasPlanAction()) {
+      TriggerEvent lostNode = new NodeLostTrigger.NodeLostEvent(TriggerEventType.NODELOST, ".auto_add_replicas", Collections.singletonList(System.currentTimeMillis()), Collections.singletonList(lostNodeName));
+      ActionContext context = new ActionContext(actionJetty.getCoreContainer().getZkController().getSolrCloudManager(), null, new HashMap<>());
+      action.process(lostNode, context);
+      List<SolrRequest> operations = (List) context.getProperty("operations");
+      return operations;
+    }
   }
 
   private void assertOperations(String collection, List<SolrRequest> operations, String lostNodeName,

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/core/src/test/org/apache/solr/cloud/autoscaling/TestPolicyCloud.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/TestPolicyCloud.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/TestPolicyCloud.java
index 9637a32..7c432ef 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/TestPolicyCloud.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/TestPolicyCloud.java
@@ -223,16 +223,18 @@ public class TestPolicyCloud extends SolrCloudTestCase {
         .process(cluster.getSolrClient());
     DocCollection collection = getCollectionState("metricsTest");
     DistributedQueueFactory queueFactory = new ZkDistributedQueueFactory(cluster.getZkClient());
-    SolrCloudManager provider = new SolrClientCloudManager(queueFactory, solrClient);
-    List<String> tags = Arrays.asList("metrics:solr.node:ADMIN./admin/authorization.clientErrors:count",
-        "metrics:solr.jvm:buffers.direct.Count");
-    Map<String, Object> val = provider.getNodeStateProvider().getNodeValues(collection .getReplicas().get(0).getNodeName(), tags);
-    for (String tag : tags) {
-      assertNotNull( "missing : "+ tag , val.get(tag));
+    try (SolrCloudManager provider = new SolrClientCloudManager(queueFactory, solrClient)) {
+      List<String> tags = Arrays.asList("metrics:solr.node:ADMIN./admin/authorization.clientErrors:count",
+          "metrics:solr.jvm:buffers.direct.Count");
+      Map<String, Object> val = provider.getNodeStateProvider().getNodeValues(collection.getReplicas().get(0).getNodeName(), tags);
+      for (String tag : tags) {
+        assertNotNull("missing : " + tag, val.get(tag));
+      }
+      val = provider.getNodeStateProvider().getNodeValues(collection.getReplicas().get(0).getNodeName(), Collections.singleton("diskType"));
+
+      Set<String> diskTypes = ImmutableSet.of("rotational", "ssd");
+      assertTrue(diskTypes.contains(val.get("diskType")));
     }
-    val = provider.getNodeStateProvider().getNodeValues(collection.getReplicas().get(0).getNodeName(), Collections.singleton("diskType"));
-    Set<String> diskTypes = ImmutableSet.of("rotational", "ssd");
-    assertTrue(diskTypes.contains(val.get("diskType")));
   }
 
   public void testCreateCollectionAddShardWithReplicaTypeUsingPolicy() throws Exception {
@@ -325,42 +327,43 @@ public class TestPolicyCloud extends SolrCloudTestCase {
         .process(cluster.getSolrClient());
     DocCollection rulesCollection = getCollectionState("policiesTest");
 
-    SolrCloudManager cloudManager = new SolrClientCloudManager(new ZkDistributedQueueFactory(cluster.getZkClient()), cluster.getSolrClient());
-    Map<String, Object> val = cloudManager.getNodeStateProvider().getNodeValues(rulesCollection.getReplicas().get(0).getNodeName(), Arrays.asList(
-        "freedisk",
-        "cores",
-        "heapUsage",
-        "sysLoadAvg"));
-    assertNotNull(val.get("freedisk"));
-    assertNotNull(val.get("heapUsage"));
-    assertNotNull(val.get("sysLoadAvg"));
-    assertTrue(((Number) val.get("cores")).intValue() > 0);
-    assertTrue("freedisk value is " + ((Number) val.get("freedisk")).doubleValue(),  Double.compare(((Number) val.get("freedisk")).doubleValue(), 0.0d) > 0);
-    assertTrue("heapUsage value is " + ((Number) val.get("heapUsage")).doubleValue(), Double.compare(((Number) val.get("heapUsage")).doubleValue(), 0.0d) > 0);
-    if (!Constants.WINDOWS)  {
-      // the system load average metrics is not available on windows platform
-      assertTrue("sysLoadAvg value is " + ((Number) val.get("sysLoadAvg")).doubleValue(), Double.compare(((Number) val.get("sysLoadAvg")).doubleValue(), 0.0d) > 0);
-    }
-    String overseerNode = OverseerTaskProcessor.getLeaderNode(cluster.getZkClient());
-    cluster.getSolrClient().request(CollectionAdminRequest.addRole(overseerNode, "overseer"));
-    for (int i = 0; i < 10; i++) {
-      Map<String, Object> data = Utils.getJson(cluster.getZkClient(), ZkStateReader.ROLES, true);
-      if (i >= 9 && data.isEmpty()) {
-        throw new RuntimeException("NO overseer node created");
+    try (SolrCloudManager cloudManager = new SolrClientCloudManager(new ZkDistributedQueueFactory(cluster.getZkClient()), cluster.getSolrClient())) {
+      Map<String, Object> val = cloudManager.getNodeStateProvider().getNodeValues(rulesCollection.getReplicas().get(0).getNodeName(), Arrays.asList(
+          "freedisk",
+          "cores",
+          "heapUsage",
+          "sysLoadAvg"));
+      assertNotNull(val.get("freedisk"));
+      assertNotNull(val.get("heapUsage"));
+      assertNotNull(val.get("sysLoadAvg"));
+      assertTrue(((Number) val.get("cores")).intValue() > 0);
+      assertTrue("freedisk value is " + ((Number) val.get("freedisk")).doubleValue(), Double.compare(((Number) val.get("freedisk")).doubleValue(), 0.0d) > 0);
+      assertTrue("heapUsage value is " + ((Number) val.get("heapUsage")).doubleValue(), Double.compare(((Number) val.get("heapUsage")).doubleValue(), 0.0d) > 0);
+      if (!Constants.WINDOWS) {
+        // the system load average metrics is not available on windows platform
+        assertTrue("sysLoadAvg value is " + ((Number) val.get("sysLoadAvg")).doubleValue(), Double.compare(((Number) val.get("sysLoadAvg")).doubleValue(), 0.0d) > 0);
+      }
+      String overseerNode = OverseerTaskProcessor.getLeaderNode(cluster.getZkClient());
+      cluster.getSolrClient().request(CollectionAdminRequest.addRole(overseerNode, "overseer"));
+      for (int i = 0; i < 10; i++) {
+        Map<String, Object> data = Utils.getJson(cluster.getZkClient(), ZkStateReader.ROLES, true);
+        if (i >= 9 && data.isEmpty()) {
+          throw new RuntimeException("NO overseer node created");
+        }
+        Thread.sleep(100);
       }
-      Thread.sleep(100);
+      val = cloudManager.getNodeStateProvider().getNodeValues(overseerNode, Arrays.asList(
+          "nodeRole",
+          "ip_1", "ip_2", "ip_3", "ip_4",
+          "sysprop.java.version",
+          "sysprop.java.vendor"));
+      assertEquals("overseer", val.get("nodeRole"));
+      assertNotNull(val.get("ip_1"));
+      assertNotNull(val.get("ip_2"));
+      assertNotNull(val.get("ip_3"));
+      assertNotNull(val.get("ip_4"));
+      assertNotNull(val.get("sysprop.java.version"));
+      assertNotNull(val.get("sysprop.java.vendor"));
     }
-    val = cloudManager.getNodeStateProvider().getNodeValues(overseerNode, Arrays.asList(
-        "nodeRole",
-        "ip_1", "ip_2", "ip_3", "ip_4",
-        "sysprop.java.version",
-        "sysprop.java.vendor"));
-    assertEquals("overseer", val.get("nodeRole"));
-    assertNotNull(val.get("ip_1"));
-    assertNotNull(val.get("ip_2"));
-    assertNotNull(val.get("ip_3"));
-    assertNotNull(val.get("ip_4"));
-    assertNotNull(val.get("sysprop.java.version"));
-    assertNotNull(val.get("sysprop.java.vendor"));
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/core/src/test/org/apache/solr/core/HdfsDirectoryFactoryTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/HdfsDirectoryFactoryTest.java b/solr/core/src/test/org/apache/solr/core/HdfsDirectoryFactoryTest.java
index 2a4dcc0..ed523c4 100644
--- a/solr/core/src/test/org/apache/solr/core/HdfsDirectoryFactoryTest.java
+++ b/solr/core/src/test/org/apache/solr/core/HdfsDirectoryFactoryTest.java
@@ -146,32 +146,33 @@ public class HdfsDirectoryFactoryTest extends SolrTestCaseJ4 {
   @Test
   public void testCleanupOldIndexDirectories() throws Exception {
 
-    HdfsDirectoryFactory hdfsFactory = new HdfsDirectoryFactory();
+    try (HdfsDirectoryFactory hdfsFactory = new HdfsDirectoryFactory()) {
 
-    System.setProperty("solr.hdfs.home", HdfsTestUtil.getURI(dfsCluster) + "/solr1");
-    hdfsFactory.init(new NamedList<>());
-    String dataHome = hdfsFactory.getDataHome(new MockCoreDescriptor());
-    assertTrue(dataHome.endsWith("/solr1/mock/data"));
-    System.clearProperty("solr.hdfs.home");
+      System.setProperty("solr.hdfs.home", HdfsTestUtil.getURI(dfsCluster) + "/solr1");
+      hdfsFactory.init(new NamedList<>());
+      String dataHome = hdfsFactory.getDataHome(new MockCoreDescriptor());
+      assertTrue(dataHome.endsWith("/solr1/mock/data"));
+      System.clearProperty("solr.hdfs.home");
 
-    FileSystem hdfs = dfsCluster.getFileSystem();
+      FileSystem hdfs = dfsCluster.getFileSystem();
 
-    org.apache.hadoop.fs.Path dataHomePath = new org.apache.hadoop.fs.Path(dataHome);
-    org.apache.hadoop.fs.Path currentIndexDirPath = new org.apache.hadoop.fs.Path(dataHomePath, "index");
-    assertTrue(!hdfs.isDirectory(currentIndexDirPath));
-    hdfs.mkdirs(currentIndexDirPath);
-    assertTrue(hdfs.isDirectory(currentIndexDirPath));
+      org.apache.hadoop.fs.Path dataHomePath = new org.apache.hadoop.fs.Path(dataHome);
+      org.apache.hadoop.fs.Path currentIndexDirPath = new org.apache.hadoop.fs.Path(dataHomePath, "index");
+      assertTrue(!hdfs.isDirectory(currentIndexDirPath));
+      hdfs.mkdirs(currentIndexDirPath);
+      assertTrue(hdfs.isDirectory(currentIndexDirPath));
 
-    String timestamp1 = new SimpleDateFormat(SnapShooter.DATE_FMT, Locale.ROOT).format(new Date());
-    org.apache.hadoop.fs.Path oldIndexDirPath = new org.apache.hadoop.fs.Path(dataHomePath, "index."+timestamp1);
-    assertTrue(!hdfs.isDirectory(oldIndexDirPath));
-    hdfs.mkdirs(oldIndexDirPath);
-    assertTrue(hdfs.isDirectory(oldIndexDirPath));
+      String timestamp1 = new SimpleDateFormat(SnapShooter.DATE_FMT, Locale.ROOT).format(new Date());
+      org.apache.hadoop.fs.Path oldIndexDirPath = new org.apache.hadoop.fs.Path(dataHomePath, "index." + timestamp1);
+      assertTrue(!hdfs.isDirectory(oldIndexDirPath));
+      hdfs.mkdirs(oldIndexDirPath);
+      assertTrue(hdfs.isDirectory(oldIndexDirPath));
 
-    hdfsFactory.cleanupOldIndexDirectories(dataHomePath.toString(), currentIndexDirPath.toString(), false);
+      hdfsFactory.cleanupOldIndexDirectories(dataHomePath.toString(), currentIndexDirPath.toString(), false);
 
-    assertTrue(hdfs.isDirectory(currentIndexDirPath));
-    assertTrue(!hdfs.isDirectory(oldIndexDirPath));
+      assertTrue(hdfs.isDirectory(currentIndexDirPath));
+      assertTrue(!hdfs.isDirectory(oldIndexDirPath));
+    }
   }
   
   @Test

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/core/src/test/org/apache/solr/handler/admin/SecurityConfHandlerTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/handler/admin/SecurityConfHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/SecurityConfHandlerTest.java
index e6dbddb..f11f9e4 100644
--- a/solr/core/src/test/org/apache/solr/handler/admin/SecurityConfHandlerTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/admin/SecurityConfHandlerTest.java
@@ -50,23 +50,25 @@ public class SecurityConfHandlerTest extends SolrTestCaseJ4 {
     req.setContentStreams(Collections.singletonList(o));
     handler.handleRequestBody(req,new SolrQueryResponse());
 
-    BasicAuthPlugin basicAuth = new BasicAuthPlugin();
-    SecurityConfig securityCfg = handler.m.get("/security.json");
-    basicAuth.init((Map<String, Object>) securityCfg.getData().get("authentication"));
-    assertTrue(basicAuth.authenticate("tom", "TomIsUberCool"));
+    try (BasicAuthPlugin basicAuth = new BasicAuthPlugin()) {
+      SecurityConfig securityCfg = handler.m.get("/security.json");
+      basicAuth.init((Map<String, Object>) securityCfg.getData().get("authentication"));
+      assertTrue(basicAuth.authenticate("tom", "TomIsUberCool"));
 
-    command = "{\n" +
-        "'set-user': {'harry':'HarryIsCool'},\n" +
-        "'delete-user': ['tom','harry']\n" +
-        "}";
-    o = new ContentStreamBase.ByteArrayStream(command.getBytes(StandardCharsets.UTF_8),"");
-    req.setContentStreams(Collections.singletonList(o));
-    handler.handleRequestBody(req,new SolrQueryResponse());
-    securityCfg = handler.m.get("/security.json");
-    assertEquals(3, securityCfg.getVersion());
-    Map result = (Map) securityCfg.getData().get("authentication");
-    result = (Map) result.get("credentials");
-    assertTrue(result.isEmpty());
+
+      command = "{\n" +
+          "'set-user': {'harry':'HarryIsCool'},\n" +
+          "'delete-user': ['tom','harry']\n" +
+          "}";
+      o = new ContentStreamBase.ByteArrayStream(command.getBytes(StandardCharsets.UTF_8), "");
+      req.setContentStreams(Collections.singletonList(o));
+      handler.handleRequestBody(req, new SolrQueryResponse());
+      securityCfg = handler.m.get("/security.json");
+      assertEquals(3, securityCfg.getVersion());
+      Map result = (Map) securityCfg.getData().get("authentication");
+      result = (Map) result.get("credentials");
+      assertTrue(result.isEmpty());
+    }
 
 
     

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/core/src/test/org/apache/solr/handler/component/DistributedDebugComponentTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedDebugComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedDebugComponentTest.java
index b447668..c0a40af 100644
--- a/solr/core/src/test/org/apache/solr/handler/component/DistributedDebugComponentTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedDebugComponentTest.java
@@ -156,19 +156,20 @@ public class DistributedDebugComponentTest extends SolrJettyTestBase {
   }
   
   @Test
+  @SuppressWarnings("resource") // Cannot close client in this loop!
   public void testRandom() throws Exception {
     final int NUM_ITERS = atLeast(50);
 
-    for (int i = 0; i < NUM_ITERS; i++) { 
-      SolrClient client = random().nextBoolean() ? collection1 : collection2;
-      
+    for (int i = 0; i < NUM_ITERS; i++) {
+      final SolrClient client = random().nextBoolean() ? collection1 : collection2;
+
       SolrQuery q = new SolrQuery();
       q.set("distrib", "true");
       q.setFields("id", "text");
-      
+
       boolean shard1Results = random().nextBoolean();
       boolean shard2Results = random().nextBoolean();
-      
+
       String qs = "_query_with_no_results_";
       if (shard1Results) {
         qs += " OR batman";
@@ -181,10 +182,8 @@ public class DistributedDebugComponentTest extends SolrJettyTestBase {
       Set<String> shards = new HashSet<String>(Arrays.asList(shard1, shard2));
       if (random().nextBoolean()) {
         shards.remove(shard1);
-        shard1Results = false;
       } else if (random().nextBoolean()) {
         shards.remove(shard2);
-        shard2Results = false;
       }
       q.set("shards", StringUtils.join(shards, ","));
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/core/src/test/org/apache/solr/highlight/HighlighterTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/highlight/HighlighterTest.java b/solr/core/src/test/org/apache/solr/highlight/HighlighterTest.java
index f345441..2d4af64 100644
--- a/solr/core/src/test/org/apache/solr/highlight/HighlighterTest.java
+++ b/solr/core/src/test/org/apache/solr/highlight/HighlighterTest.java
@@ -201,22 +201,25 @@ public class HighlighterTest extends SolrTestCaseJ4 {
   @Test
   public void testOffsetWindowTokenFilter() throws Exception {
     String[] multivalued = { "a b c d", "e f g", "h", "i j k l m n" };
-    Analyzer a1 = new WhitespaceAnalyzer();
-    TokenStream tokenStream = a1.tokenStream("", "a b c d e f g h i j k l m n");
-
-    OffsetWindowTokenFilter tots = new OffsetWindowTokenFilter(tokenStream);
-    for( String v : multivalued ){
-      TokenStream ts1 = tots.advanceToNextWindowOfLength(v.length());
-      ts1.reset();
-      Analyzer a2 = new WhitespaceAnalyzer();
-      TokenStream ts2 = a2.tokenStream("", v);
-      ts2.reset();
-
-      while (ts1.incrementToken()) {
-        assertTrue(ts2.incrementToken());
-        assertEquals(ts1, ts2);
+    try (Analyzer a1 = new WhitespaceAnalyzer()) {
+      TokenStream tokenStream = a1.tokenStream("", "a b c d e f g h i j k l m n");
+
+      try (OffsetWindowTokenFilter tots = new OffsetWindowTokenFilter(tokenStream)) {
+        for (String v : multivalued) {
+          TokenStream ts1 = tots.advanceToNextWindowOfLength(v.length());
+          ts1.reset();
+          try (Analyzer a2 = new WhitespaceAnalyzer()) {
+            TokenStream ts2 = a2.tokenStream("", v);
+            ts2.reset();
+
+            while (ts1.incrementToken()) {
+              assertTrue(ts2.incrementToken());
+              assertEquals(ts1, ts2);
+            }
+            assertFalse(ts2.incrementToken());
+          }
+        }
       }
-      assertFalse(ts2.incrementToken());
     }
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/core/src/test/org/apache/solr/response/TestJavabinTupleStreamParser.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/response/TestJavabinTupleStreamParser.java b/solr/core/src/test/org/apache/solr/response/TestJavabinTupleStreamParser.java
index 2652314..81d0e26 100644
--- a/solr/core/src/test/org/apache/solr/response/TestJavabinTupleStreamParser.java
+++ b/solr/core/src/test/org/apache/solr/response/TestJavabinTupleStreamParser.java
@@ -69,14 +69,15 @@ public class TestJavabinTupleStreamParser extends SolrTestCaseJ4 {
 
     byte[] bytes = serialize(nl);
 
-    JavabinTupleStreamParser parser = new JavabinTupleStreamParser(new ByteArrayInputStream(bytes), true);
-    Map<String, Object> map = parser.next();
-    assertEquals("2", map.get("id"));
-    map = parser.next();
-    assertEquals("3", map.get("id"));
-    System.out.println();
-    map = parser.next();
-    assertNull(map);
+    try (JavabinTupleStreamParser parser = new JavabinTupleStreamParser(new ByteArrayInputStream(bytes), true)) {
+      Map<String, Object> map = parser.next();
+      assertEquals("2", map.get("id"));
+      map = parser.next();
+      assertEquals("3", map.get("id"));
+      System.out.println();
+      map = parser.next();
+      assertNull(map);
+    }
 
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/core/src/test/org/apache/solr/schema/TestSortableTextField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/schema/TestSortableTextField.java b/solr/core/src/test/org/apache/solr/schema/TestSortableTextField.java
index 2e861f5..d89a0d1 100644
--- a/solr/core/src/test/org/apache/solr/schema/TestSortableTextField.java
+++ b/solr/core/src/test/org/apache/solr/schema/TestSortableTextField.java
@@ -488,7 +488,7 @@ public class TestSortableTextField extends SolrTestCaseJ4 {
     final List<String> test_fields = Arrays.asList("keyword_stxt", "keyword_dv_stxt",
                                                    "keyword_s_dv", "keyword_s");
     // we use embedded client instead of assertQ: we want to compare the responses from multiple requests
-    final SolrClient client = new EmbeddedSolrServer(h.getCore());
+    @SuppressWarnings("resource") final SolrClient client = new EmbeddedSolrServer(h.getCore());
     
     final int numDocs = atLeast(100);
     final int magicIdx = TestUtil.nextInt(random(), 1, numDocs);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/core/src/test/org/apache/solr/security/TestPKIAuthenticationPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/security/TestPKIAuthenticationPlugin.java b/solr/core/src/test/org/apache/solr/security/TestPKIAuthenticationPlugin.java
index a7dadcd..a664cc0 100644
--- a/solr/core/src/test/org/apache/solr/security/TestPKIAuthenticationPlugin.java
+++ b/solr/core/src/test/org/apache/solr/security/TestPKIAuthenticationPlugin.java
@@ -145,8 +145,8 @@ public class TestPKIAuthenticationPlugin extends SolrTestCaseJ4 {
     mock1.doAuthenticate(mockReq, null,filterChain );
     assertNotNull(wrappedRequestByFilter.get());
     assertEquals("$", ((HttpServletRequest) wrappedRequestByFilter.get()).getUserPrincipal().getName());
-
-
+    mock1.close();
+    mock.close();
   }
 
   private HttpServletRequest createMockRequest(final AtomicReference<Header> header) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/core/src/test/org/apache/solr/security/TestRuleBasedAuthorizationPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/security/TestRuleBasedAuthorizationPlugin.java b/solr/core/src/test/org/apache/solr/security/TestRuleBasedAuthorizationPlugin.java
index f4509a8..bacfc10 100644
--- a/solr/core/src/test/org/apache/solr/security/TestRuleBasedAuthorizationPlugin.java
+++ b/solr/core/src/test/org/apache/solr/security/TestRuleBasedAuthorizationPlugin.java
@@ -377,10 +377,13 @@ public class TestRuleBasedAuthorizationPlugin extends SolrTestCaseJ4 {
 
   private void checkRules(Map<String, Object> values, int expected, Map<String ,Object> permissions) {
     AuthorizationContext context = new MockAuthorizationContext(values);
-    RuleBasedAuthorizationPlugin plugin = new RuleBasedAuthorizationPlugin();
-    plugin.init(permissions);
-    AuthorizationResponse authResp = plugin.authorize(context);
-    assertEquals(expected, authResp.statusCode);
+    try (RuleBasedAuthorizationPlugin plugin = new RuleBasedAuthorizationPlugin()) {
+      plugin.init(permissions);
+      AuthorizationResponse authResp = plugin.authorize(context);
+      assertEquals(expected, authResp.statusCode);
+    } catch (IOException e) {
+      ; // swallow error, otherwise a you have to add a _lot_ of exceptions to methods.
+    }
   }
 
   private static class MockAuthorizationContext extends AuthorizationContext {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/core/src/test/org/apache/solr/security/TestSha256AuthenticationProvider.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/security/TestSha256AuthenticationProvider.java b/solr/core/src/test/org/apache/solr/security/TestSha256AuthenticationProvider.java
index 9139bdf..26c3597 100644
--- a/solr/core/src/test/org/apache/solr/security/TestSha256AuthenticationProvider.java
+++ b/solr/core/src/test/org/apache/solr/security/TestSha256AuthenticationProvider.java
@@ -16,6 +16,7 @@
  */
 package org.apache.solr.security;
 
+import java.io.IOException;
 import java.util.Collections;
 import java.util.LinkedHashMap;
 import java.util.Map;
@@ -45,22 +46,22 @@ public class TestSha256AuthenticationProvider extends SolrTestCaseJ4 {
 
   }
 
-  public void testBasicAuthCommands(){
-    BasicAuthPlugin basicAuthPlugin = new BasicAuthPlugin();
-    basicAuthPlugin.init(Collections.emptyMap());
+  public void testBasicAuthCommands() throws IOException {
+    try (BasicAuthPlugin basicAuthPlugin = new BasicAuthPlugin()) {
+      basicAuthPlugin.init(Collections.emptyMap());
 
-    Map latestConf = new LinkedHashMap<>();
-
-    CommandOperation blockUnknown = new CommandOperation("set-property", singletonMap("blockUnknown", true));
-    basicAuthPlugin.edit(latestConf, Collections.singletonList(blockUnknown));
-    assertEquals(Boolean.TRUE,  latestConf.get("blockUnknown"));
-    basicAuthPlugin.init(latestConf);
-    assertTrue(basicAuthPlugin.getBlockUnknown());
-    blockUnknown = new CommandOperation("set-property", singletonMap("blockUnknown", false));
-    basicAuthPlugin.edit(latestConf, Collections.singletonList(blockUnknown));
-    assertEquals(Boolean.FALSE,  latestConf.get("blockUnknown"));
-    basicAuthPlugin.init(latestConf);
-    assertFalse(basicAuthPlugin.getBlockUnknown());
+      Map latestConf = new LinkedHashMap<>();
 
+      CommandOperation blockUnknown = new CommandOperation("set-property", singletonMap("blockUnknown", true));
+      basicAuthPlugin.edit(latestConf, Collections.singletonList(blockUnknown));
+      assertEquals(Boolean.TRUE, latestConf.get("blockUnknown"));
+      basicAuthPlugin.init(latestConf);
+      assertTrue(basicAuthPlugin.getBlockUnknown());
+      blockUnknown = new CommandOperation("set-property", singletonMap("blockUnknown", false));
+      basicAuthPlugin.edit(latestConf, Collections.singletonList(blockUnknown));
+      assertEquals(Boolean.FALSE, latestConf.get("blockUnknown"));
+      basicAuthPlugin.init(latestConf);
+      assertFalse(basicAuthPlugin.getBlockUnknown());
+    }
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/core/src/test/org/apache/solr/spelling/SimpleQueryConverter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/spelling/SimpleQueryConverter.java b/solr/core/src/test/org/apache/solr/spelling/SimpleQueryConverter.java
index 8d91a1b..bd647b3 100644
--- a/solr/core/src/test/org/apache/solr/spelling/SimpleQueryConverter.java
+++ b/solr/core/src/test/org/apache/solr/spelling/SimpleQueryConverter.java
@@ -39,9 +39,8 @@ class SimpleQueryConverter extends SpellingQueryConverter {
   @Override
   public Collection<Token> convert(String origQuery) {
     Collection<Token> result = new HashSet<>();
-    WhitespaceAnalyzer analyzer = new WhitespaceAnalyzer();
-    
-    try (TokenStream ts = analyzer.tokenStream("", origQuery)) {
+
+    try (WhitespaceAnalyzer analyzer = new WhitespaceAnalyzer(); TokenStream ts = analyzer.tokenStream("", origQuery)) {
       // TODO: support custom attributes
       CharTermAttribute termAtt = ts.addAttribute(CharTermAttribute.class);
       OffsetAttribute offsetAtt = ts.addAttribute(OffsetAttribute.class);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/core/src/test/org/apache/solr/uninverting/TestFieldCacheVsDocValues.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/uninverting/TestFieldCacheVsDocValues.java b/solr/core/src/test/org/apache/solr/uninverting/TestFieldCacheVsDocValues.java
index 308a870..9bacab6 100644
--- a/solr/core/src/test/org/apache/solr/uninverting/TestFieldCacheVsDocValues.java
+++ b/solr/core/src/test/org/apache/solr/uninverting/TestFieldCacheVsDocValues.java
@@ -124,96 +124,97 @@ public class TestFieldCacheVsDocValues extends LuceneTestCase {
     Analyzer analyzer = new MockAnalyzer(random());
     // FSDirectory because SimpleText will consume gobbs of
     // space when storing big binary values:
-    Directory d = newFSDirectory(createTempDir("hugeBinaryValues"));
-    boolean doFixed = random().nextBoolean();
-    int numDocs;
-    int fixedLength = 0;
-    if (doFixed) {
-      // Sometimes make all values fixed length since some
-      // codecs have different code paths for this:
-      numDocs = TestUtil.nextInt(random(), 10, 20);
-      fixedLength = TestUtil.nextInt(random(), 65537, 256 * 1024);
-    } else {
-      numDocs = TestUtil.nextInt(random(), 100, 200);
-    }
-    IndexWriter w = new IndexWriter(d, newIndexWriterConfig(analyzer));
-    List<byte[]> docBytes = new ArrayList<>();
-    long totalBytes = 0;
-    for(int docID=0;docID<numDocs;docID++) {
-      // we don't use RandomIndexWriter because it might add
-      // more docvalues than we expect !!!!
-
-      // Must be > 64KB in size to ensure more than 2 pages in
-      // PagedBytes would be needed:
-      int numBytes;
+    try (Directory d = newFSDirectory(createTempDir("hugeBinaryValues"))) {
+      boolean doFixed = random().nextBoolean();
+      int numDocs;
+      int fixedLength = 0;
       if (doFixed) {
-        numBytes = fixedLength;
-      } else if (docID == 0 || random().nextInt(5) == 3) {
-        numBytes = TestUtil.nextInt(random(), 65537, 3 * 1024 * 1024);
+        // Sometimes make all values fixed length since some
+        // codecs have different code paths for this:
+        numDocs = TestUtil.nextInt(random(), 10, 20);
+        fixedLength = TestUtil.nextInt(random(), 65537, 256 * 1024);
       } else {
-        numBytes = TestUtil.nextInt(random(), 1, 1024 * 1024);
+        numDocs = TestUtil.nextInt(random(), 100, 200);
       }
-      totalBytes += numBytes;
-      if (totalBytes > 5 * 1024*1024) {
-        break;
-      }
-      byte[] bytes = new byte[numBytes];
-      random().nextBytes(bytes);
-      docBytes.add(bytes);
-      Document doc = new Document();      
-      BytesRef b = new BytesRef(bytes);
-      b.length = bytes.length;
-      doc.add(new BinaryDocValuesField("field", b));
-      doc.add(new StringField("id", ""+docID, Field.Store.YES));
-      try {
-        w.addDocument(doc);
-      } catch (IllegalArgumentException iae) {
-        if (iae.getMessage().indexOf("is too large") == -1) {
-          throw iae;
-        } else {
-          // OK: some codecs can't handle binary DV > 32K
-          assertFalse(codecAcceptsHugeBinaryValues("field"));
-          w.rollback();
-          d.close();
-          return;
+      try (IndexWriter w = new IndexWriter(d, newIndexWriterConfig(analyzer))) {
+        List<byte[]> docBytes = new ArrayList<>();
+        long totalBytes = 0;
+        for (int docID = 0; docID < numDocs; docID++) {
+          // we don't use RandomIndexWriter because it might add
+          // more docvalues than we expect !!!!
+
+          // Must be > 64KB in size to ensure more than 2 pages in
+          // PagedBytes would be needed:
+          int numBytes;
+          if (doFixed) {
+            numBytes = fixedLength;
+          } else if (docID == 0 || random().nextInt(5) == 3) {
+            numBytes = TestUtil.nextInt(random(), 65537, 3 * 1024 * 1024);
+          } else {
+            numBytes = TestUtil.nextInt(random(), 1, 1024 * 1024);
+          }
+          totalBytes += numBytes;
+          if (totalBytes > 5 * 1024 * 1024) {
+            break;
+          }
+          byte[] bytes = new byte[numBytes];
+          random().nextBytes(bytes);
+          docBytes.add(bytes);
+          Document doc = new Document();
+          BytesRef b = new BytesRef(bytes);
+          b.length = bytes.length;
+          doc.add(new BinaryDocValuesField("field", b));
+          doc.add(new StringField("id", "" + docID, Field.Store.YES));
+          try {
+            w.addDocument(doc);
+          } catch (IllegalArgumentException iae) {
+            if (iae.getMessage().indexOf("is too large") == -1) {
+              throw iae;
+            } else {
+              // OK: some codecs can't handle binary DV > 32K
+              assertFalse(codecAcceptsHugeBinaryValues("field"));
+              w.rollback();
+              d.close();
+              return;
+            }
+          }
         }
-      }
-    }
-    
-    DirectoryReader r;
-    try {
-      r = DirectoryReader.open(w);
-    } catch (IllegalArgumentException iae) {
-      if (iae.getMessage().indexOf("is too large") == -1) {
-        throw iae;
-      } else {
-        assertFalse(codecAcceptsHugeBinaryValues("field"));
 
-        // OK: some codecs can't handle binary DV > 32K
-        w.rollback();
-        d.close();
-        return;
-      }
-    }
-    w.close();
 
-    LeafReader ar = SlowCompositeReaderWrapper.wrap(r);
-    TestUtil.checkReader(ar);
+        DirectoryReader r;
+        try {
+          r = DirectoryReader.open(w);
+        } catch (IllegalArgumentException iae) {
+          if (iae.getMessage().indexOf("is too large") == -1) {
+            throw iae;
+          } else {
+            assertFalse(codecAcceptsHugeBinaryValues("field"));
 
-    BinaryDocValues s = FieldCache.DEFAULT.getTerms(ar, "field");
-    for(int docID=0;docID<docBytes.size();docID++) {
-      Document doc = ar.document(docID);
-      assertEquals(docID, s.nextDoc());
-      BytesRef bytes = s.binaryValue();
-      byte[] expected = docBytes.get(Integer.parseInt(doc.get("id")));
-      assertEquals(expected.length, bytes.length);
-      assertEquals(new BytesRef(expected), bytes);
-    }
+            // OK: some codecs can't handle binary DV > 32K
+            w.rollback();
+            d.close();
+            return;
+          }
+        }
 
-    assertTrue(codecAcceptsHugeBinaryValues("field"));
 
-    ar.close();
-    d.close();
+        try (LeafReader ar = SlowCompositeReaderWrapper.wrap(r)) {
+          TestUtil.checkReader(ar);
+
+          BinaryDocValues s = FieldCache.DEFAULT.getTerms(ar, "field");
+          for (int docID = 0; docID < docBytes.size(); docID++) {
+            Document doc = ar.document(docID);
+            assertEquals(docID, s.nextDoc());
+            BytesRef bytes = s.binaryValue();
+            byte[] expected = docBytes.get(Integer.parseInt(doc.get("id")));
+            assertEquals(expected.length, bytes.length);
+            assertEquals(new BytesRef(expected), bytes);
+          }
+
+          assertTrue(codecAcceptsHugeBinaryValues("field"));
+        }
+      }
+    }
   }
 
   private static final int LARGE_BINARY_FIELD_LENGTH = (1 << 15) - 2;


[2/3] lucene-solr:branch_7x: SOLR-10809: Get precommit lint warnings out of Solr core

Posted by er...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/core/src/test/org/apache/solr/update/SolrCmdDistributorTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/SolrCmdDistributorTest.java b/solr/core/src/test/org/apache/solr/update/SolrCmdDistributorTest.java
index 20c2f1a..8aeaa73 100644
--- a/solr/core/src/test/org/apache/solr/update/SolrCmdDistributorTest.java
+++ b/solr/core/src/test/org/apache/solr/update/SolrCmdDistributorTest.java
@@ -130,196 +130,200 @@ public class SolrCmdDistributorTest extends BaseDistributedSearchTestCase {
   @ShardsFixed(num = 4)
   public void test() throws Exception {
     del("*:*");
-    
-    SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(updateShardHandler);
-    
-    ModifiableSolrParams params = new ModifiableSolrParams();
 
+    ModifiableSolrParams params = new ModifiableSolrParams();
     List<Node> nodes = new ArrayList<>();
+    AddUpdateCommand cmd = new AddUpdateCommand(null);
+    List<Error> errors;
+    CommitUpdateCommand ccmd = new CommitUpdateCommand(null, false);
+    long numFound;
+    HttpSolrClient client;
+    ZkNodeProps nodeProps;
 
-    ZkNodeProps nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP,
-        ((HttpSolrClient) controlClient).getBaseURL(),
-        ZkStateReader.CORE_NAME_PROP, "");
-    nodes.add(new StdNode(new ZkCoreNodeProps(nodeProps)));
+    try (SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(updateShardHandler)) {
 
-    // add one doc to controlClient
-    
-    AddUpdateCommand cmd = new AddUpdateCommand(null);
-    cmd.solrDoc = sdoc("id", id.incrementAndGet());
-    params = new ModifiableSolrParams();
+      nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP,
+          ((HttpSolrClient) controlClient).getBaseURL(),
+          ZkStateReader.CORE_NAME_PROP, "");
+      nodes.add(new StdNode(new ZkCoreNodeProps(nodeProps)));
 
-    cmdDistrib.distribAdd(cmd, nodes, params);
-    
-    CommitUpdateCommand ccmd = new CommitUpdateCommand(null, false);
-    params = new ModifiableSolrParams();
-    params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true);
-    cmdDistrib.distribCommit(ccmd, nodes, params);
-    cmdDistrib.finish();
+      // add one doc to controlClient
+      cmd.solrDoc = sdoc("id", id.incrementAndGet());
+      params = new ModifiableSolrParams();
 
-    
-    List<Error> errors = cmdDistrib.getErrors();
-    
-    assertEquals(errors.toString(), 0, errors.size());
-    
-    long numFound = controlClient.query(new SolrQuery("*:*")).getResults()
-        .getNumFound();
-    assertEquals(1, numFound);
-    
-    HttpSolrClient client = (HttpSolrClient) clients.get(0);
-    nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP,
-        client.getBaseURL(), ZkStateReader.CORE_NAME_PROP, "");
-    nodes.add(new StdNode(new ZkCoreNodeProps(nodeProps)));
-    
-    // add another 2 docs to control and 3 to client
-    cmdDistrib = new SolrCmdDistributor(updateShardHandler);
-    cmd.solrDoc = sdoc("id", id.incrementAndGet());
-    params = new ModifiableSolrParams();
-    params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true);
-    cmdDistrib.distribAdd(cmd, nodes, params);
-    
-    int id2 = id.incrementAndGet();
-    AddUpdateCommand cmd2 = new AddUpdateCommand(null);
-    cmd2.solrDoc = sdoc("id", id2);
+      cmdDistrib.distribAdd(cmd, nodes, params);
 
-    params = new ModifiableSolrParams();
-    params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true);
-    cmdDistrib.distribAdd(cmd2, nodes, params);
-    
-    AddUpdateCommand cmd3 = new AddUpdateCommand(null);
-    cmd3.solrDoc = sdoc("id", id.incrementAndGet());
-    
-    params = new ModifiableSolrParams();
-    params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true);
-    cmdDistrib.distribAdd(cmd3, Collections.singletonList(nodes.get(1)), params);
-    
-    params = new ModifiableSolrParams();
-    params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true);
-    cmdDistrib.distribCommit(ccmd, nodes, params);
-    cmdDistrib.finish();
-    errors = cmdDistrib.getErrors();
-    
+      params = new ModifiableSolrParams();
+      params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true);
+      cmdDistrib.distribCommit(ccmd, nodes, params);
+      cmdDistrib.finish();
+
+
+      errors = cmdDistrib.getErrors();
+
+      assertEquals(errors.toString(), 0, errors.size());
+
+      numFound = controlClient.query(new SolrQuery("*:*")).getResults()
+          .getNumFound();
+      assertEquals(1, numFound);
+
+      client = (HttpSolrClient) clients.get(0);
+      nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP,
+          client.getBaseURL(), ZkStateReader.CORE_NAME_PROP, "");
+      nodes.add(new StdNode(new ZkCoreNodeProps(nodeProps)));
+    }
+    int id2;
+    // add another 2 docs to control and 3 to client
+    try (SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(updateShardHandler)) {
+      cmd.solrDoc = sdoc("id", id.incrementAndGet());
+      params = new ModifiableSolrParams();
+      params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true);
+      cmdDistrib.distribAdd(cmd, nodes, params);
+
+      id2 = id.incrementAndGet();
+      AddUpdateCommand cmd2 = new AddUpdateCommand(null);
+      cmd2.solrDoc = sdoc("id", id2);
+
+      params = new ModifiableSolrParams();
+      params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true);
+      cmdDistrib.distribAdd(cmd2, nodes, params);
+
+      AddUpdateCommand cmd3 = new AddUpdateCommand(null);
+      cmd3.solrDoc = sdoc("id", id.incrementAndGet());
+
+      params = new ModifiableSolrParams();
+      params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true);
+      cmdDistrib.distribAdd(cmd3, Collections.singletonList(nodes.get(1)), params);
+
+      params = new ModifiableSolrParams();
+      params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true);
+      cmdDistrib.distribCommit(ccmd, nodes, params);
+      cmdDistrib.finish();
+      errors = cmdDistrib.getErrors();
+    }
     assertEquals(errors.toString(), 0, errors.size());
-    
+
     SolrDocumentList results = controlClient.query(new SolrQuery("*:*")).getResults();
     numFound = results.getNumFound();
     assertEquals(results.toString(), 3, numFound);
-    
+
     numFound = client.query(new SolrQuery("*:*")).getResults()
         .getNumFound();
     assertEquals(3, numFound);
-    
+
     // now delete doc 2 which is on both control and client1
-    
+
     DeleteUpdateCommand dcmd = new DeleteUpdateCommand(null);
     dcmd.id = Integer.toString(id2);
-    
 
-    cmdDistrib = new SolrCmdDistributor(updateShardHandler);
-    
-    params = new ModifiableSolrParams();
-    params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true);
-    
-    cmdDistrib.distribDelete(dcmd, nodes, params);
-    
-    params = new ModifiableSolrParams();
-    params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true);
-    
-    cmdDistrib.distribCommit(ccmd, nodes, params);
-    cmdDistrib.finish();
 
-    errors = cmdDistrib.getErrors();
-    
+    try (SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(updateShardHandler)) {
+
+      params = new ModifiableSolrParams();
+      params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true);
+
+      cmdDistrib.distribDelete(dcmd, nodes, params);
+
+      params = new ModifiableSolrParams();
+      params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true);
+
+      cmdDistrib.distribCommit(ccmd, nodes, params);
+      cmdDistrib.finish();
+
+      errors = cmdDistrib.getErrors();
+    }
+
     assertEquals(errors.toString(), 0, errors.size());
-    
-    
+
+
     results = controlClient.query(new SolrQuery("*:*")).getResults();
     numFound = results.getNumFound();
     assertEquals(results.toString(), 2, numFound);
-    
+
     numFound = client.query(new SolrQuery("*:*")).getResults()
         .getNumFound();
     assertEquals(results.toString(), 2, numFound);
-    
+
     for (SolrClient c : clients) {
       c.optimize();
       //System.out.println(clients.get(0).request(new LukeRequest()));
     }
-    
-    cmdDistrib = new SolrCmdDistributor(updateShardHandler);
-    
-    int cnt = atLeast(303);
-    for (int i = 0; i < cnt; i++) {
+
+    try (SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(updateShardHandler)) {
+
+      int cnt = atLeast(303);
+      for (int i = 0; i < cnt; i++) {
+        nodes.clear();
+        for (SolrClient c : clients) {
+          if (random().nextBoolean()) {
+            continue;
+          }
+          HttpSolrClient httpClient = (HttpSolrClient) c;
+          nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP,
+              httpClient.getBaseURL(), ZkStateReader.CORE_NAME_PROP, "");
+          nodes.add(new StdNode(new ZkCoreNodeProps(nodeProps)));
+
+        }
+        AddUpdateCommand c = new AddUpdateCommand(null);
+        c.solrDoc = sdoc("id", id.incrementAndGet());
+        if (nodes.size() > 0) {
+          params = new ModifiableSolrParams();
+          cmdDistrib.distribAdd(c, nodes, params);
+        }
+      }
+
       nodes.clear();
+
       for (SolrClient c : clients) {
-        if (random().nextBoolean()) {
-          continue;
-        }
         HttpSolrClient httpClient = (HttpSolrClient) c;
         nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP,
             httpClient.getBaseURL(), ZkStateReader.CORE_NAME_PROP, "");
-        nodes.add(new StdNode(new ZkCoreNodeProps(nodeProps)));
 
+        nodes.add(new StdNode(new ZkCoreNodeProps(nodeProps)));
       }
-      AddUpdateCommand c = new AddUpdateCommand(null);
-      c.solrDoc = sdoc("id", id.incrementAndGet());
-      if (nodes.size() > 0) {
-        params = new ModifiableSolrParams();
-        cmdDistrib.distribAdd(c, nodes, params);
-      }
-    }
-    
-    nodes.clear();
-    
-    for (SolrClient c : clients) {
-      HttpSolrClient httpClient = (HttpSolrClient) c;
-      nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP,
-          httpClient.getBaseURL(), ZkStateReader.CORE_NAME_PROP, "");
-      
-      nodes.add(new StdNode(new ZkCoreNodeProps(nodeProps)));
-    }
-    
-    final AtomicInteger commits = new AtomicInteger();
-    for(JettySolrRunner jetty : jettys) {
-      CoreContainer cores = jetty.getCoreContainer();
-      try (SolrCore core = cores.getCore("collection1")) {
-        core.getUpdateHandler().registerCommitCallback(new SolrEventListener() {
-          @Override
-          public void init(NamedList args) {
-          }
 
-          @Override
-          public void postSoftCommit() {
-          }
+      final AtomicInteger commits = new AtomicInteger();
+      for (JettySolrRunner jetty : jettys) {
+        CoreContainer cores = jetty.getCoreContainer();
+        try (SolrCore core = cores.getCore("collection1")) {
+          core.getUpdateHandler().registerCommitCallback(new SolrEventListener() {
+            @Override
+            public void init(NamedList args) {
+            }
+
+            @Override
+            public void postSoftCommit() {
+            }
+
+            @Override
+            public void postCommit() {
+              commits.incrementAndGet();
+            }
+
+            @Override
+            public void newSearcher(SolrIndexSearcher newSearcher,
+                                    SolrIndexSearcher currentSearcher) {
+            }
+          });
+        }
+      }
+      params = new ModifiableSolrParams();
+      params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true);
 
-          @Override
-          public void postCommit() {
-            commits.incrementAndGet();
-          }
+      cmdDistrib.distribCommit(ccmd, nodes, params);
 
-          @Override
-          public void newSearcher(SolrIndexSearcher newSearcher,
-                                  SolrIndexSearcher currentSearcher) {
-          }
-        });
-      }
-    }
-    params = new ModifiableSolrParams();
-    params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true);
+      cmdDistrib.finish();
 
-    cmdDistrib.distribCommit(ccmd, nodes, params);
-    
-    cmdDistrib.finish();
+      assertEquals(getShardCount(), commits.get());
 
-    assertEquals(getShardCount(), commits.get());
-    
-    for (SolrClient c : clients) {
-      NamedList<Object> resp = c.request(new LukeRequest());
-      assertEquals("SOLR-3428: We only did adds - there should be no deletes",
-          ((NamedList<Object>) resp.get("index")).get("numDocs"),
-          ((NamedList<Object>) resp.get("index")).get("maxDoc"));
+      for (SolrClient c : clients) {
+        NamedList<Object> resp = c.request(new LukeRequest());
+        assertEquals("SOLR-3428: We only did adds - there should be no deletes",
+            ((NamedList<Object>) resp.get("index")).get("numDocs"),
+            ((NamedList<Object>) resp.get("index")).get("maxDoc"));
+      }
     }
     
-    
     testMaxRetries();
     testOneRetry();
     testRetryNodeAgainstBadAddress();
@@ -330,33 +334,34 @@ public class SolrCmdDistributorTest extends BaseDistributedSearchTestCase {
 
   private void testMaxRetries() throws IOException {
     final MockStreamingSolrClients streamingClients = new MockStreamingSolrClients(updateShardHandler);
-    SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(streamingClients, 5, 0);
-    streamingClients.setExp(Exp.CONNECT_EXCEPTION);
-    ArrayList<Node> nodes = new ArrayList<>();
-    final HttpSolrClient solrclient1 = (HttpSolrClient) clients.get(0);
-    
-    final AtomicInteger retries = new AtomicInteger();
-    ZkNodeProps nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, solrclient1.getBaseURL(), ZkStateReader.CORE_NAME_PROP, "");
-    RetryNode retryNode = new RetryNode(new ZkCoreNodeProps(nodeProps), null, "collection1", "shard1") {
-      @Override
-      public boolean checkRetry() {
-        retries.incrementAndGet();
-        return true;
-      }
-    };
-    
-    nodes.add(retryNode);
-    
-    AddUpdateCommand cmd = new AddUpdateCommand(null);
-    cmd.solrDoc = sdoc("id", id.incrementAndGet());
-    ModifiableSolrParams params = new ModifiableSolrParams();
+    try (SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(streamingClients, 5, 0)) {
+      streamingClients.setExp(Exp.CONNECT_EXCEPTION);
+      ArrayList<Node> nodes = new ArrayList<>();
+      final HttpSolrClient solrclient1 = (HttpSolrClient) clients.get(0);
+
+      final AtomicInteger retries = new AtomicInteger();
+      ZkNodeProps nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, solrclient1.getBaseURL(), ZkStateReader.CORE_NAME_PROP, "");
+      RetryNode retryNode = new RetryNode(new ZkCoreNodeProps(nodeProps), null, "collection1", "shard1") {
+        @Override
+        public boolean checkRetry() {
+          retries.incrementAndGet();
+          return true;
+        }
+      };
 
-    cmdDistrib.distribAdd(cmd, nodes, params);
-    cmdDistrib.finish();
-    
-    assertEquals(6, retries.get());
-    
-    assertEquals(1, cmdDistrib.getErrors().size());
+      nodes.add(retryNode);
+
+      AddUpdateCommand cmd = new AddUpdateCommand(null);
+      cmd.solrDoc = sdoc("id", id.incrementAndGet());
+      ModifiableSolrParams params = new ModifiableSolrParams();
+
+      cmdDistrib.distribAdd(cmd, nodes, params);
+      cmdDistrib.finish();
+
+      assertEquals(6, retries.get());
+
+      assertEquals(1, cmdDistrib.getErrors().size());
+    }
   }
   
   private void testOneRetry() throws Exception {
@@ -364,45 +369,46 @@ public class SolrCmdDistributorTest extends BaseDistributedSearchTestCase {
     long numFoundBefore = solrclient.query(new SolrQuery("*:*")).getResults()
         .getNumFound();
     final MockStreamingSolrClients streamingClients = new MockStreamingSolrClients(updateShardHandler);
-    SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(streamingClients, 5, 0);
-    streamingClients.setExp(Exp.CONNECT_EXCEPTION);
-    ArrayList<Node> nodes = new ArrayList<>();
-
-    ZkNodeProps nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, solrclient.getBaseURL(),
-        ZkStateReader.CORE_NAME_PROP, "");
-
-    final AtomicInteger retries = new AtomicInteger();
-    nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, solrclient.getBaseURL(), ZkStateReader.CORE_NAME_PROP, "");
-    RetryNode retryNode = new RetryNode(new ZkCoreNodeProps(nodeProps), null, "collection1", "shard1") {
-      @Override
-      public boolean checkRetry() {
-        streamingClients.setExp(null);
-        retries.incrementAndGet();
-        return true;
-      }
-    };
-    
+    try (SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(streamingClients, 5, 0)) {
+      streamingClients.setExp(Exp.CONNECT_EXCEPTION);
+      ArrayList<Node> nodes = new ArrayList<>();
+
+      ZkNodeProps nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, solrclient.getBaseURL(),
+          ZkStateReader.CORE_NAME_PROP, "");
+
+      final AtomicInteger retries = new AtomicInteger();
+      nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, solrclient.getBaseURL(), ZkStateReader.CORE_NAME_PROP, "");
+      RetryNode retryNode = new RetryNode(new ZkCoreNodeProps(nodeProps), null, "collection1", "shard1") {
+        @Override
+        public boolean checkRetry() {
+          streamingClients.setExp(null);
+          retries.incrementAndGet();
+          return true;
+        }
+      };
 
-    nodes.add(retryNode);
-    
-    AddUpdateCommand cmd = new AddUpdateCommand(null);
-    cmd.solrDoc = sdoc("id", id.incrementAndGet());
-    ModifiableSolrParams params = new ModifiableSolrParams();
 
-    CommitUpdateCommand ccmd = new CommitUpdateCommand(null, false);
-    cmdDistrib.distribAdd(cmd, nodes, params);
-    cmdDistrib.distribCommit(ccmd, nodes, params);
-    cmdDistrib.finish();
-    
-    assertEquals(1, retries.get());
-    
-    
-    long numFoundAfter = solrclient.query(new SolrQuery("*:*")).getResults()
-        .getNumFound();
-    
-    // we will get java.net.ConnectException which we retry on
-    assertEquals(numFoundBefore + 1, numFoundAfter);
-    assertEquals(0, cmdDistrib.getErrors().size());
+      nodes.add(retryNode);
+
+      AddUpdateCommand cmd = new AddUpdateCommand(null);
+      cmd.solrDoc = sdoc("id", id.incrementAndGet());
+      ModifiableSolrParams params = new ModifiableSolrParams();
+
+      CommitUpdateCommand ccmd = new CommitUpdateCommand(null, false);
+      cmdDistrib.distribAdd(cmd, nodes, params);
+      cmdDistrib.distribCommit(ccmd, nodes, params);
+      cmdDistrib.finish();
+
+      assertEquals(1, retries.get());
+
+
+      long numFoundAfter = solrclient.query(new SolrQuery("*:*")).getResults()
+          .getNumFound();
+
+      // we will get java.net.ConnectException which we retry on
+      assertEquals(numFoundBefore + 1, numFoundAfter);
+      assertEquals(0, cmdDistrib.getErrors().size());
+    }
   }
 
   private void testRetryNodeWontRetrySocketError() throws Exception {
@@ -410,100 +416,101 @@ public class SolrCmdDistributorTest extends BaseDistributedSearchTestCase {
     long numFoundBefore = solrclient.query(new SolrQuery("*:*")).getResults()
         .getNumFound();
     final MockStreamingSolrClients streamingClients = new MockStreamingSolrClients(updateShardHandler);
-    SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(streamingClients, 5, 0);
-    streamingClients.setExp(Exp.SOCKET_EXCEPTION);
-    ArrayList<Node> nodes = new ArrayList<>();
-
-    ZkNodeProps nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, solrclient.getBaseURL(),
-        ZkStateReader.CORE_NAME_PROP, "");
-
-    final AtomicInteger retries = new AtomicInteger();
-    nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, solrclient.getBaseURL(), ZkStateReader.CORE_NAME_PROP, "");
-    RetryNode retryNode = new RetryNode(new ZkCoreNodeProps(nodeProps), null, "collection1", "shard1") {
-      @Override
-      public boolean checkRetry() {
-        retries.incrementAndGet();
-        return true;
-      }
-    };
-    
+    try (SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(streamingClients, 5, 0)) {
+      streamingClients.setExp(Exp.SOCKET_EXCEPTION);
+      ArrayList<Node> nodes = new ArrayList<>();
+
+      ZkNodeProps nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, solrclient.getBaseURL(),
+          ZkStateReader.CORE_NAME_PROP, "");
+
+      final AtomicInteger retries = new AtomicInteger();
+      nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, solrclient.getBaseURL(), ZkStateReader.CORE_NAME_PROP, "");
+      RetryNode retryNode = new RetryNode(new ZkCoreNodeProps(nodeProps), null, "collection1", "shard1") {
+        @Override
+        public boolean checkRetry() {
+          retries.incrementAndGet();
+          return true;
+        }
+      };
 
-    nodes.add(retryNode);
-    
-    AddUpdateCommand cmd = new AddUpdateCommand(null);
-    cmd.solrDoc = sdoc("id", id.incrementAndGet());
-    ModifiableSolrParams params = new ModifiableSolrParams();
 
-    CommitUpdateCommand ccmd = new CommitUpdateCommand(null, false);
-    cmdDistrib.distribAdd(cmd, nodes, params);
-    
-    streamingClients.setExp(null);
-    cmdDistrib.distribCommit(ccmd, nodes, params);
-    cmdDistrib.finish();
-    
-    // it will checkRetry, but not actually do it...
-    assertEquals(1, retries.get());
-    
-    
-    long numFoundAfter = solrclient.query(new SolrQuery("*:*")).getResults()
-        .getNumFound();
-    
-    // we will get java.net.SocketException: Network is unreachable, which we don't retry on
-    assertEquals(numFoundBefore, numFoundAfter);
-    assertEquals(1, cmdDistrib.getErrors().size());
+      nodes.add(retryNode);
+
+      AddUpdateCommand cmd = new AddUpdateCommand(null);
+      cmd.solrDoc = sdoc("id", id.incrementAndGet());
+      ModifiableSolrParams params = new ModifiableSolrParams();
+
+      CommitUpdateCommand ccmd = new CommitUpdateCommand(null, false);
+      cmdDistrib.distribAdd(cmd, nodes, params);
+
+      streamingClients.setExp(null);
+      cmdDistrib.distribCommit(ccmd, nodes, params);
+      cmdDistrib.finish();
+
+      // it will checkRetry, but not actually do it...
+      assertEquals(1, retries.get());
+
+
+      long numFoundAfter = solrclient.query(new SolrQuery("*:*")).getResults()
+          .getNumFound();
+
+      // we will get java.net.SocketException: Network is unreachable, which we don't retry on
+      assertEquals(numFoundBefore, numFoundAfter);
+      assertEquals(1, cmdDistrib.getErrors().size());
+    }
   }
 
   private void testRetryNodeAgainstBadAddress() throws SolrServerException, IOException {
     // Test RetryNode
-    SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(updateShardHandler);
-    final HttpSolrClient solrclient = (HttpSolrClient) clients.get(0);
-    long numFoundBefore = solrclient.query(new SolrQuery("*:*")).getResults()
-        .getNumFound();
-    
-    ArrayList<Node> nodes = new ArrayList<>();
+    try (SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(updateShardHandler)) {
+      final HttpSolrClient solrclient = (HttpSolrClient) clients.get(0);
+      long numFoundBefore = solrclient.query(new SolrQuery("*:*")).getResults()
+          .getNumFound();
+
+      ArrayList<Node> nodes = new ArrayList<>();
+
+      ZkNodeProps nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, "[ff01::114]:33332" + context, ZkStateReader.CORE_NAME_PROP, "");
+      RetryNode retryNode = new RetryNode(new ZkCoreNodeProps(nodeProps), null, "collection1", "shard1") {
+        @Override
+        public boolean checkRetry() {
+          ZkNodeProps leaderProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, solrclient.getBaseURL(),
+              ZkStateReader.CORE_NAME_PROP, "");
+          this.nodeProps = new ZkCoreNodeProps(leaderProps);
+
+          return true;
+        }
+      };
 
-    ZkNodeProps nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, "[ff01::114]:33332" + context, ZkStateReader.CORE_NAME_PROP, "");
-    RetryNode retryNode = new RetryNode(new ZkCoreNodeProps(nodeProps), null, "collection1", "shard1") {
-      @Override
-      public boolean checkRetry() {
-        ZkNodeProps leaderProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, solrclient.getBaseURL(),
-            ZkStateReader.CORE_NAME_PROP, "");
-        this.nodeProps = new ZkCoreNodeProps(leaderProps);
 
-        return true;
-      }
-    };
-    
+      nodes.add(retryNode);
 
-    nodes.add(retryNode);
-    
-    
-    AddUpdateCommand cmd = new AddUpdateCommand(null);
-    cmd.solrDoc = sdoc("id", id.incrementAndGet());
-    ModifiableSolrParams params = new ModifiableSolrParams();
 
-    cmdDistrib.distribAdd(cmd, nodes, params);
-    
-    CommitUpdateCommand ccmd = new CommitUpdateCommand(null, false);
-    params = new ModifiableSolrParams();
-    params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true);
-    cmdDistrib.distribCommit(ccmd, nodes, params);
-    cmdDistrib.finish();
-    
-    long numFoundAfter = solrclient.query(new SolrQuery("*:*")).getResults()
-        .getNumFound();
-    
-    // different OS's will throw different exceptions for the bad address above
-    if (numFoundBefore != numFoundAfter) {
-      assertEquals(0, cmdDistrib.getErrors().size());
-      assertEquals(numFoundBefore + 1, numFoundAfter);
-    } else {
-      // we will get java.net.SocketException: Network is unreachable and not retry
-      assertEquals(numFoundBefore, numFoundAfter);
-      
-      assertEquals(1, cmdDistrib.getErrors().size());
+      AddUpdateCommand cmd = new AddUpdateCommand(null);
+      cmd.solrDoc = sdoc("id", id.incrementAndGet());
+      ModifiableSolrParams params = new ModifiableSolrParams();
+
+      cmdDistrib.distribAdd(cmd, nodes, params);
+
+      CommitUpdateCommand ccmd = new CommitUpdateCommand(null, false);
+      params = new ModifiableSolrParams();
+      params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true);
+      cmdDistrib.distribCommit(ccmd, nodes, params);
+      cmdDistrib.finish();
+
+      long numFoundAfter = solrclient.query(new SolrQuery("*:*")).getResults()
+          .getNumFound();
+
+      // different OS's will throw different exceptions for the bad address above
+      if (numFoundBefore != numFoundAfter) {
+        assertEquals(0, cmdDistrib.getErrors().size());
+        assertEquals(numFoundBefore + 1, numFoundAfter);
+      } else {
+        // we will get java.net.SocketException: Network is unreachable and not retry
+        assertEquals(numFoundBefore, numFoundAfter);
+
+        assertEquals(1, cmdDistrib.getErrors().size());
+      }
     }
-    
   }
   
   @Override
@@ -513,21 +520,22 @@ public class SolrCmdDistributorTest extends BaseDistributedSearchTestCase {
   }
 
   private void testDistribOpenSearcher() {
-    SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(updateShardHandler);
-    UpdateRequest updateRequest = new UpdateRequest();
+    try (SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(updateShardHandler)) {
+      UpdateRequest updateRequest = new UpdateRequest();
 
-    CommitUpdateCommand ccmd = new CommitUpdateCommand(null, false);
+      CommitUpdateCommand ccmd = new CommitUpdateCommand(null, false);
 
-    //test default value (should be true)
-    cmdDistrib.addCommit(updateRequest, ccmd);
-    boolean openSearcher = updateRequest.getParams().getBool(UpdateParams.OPEN_SEARCHER,false);
-    assertTrue(openSearcher);
+      //test default value (should be true)
+      cmdDistrib.addCommit(updateRequest, ccmd);
+      boolean openSearcher = updateRequest.getParams().getBool(UpdateParams.OPEN_SEARCHER, false);
+      assertTrue(openSearcher);
 
-    //test openSearcher = false
-    ccmd.openSearcher = false;
-    
-    cmdDistrib.addCommit(updateRequest, ccmd);
-    openSearcher = updateRequest.getParams().getBool(UpdateParams.OPEN_SEARCHER,true);
-    assertFalse(openSearcher);
+      //test openSearcher = false
+      ccmd.openSearcher = false;
+
+      cmdDistrib.addCommit(updateRequest, ccmd);
+      openSearcher = updateRequest.getParams().getBool(UpdateParams.OPEN_SEARCHER, true);
+      assertFalse(openSearcher);
+    }
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/core/src/test/org/apache/solr/update/SolrIndexSplitterTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/SolrIndexSplitterTest.java b/solr/core/src/test/org/apache/solr/update/SolrIndexSplitterTest.java
index 0bd2e6e..ae743da 100644
--- a/solr/core/src/test/org/apache/solr/update/SolrIndexSplitterTest.java
+++ b/solr/core/src/test/org/apache/solr/update/SolrIndexSplitterTest.java
@@ -178,8 +178,10 @@ public class SolrIndexSplitterTest extends SolrTestCaseJ4 {
       } finally {
         if (request != null) request.close();
       }
-      EmbeddedSolrServer server1 = new EmbeddedSolrServer(h.getCoreContainer(), "split1");
-      EmbeddedSolrServer server2 = new EmbeddedSolrServer(h.getCoreContainer(), "split2");
+      @SuppressWarnings("resource")
+      final EmbeddedSolrServer server1 = new EmbeddedSolrServer(h.getCoreContainer(), "split1");
+      @SuppressWarnings("resource")
+      final EmbeddedSolrServer server2 = new EmbeddedSolrServer(h.getCoreContainer(), "split2");
       server1.commit(true, true);
       server2.commit(true, true);
       assertEquals("id:dorothy should be present in split index1", 1, server1.query(new SolrQuery("id:dorothy")).getResults().getNumFound());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/core/src/test/org/apache/solr/update/processor/RecordingUpdateProcessorFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/processor/RecordingUpdateProcessorFactory.java b/solr/core/src/test/org/apache/solr/update/processor/RecordingUpdateProcessorFactory.java
index a13fdaa..1a6f60d 100644
--- a/solr/core/src/test/org/apache/solr/update/processor/RecordingUpdateProcessorFactory.java
+++ b/solr/core/src/test/org/apache/solr/update/processor/RecordingUpdateProcessorFactory.java
@@ -61,6 +61,7 @@ public final class RecordingUpdateProcessorFactory
   }
 
   @Override
+  @SuppressWarnings("resource")
   public synchronized UpdateRequestProcessor getInstance(SolrQueryRequest req, 
                                                          SolrQueryResponse rsp, 
                                                          UpdateRequestProcessor next ) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/DaemonStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/DaemonStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/DaemonStream.java
index 0bd4456..a4f528b 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/DaemonStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/DaemonStream.java
@@ -223,7 +223,9 @@ public class DaemonStream extends TupleStream implements Expressible {
     if(closed) {
       return;
     }
-    streamRunner.setShutdown(true);
+    if (streamRunner != null) {
+      streamRunner.setShutdown(true);
+    }
     this.closed = true;
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FacetStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FacetStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FacetStream.java
index 0f98e35..29e485b 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FacetStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FacetStream.java
@@ -348,7 +348,9 @@ public class FacetStream extends TupleStream implements Expressible  {
 
   public void close() throws IOException {
     if(cache == null) {
-      cloudSolrClient.close();
+      if (cloudSolrClient != null) {
+        cloudSolrClient.close();
+      }
     }
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FeaturesSelectionStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FeaturesSelectionStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FeaturesSelectionStream.java
index 75d2d34..b6ad276 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FeaturesSelectionStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FeaturesSelectionStream.java
@@ -305,11 +305,13 @@ public class FeaturesSelectionStream extends TupleStream implements Expressible{
   }
 
   public void close() throws IOException {
-    if (isCloseCache) {
+    if (isCloseCache && cache != null) {
       cache.close();
     }
 
-    executorService.shutdown();
+    if (executorService != null) {
+      executorService.shutdown();
+    }
   }
 
   /** Return the stream sort - ie, the order in which records are returned */

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TextLogitStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TextLogitStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TextLogitStream.java
index 879c708..f56431c 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TextLogitStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TextLogitStream.java
@@ -391,11 +391,13 @@ public class TextLogitStream extends TupleStream implements Expressible {
   }
 
   public void close() throws IOException {
-    if (isCloseCache) {
+    if (isCloseCache && cache != null) {
       cache.close();
     }
 
-    executorService.shutdown();
+    if (executorService != null) {
+      executorService.shutdown();
+    }
     termsStream.close();
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TopicStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TopicStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TopicStream.java
index b7a1edd..4bb6223 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TopicStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TopicStream.java
@@ -355,7 +355,7 @@ public class TopicStream extends CloudSolrStream implements Expressible  {
         }
       }
 
-      if (streamContext.getSolrClientCache() == null) {
+      if (streamContext != null && streamContext.getSolrClientCache() == null) {
         cloudSolrClient.close();
       }
     }
@@ -455,6 +455,9 @@ public class TopicStream extends CloudSolrStream implements Expressible  {
 
   private void persistCheckpoints() throws IOException{
 
+    if (cloudSolrClient == null) {
+      return;
+    }
     UpdateRequest request = new UpdateRequest();
     request.setParam("collection", checkpointCollection);
     SolrInputDocument doc = new SolrInputDocument();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
index 5c12645..5f3d498 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
@@ -231,7 +231,6 @@ public class StreamExpressionTest extends SolrCloudTestCase {
       solrParams.add("qt", "/stream");
       solrParams.add("expr", "search(myCollection, q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\")");
       solrParams.add("myCollection.shards", buf.toString());
-      SolrStream solrStream = new SolrStream(shardUrls.get(0), solrParams);
       stream.setStreamContext(context);
       tuples = getTuples(stream);
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionToExpessionTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionToExpessionTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionToExpessionTest.java
index 36b2829..9327ee9 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionToExpessionTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionToExpessionTest.java
@@ -70,161 +70,152 @@ public class StreamExpressionToExpessionTest extends LuceneTestCase {
   @Test
   public void testCloudSolrStream() throws Exception {
 
-    CloudSolrStream stream;
     String expressionString;
     
     // Basic test
-    stream = new CloudSolrStream(StreamExpressionParser.parse("search(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\", fq=\"a_s:one\", fq=\"a_s:two\")"), factory);
-    expressionString = stream.toExpression(factory).toString();
-    System.out.println("ExpressionString: " + expressionString.toString());
-    assertTrue(expressionString.contains("search(collection1,"));
-    assertTrue(expressionString.contains("q=\"*:*\""));
-    assertTrue(expressionString.contains("fl=\"id,a_s,a_i,a_f\""));
-    assertTrue(expressionString.contains("sort=\"a_f asc, a_i asc\""));
-    assertTrue(expressionString.contains("fq=\"a_s:one\""));
-    assertTrue(expressionString.contains("fq=\"a_s:two\""));
-    
+    try (CloudSolrStream stream = new CloudSolrStream(StreamExpressionParser.parse("search(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\", fq=\"a_s:one\", fq=\"a_s:two\")"), factory)) {
+      expressionString = stream.toExpression(factory).toString();
+      System.out.println("ExpressionString: " + expressionString.toString());
+      assertTrue(expressionString.contains("search(collection1,"));
+      assertTrue(expressionString.contains("q=\"*:*\""));
+      assertTrue(expressionString.contains("fl=\"id,a_s,a_i,a_f\""));
+      assertTrue(expressionString.contains("sort=\"a_f asc, a_i asc\""));
+      assertTrue(expressionString.contains("fq=\"a_s:one\""));
+      assertTrue(expressionString.contains("fq=\"a_s:two\""));
+    }
     // Basic w/aliases
-    stream = new CloudSolrStream(StreamExpressionParser.parse("search(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\", aliases=\"id=izzy,a_s=kayden\")"), factory);
-    expressionString = stream.toExpression(factory).toString();
-    assertTrue(expressionString.contains("id=izzy"));
-    assertTrue(expressionString.contains("a_s=kayden"));
-
+    try (CloudSolrStream stream = new CloudSolrStream(StreamExpressionParser.parse("search(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\", aliases=\"id=izzy,a_s=kayden\")"), factory)) {
+      expressionString = stream.toExpression(factory).toString();
+      assertTrue(expressionString.contains("id=izzy"));
+      assertTrue(expressionString.contains("a_s=kayden"));
+    }
   }
   
   @Test
   public void testSelectStream() throws Exception {
-
-    SelectStream stream;
     String expressionString;
     
     // Basic test
-    stream = new SelectStream(StreamExpressionParser.parse("select(\"a_s as fieldA\", search(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\"))"), factory);
-    expressionString = stream.toExpression(factory).toString();
-    assertTrue(expressionString.contains("select(search(collection1,"));
-    assertTrue(expressionString.contains("q=\"*:*\""));
-    assertTrue(expressionString.contains("fl=\"id,a_s,a_i,a_f\""));
-    assertTrue(expressionString.contains("sort=\"a_f asc, a_i asc\""));
-    assertTrue(expressionString.contains("a_s as fieldA"));
-    
+    try (SelectStream stream = new SelectStream(StreamExpressionParser.parse("select(\"a_s as fieldA\", search(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\"))"), factory)) {
+      expressionString = stream.toExpression(factory).toString();
+      assertTrue(expressionString.contains("select(search(collection1,"));
+      assertTrue(expressionString.contains("q=\"*:*\""));
+      assertTrue(expressionString.contains("fl=\"id,a_s,a_i,a_f\""));
+      assertTrue(expressionString.contains("sort=\"a_f asc, a_i asc\""));
+      assertTrue(expressionString.contains("a_s as fieldA"));
+    }
   }
 
   @Test
   public void testDaemonStream() throws Exception {
-
-    DaemonStream stream;
     String expressionString;
 
     // Basic test
-    stream = new DaemonStream(StreamExpressionParser.parse("daemon(search(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\"), id=\"blah\", runInterval=\"1000\", queueSize=\"100\")"), factory);
-    expressionString = stream.toExpression(factory).toString();
-    assertTrue(expressionString.contains("daemon(search(collection1,"));
-    assertTrue(expressionString.contains("q=\"*:*\""));
-    assertTrue(expressionString.contains("fl=\"id,a_s,a_i,a_f\""));
-    assertTrue(expressionString.contains("sort=\"a_f asc, a_i asc\""));
-    assertTrue(expressionString.contains("id=blah"));
-    assertTrue(expressionString.contains("queueSize=100"));
-    assertTrue(expressionString.contains("runInterval=1000"));
+    try (DaemonStream stream = new DaemonStream(StreamExpressionParser.parse("daemon(search(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\"), id=\"blah\", runInterval=\"1000\", queueSize=\"100\")"), factory)) {
+      expressionString = stream.toExpression(factory).toString();
+      assertTrue(expressionString.contains("daemon(search(collection1,"));
+      assertTrue(expressionString.contains("q=\"*:*\""));
+      assertTrue(expressionString.contains("fl=\"id,a_s,a_i,a_f\""));
+      assertTrue(expressionString.contains("sort=\"a_f asc, a_i asc\""));
+      assertTrue(expressionString.contains("id=blah"));
+      assertTrue(expressionString.contains("queueSize=100"));
+      assertTrue(expressionString.contains("runInterval=1000"));
+    }
   }
 
   @Test
   public void testTopicStream() throws Exception {
 
-    TopicStream stream;
     String expressionString;
 
     // Basic test
-    stream = new TopicStream(StreamExpressionParser.parse("topic(collection2, collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", id=\"blah\", checkpointEvery=1000)"), factory);
-    expressionString = stream.toExpression(factory).toString();
-    assertTrue(expressionString.contains("topic(collection2,collection1"));
-    assertTrue(expressionString.contains("q=\"*:*\""));
-    assertTrue(expressionString.contains("fl=\"id,a_s,a_i,a_f\""));
-    assertTrue(expressionString.contains("id=blah"));
-    assertTrue(expressionString.contains("checkpointEvery=1000"));
+    try (TopicStream stream = new TopicStream(StreamExpressionParser.parse("topic(collection2, collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", id=\"blah\", checkpointEvery=1000)"), factory)) {
+      expressionString = stream.toExpression(factory).toString();
+      assertTrue(expressionString.contains("topic(collection2,collection1"));
+      assertTrue(expressionString.contains("q=\"*:*\""));
+      assertTrue(expressionString.contains("fl=\"id,a_s,a_i,a_f\""));
+      assertTrue(expressionString.contains("id=blah"));
+      assertTrue(expressionString.contains("checkpointEvery=1000"));
+    }
   }
 
   @Test
   public void testStatsStream() throws Exception {
-
-    StatsStream stream;
     String expressionString;
     
     // Basic test
-    stream = new StatsStream(StreamExpressionParser.parse("stats(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\", sum(a_i), avg(a_i), count(*), min(a_i), max(a_i))"), factory);
-    expressionString = stream.toExpression(factory).toString();
-    assertTrue(expressionString.contains("stats(collection1,"));
-    assertTrue(expressionString.contains("q=\"*:*\""));
-    assertTrue(expressionString.contains("fl=\"id,a_s,a_i,a_f\""));
-    assertTrue(expressionString.contains("sort=\"a_f asc, a_i asc\""));
-    assertTrue(expressionString.contains("min(a_i)"));
-    assertTrue(expressionString.contains("max(a_i)"));
-    assertTrue(expressionString.contains("avg(a_i,false)"));
-    assertTrue(expressionString.contains("count(*)"));
-    assertTrue(expressionString.contains("sum(a_i)"));
-    
+    try (StatsStream stream = new StatsStream(StreamExpressionParser.parse("stats(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\", sum(a_i), avg(a_i), count(*), min(a_i), max(a_i))"), factory)) {
+      expressionString = stream.toExpression(factory).toString();
+      assertTrue(expressionString.contains("stats(collection1,"));
+      assertTrue(expressionString.contains("q=\"*:*\""));
+      assertTrue(expressionString.contains("fl=\"id,a_s,a_i,a_f\""));
+      assertTrue(expressionString.contains("sort=\"a_f asc, a_i asc\""));
+      assertTrue(expressionString.contains("min(a_i)"));
+      assertTrue(expressionString.contains("max(a_i)"));
+      assertTrue(expressionString.contains("avg(a_i,false)"));
+      assertTrue(expressionString.contains("count(*)"));
+      assertTrue(expressionString.contains("sum(a_i)"));
+    }
   }
 
   @Test
   public void testUniqueStream() throws Exception {
-
-    UniqueStream stream;
     String expressionString;
     
     // Basic test
-    stream = new UniqueStream(StreamExpressionParser.parse("unique(search(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\"), over=\"a_f\")"), factory);
-    expressionString = stream.toExpression(factory).toString();
-    assertTrue(expressionString.contains("unique(search(collection1"));
-    assertTrue(expressionString.contains("over=a_f"));
+    try (UniqueStream stream = new UniqueStream(StreamExpressionParser.parse("unique(search(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\"), over=\"a_f\")"), factory)) {
+      expressionString = stream.toExpression(factory).toString();
+      assertTrue(expressionString.contains("unique(search(collection1"));
+      assertTrue(expressionString.contains("over=a_f"));
+    }
   }
   
   @Test
   public void testMergeStream() throws Exception {
-
-    MergeStream stream;
     String expressionString;
     
     // Basic test
-    stream = new MergeStream(StreamExpressionParser.parse("merge("
+    try (MergeStream stream = new MergeStream(StreamExpressionParser.parse("merge("
                               + "search(collection1, q=\"id:(0 3 4)\", fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_s asc\"),"
                               + "search(collection1, q=\"id:(1 2)\", fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_s asc\"),"
-                              + "on=\"a_f asc, a_s asc\")"), factory);
-    expressionString = stream.toExpression(factory).toString();
-    assertTrue(expressionString.contains("q=\"id:(0 3 4)\""));
-    assertTrue(expressionString.contains("q=\"id:(1 2)\""));
-    assertTrue(expressionString.contains("on=\"a_f asc,a_s asc\""));
+                              + "on=\"a_f asc, a_s asc\")"), factory)) {
+      expressionString = stream.toExpression(factory).toString();
+      assertTrue(expressionString.contains("q=\"id:(0 3 4)\""));
+      assertTrue(expressionString.contains("q=\"id:(1 2)\""));
+      assertTrue(expressionString.contains("on=\"a_f asc,a_s asc\""));
+    }
   }
   
   @Test
   public void testRankStream() throws Exception {
 
-    RankStream stream;
     String expressionString;
     
     // Basic test
-    stream = new RankStream(StreamExpressionParser.parse("top("
+    try (RankStream stream = new RankStream(StreamExpressionParser.parse("top("
                                               + "n=3,"
                                               + "search(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc,a_i asc\"),"
-                                              + "sort=\"a_f asc, a_i asc\")"), factory);
-    expressionString = stream.toExpression(factory).toString();
-    assertTrue(expressionString.contains("top(n=3,search(collection1"));
-    assertTrue(expressionString.contains("sort=\"a_f asc,a_i asc\""));
-    // find 2nd instance of sort
-    assertTrue(expressionString.substring(expressionString.indexOf("sort=\"a_f asc,a_i asc\"") + 1).contains("sort=\"a_f asc,a_i asc\""));
+                                              + "sort=\"a_f asc, a_i asc\")"), factory)) {
+      expressionString = stream.toExpression(factory).toString();
+      assertTrue(expressionString.contains("top(n=3,search(collection1"));
+      assertTrue(expressionString.contains("sort=\"a_f asc,a_i asc\""));
+      // find 2nd instance of sort
+      assertTrue(expressionString.substring(expressionString.indexOf("sort=\"a_f asc,a_i asc\"") + 1).contains("sort=\"a_f asc,a_i asc\""));
+    }
   }
 
   @Test
   public void testReducerStream() throws Exception {
-
-    ReducerStream stream;
     String expressionString;
     
     // Basic test
-    stream = new ReducerStream(StreamExpressionParser.parse("reduce("
+    try (ReducerStream stream = new ReducerStream(StreamExpressionParser.parse("reduce("
                                                   + "search(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_s desc, a_f asc\"),"
-                                                  + "by=\"a_s\", group(sort=\"a_i desc\", n=\"5\"))"), factory);
-    expressionString = stream.toExpression(factory).toString();
-    assertTrue(expressionString.contains("reduce(search(collection1"));
-    assertTrue(expressionString.contains("by=a_s"));
+                                                  + "by=\"a_s\", group(sort=\"a_i desc\", n=\"5\"))"), factory)) {
+      expressionString = stream.toExpression(factory).toString();
+      assertTrue(expressionString.contains("reduce(search(collection1"));
+      assertTrue(expressionString.contains("by=a_s"));
+    }
   }
   
   @Test
@@ -238,22 +229,22 @@ public class StreamExpressionToExpessionTest extends LuceneTestCase {
                                                                  + "fl=\"id,a_s,a_i,a_f\", "
                                                                  + "sort=\"a_f asc, a_i asc\"))");
     
-    UpdateStream updateStream = new UpdateStream(expression, factory);
-    String expressionString = updateStream.toExpression(factory).toString();
-    
-    assertTrue(expressionString.contains("update(collection2"));
-    assertTrue(expressionString.contains("batchSize=5"));
-    assertTrue(expressionString.contains("search(collection1"));
+    try (UpdateStream updateStream = new UpdateStream(expression, factory)) {
+      String expressionString = updateStream.toExpression(factory).toString();
+
+      assertTrue(expressionString.contains("update(collection2"));
+      assertTrue(expressionString.contains("batchSize=5"));
+      assertTrue(expressionString.contains("search(collection1"));
+    }
   }
   
   @Test
   public void testFacetStream() throws Exception {
 
-    FacetStream stream;
     String expressionString;
     
     // Basic test
-    stream = new FacetStream(StreamExpressionParser.parse("facet("
+    try (FacetStream stream = new FacetStream(StreamExpressionParser.parse("facet("
                                                         +   "collection1, "
                                                         +   "q=\"*:*\", "
                                                         +   "buckets=\"a_s\", "
@@ -264,68 +255,68 @@ public class StreamExpressionToExpessionTest extends LuceneTestCase {
                                                         +   "max(a_i), max(a_f), "
                                                         +   "avg(a_i), avg(a_f), "
                                                         +   "count(*)"
-                                                        + ")"), factory);
-    expressionString = stream.toExpression(factory).toString();
-    assertTrue(expressionString.contains("facet(collection1"));
-    assertTrue(expressionString.contains("q=\"*:*\""));
-    assertTrue(expressionString.contains("buckets=a_s"));
-    assertTrue(expressionString.contains("bucketSorts=\"sum(a_i) asc\""));
-    assertTrue(expressionString.contains("bucketSizeLimit=100"));
-    assertTrue(expressionString.contains("sum(a_i)"));
-    assertTrue(expressionString.contains("sum(a_f)"));
-    assertTrue(expressionString.contains("min(a_i)"));
-    assertTrue(expressionString.contains("min(a_f)"));
-    assertTrue(expressionString.contains("max(a_i)"));
-    assertTrue(expressionString.contains("max(a_f)"));
-    assertTrue(expressionString.contains("avg(a_i,false)"));
-    assertTrue(expressionString.contains("avg(a_f,false)"));
-    assertTrue(expressionString.contains("count(*)"));
+                                                        + ")"), factory)){
+      expressionString = stream.toExpression(factory).toString();
+      assertTrue(expressionString.contains("facet(collection1"));
+      assertTrue(expressionString.contains("q=\"*:*\""));
+      assertTrue(expressionString.contains("buckets=a_s"));
+      assertTrue(expressionString.contains("bucketSorts=\"sum(a_i) asc\""));
+      assertTrue(expressionString.contains("bucketSizeLimit=100"));
+      assertTrue(expressionString.contains("sum(a_i)"));
+      assertTrue(expressionString.contains("sum(a_f)"));
+      assertTrue(expressionString.contains("min(a_i)"));
+      assertTrue(expressionString.contains("min(a_f)"));
+      assertTrue(expressionString.contains("max(a_i)"));
+      assertTrue(expressionString.contains("max(a_f)"));
+      assertTrue(expressionString.contains("avg(a_i,false)"));
+      assertTrue(expressionString.contains("avg(a_f,false)"));
+      assertTrue(expressionString.contains("count(*)"));
+    }
   }
   
   @Test
   public void testJDBCStream() throws Exception {
-
-    JDBCStream stream;
     String expressionString;
     
     // Basic test
-    stream = new JDBCStream(StreamExpressionParser.parse("jdbc(connection=\"jdbc:hsqldb:mem:.\", sql=\"select PEOPLE.ID, PEOPLE.NAME, COUNTRIES.COUNTRY_NAME from PEOPLE inner join COUNTRIES on PEOPLE.COUNTRY_CODE = COUNTRIES.CODE order by PEOPLE.ID\", sort=\"ID asc\")"), factory);
-    expressionString = stream.toExpression(factory).toString();
-    assertTrue(expressionString.contains("jdbc(connection=\"jdbc:hsqldb:mem:.\","));
-    assertTrue(expressionString.contains("sql=\"select PEOPLE.ID, PEOPLE.NAME, COUNTRIES.COUNTRY_NAME from PEOPLE inner join COUNTRIES on PEOPLE.COUNTRY_CODE = COUNTRIES.CODE order by PEOPLE.ID\""));
-    assertTrue(expressionString.contains("sort=\"ID asc\""));
+    try (JDBCStream stream = new JDBCStream(StreamExpressionParser.parse("jdbc(connection=\"jdbc:hsqldb:mem:.\", sql=\"select PEOPLE.ID, PEOPLE.NAME, COUNTRIES.COUNTRY_NAME from PEOPLE inner join COUNTRIES on PEOPLE.COUNTRY_CODE = COUNTRIES.CODE order by PEOPLE.ID\", sort=\"ID asc\")"), factory)) {
+      expressionString = stream.toExpression(factory).toString();
+      assertTrue(expressionString.contains("jdbc(connection=\"jdbc:hsqldb:mem:.\","));
+      assertTrue(expressionString.contains("sql=\"select PEOPLE.ID, PEOPLE.NAME, COUNTRIES.COUNTRY_NAME from PEOPLE inner join COUNTRIES on PEOPLE.COUNTRY_CODE = COUNTRIES.CODE order by PEOPLE.ID\""));
+      assertTrue(expressionString.contains("sort=\"ID asc\""));
+    }
   }
 
   @Test 
   public void testIntersectStream() throws Exception {
-    IntersectStream stream;
     String expressionString;
     
     // Basic test
-    stream = new IntersectStream(StreamExpressionParser.parse("intersect("
+    try (IntersectStream stream = new IntersectStream(StreamExpressionParser.parse("intersect("
                               + "search(collection1, q=\"id:(0 3 4)\", fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_s asc\"),"
                               + "search(collection1, q=\"id:(1 2)\", fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_s asc\"),"
-                              + "on=\"a_f, a_s\")"), factory);
-    expressionString = stream.toExpression(factory).toString();
-    assertTrue(expressionString.contains("q=\"id:(0 3 4)\""));
-    assertTrue(expressionString.contains("q=\"id:(1 2)\""));
-    assertTrue(expressionString.contains("on=\"a_f,a_s\""));
+                              + "on=\"a_f, a_s\")"), factory)) {
+      expressionString = stream.toExpression(factory).toString();
+      assertTrue(expressionString.contains("q=\"id:(0 3 4)\""));
+      assertTrue(expressionString.contains("q=\"id:(1 2)\""));
+      assertTrue(expressionString.contains("on=\"a_f,a_s\""));
+    }
   }
 
   @Test 
   public void testComplementStream() throws Exception {
-    ComplementStream stream;
     String expressionString;
     
     // Basic test
-    stream = new ComplementStream(StreamExpressionParser.parse("complement("
+    try (ComplementStream stream = new ComplementStream(StreamExpressionParser.parse("complement("
                               + "search(collection1, q=\"id:(0 3 4)\", fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_s asc\"),"
                               + "search(collection1, q=\"id:(1 2)\", fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_s asc\"),"
-                              + "on=\"a_f, a_s\")"), factory);
-    expressionString = stream.toExpression(factory).toString();
-    assertTrue(expressionString.contains("q=\"id:(0 3 4)\""));
-    assertTrue(expressionString.contains("q=\"id:(1 2)\""));
-    assertTrue(expressionString.contains("on=\"a_f,a_s\""));
+                              + "on=\"a_f, a_s\")"), factory)) {
+      expressionString = stream.toExpression(factory).toString();
+      assertTrue(expressionString.contains("q=\"id:(0 3 4)\""));
+      assertTrue(expressionString.contains("q=\"id:(1 2)\""));
+      assertTrue(expressionString.contains("on=\"a_f,a_s\""));
+    }
   }
   
   @Test
@@ -337,27 +328,30 @@ public class StreamExpressionToExpessionTest extends LuceneTestCase {
     
     // Basic test
     String originalExpressionString = "search(collection1,fl=\"id,first\",sort=\"first asc\",q=\"presentTitles:\\\"chief, executive officer\\\" AND age:[36 TO *]\")";
-    CloudSolrStream firstStream = new CloudSolrStream(StreamExpressionParser.parse(originalExpressionString), factory);
-    String firstExpressionString = firstStream.toExpression(factory).toString();
-    
-    CloudSolrStream secondStream = new CloudSolrStream(StreamExpressionParser.parse(firstExpressionString), factory);
-    String secondExpressionString = secondStream.toExpression(factory).toString();
-    
-    assertTrue(firstExpressionString.contains("q=\"presentTitles:\\\"chief, executive officer\\\" AND age:[36 TO *]\""));
-    assertTrue(secondExpressionString.contains("q=\"presentTitles:\\\"chief, executive officer\\\" AND age:[36 TO *]\""));
+    try (CloudSolrStream firstStream = new CloudSolrStream(StreamExpressionParser.parse(originalExpressionString), factory)) {
+      String firstExpressionString = firstStream.toExpression(factory).toString();
+
+      try (CloudSolrStream secondStream = new CloudSolrStream(StreamExpressionParser.parse(firstExpressionString), factory)) {
+        String secondExpressionString = secondStream.toExpression(factory).toString();
+
+        assertTrue(firstExpressionString.contains("q=\"presentTitles:\\\"chief, executive officer\\\" AND age:[36 TO *]\""));
+        assertTrue(secondExpressionString.contains("q=\"presentTitles:\\\"chief, executive officer\\\" AND age:[36 TO *]\""));
+      }
+    }
   }
 
   @Test
   public void testFeaturesSelectionStream() throws Exception {
     String expr = "featuresSelection(collection1, q=\"*:*\", featureSet=\"first\", field=\"tv_text\", outcome=\"out_i\", numTerms=4, positiveLabel=2)";
-    FeaturesSelectionStream stream = new FeaturesSelectionStream(StreamExpressionParser.parse(expr), factory);
-    String expressionString = stream.toExpression(factory).toString();
-    assertTrue(expressionString.contains("q=\"*:*\""));
-    assertTrue(expressionString.contains("featureSet=first"));
-    assertTrue(expressionString.contains("field=tv_text"));
-    assertTrue(expressionString.contains("outcome=out_i"));
-    assertTrue(expressionString.contains("numTerms=4"));
-    assertTrue(expressionString.contains("positiveLabel=2"));
+    try (FeaturesSelectionStream stream = new FeaturesSelectionStream(StreamExpressionParser.parse(expr), factory)) {
+      String expressionString = stream.toExpression(factory).toString();
+      assertTrue(expressionString.contains("q=\"*:*\""));
+      assertTrue(expressionString.contains("featureSet=first"));
+      assertTrue(expressionString.contains("field=tv_text"));
+      assertTrue(expressionString.contains("outcome=out_i"));
+      assertTrue(expressionString.contains("numTerms=4"));
+      assertTrue(expressionString.contains("positiveLabel=2"));
+    }
   }
 
   @Test
@@ -370,15 +364,16 @@ public class StreamExpressionToExpessionTest extends LuceneTestCase {
         "field=\"tv_text\", " +
         "outcome=\"out_i\", " +
         "maxIterations=100)";
-    TextLogitStream logitStream = new TextLogitStream(StreamExpressionParser.parse(expr), factory);
-    String expressionString = logitStream.toExpression(factory).toString();
-    assertTrue(expressionString.contains("q=\"*:*\""));
-    assertTrue(expressionString.contains("name=model"));
-    assertFalse(expressionString.contains("terms="));
-    assertTrue(expressionString.contains("featuresSelection("));
-    assertTrue(expressionString.contains("field=tv_text"));
-    assertTrue(expressionString.contains("outcome=out_i"));
-    assertTrue(expressionString.contains("maxIterations=100"));
+    try (TextLogitStream logitStream = new TextLogitStream(StreamExpressionParser.parse(expr), factory)) {
+      String expressionString = logitStream.toExpression(factory).toString();
+      assertTrue(expressionString.contains("q=\"*:*\""));
+      assertTrue(expressionString.contains("name=model"));
+      assertFalse(expressionString.contains("terms="));
+      assertTrue(expressionString.contains("featuresSelection("));
+      assertTrue(expressionString.contains("field=tv_text"));
+      assertTrue(expressionString.contains("outcome=out_i"));
+      assertTrue(expressionString.contains("maxIterations=100"));
+    }
   }
   
   @Test

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionToExplanationTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionToExplanationTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionToExplanationTest.java
index 91cab3d..c1c5369 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionToExplanationTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionToExplanationTest.java
@@ -69,126 +69,108 @@ public class StreamExpressionToExplanationTest extends LuceneTestCase {
     
   @Test
   public void testCloudSolrStream() throws Exception {
-
-    CloudSolrStream stream;
-    
     // Basic test
-    stream = new CloudSolrStream(StreamExpressionParser.parse("search(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\")"), factory);
-    Explanation explanation = stream.toExplanation(factory);
-    Assert.assertEquals("search", explanation.getFunctionName());
-    Assert.assertEquals(CloudSolrStream.class.getName(), explanation.getImplementingClass());
-
+    try (CloudSolrStream stream = new CloudSolrStream(StreamExpressionParser.parse("search(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\")"), factory)) {
+      Explanation explanation = stream.toExplanation(factory);
+      Assert.assertEquals("search", explanation.getFunctionName());
+      Assert.assertEquals(CloudSolrStream.class.getName(), explanation.getImplementingClass());
+    }
   }
   
   @Test
   public void testSelectStream() throws Exception {
-
-    SelectStream stream;
-    
     // Basic test
-    stream = new SelectStream(StreamExpressionParser.parse("select(\"a_s as fieldA\", search(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\"))"), factory);
-    Explanation explanation = stream.toExplanation(factory);
-    Assert.assertEquals("select", explanation.getFunctionName());
-    Assert.assertEquals(SelectStream.class.getName(), explanation.getImplementingClass());    
+    try (SelectStream stream = new SelectStream(StreamExpressionParser.parse("select(\"a_s as fieldA\", search(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\"))"), factory)) {
+      Explanation explanation = stream.toExplanation(factory);
+      Assert.assertEquals("select", explanation.getFunctionName());
+      Assert.assertEquals(SelectStream.class.getName(), explanation.getImplementingClass());
+    }
   }
 
   @Test
   public void testDaemonStream() throws Exception {
-
-    DaemonStream stream;
-
     // Basic test
-    stream = new DaemonStream(StreamExpressionParser.parse("daemon(search(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\"), id=\"blah\", runInterval=\"1000\", queueSize=\"100\")"), factory);
-    Explanation explanation = stream.toExplanation(factory);
-    Assert.assertEquals("daemon", explanation.getFunctionName());
-    Assert.assertEquals(DaemonStream.class.getName(), explanation.getImplementingClass());
+    try (DaemonStream stream = new DaemonStream(StreamExpressionParser.parse("daemon(search(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\"), id=\"blah\", runInterval=\"1000\", queueSize=\"100\")"), factory)) {
+      Explanation explanation = stream.toExplanation(factory);
+      Assert.assertEquals("daemon", explanation.getFunctionName());
+      Assert.assertEquals(DaemonStream.class.getName(), explanation.getImplementingClass());
+    }
   }
 
   @Test
   public void testTopicStream() throws Exception {
-
-    TopicStream stream;
-
     // Basic test
-    stream = new TopicStream(StreamExpressionParser.parse("topic(collection2, collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", id=\"blah\", checkpointEvery=1000)"), factory);
-    Explanation explanation = stream.toExplanation(factory);
-    Assert.assertEquals("topic", explanation.getFunctionName());
-    Assert.assertEquals(TopicStream.class.getName(), explanation.getImplementingClass());
+    try (TopicStream stream = new TopicStream(StreamExpressionParser.parse("topic(collection2, collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", id=\"blah\", checkpointEvery=1000)"), factory)) {
+      Explanation explanation = stream.toExplanation(factory);
+      Assert.assertEquals("topic", explanation.getFunctionName());
+      Assert.assertEquals(TopicStream.class.getName(), explanation.getImplementingClass());
+    }
   }
 
 
   @Test
   public void testStatsStream() throws Exception {
-
-    StatsStream stream;
-    
     // Basic test
-    stream = new StatsStream(StreamExpressionParser.parse("stats(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\", sum(a_i), avg(a_i), count(*), min(a_i), max(a_i))"), factory);
-    Explanation explanation = stream.toExplanation(factory);
-    Assert.assertEquals("stats", explanation.getFunctionName());
-    Assert.assertEquals(StatsStream.class.getName(), explanation.getImplementingClass());
-    
+    try (StatsStream stream = new StatsStream(StreamExpressionParser.parse("stats(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\", sum(a_i), avg(a_i), count(*), min(a_i), max(a_i))"), factory)) {
+      Explanation explanation = stream.toExplanation(factory);
+      Assert.assertEquals("stats", explanation.getFunctionName());
+      Assert.assertEquals(StatsStream.class.getName(), explanation.getImplementingClass());
+    }
   }
 
   @Test
   public void testUniqueStream() throws Exception {
-
-    UniqueStream stream;
-    
     // Basic test
-    stream = new UniqueStream(StreamExpressionParser.parse("unique(search(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\"), over=\"a_f\")"), factory);
-    Explanation explanation = stream.toExplanation(factory);
-    Assert.assertEquals("unique", explanation.getFunctionName());
-    Assert.assertEquals(UniqueStream.class.getName(), explanation.getImplementingClass());
+    try (UniqueStream stream = new UniqueStream(StreamExpressionParser.parse("unique(search(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\"), over=\"a_f\")"), factory)) {
+      Explanation explanation = stream.toExplanation(factory);
+      Assert.assertEquals("unique", explanation.getFunctionName());
+      Assert.assertEquals(UniqueStream.class.getName(), explanation.getImplementingClass());
+    }
   }
   
   @Test
   public void testMergeStream() throws Exception {
-
-    MergeStream stream;
-    
     // Basic test
-    stream = new MergeStream(StreamExpressionParser.parse("merge("
+    try (MergeStream stream = new MergeStream(StreamExpressionParser.parse("merge("
                               + "search(collection1, q=\"id:(0 3 4)\", fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_s asc\"),"
                               + "search(collection1, q=\"id:(1 2)\", fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_s asc\"),"
-                              + "on=\"a_f asc, a_s asc\")"), factory);
-    Explanation explanation = stream.toExplanation(factory);
-    Assert.assertEquals("merge", explanation.getFunctionName());
-    Assert.assertEquals(MergeStream.class.getName(), explanation.getImplementingClass());
-    Assert.assertEquals(2, ((StreamExplanation)explanation).getChildren().size());
+                              + "on=\"a_f asc, a_s asc\")"), factory)) {
+      Explanation explanation = stream.toExplanation(factory);
+      Assert.assertEquals("merge", explanation.getFunctionName());
+      Assert.assertEquals(MergeStream.class.getName(), explanation.getImplementingClass());
+      Assert.assertEquals(2, ((StreamExplanation) explanation).getChildren().size());
+    }
   }
   
   @Test
   public void testRankStream() throws Exception {
-
-    RankStream stream;
     String expressionString;
     
     // Basic test
-    stream = new RankStream(StreamExpressionParser.parse("top("
+    try (RankStream stream = new RankStream(StreamExpressionParser.parse("top("
                                               + "n=3,"
                                               + "search(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc,a_i asc\"),"
-                                              + "sort=\"a_f asc, a_i asc\")"), factory);
-    Explanation explanation = stream.toExplanation(factory);
-    Assert.assertEquals("top", explanation.getFunctionName());
-    Assert.assertEquals(RankStream.class.getName(), explanation.getImplementingClass());
-    Assert.assertEquals(1, ((StreamExplanation)explanation).getChildren().size());
+                                              + "sort=\"a_f asc, a_i asc\")"), factory)) {
+      Explanation explanation = stream.toExplanation(factory);
+      Assert.assertEquals("top", explanation.getFunctionName());
+      Assert.assertEquals(RankStream.class.getName(), explanation.getImplementingClass());
+      Assert.assertEquals(1, ((StreamExplanation) explanation).getChildren().size());
+    }
   }
 
   @Test
   public void testReducerStream() throws Exception {
-
-    ReducerStream stream;
     String expressionString;
     
     // Basic test
-    stream = new ReducerStream(StreamExpressionParser.parse("reduce("
+    try (ReducerStream stream = new ReducerStream(StreamExpressionParser.parse("reduce("
                                                   + "search(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_s desc, a_f asc\"),"
-                                                  + "by=\"a_s\", group(sort=\"a_i desc\", n=\"5\"))"), factory);
-    Explanation explanation = stream.toExplanation(factory);
-    Assert.assertEquals("reduce", explanation.getFunctionName());
-    Assert.assertEquals(ReducerStream.class.getName(), explanation.getImplementingClass());
-    Assert.assertEquals(1, ((StreamExplanation)explanation).getChildren().size());
+                                                  + "by=\"a_s\", group(sort=\"a_i desc\", n=\"5\"))"), factory)) {
+      Explanation explanation = stream.toExplanation(factory);
+      Assert.assertEquals("reduce", explanation.getFunctionName());
+      Assert.assertEquals(ReducerStream.class.getName(), explanation.getImplementingClass());
+      Assert.assertEquals(1, ((StreamExplanation) explanation).getChildren().size());
+    }
   }
   
   @Test
@@ -202,26 +184,25 @@ public class StreamExpressionToExplanationTest extends LuceneTestCase {
                                                                  + "fl=\"id,a_s,a_i,a_f\", "
                                                                  + "sort=\"a_f asc, a_i asc\"))");
     
-    UpdateStream updateStream = new UpdateStream(expression, factory);
-    Explanation explanation = updateStream.toExplanation(factory);
-    Assert.assertEquals("solr (collection2)", explanation.getFunctionName());
-    Assert.assertEquals("Solr/Lucene", explanation.getImplementingClass());
-    
-    StreamExplanation updateExplanation = (StreamExplanation)explanation;
-    Assert.assertEquals(1, updateExplanation.getChildren().size());
-    Assert.assertEquals("update", updateExplanation.getChildren().get(0).getFunctionName());
-    Assert.assertEquals(UpdateStream.class.getName(), updateExplanation.getChildren().get(0).getImplementingClass());
+    try (UpdateStream updateStream = new UpdateStream(expression, factory)) {
+      Explanation explanation = updateStream.toExplanation(factory);
+      Assert.assertEquals("solr (collection2)", explanation.getFunctionName());
+      Assert.assertEquals("Solr/Lucene", explanation.getImplementingClass());
+
+      StreamExplanation updateExplanation = (StreamExplanation) explanation;
+      Assert.assertEquals(1, updateExplanation.getChildren().size());
+      Assert.assertEquals("update", updateExplanation.getChildren().get(0).getFunctionName());
+      Assert.assertEquals(UpdateStream.class.getName(), updateExplanation.getChildren().get(0).getImplementingClass());
+    }
     
   }
   
   @Test
   public void testFacetStream() throws Exception {
-
-    FacetStream stream;
     String expressionString;
     
     // Basic test
-    stream = new FacetStream(StreamExpressionParser.parse("facet("
+    try (FacetStream stream = new FacetStream(StreamExpressionParser.parse("facet("
                                                         +   "collection1, "
                                                         +   "q=\"*:*\", "
                                                         +   "buckets=\"a_s\", "
@@ -232,57 +213,57 @@ public class StreamExpressionToExplanationTest extends LuceneTestCase {
                                                         +   "max(a_i), max(a_f), "
                                                         +   "avg(a_i), avg(a_f), "
                                                         +   "count(*)"
-                                                        + ")"), factory);
-    expressionString = stream.toExpression(factory).toString();
-    Explanation explanation = stream.toExplanation(factory);
-    Assert.assertEquals("facet", explanation.getFunctionName());
-    Assert.assertEquals(FacetStream.class.getName(), explanation.getImplementingClass());
-    Assert.assertEquals(1, ((StreamExplanation)explanation).getChildren().size());
+                                                        + ")"), factory)) {
+      expressionString = stream.toExpression(factory).toString();
+      Explanation explanation = stream.toExplanation(factory);
+      Assert.assertEquals("facet", explanation.getFunctionName());
+      Assert.assertEquals(FacetStream.class.getName(), explanation.getImplementingClass());
+      Assert.assertEquals(1, ((StreamExplanation) explanation).getChildren().size());
+    }
   }
   
   @Test
   public void testJDBCStream() throws Exception {
-
-    JDBCStream stream;
     String expressionString;
     
     // Basic test
-    stream = new JDBCStream(StreamExpressionParser.parse("jdbc(connection=\"jdbc:hsqldb:mem:.\", sql=\"select PEOPLE.ID, PEOPLE.NAME, COUNTRIES.COUNTRY_NAME from PEOPLE inner join COUNTRIES on PEOPLE.COUNTRY_CODE = COUNTRIES.CODE order by PEOPLE.ID\", sort=\"ID asc\")"), factory);
-    Explanation explanation = stream.toExplanation(factory);
-    Assert.assertEquals("jdbc", explanation.getFunctionName());
-    Assert.assertEquals(JDBCStream.class.getName(), explanation.getImplementingClass());
-    Assert.assertEquals(1, ((StreamExplanation)explanation).getChildren().size());
+    try (JDBCStream stream = new JDBCStream(StreamExpressionParser.parse("jdbc(connection=\"jdbc:hsqldb:mem:.\", sql=\"select PEOPLE.ID, PEOPLE.NAME, COUNTRIES.COUNTRY_NAME from PEOPLE inner join COUNTRIES on PEOPLE.COUNTRY_CODE = COUNTRIES.CODE order by PEOPLE.ID\", sort=\"ID asc\")"), factory)) {
+      Explanation explanation = stream.toExplanation(factory);
+      Assert.assertEquals("jdbc", explanation.getFunctionName());
+      Assert.assertEquals(JDBCStream.class.getName(), explanation.getImplementingClass());
+      Assert.assertEquals(1, ((StreamExplanation) explanation).getChildren().size());
+    }
   }
 
   @Test 
   public void testIntersectStream() throws Exception {
-    IntersectStream stream;
     String expressionString;
     
     // Basic test
-    stream = new IntersectStream(StreamExpressionParser.parse("intersect("
+    try (IntersectStream stream = new IntersectStream(StreamExpressionParser.parse("intersect("
                               + "search(collection1, q=\"id:(0 3 4)\", fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_s asc\"),"
                               + "search(collection1, q=\"id:(1 2)\", fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_s asc\"),"
-                              + "on=\"a_f, a_s\")"), factory);
-    Explanation explanation = stream.toExplanation(factory);
-    Assert.assertEquals("intersect", explanation.getFunctionName());
-    Assert.assertEquals(IntersectStream.class.getName(), explanation.getImplementingClass());
-    Assert.assertEquals(2, ((StreamExplanation)explanation).getChildren().size());
+                              + "on=\"a_f, a_s\")"), factory)) {
+      Explanation explanation = stream.toExplanation(factory);
+      Assert.assertEquals("intersect", explanation.getFunctionName());
+      Assert.assertEquals(IntersectStream.class.getName(), explanation.getImplementingClass());
+      Assert.assertEquals(2, ((StreamExplanation) explanation).getChildren().size());
+    }
   }
 
   @Test 
   public void testComplementStream() throws Exception {
-    ComplementStream stream;
     String expressionString;
     
     // Basic test
-    stream = new ComplementStream(StreamExpressionParser.parse("complement("
+    try (ComplementStream stream = new ComplementStream(StreamExpressionParser.parse("complement("
                               + "search(collection1, q=\"id:(0 3 4)\", fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_s asc\"),"
                               + "search(collection1, q=\"id:(1 2)\", fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_s asc\"),"
-                              + "on=\"a_f, a_s\")"), factory);
-    Explanation explanation = stream.toExplanation(factory);
-    Assert.assertEquals("complement", explanation.getFunctionName());
-    Assert.assertEquals(ComplementStream.class.getName(), explanation.getImplementingClass());
-    Assert.assertEquals(2, ((StreamExplanation)explanation).getChildren().size());
+                              + "on=\"a_f, a_s\")"), factory)) {
+      Explanation explanation = stream.toExplanation(factory);
+      Assert.assertEquals("complement", explanation.getFunctionName());
+      Assert.assertEquals(ComplementStream.class.getName(), explanation.getImplementingClass());
+      Assert.assertEquals(2, ((StreamExplanation) explanation).getChildren().size());
+    }
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java
index 0de3aa0..9ee6688 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java
@@ -157,6 +157,7 @@ public void testSpacesInParams() throws Exception {
   //a value in the field list.
 
   CloudSolrStream stream = new CloudSolrStream("", "collection1", sParams);
+  stream.close();
 }
 
 @Test

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/solrj/src/test/org/apache/solr/client/solrj/response/NoOpResponseParserTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/response/NoOpResponseParserTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/response/NoOpResponseParserTest.java
index c813ea2a..cda751d 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/response/NoOpResponseParserTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/response/NoOpResponseParserTest.java
@@ -20,10 +20,12 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
 import java.io.Reader;
+import java.io.StringReader;
 import java.nio.charset.StandardCharsets;
 import java.util.List;
 
 import org.apache.commons.io.IOUtils;
+import org.apache.commons.io.input.ReaderInputStream;
 import org.apache.solr.SolrJettyTestBase;
 import org.apache.solr.client.solrj.ResponseParser;
 import org.apache.solr.client.solrj.SolrClient;
@@ -36,7 +38,6 @@ import org.apache.solr.client.solrj.request.QueryRequest;
 import org.apache.solr.common.SolrDocument;
 import org.apache.solr.common.SolrInputDocument;
 import org.apache.solr.common.util.NamedList;
-import org.apache.solr.core.SolrResourceLoader;
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -49,7 +50,7 @@ import org.junit.Test;
 public class NoOpResponseParserTest extends SolrJettyTestBase {
 
   private static InputStream getResponse() throws IOException {
-    return new SolrResourceLoader().openResource("solrj/sampleRangeFacetResponse.xml");
+    return new ReaderInputStream(new StringReader("NO-OP test response"), StandardCharsets.UTF_8);
   }
 
   @BeforeClass

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/solrj/src/test/org/apache/solr/client/solrj/response/QueryResponseTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/response/QueryResponseTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/response/QueryResponseTest.java
index 7a69815..a567116 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/response/QueryResponseTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/response/QueryResponseTest.java
@@ -43,11 +43,15 @@ public class QueryResponseTest extends LuceneTestCase {
   @Test
   public void testRangeFacets() throws Exception {
     XMLResponseParser parser = new XMLResponseParser();
-    InputStream is = new SolrResourceLoader().openResource("solrj/sampleRangeFacetResponse.xml");
-    assertNotNull(is);
-    Reader in = new InputStreamReader(is, StandardCharsets.UTF_8);
-    NamedList<Object> response = parser.processResponse(in);
-    in.close();
+    NamedList<Object> response = null;
+    try (SolrResourceLoader loader = new SolrResourceLoader();
+         InputStream is = loader.openResource("solrj/sampleRangeFacetResponse.xml")) {
+      assertNotNull(is);
+
+      try (Reader in = new InputStreamReader(is, StandardCharsets.UTF_8)) {
+        response = parser.processResponse(in);
+      }
+    }
 
     QueryResponse qr = new QueryResponse(response, null);
     Assert.assertNotNull(qr);
@@ -100,11 +104,14 @@ public class QueryResponseTest extends LuceneTestCase {
   @Test
   public void testGroupResponse() throws Exception {
     XMLResponseParser parser = new XMLResponseParser();
-    InputStream is = new SolrResourceLoader().openResource("solrj/sampleGroupResponse.xml");
-    assertNotNull(is);
-    Reader in = new InputStreamReader(is, StandardCharsets.UTF_8);
-    NamedList<Object> response = parser.processResponse(in);
-    in.close();
+    NamedList<Object> response = null;
+    try (SolrResourceLoader loader = new SolrResourceLoader();
+         InputStream is = loader.openResource("solrj/sampleGroupResponse.xml")) {
+      assertNotNull(is);
+      try (Reader in = new InputStreamReader(is, StandardCharsets.UTF_8)) {
+        response = parser.processResponse(in);
+      }
+    }
 
     QueryResponse qr = new QueryResponse(response, null);
     assertNotNull(qr);
@@ -202,11 +209,15 @@ public class QueryResponseTest extends LuceneTestCase {
   @Test
   public void testSimpleGroupResponse() throws Exception {
     XMLResponseParser parser = new XMLResponseParser();
-    InputStream is = new SolrResourceLoader().openResource("solrj/sampleSimpleGroupResponse.xml");
-    assertNotNull(is);
-    Reader in = new InputStreamReader(is, StandardCharsets.UTF_8);
-    NamedList<Object> response = parser.processResponse(in);
-    in.close();
+    NamedList<Object> response = null;
+
+    try (SolrResourceLoader loader = new SolrResourceLoader();
+         InputStream is = loader.openResource("solrj/sampleSimpleGroupResponse.xml")) {
+      assertNotNull(is);
+      try (Reader in = new InputStreamReader(is, StandardCharsets.UTF_8)) {
+        response = parser.processResponse(in);
+      }
+    }
 
     QueryResponse qr = new QueryResponse(response, null);
     assertNotNull(qr);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/94b7f7dc/solr/solrj/src/test/org/apache/solr/client/solrj/response/TestClusteringResponse.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/response/TestClusteringResponse.java b/solr/solrj/src/test/org/apache/solr/client/solrj/response/TestClusteringResponse.java
index 7e789d1..75bb0d3 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/response/TestClusteringResponse.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/response/TestClusteringResponse.java
@@ -37,13 +37,16 @@ public class TestClusteringResponse extends SolrJettyTestBase {
   @Test
   public void testClusteringResponse() throws Exception {
     XMLResponseParser parser = new XMLResponseParser();
-    /*Load a simple XML with the clustering response encoded in an XML format*/
-    InputStream is = new SolrResourceLoader().openResource("solrj/sampleClusteringResponse.xml");
-    assertNotNull(is);
-    Reader in = new InputStreamReader(is, StandardCharsets.UTF_8);
-    NamedList<Object> response = parser.processResponse(in);
-    in.close();
+    NamedList<Object> response = null;
 
+    /*Load a simple XML with the clustering response encoded in an XML format*/
+    try (SolrResourceLoader loader = new SolrResourceLoader();
+         InputStream is = loader.openResource("solrj/sampleClusteringResponse.xml")) {
+      assertNotNull(is);
+      try (Reader in = new InputStreamReader(is, StandardCharsets.UTF_8)) {
+        response = parser.processResponse(in);
+      }
+    }
     QueryResponse qr = new QueryResponse(response, null);
     ClusteringResponse clusteringResponse = qr.getClusteringResponse();
     List<Cluster> clusters = clusteringResponse.getClusters();