You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by er...@apache.org on 2018/08/23 02:50:39 UTC

[5/5] lucene-solr:master: SOLR-12690: Regularize LoggerFactory declarations

SOLR-12690: Regularize LoggerFactory declarations


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/8cde1277
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/8cde1277
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/8cde1277

Branch: refs/heads/master
Commit: 8cde1277ec7151bd6ab62950ac93cbdd6ff04d9f
Parents: 5eab1c3
Author: Erick Erickson <Er...@gmail.com>
Authored: Wed Aug 22 15:26:37 2018 -0700
Committer: Erick Erickson <Er...@gmail.com>
Committed: Wed Aug 22 15:26:37 2018 -0700

----------------------------------------------------------------------
 .../directory/DirectoryTaxonomyReader.java      |   6 +-
 .../src/groovy/check-source-patterns.groovy     |   9 +-
 solr/CHANGES.txt                                |   2 +
 .../handler/dataimport/MailEntityProcessor.java |  74 ++++-----
 .../handler/dataimport/BinURLDataSource.java    |  10 +-
 .../handler/dataimport/DataImportHandler.java   |   6 +-
 .../solr/handler/dataimport/DataImporter.java   |  24 +--
 .../dataimport/DateFormatTransformer.java       |   4 +-
 .../solr/handler/dataimport/DocBuilder.java     |  34 ++--
 .../dataimport/FieldReaderDataSource.java       |   6 +-
 .../dataimport/FieldStreamDataSource.java       |   4 +-
 .../solr/handler/dataimport/FileDataSource.java |   8 +-
 .../solr/handler/dataimport/JdbcDataSource.java |  20 +--
 .../solr/handler/dataimport/LogTransformer.java |  22 +--
 .../handler/dataimport/RegexTransformer.java    |   4 +-
 .../handler/dataimport/SolrEntityProcessor.java |   6 +-
 .../handler/dataimport/SqlEntityProcessor.java  |   8 +-
 .../handler/dataimport/TemplateTransformer.java |   4 +-
 .../solr/handler/dataimport/URLDataSource.java  |  10 +-
 .../dataimport/XPathEntityProcessor.java        |  24 +--
 .../handler/dataimport/XPathRecordReader.java   |   6 +-
 .../dataimport/config/DIHConfiguration.java     |   6 +-
 .../TestSolrEntityProcessorEndToEnd.java        |  18 +-
 .../client/solrj/embedded/JettySolrRunner.java  |   6 +-
 .../java/org/apache/solr/cloud/LockTree.java    |   6 +-
 .../apache/solr/cloud/OverseerTaskQueue.java    |  14 +-
 .../org/apache/solr/cloud/RecoveryStrategy.java | 164 +++++++++----------
 .../apache/solr/cloud/ReplicateFromLeader.java  |   6 +-
 .../org/apache/solr/cloud/SolrZkServer.java     |   4 +-
 .../apache/solr/cloud/ZkDistributedQueue.java   |   6 +-
 .../cloud/autoscaling/HttpTriggerListener.java  |   4 +-
 .../solr/cloud/autoscaling/LoggingListener.java |   4 +-
 .../solr/cloud/autoscaling/TriggerBase.java     |   8 +-
 .../cloud/autoscaling/TriggerEventQueue.java    |  16 +-
 .../apache/solr/core/HdfsDirectoryFactory.java  |  50 +++---
 .../solr/core/IndexDeletionPolicyWrapper.java   |   4 +-
 .../repository/BackupRepositoryFactory.java     |   6 +-
 .../org/apache/solr/handler/IndexFetcher.java   | 142 ++++++++--------
 .../apache/solr/handler/ReplicationHandler.java |  68 ++++----
 .../org/apache/solr/handler/SnapShooter.java    |  16 +-
 .../admin/AutoscalingHistoryHandler.java        |   4 +-
 .../solr/handler/component/QueryComponent.java  |   4 +-
 .../handler/component/SpellCheckComponent.java  |  24 +--
 .../handler/component/SuggestComponent.java     |  26 +--
 .../metrics/reporters/ReporterClientCache.java  |   6 +-
 .../reporters/jmx/JmxMetricsReporter.java       |  58 +++----
 .../reporters/solr/SolrShardReporter.java       |   2 +-
 .../org/apache/solr/request/SimpleFacets.java   |   4 +-
 .../solr/response/BinaryResponseWriter.java     |   4 +-
 .../solr/schema/JsonPreAnalyzedParser.java      |  12 +-
 .../apache/solr/schema/PreAnalyzedField.java    |  10 +-
 .../solr/search/SurroundQParserPlugin.java      |   2 +-
 .../search/stats/ExactSharedStatsCache.java     |   6 +-
 .../solr/search/stats/ExactStatsCache.java      |  28 ++--
 .../apache/solr/search/stats/LRUStatsCache.java |  10 +-
 .../solr/search/stats/LocalStatsCache.java      |  18 +-
 .../org/apache/solr/search/stats/StatsUtil.java |  22 +--
 .../solr/servlet/CheckLoggingConfiguration.java |   4 +-
 .../solr/spelling/DirectSolrSpellChecker.java   |   6 +-
 .../solr/spelling/SpellCheckCollator.java       |  10 +-
 .../solr/spelling/suggest/SolrSuggester.java    |  34 ++--
 .../apache/solr/spelling/suggest/Suggester.java |  26 +--
 .../suggest/jaspell/JaspellLookupFactory.java   |   4 +-
 .../solr/store/blockcache/BlockDirectory.java   |   8 +-
 .../apache/solr/store/hdfs/HdfsDirectory.java   |  16 +-
 .../org/apache/solr/util/stats/MetricUtils.java |   6 +-
 .../apache/solr/cloud/DeleteReplicaTest.java    |   8 +-
 .../solr/cloud/LIROnShardRestartTest.java       |   4 +-
 .../solr/cloud/LIRRollingUpdatesTest.java       |   4 +-
 .../solr/cloud/LeaderVoteWaitTimeoutTest.java   |   6 +-
 .../apache/solr/cloud/TestCloudConsistency.java |   4 +-
 .../org/apache/solr/cloud/TestPullReplica.java  |  18 +-
 .../cloud/TestPullReplicaErrorHandling.java     |  12 +-
 .../org/apache/solr/cloud/TestTlogReplica.java  |  12 +-
 .../sim/GenericDistributedQueue.java            |   6 +-
 .../cloud/autoscaling/sim/SimCloudManager.java  |  18 +-
 .../sim/SimClusterStateProvider.java            |  50 +++---
 .../autoscaling/sim/SimDistribStateManager.java |   2 +-
 .../sim/SimDistributedQueueFactory.java         |   4 +-
 .../autoscaling/sim/SimNodeStateProvider.java   |  12 +-
 .../sim/TestClusterStateProvider.java           |   4 +-
 .../sim/TestDistribStateManager.java            |   4 +-
 .../search/CurrencyRangeFacetCloudTest.java     |   2 +-
 .../solr/search/facet/RangeFacetCloudTest.java  |   2 +-
 .../cloud/autoscaling/AutoScalingConfig.java    |   4 +-
 .../client/solrj/cloud/autoscaling/Policy.java  |   8 +-
 .../solr/client/solrj/impl/HttpClientUtil.java  |  12 +-
 .../solrj/impl/Krb5HttpClientBuilder.java       |  12 +-
 .../client/solrj/io/stream/CommitStream.java    |   4 +-
 .../client/solrj/io/stream/DaemonStream.java    |  14 +-
 .../client/solrj/io/stream/ExecutorStream.java  |   8 +-
 .../client/solrj/io/stream/UpdateStream.java    |   6 +-
 .../solr/common/cloud/ZkConfigManager.java      |   4 +-
 .../apache/solr/common/cloud/ZkStateReader.java | 140 ++++++++--------
 .../org/apache/solr/common/util/IOUtils.java    |   4 +-
 95 files changed, 791 insertions(+), 790 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyReader.java
----------------------------------------------------------------------
diff --git a/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyReader.java b/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyReader.java
index cde56e1..2e4270b 100644
--- a/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyReader.java
+++ b/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyReader.java
@@ -60,7 +60,7 @@ import org.apache.lucene.util.RamUsageEstimator;
  */
 public class DirectoryTaxonomyReader extends TaxonomyReader implements Accountable {
 
-  private static final Logger logger = Logger.getLogger(DirectoryTaxonomyReader.class.getName());
+  private static final Logger log = Logger.getLogger(DirectoryTaxonomyReader.class.getName());
 
   private static final int DEFAULT_CACHE_VALUE = 4000;
 
@@ -421,8 +421,8 @@ public class DirectoryTaxonomyReader extends TaxonomyReader implements Accountab
         }
         sb.append(i +": "+category.toString()+"\n");
       } catch (IOException e) {
-        if (logger.isLoggable(Level.FINEST)) {
-          logger.log(Level.FINEST, e.getMessage(), e);
+        if (log.isLoggable(Level.FINEST)) {
+          log.log(Level.FINEST, e.getMessage(), e);
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/lucene/tools/src/groovy/check-source-patterns.groovy
----------------------------------------------------------------------
diff --git a/lucene/tools/src/groovy/check-source-patterns.groovy b/lucene/tools/src/groovy/check-source-patterns.groovy
index d7af361..7fa4aa4 100644
--- a/lucene/tools/src/groovy/check-source-patterns.groovy
+++ b/lucene/tools/src/groovy/check-source-patterns.groovy
@@ -66,7 +66,7 @@ def lineSplitter = ~$/[\r\n]+/$;
 def singleLineSplitter = ~$/\n\r?/$;
 def licenseMatcher = Defaults.createDefaultMatcher();
 def validLoggerPattern = ~$/(?s)\b(private\s|static\s|final\s){3}+\s*Logger\s+\p{javaJavaIdentifierStart}+\s+=\s+\QLoggerFactory.getLogger(MethodHandles.lookup().lookupClass());\E/$;
-def validLoggerNamePattern = ~$/(?s)\b(private\s|static\s|final\s){3}+\s*Logger\s+(log|LOG)+\s+=\s+\QLoggerFactory.getLogger(MethodHandles.lookup().lookupClass());\E/$;
+def validLoggerNamePattern = ~$/(?s)\b(private\s|static\s|final\s){3}+\s*Logger\s+log+\s+=\s+\QLoggerFactory.getLogger(MethodHandles.lookup().lookupClass());\E/$;
 def packagePattern = ~$/(?m)^\s*package\s+org\.apache.*;/$;
 def xmlTagPattern = ~$/(?m)\s*<[a-zA-Z].*/$;
 def sourceHeaderPattern = ~$/\[source\b.*/$;
@@ -170,11 +170,8 @@ ant.fileScanner{
       if (!validLoggerPattern.matcher(text).find()) {
         reportViolation(f, 'invalid logging pattern [not private static final, uses static class name]');
       }
-      if (f.toString().contains('solr/contrib') && !validLoggerNamePattern.matcher(text).find()) {
-        reportViolation(f, 'invalid logger name [not log or LOG]');
-      }
-      if (f.toString().contains('solr/core') && !validLoggerNamePattern.matcher(text).find()) {
-        reportViolation(f, 'invalid logger name [not log or LOG]');
+      if (!validLoggerNamePattern.matcher(text).find()) {
+        reportViolation(f, 'invalid logger name [log, uses static class name, not specialized logger]')
       }
     }
     checkLicenseHeaderPrecedes(f, 'package', packagePattern, javaCommentPattern, text, ratDocument);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 1303062..a902422 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -333,6 +333,8 @@ Other Changes
 
 * SOLR-12625: Combine SolrDocumentFetcher and RetrieveFieldsOptimizer (Erick Erickson)
 
+* SOLR-12690: Regularize LoggerFactory declarations (Erick Erickson)
+
 ==================  7.4.0 ==================
 
 Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler-extras/src/java/org/apache/solr/handler/dataimport/MailEntityProcessor.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler-extras/src/java/org/apache/solr/handler/dataimport/MailEntityProcessor.java b/solr/contrib/dataimporthandler-extras/src/java/org/apache/solr/handler/dataimport/MailEntityProcessor.java
index f7ad22f..54f5984 100644
--- a/solr/contrib/dataimporthandler-extras/src/java/org/apache/solr/handler/dataimport/MailEntityProcessor.java
+++ b/solr/contrib/dataimporthandler-extras/src/java/org/apache/solr/handler/dataimport/MailEntityProcessor.java
@@ -58,7 +58,7 @@ public class MailEntityProcessor extends EntityProcessorBase {
       new SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.ROOT);
   private static final SimpleDateFormat afterFmt = 
       new SimpleDateFormat("yyyy/MM/dd", Locale.ROOT);
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   
   public static interface CustomFilter {
     public SearchTerm getCustomSearch(Folder folder);
@@ -112,7 +112,7 @@ public class MailEntityProcessor extends EntityProcessorBase {
     String varName = ConfigNameConstants.IMPORTER_NS_SHORT + "." + cname + "."
         + DocBuilder.LAST_INDEX_TIME;
     Object varValue = context.getVariableResolver().resolve(varName);
-    LOG.info(varName+"="+varValue);    
+    log.info(varName+"="+varValue);
     
     if (varValue != null && !"".equals(varValue) && 
         !"".equals(getStringFromContext("fetchMailsSince", ""))) {
@@ -123,21 +123,21 @@ public class MailEntityProcessor extends EntityProcessorBase {
       try {
         tmp = sinceDateParser.parse((String)varValue);
         if (tmp.getTime() == 0) {
-          LOG.info("Ignoring initial value "+varValue+" for "+varName+
+          log.info("Ignoring initial value "+varValue+" for "+varName+
               " in favor of fetchMailsSince config parameter");
           tmp = null; // don't use this value
         }
       } catch (ParseException e) {
         // probably ok to ignore this since we have other options below
         // as we're just trying to figure out if the date is 0
-        LOG.warn("Failed to parse "+varValue+" from "+varName+" due to: "+e);
+        log.warn("Failed to parse "+varValue+" from "+varName+" due to: "+e);
       }    
       
       if (tmp == null) {
         // favor fetchMailsSince in this case because the value from
         // dataimport.properties is the default/init value
         varValue = getStringFromContext("fetchMailsSince", "");
-        LOG.info("fetchMailsSince="+varValue);            
+        log.info("fetchMailsSince="+varValue);
       }
     }
     
@@ -145,7 +145,7 @@ public class MailEntityProcessor extends EntityProcessorBase {
       varName = ConfigNameConstants.IMPORTER_NS_SHORT + "."
           + DocBuilder.LAST_INDEX_TIME;
       varValue = context.getVariableResolver().resolve(varName);
-      LOG.info(varName+"="+varValue);
+      log.info(varName+"="+varValue);
     }
       
     if (varValue != null && varValue instanceof String) {
@@ -157,13 +157,13 @@ public class MailEntityProcessor extends EntityProcessorBase {
     if (lastIndexTime == null) 
       lastIndexTime = getStringFromContext("fetchMailsSince", "");
 
-    LOG.info("Using lastIndexTime "+lastIndexTime+" for mail import");
+    log.info("Using lastIndexTime "+lastIndexTime+" for mail import");
     
     this.fetchMailsSince = null;
     if (lastIndexTime != null && lastIndexTime.length() > 0) {
       try {
         fetchMailsSince = sinceDateParser.parse(lastIndexTime);
-        LOG.info("Parsed fetchMailsSince=" + lastIndexTime);
+        log.info("Parsed fetchMailsSince=" + lastIndexTime);
       } catch (ParseException e) {
         throw new DataImportHandlerException(DataImportHandlerException.SEVERE,
             "Invalid value for fetchMailSince: " + lastIndexTime, e);
@@ -247,7 +247,7 @@ public class MailEntityProcessor extends EntityProcessorBase {
       addPartToDocument(mail, row, true);
       return row;
     } catch (Exception e) {
-      LOG.error("Failed to convert message [" + mail.toString()
+      log.error("Failed to convert message [" + mail.toString()
           + "] to document due to: " + e, e);
       return null;
     }
@@ -269,7 +269,7 @@ public class MailEntityProcessor extends EntityProcessorBase {
         for (int i = 0; i < count; i++)
           addPartToDocument(mp.getBodyPart(i), row, false);
       } else {
-        LOG.warn("Multipart content is a not an instance of Multipart! Content is: "
+        log.warn("Multipart content is a not an instance of Multipart! Content is: "
             + (content != null ? content.getClass().getName() : "null")
             + ". Typically, this is due to the Java Activation JAR being loaded by the wrong classloader.");
       }
@@ -374,7 +374,7 @@ public class MailEntityProcessor extends EntityProcessorBase {
       
       if (("imap".equals(protocol) || "imaps".equals(protocol))
           && "imap.gmail.com".equals(host)) {
-        LOG.info("Consider using 'gimaps' protocol instead of '" + protocol
+        log.info("Consider using 'gimaps' protocol instead of '" + protocol
             + "' for enabling GMail specific extensions for " + host);
       }
       
@@ -399,14 +399,14 @@ public class MailEntityProcessor extends EntityProcessorBase {
       } else {
         mailbox.connect(host, user, password);
       }
-      LOG.info("Connected to " + user + "'s mailbox on " + host);
+      log.info("Connected to " + user + "'s mailbox on " + host);
       
       return true;
     } catch (MessagingException e) {      
       String errMsg = String.format(Locale.ENGLISH,
           "Failed to connect to %s server %s as user %s due to: %s", protocol,
           host, user, e.toString());
-      LOG.error(errMsg, e);
+      log.error(errMsg, e);
       throw new DataImportHandlerException(DataImportHandlerException.SEVERE,
           errMsg, e);
     }
@@ -431,7 +431,7 @@ public class MailEntityProcessor extends EntityProcessorBase {
   }
   
   private void logConfig() {
-    if (!LOG.isInfoEnabled()) return;
+    if (!log.isInfoEnabled()) return;
     
     String lineSep = System.getProperty("line.separator"); 
     
@@ -474,7 +474,7 @@ public class MailEntityProcessor extends EntityProcessorBase {
         .append(lineSep);
     config.append("includeSharedFolders : ").append(includeSharedFolders)
         .append(lineSep);
-    LOG.info(config.toString());
+    log.info(config.toString());
   }
   
   class FolderIterator implements Iterator<Folder> {
@@ -515,22 +515,22 @@ public class MailEntityProcessor extends EntityProcessorBase {
               hasMessages = (next.getType() & Folder.HOLDS_MESSAGES) != 0;
               next.open(Folder.READ_ONLY);
               lastFolder = next;
-              LOG.info("Opened folder : " + fullName);
+              log.info("Opened folder : " + fullName);
             }
             if (recurse && ((next.getType() & Folder.HOLDS_FOLDERS) != 0)) {
               Folder[] children = next.list();
-              LOG.info("Added its children to list  : ");
+              log.info("Added its children to list  : ");
               for (int i = children.length - 1; i >= 0; i--) {
                 folders.add(0, children[i]);
-                LOG.info("child name : " + children[i].getFullName());
+                log.info("child name : " + children[i].getFullName());
               }
-              if (children.length == 0) LOG.info("NO children : ");
+              if (children.length == 0) log.info("NO children : ");
             }
           }
         } while (!hasMessages);
         return next;
       } catch (Exception e) {
-        LOG.warn("Failed to read folders due to: "+e);
+        log.warn("Failed to read folders due to: "+e);
         // throw new
         // DataImportHandlerException(DataImportHandlerException.SEVERE,
         // "Folder open failed", e);
@@ -568,12 +568,12 @@ public class MailEntityProcessor extends EntityProcessorBase {
       try {
         Folder[] ufldrs = mailbox.getUserNamespaces(null);
         if (ufldrs != null) {
-          LOG.info("Found " + ufldrs.length + " user namespace folders");
+          log.info("Found " + ufldrs.length + " user namespace folders");
           for (Folder ufldr : ufldrs)
             folders.add(ufldr);
         }
       } catch (MessagingException me) {
-        LOG.warn("Messaging exception retrieving user namespaces: "
+        log.warn("Messaging exception retrieving user namespaces: "
             + me.getMessage());
       }
     }
@@ -582,12 +582,12 @@ public class MailEntityProcessor extends EntityProcessorBase {
       try {
         Folder[] sfldrs = mailbox.getSharedNamespaces();
         if (sfldrs != null) {
-          LOG.info("Found " + sfldrs.length + " shared namespace folders");
+          log.info("Found " + sfldrs.length + " shared namespace folders");
           for (Folder sfldr : sfldrs)
             folders.add(sfldr);
         }
       } catch (MessagingException me) {
-        LOG.warn("Messaging exception retrieving shared namespaces: "
+        log.warn("Messaging exception retrieving shared namespaces: "
             + me.getMessage());
       }
     }
@@ -620,14 +620,14 @@ public class MailEntityProcessor extends EntityProcessorBase {
         this.batchSize = batchSize;
         SearchTerm st = getSearchTerm();
         
-        LOG.info("SearchTerm=" + st);
+        log.info("SearchTerm=" + st);
         
         if (st != null || folder instanceof GmailFolder) {
           doBatching = false;
           // Searching can still take a while even though we're only pulling
           // envelopes; unless you're using gmail server-side filter, which is
           // fast
-          LOG.info("Searching folder " + folder.getName() + " for messages");
+          log.info("Searching folder " + folder.getName() + " for messages");
           final RTimer searchTimer = new RTimer();
 
           // If using GMail, speed up the envelope processing by doing a
@@ -642,11 +642,11 @@ public class MailEntityProcessor extends EntityProcessorBase {
                     
           if (folder instanceof GmailFolder && fetchMailsSince != null) {
             String afterCrit = "after:" + afterFmt.format(fetchMailsSince);
-            LOG.info("Added server-side gmail filter: " + afterCrit);
+            log.info("Added server-side gmail filter: " + afterCrit);
             Message[] afterMessages = folder.search(new GmailRawSearchTerm(
                 afterCrit));
             
-            LOG.info("GMail server-side filter found " + afterMessages.length
+            log.info("GMail server-side filter found " + afterMessages.length
                 + " messages received " + afterCrit + " in folder " + folder.getName());
             
             // now pass in the server-side filtered messages to the local filter
@@ -657,11 +657,11 @@ public class MailEntityProcessor extends EntityProcessorBase {
           totalInFolder = messagesInCurBatch.length;
           folder.fetch(messagesInCurBatch, fp);
           current = 0;
-          LOG.info("Total messages : " + totalInFolder);
-          LOG.info("Search criteria applied. Batching disabled. Took {} (ms)", searchTimer.getTime());
+          log.info("Total messages : " + totalInFolder);
+          log.info("Search criteria applied. Batching disabled. Took {} (ms)", searchTimer.getTime());
         } else {
           totalInFolder = folder.getMessageCount();
-          LOG.info("Total messages : " + totalInFolder);
+          log.info("Total messages : " + totalInFolder);
           getNextBatch(batchSize, folder);
         }
       } catch (MessagingException e) {
@@ -685,8 +685,8 @@ public class MailEntityProcessor extends EntityProcessorBase {
       folder.fetch(messagesInCurBatch, fp);
       current = 0;
       currentBatch++;
-      LOG.info("Current Batch  : " + currentBatch);
-      LOG.info("Messages in this batch  : " + messagesInCurBatch.length);
+      log.info("Current Batch  : " + currentBatch);
+      log.info("Messages in this batch  : " + messagesInCurBatch.length);
     }
     
     public boolean hasNext() {
@@ -741,7 +741,7 @@ public class MailEntityProcessor extends EntityProcessorBase {
     
     @SuppressWarnings("serial")
     public SearchTerm getCustomSearch(final Folder folder) {
-      LOG.info("Building mail filter for messages in " + folder.getName()
+      log.info("Building mail filter for messages in " + folder.getName()
           + " that occur after " + sinceDateParser.format(since));
       return new DateTerm(ComparisonTerm.GE, since) {
         private int matched = 0;
@@ -761,15 +761,15 @@ public class MailEntityProcessor extends EntityProcessorBase {
             } else {
               String msgDateStr = (msgDate != null) ? sinceDateParser.format(msgDate) : "null";
               String sinceDateStr = (since != null) ? sinceDateParser.format(since) : "null";
-              LOG.debug("Message " + msg.getSubject() + " was received at [" + msgDateStr
+              log.debug("Message " + msg.getSubject() + " was received at [" + msgDateStr
                   + "], since filter is [" + sinceDateStr + "]");
             }
           } catch (MessagingException e) {
-            LOG.warn("Failed to process message due to: "+e, e);
+            log.warn("Failed to process message due to: "+e, e);
           }
           
           if (seen % 100 == 0) {
-            LOG.info("Matched " + matched + " of " + seen + " messages since: "
+            log.info("Matched " + matched + " of " + seen + " messages since: "
                 + sinceDateParser.format(since));
           }
           

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/BinURLDataSource.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/BinURLDataSource.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/BinURLDataSource.java
index 2b3a2c1..c1b4808 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/BinURLDataSource.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/BinURLDataSource.java
@@ -36,7 +36,7 @@ import java.util.Properties;
  * @since solr 3.1
  */
 public class BinURLDataSource extends DataSource<InputStream>{
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   private String baseUrl;
   private int connectionTimeout = CONNECTION_TIMEOUT;
@@ -61,14 +61,14 @@ public class BinURLDataSource extends DataSource<InputStream>{
       try {
         connectionTimeout = Integer.parseInt(cTimeout);
       } catch (NumberFormatException e) {
-        LOG.warn("Invalid connection timeout: " + cTimeout);
+        log.warn("Invalid connection timeout: " + cTimeout);
       }
     }
     if (rTimeout != null) {
       try {
         readTimeout = Integer.parseInt(rTimeout);
       } catch (NumberFormatException e) {
-        LOG.warn("Invalid read timeout: " + rTimeout);
+        log.warn("Invalid read timeout: " + rTimeout);
       }
     }
   }
@@ -79,13 +79,13 @@ public class BinURLDataSource extends DataSource<InputStream>{
     try {
       if (URIMETHOD.matcher(query).find()) url = new URL(query);
       else url = new URL(baseUrl + query);
-      LOG.debug("Accessing URL: " + url.toString());
+      log.debug("Accessing URL: " + url.toString());
       URLConnection conn = url.openConnection();
       conn.setConnectTimeout(connectionTimeout);
       conn.setReadTimeout(readTimeout);
       return conn.getInputStream();
     } catch (Exception e) {
-      LOG.error("Exception thrown while getting data", e);
+      log.error("Exception thrown while getting data", e);
       wrapAndThrow (SEVERE, e, "Exception in invoking url " + url);
       return null;//unreachable
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImportHandler.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImportHandler.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImportHandler.java
index c9e997c..71ee442 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImportHandler.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImportHandler.java
@@ -68,7 +68,7 @@ import static org.apache.solr.handler.dataimport.DataImporter.IMPORT_CMD;
 public class DataImportHandler extends RequestHandlerBase implements
         SolrCoreAware {
 
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   private DataImporter importer;
 
@@ -107,7 +107,7 @@ public class DataImportHandler extends RequestHandlerBase implements
       debugEnabled = StrUtils.parseBool((String)initArgs.get(ENABLE_DEBUG), true);
       importer = new DataImporter(core, myName);         
     } catch (Exception e) {
-      LOG.error( DataImporter.MSG.LOAD_EXP, e);
+      log.error( DataImporter.MSG.LOAD_EXP, e);
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, DataImporter.MSG.LOAD_EXP, e);
     }
   }
@@ -257,7 +257,7 @@ public class DataImportHandler extends RequestHandlerBase implements
           try {
             return super.upload(document);
           } catch (RuntimeException e) {
-            LOG.error("Exception while adding: " + document, e);
+            log.error("Exception while adding: " + document, e);
             return false;
           }
         }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImporter.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImporter.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImporter.java
index 4825fd1..d610d66 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImporter.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImporter.java
@@ -74,8 +74,8 @@ public class DataImporter {
     IDLE, RUNNING_FULL_DUMP, RUNNING_DELTA_DUMP, JOB_FAILED
   }
 
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-  private static final XMLErrorLogger XMLLOG = new XMLErrorLogger(LOG);
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final XMLErrorLogger XMLLOG = new XMLErrorLogger(log);
 
   private Status status = Status.IDLE;
   private DIHConfiguration config;
@@ -125,7 +125,7 @@ public class DataImporter {
         } else if(dataconfigFile!=null) {
           is = new InputSource(core.getResourceLoader().openResource(dataconfigFile));
           is.setSystemId(SystemIdResolver.createSystemIdFromResourceName(dataconfigFile));
-          LOG.info("Loading DIH Configuration: " + dataconfigFile);
+          log.info("Loading DIH Configuration: " + dataconfigFile);
         }
         if(is!=null) {          
           config = loadDataConfig(is);
@@ -143,12 +143,12 @@ public class DataImporter {
             if (name.equals("datasource")) {
               success = true;
               NamedList dsConfig = (NamedList) defaultParams.getVal(position);
-              LOG.info("Getting configuration for Global Datasource...");              
+              log.info("Getting configuration for Global Datasource...");
               Map<String,String> props = new HashMap<>();
               for (int i = 0; i < dsConfig.size(); i++) {
                 props.put(dsConfig.getName(i), dsConfig.getVal(i).toString());
               }
-              LOG.info("Adding properties to datasource: " + props);
+              log.info("Adding properties to datasource: " + props);
               dsProps.put((String) dsConfig.get("name"), props);
             }
             position++;
@@ -201,7 +201,7 @@ public class DataImporter {
           dbf.setXIncludeAware(true);
           dbf.setNamespaceAware(true);
         } catch( UnsupportedOperationException e ) {
-          LOG.warn( "XML parser doesn't support XInclude option" );
+          log.warn( "XML parser doesn't support XInclude option" );
         }
       }
       
@@ -224,7 +224,7 @@ public class DataImporter {
       }
 
       dihcfg = readFromXml(document);
-      LOG.info("Data Configuration loaded successfully");
+      log.info("Data Configuration loaded successfully");
     } catch (Exception e) {
       throw new DataImportHandlerException(SEVERE,
               "Data Config problem: " + e.getMessage(), e);
@@ -414,7 +414,7 @@ public class DataImporter {
   }
 
   public void doFullImport(DIHWriter writer, RequestInfo requestParams) {
-    LOG.info("Starting Full Import");
+    log.info("Starting Full Import");
     setStatus(Status.RUNNING_FULL_DUMP);
     try {
       DIHProperties dihPropWriter = createPropertyWriter();
@@ -425,7 +425,7 @@ public class DataImporter {
       if (!requestParams.isDebug())
         cumulativeStatistics.add(docBuilder.importStatistics);
     } catch (Exception e) {
-      SolrException.log(LOG, "Full Import failed", e);
+      SolrException.log(log, "Full Import failed", e);
       docBuilder.handleError("Full Import failed", e);
     } finally {
       setStatus(Status.IDLE);
@@ -442,7 +442,7 @@ public class DataImporter {
   }
 
   public void doDeltaImport(DIHWriter writer, RequestInfo requestParams) {
-    LOG.info("Starting Delta Import");
+    log.info("Starting Delta Import");
     setStatus(Status.RUNNING_DELTA_DUMP);
     try {
       DIHProperties dihPropWriter = createPropertyWriter();
@@ -453,7 +453,7 @@ public class DataImporter {
       if (!requestParams.isDebug())
         cumulativeStatistics.add(docBuilder.importStatistics);
     } catch (Exception e) {
-      LOG.error("Delta Import Failed", e);
+      log.error("Delta Import Failed", e);
       docBuilder.handleError("Delta Import Failed", e);
     } finally {
       setStatus(Status.IDLE);
@@ -475,7 +475,7 @@ public class DataImporter {
       return;
     }
     if (!importLock.tryLock()){
-      LOG.warn("Import command failed . another import is running");      
+      log.warn("Import command failed . another import is running");
       return;
     }
     try {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DateFormatTransformer.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DateFormatTransformer.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DateFormatTransformer.java
index f48cbea..6da9cc1 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DateFormatTransformer.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DateFormatTransformer.java
@@ -39,7 +39,7 @@ import org.slf4j.LoggerFactory;
  */
 public class DateFormatTransformer extends Transformer {
   private Map<String, SimpleDateFormat> fmtCache = new HashMap<>();
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   @Override
   @SuppressWarnings("unchecked")
@@ -80,7 +80,7 @@ public class DateFormatTransformer extends Transformer {
           }
         }
       } catch (ParseException e) {
-        LOG.warn("Could not parse a Date field ", e);
+        log.warn("Could not parse a Date field ", e);
       }
     }
     return aRow;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java
index f6a62aa..164cf70 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java
@@ -50,7 +50,7 @@ import java.util.concurrent.atomic.AtomicLong;
  */
 public class DocBuilder {
 
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   private static final AtomicBoolean WARNED_ABOUT_INDEX_TIME_BOOSTS = new AtomicBoolean();
 
   private static final Date EPOCH = new Date(0);
@@ -265,7 +265,7 @@ public class DocBuilder {
         statusMessages.put(DataImporter.MSG.TOTAL_FAILED_DOCS, ""+ importStatistics.failedDocCount.get());
 
       statusMessages.put("Time taken", getTimeElapsedSince(startTime.get()));
-      LOG.info("Time taken = " + getTimeElapsedSince(startTime.get()));
+      log.info("Time taken = " + getTimeElapsedSince(startTime.get()));
     } catch(Exception e)
     {
       throw new RuntimeException(e);
@@ -294,7 +294,7 @@ public class DocBuilder {
 
   @SuppressWarnings("unchecked")
   private void finish(Map<String,Object> lastIndexTimeProps) {
-    LOG.info("Import completed successfully");
+    log.info("Import completed successfully");
     statusMessages.put("", "Indexing completed. Added/Updated: "
             + importStatistics.docCount + " documents. Deleted "
             + importStatistics.deletedDocCount + " documents.");
@@ -307,7 +307,7 @@ public class DocBuilder {
     try {
       propWriter.persist(lastIndexTimeProps);
     } catch (Exception e) {
-      LOG.error("Could not write property file", e);
+      log.error("Could not write property file", e);
       statusMessages.put("error", "Could not write property file. Delta imports will not work. " +
           "Make sure your conf directory is writable");
     }
@@ -340,7 +340,7 @@ public class DocBuilder {
     }
 
     addStatusMessage("Identifying Delta");
-    LOG.info("Starting delta collection.");
+    log.info("Starting delta collection.");
     Set<Map<String, Object>> deletedKeys = new HashSet<>();
     Set<Map<String, Object>> allPks = collectDelta(currentEntityProcessorWrapper, resolver, deletedKeys);
     if (stop.get())
@@ -369,12 +369,12 @@ public class DocBuilder {
     }
 
     if (!stop.get()) {
-      LOG.info("Delta Import completed successfully");
+      log.info("Delta Import completed successfully");
     }
   }
 
   private void deleteAll(Set<Map<String, Object>> deletedKeys) {
-    LOG.info("Deleting stale documents ");
+    log.info("Deleting stale documents ");
     Iterator<Map<String, Object>> iter = deletedKeys.iterator();
     while (iter.hasNext()) {
       Map<String, Object> map = iter.next();
@@ -385,7 +385,7 @@ public class DocBuilder {
         key = map.get(keyName);
       }
       if(key == null) {
-        LOG.warn("no key was available for deleted pk query. keyName = " + keyName);
+        log.warn("no key was available for deleted pk query. keyName = " + keyName);
         continue;
       }
       writer.deleteDoc(key);
@@ -483,7 +483,7 @@ public class DocBuilder {
             if (seenDocCount <= reqParams.getStart())
               continue;
             if (seenDocCount > reqParams.getStart() + reqParams.getRows()) {
-              LOG.info("Indexing stopped at docCount = " + importStatistics.docCount);
+              log.info("Indexing stopped at docCount = " + importStatistics.docCount);
               break;
             }
           }
@@ -548,7 +548,7 @@ public class DocBuilder {
               importStatistics.skipDocCount.getAndIncrement();
               doc = null;
             } else {
-              SolrException.log(LOG, "Exception while processing: "
+              SolrException.log(log, "Exception while processing: "
                       + epw.getEntity().getName() + " document : " + doc, e);
             }
             if (e.getErrCode() == DataImportHandlerException.SEVERE)
@@ -620,9 +620,9 @@ public class DocBuilder {
     if (value != null) {
       String message = "Ignoring document boost: " + value + " as index-time boosts are not supported anymore";
       if (WARNED_ABOUT_INDEX_TIME_BOOSTS.compareAndSet(false, true)) {
-        LOG.warn(message);
+        log.warn(message);
       } else {
-        LOG.debug(message);
+        log.debug(message);
       }
     }
 
@@ -759,7 +759,7 @@ public class DocBuilder {
                   "deltaQuery has no column to resolve to declared primary key pk='%s'",
                   pk));
     }
-    LOG.info(String.format(Locale.ROOT,
+    log.info(String.format(Locale.ROOT,
         "Resolving deltaQuery column '%s' to match entity's declared pk '%s'",
         resolvedPk, pk));
     return resolvedPk;
@@ -796,7 +796,7 @@ public class DocBuilder {
     
     // identifying the modified rows for this entity
     Map<String, Map<String, Object>> deltaSet = new HashMap<>();
-    LOG.info("Running ModifiedRowKey() for Entity: " + epw.getEntity().getName());
+    log.info("Running ModifiedRowKey() for Entity: " + epw.getEntity().getName());
     //get the modified rows in this entity
     String pk = epw.getEntity().getPk();
     while (true) {
@@ -844,8 +844,8 @@ public class DocBuilder {
         return new HashSet();
     }
 
-    LOG.info("Completed ModifiedRowKey for Entity: " + epw.getEntity().getName() + " rows obtained : " + deltaSet.size());
-    LOG.info("Completed DeletedRowKey for Entity: " + epw.getEntity().getName() + " rows obtained : " + deletedSet.size());
+    log.info("Completed ModifiedRowKey for Entity: " + epw.getEntity().getName() + " rows obtained : " + deltaSet.size());
+    log.info("Completed DeletedRowKey for Entity: " + epw.getEntity().getName() + " rows obtained : " + deletedSet.size());
 
     myModifiedPks.addAll(deltaSet.values());
     Set<Map<String, Object>> parentKeyList = new HashSet<>();
@@ -870,7 +870,7 @@ public class DocBuilder {
           return new HashSet();
       }
     }
-    LOG.info("Completed parentDeltaQuery for Entity: " + epw.getEntity().getName());
+    log.info("Completed parentDeltaQuery for Entity: " + epw.getEntity().getName());
     if (epw.getEntity().isDocRoot())
       deletedRows.addAll(deletedSet);
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/FieldReaderDataSource.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/FieldReaderDataSource.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/FieldReaderDataSource.java
index edc8589..571c280 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/FieldReaderDataSource.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/FieldReaderDataSource.java
@@ -48,7 +48,7 @@ import java.util.Properties;
  * @since 1.4
  */
 public class FieldReaderDataSource extends DataSource<Reader> {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   protected VariableResolver vr;
   protected String dataField;
   private String encoding;
@@ -77,7 +77,7 @@ public class FieldReaderDataSource extends DataSource<Reader> {
         // so let us just check it
         return readCharStream(clob);
       } catch (Exception e) {
-        LOG.info("Unable to get data from CLOB");
+        log.info("Unable to get data from CLOB");
         return null;
 
       }
@@ -87,7 +87,7 @@ public class FieldReaderDataSource extends DataSource<Reader> {
       try {
         return getReader(blob);
       } catch (Exception e) {
-        LOG.info("Unable to get data from BLOB");
+        log.info("Unable to get data from BLOB");
         return null;
 
       }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/FieldStreamDataSource.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/FieldStreamDataSource.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/FieldStreamDataSource.java
index 42ba5a6..ba7ca5d 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/FieldStreamDataSource.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/FieldStreamDataSource.java
@@ -45,7 +45,7 @@ import org.slf4j.LoggerFactory;
  * @since 3.1
  */
 public class FieldStreamDataSource extends DataSource<InputStream> {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   protected VariableResolver vr;
   protected String dataField;
   private EntityProcessorWrapper wrapper;
@@ -67,7 +67,7 @@ public class FieldStreamDataSource extends DataSource<InputStream> {
       try {
         return blob.getBinaryStream();
       } catch (SQLException sqle) {
-        LOG.info("Unable to get data from BLOB");
+        log.info("Unable to get data from BLOB");
         return null;
       }
     } else if (o instanceof byte[]) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/FileDataSource.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/FileDataSource.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/FileDataSource.java
index 2db5804..920472e 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/FileDataSource.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/FileDataSource.java
@@ -58,7 +58,7 @@ public class FileDataSource extends DataSource<Reader> {
    */
   protected String encoding = null;
 
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   @Override
   public void init(Context context, Properties initProps) {
@@ -102,13 +102,13 @@ public class FileDataSource extends DataSource<Reader> {
         File basePathFile;
         if (basePath == null) {
           basePathFile = new File(".").getAbsoluteFile(); 
-          LOG.warn("FileDataSource.basePath is empty. " +
+          log.warn("FileDataSource.basePath is empty. " +
               "Resolving to: " + basePathFile.getAbsolutePath());
         } else {
           basePathFile = new File(basePath);
           if (!basePathFile.isAbsolute()) {
             basePathFile = basePathFile.getAbsoluteFile();
-            LOG.warn("FileDataSource.basePath is not absolute. Resolving to: "
+            log.warn("FileDataSource.basePath is not absolute. Resolving to: "
                 + basePathFile.getAbsolutePath());
           }
         }
@@ -117,7 +117,7 @@ public class FileDataSource extends DataSource<Reader> {
       }
 
       if (file.isFile() && file.canRead()) {
-        LOG.debug("Accessing File: " + file.getAbsolutePath());
+        log.debug("Accessing File: " + file.getAbsolutePath());
         return file;
       } else {
         throw new FileNotFoundException("Could not find file: " + query + 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/JdbcDataSource.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/JdbcDataSource.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/JdbcDataSource.java
index b17650a..a8eed55 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/JdbcDataSource.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/JdbcDataSource.java
@@ -51,7 +51,7 @@ import java.util.concurrent.TimeUnit;
  */
 public class JdbcDataSource extends
         DataSource<Iterator<Map<String, Object>>> {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   protected Callable<Connection> factory;
 
@@ -87,7 +87,7 @@ public class JdbcDataSource extends
         if (batchSize == -1)
           batchSize = Integer.MIN_VALUE;
       } catch (NumberFormatException e) {
-        LOG.warn("Invalid batch size: " + bsz);
+        log.warn("Invalid batch size: " + bsz);
       }
     }
 
@@ -172,7 +172,7 @@ public class JdbcDataSource extends
     return factory = new Callable<Connection>() {
       @Override
       public Connection call() throws Exception {
-        LOG.info("Creating a connection for entity "
+        log.info("Creating a connection for entity "
                 + context.getEntityAttribute(DataImporter.NAME) + " with URL: "
                 + url);
         long start = System.nanoTime();
@@ -199,13 +199,13 @@ public class JdbcDataSource extends
             try {
               c.close();
             } catch (SQLException e2) {
-              LOG.warn("Exception closing connection during cleanup", e2);
+              log.warn("Exception closing connection during cleanup", e2);
             }
 
             throw new DataImportHandlerException(SEVERE, "Exception initializing SQL connection", e);
           }
         }
-        LOG.info("Time taken for getConnection(): "
+        log.info("Time taken for getConnection(): "
             + TimeUnit.MILLISECONDS.convert(System.nanoTime() - start, TimeUnit.NANOSECONDS));
         return c;
       }
@@ -289,7 +289,7 @@ public class JdbcDataSource extends
   }
 
   private void logError(String msg, Exception e) {
-    LOG.warn(msg, e);
+    log.warn(msg, e);
   }
 
   protected List<String> readFieldNames(ResultSetMetaData metaData)
@@ -316,10 +316,10 @@ public class JdbcDataSource extends
       try {
         Connection c = getConnection();
         stmt = createStatement(c, batchSize, maxRows);
-        LOG.debug("Executing SQL: " + query);
+        log.debug("Executing SQL: " + query);
         long start = System.nanoTime();
         resultSet = executeStatement(stmt, query);
-        LOG.trace("Time taken for sql :"
+        log.trace("Time taken for sql :"
                 + TimeUnit.MILLISECONDS.convert(System.nanoTime() - start, TimeUnit.NANOSECONDS));
         setColNames(resultSet);
       } catch (Exception e) {
@@ -541,7 +541,7 @@ public class JdbcDataSource extends
   protected void finalize() throws Throwable {
     try {
       if(!isClosed){
-        LOG.error("JdbcDataSource was not closed prior to finalize(), indicates a bug -- POSSIBLE RESOURCE LEAK!!!");
+        log.error("JdbcDataSource was not closed prior to finalize(), indicates a bug -- POSSIBLE RESOURCE LEAK!!!");
         close();
       }
     } finally {
@@ -575,7 +575,7 @@ public class JdbcDataSource extends
         conn.close();
       }
     } catch (Exception e) {
-      LOG.error("Ignoring Error when closing connection", e);
+      log.error("Ignoring Error when closing connection", e);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/LogTransformer.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/LogTransformer.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/LogTransformer.java
index 17687e1..66c525e 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/LogTransformer.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/LogTransformer.java
@@ -33,7 +33,7 @@ import java.util.Map;
  * @since solr 1.4
  */
 public class LogTransformer extends Transformer {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   @Override
   public Object transformRow(Map<String, Object> row, Context ctx) {
@@ -43,20 +43,20 @@ public class LogTransformer extends Transformer {
     if (expr == null || level == null) return row;
 
     if ("info".equals(level)) {
-      if (LOG.isInfoEnabled())
-        LOG.info(ctx.replaceTokens(expr));
+      if (log.isInfoEnabled())
+        log.info(ctx.replaceTokens(expr));
     } else if ("trace".equals(level)) {
-      if (LOG.isTraceEnabled())
-        LOG.trace(ctx.replaceTokens(expr));
+      if (log.isTraceEnabled())
+        log.trace(ctx.replaceTokens(expr));
     } else if ("warn".equals(level)) {
-      if (LOG.isWarnEnabled())
-        LOG.warn(ctx.replaceTokens(expr));
+      if (log.isWarnEnabled())
+        log.warn(ctx.replaceTokens(expr));
     } else if ("error".equals(level)) {
-      if (LOG.isErrorEnabled())
-        LOG.error(ctx.replaceTokens(expr));
+      if (log.isErrorEnabled())
+        log.error(ctx.replaceTokens(expr));
     } else if ("debug".equals(level)) {
-      if (LOG.isDebugEnabled())
-        LOG.debug(ctx.replaceTokens(expr));
+      if (log.isDebugEnabled())
+        log.debug(ctx.replaceTokens(expr));
     }
 
     return row;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/RegexTransformer.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/RegexTransformer.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/RegexTransformer.java
index 8e0d249..7a919de 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/RegexTransformer.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/RegexTransformer.java
@@ -40,7 +40,7 @@ import java.util.regex.Pattern;
  * @see Pattern
  */
 public class RegexTransformer extends Transformer {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   @Override
   @SuppressWarnings("unchecked")
@@ -166,7 +166,7 @@ public class RegexTransformer extends Transformer {
               }
             }
           } catch (Exception e) {
-            LOG.warn("Parsing failed for field : " + columnName, e);
+            log.warn("Parsing failed for field : " + columnName, e);
           }
         }
         return l == null ? map: l;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SolrEntityProcessor.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SolrEntityProcessor.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SolrEntityProcessor.java
index 5bbb57b..7732673 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SolrEntityProcessor.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SolrEntityProcessor.java
@@ -59,7 +59,7 @@ import org.slf4j.LoggerFactory;
  */
 public class SolrEntityProcessor extends EntityProcessorBase {
   
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   
   public static final String SOLR_SERVER = "url";
   public static final String QUERY = "query";
@@ -118,13 +118,13 @@ public class SolrEntityProcessor extends EntityProcessorBase {
             .withHttpClient(client)
             .withResponseParser(new XMLResponseParser())
             .build();
-        LOG.info("using XMLResponseParser");
+        log.info("using XMLResponseParser");
       } else {
         // TODO: it doesn't matter for this impl when passing a client currently, but we should close this!
         solrClient = new Builder(url.toExternalForm())
             .withHttpClient(client)
             .build();
-        LOG.info("using BinaryResponseParser");
+        log.info("using BinaryResponseParser");
       }
     } catch (MalformedURLException e) {
       throw new DataImportHandlerException(DataImportHandlerException.SEVERE, e);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SqlEntityProcessor.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SqlEntityProcessor.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SqlEntityProcessor.java
index d30c670..19c6d0f 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SqlEntityProcessor.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SqlEntityProcessor.java
@@ -42,7 +42,7 @@ import java.util.regex.Pattern;
  * @since solr 1.3
  */
 public class SqlEntityProcessor extends EntityProcessorBase {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   protected DataSource<Iterator<Map<String, Object>>> dataSource;
 
@@ -61,7 +61,7 @@ public class SqlEntityProcessor extends EntityProcessorBase {
     } catch (DataImportHandlerException e) {
       throw e;
     } catch (Exception e) {
-      LOG.error( "The query failed '" + q + "'", e);
+      log.error( "The query failed '" + q + "'", e);
       throw new DataImportHandlerException(DataImportHandlerException.SEVERE, e);
     }
   }
@@ -103,7 +103,7 @@ public class SqlEntityProcessor extends EntityProcessorBase {
       String parentDeltaQuery = context.getEntityAttribute(PARENT_DELTA_QUERY);
       if (parentDeltaQuery == null)
         return null;
-      LOG.info("Running parentDeltaQuery for Entity: "
+      log.info("Running parentDeltaQuery for Entity: "
               + context.getEntityAttribute("name"));
       initQuery(context.replaceTokens(parentDeltaQuery));
     }
@@ -119,7 +119,7 @@ public class SqlEntityProcessor extends EntityProcessorBase {
       String deltaImportQuery = context.getEntityAttribute(DELTA_IMPORT_QUERY);
       if(deltaImportQuery != null) return deltaImportQuery;
     }
-    LOG.warn("'deltaImportQuery' attribute is not specified for entity : "+ entityName);
+    log.warn("'deltaImportQuery' attribute is not specified for entity : "+ entityName);
     return getDeltaImportQuery(queryString);
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/TemplateTransformer.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/TemplateTransformer.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/TemplateTransformer.java
index a5faa7e..f655edd 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/TemplateTransformer.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/TemplateTransformer.java
@@ -48,7 +48,7 @@ import org.slf4j.LoggerFactory;
  */
 public class TemplateTransformer extends Transformer {
 
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   private Map<String ,List<String>> templateVsVars = new HashMap<>();
 
   @Override
@@ -76,7 +76,7 @@ public class TemplateTransformer extends Transformer {
       }
       for (String v : variables) {
         if (resolver.resolve(v) == null) {
-          LOG.warn("Unable to resolve variable: " + v
+          log.warn("Unable to resolve variable: " + v
                   + " while parsing expression: " + expr);
           resolvable = false;
         }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/URLDataSource.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/URLDataSource.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/URLDataSource.java
index 9271679..145ffc4 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/URLDataSource.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/URLDataSource.java
@@ -41,7 +41,7 @@ import java.util.regex.Pattern;
  * @since solr 1.4
  */
 public class URLDataSource extends DataSource<Reader> {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   private String baseUrl;
 
@@ -72,14 +72,14 @@ public class URLDataSource extends DataSource<Reader> {
       try {
         connectionTimeout = Integer.parseInt(cTimeout);
       } catch (NumberFormatException e) {
-        LOG.warn("Invalid connection timeout: " + cTimeout);
+        log.warn("Invalid connection timeout: " + cTimeout);
       }
     }
     if (rTimeout != null) {
       try {
         readTimeout = Integer.parseInt(rTimeout);
       } catch (NumberFormatException e) {
-        LOG.warn("Invalid read timeout: " + rTimeout);
+        log.warn("Invalid read timeout: " + rTimeout);
       }
     }
   }
@@ -91,7 +91,7 @@ public class URLDataSource extends DataSource<Reader> {
       if (URIMETHOD.matcher(query).find()) url = new URL(query);
       else url = new URL(baseUrl + query);
 
-      LOG.debug("Accessing URL: " + url.toString());
+      log.debug("Accessing URL: " + url.toString());
 
       URLConnection conn = url.openConnection();
       conn.setConnectTimeout(connectionTimeout);
@@ -112,7 +112,7 @@ public class URLDataSource extends DataSource<Reader> {
       DataImporter.QUERY_COUNT.get().incrementAndGet();
       return new InputStreamReader(in, enc);
     } catch (Exception e) {
-      LOG.error("Exception thrown while getting data", e);
+      log.error("Exception thrown while getting data", e);
       throw new DataImportHandlerException(DataImportHandlerException.SEVERE,
               "Exception in invoking url " + url, e);
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathEntityProcessor.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathEntityProcessor.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathEntityProcessor.java
index 70b9aba..c93b581 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathEntityProcessor.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathEntityProcessor.java
@@ -54,8 +54,8 @@ import java.util.concurrent.atomic.AtomicReference;
  * @since solr 1.3
  */
 public class XPathEntityProcessor extends EntityProcessorBase {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-  private static final XMLErrorLogger xmllog = new XMLErrorLogger(LOG);
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final XMLErrorLogger xmllog = new XMLErrorLogger(log);
 
   private static final Map<String, Object> END_MARKER = new HashMap<>();
   
@@ -136,7 +136,7 @@ public class XPathEntityProcessor extends EntityProcessorBase {
           // some XML parsers are broken and don't close the byte stream (but they should according to spec)
           IOUtils.closeQuietly(xsltSource.getInputStream());
         }
-        LOG.info("Using xslTransformer: "
+        log.info("Using xslTransformer: "
                         + xslTransformer.getClass().getName());
       } catch (Exception e) {
         throw new DataImportHandlerException(SEVERE,
@@ -293,10 +293,10 @@ public class XPathEntityProcessor extends EntityProcessorBase {
         if (ABORT.equals(onError)) {
           wrapAndThrow(SEVERE, e);
         } else if (SKIP.equals(onError)) {
-          if (LOG.isDebugEnabled()) LOG.debug("Skipping url : " + s, e);
+          if (log.isDebugEnabled()) log.debug("Skipping url : " + s, e);
           wrapAndThrow(DataImportHandlerException.SKIP, e);
         } else {
-          LOG.warn("Failed for url : " + s, e);
+          log.warn("Failed for url : " + s, e);
           rowIterator = Collections.EMPTY_LIST.iterator();
           return;
         }
@@ -313,7 +313,7 @@ public class XPathEntityProcessor extends EntityProcessorBase {
           } else if (SKIP.equals(onError)) {
             wrapAndThrow(DataImportHandlerException.SKIP, e);
           } else {
-            LOG.warn("Failed for url : " + s, e);
+            log.warn("Failed for url : " + s, e);
             rowIterator = Collections.EMPTY_LIST.iterator();
             return;
           }
@@ -330,12 +330,12 @@ public class XPathEntityProcessor extends EntityProcessorBase {
           if (ABORT.equals(onError)) {
             wrapAndThrow(SEVERE, e, msg);
           } else if (SKIP.equals(onError)) {
-            LOG.warn(msg, e);
+            log.warn(msg, e);
             Map<String, Object> map = new HashMap<>();
             map.put(DocBuilder.SKIP_DOC, Boolean.TRUE);
             rows.add(map);
           } else if (CONTINUE.equals(onError)) {
-            LOG.warn(msg, e);
+            log.warn(msg, e);
           }
         }
         rowIterator = rows.iterator();
@@ -457,7 +457,7 @@ public class XPathEntityProcessor extends EntityProcessorBase {
         try {
           while (!blockingQueue.offer(row, blockingQueueTimeOut, blockingQueueTimeOutUnits)) {
             if (isEnd.get()) return;
-            LOG.debug("Timeout elapsed writing records.  Perhaps buffer size should be increased.");
+            log.debug("Timeout elapsed writing records.  Perhaps buffer size should be increased.");
           }
         } catch (InterruptedException e) {
           return;
@@ -488,10 +488,10 @@ public class XPathEntityProcessor extends EntityProcessorBase {
           try {
             row = blockingQueue.poll(blockingQueueTimeOut, blockingQueueTimeOutUnits);
             if (row == null) {
-              LOG.debug("Timeout elapsed reading records.");
+              log.debug("Timeout elapsed reading records.");
             }
           } catch (InterruptedException e) {
-            LOG.debug("Caught InterruptedException while waiting for row.  Aborting.");
+            log.debug("Caught InterruptedException while waiting for row.  Aborting.");
             isEnd.set(true);
             return null;
           }
@@ -507,7 +507,7 @@ public class XPathEntityProcessor extends EntityProcessorBase {
             } else if (SKIP.equals(onError)) {
               wrapAndThrow(DataImportHandlerException.SKIP, exp.get());
             } else {
-              LOG.warn(msg, exp.get());
+              log.warn(msg, exp.get());
             }
           }
           return null;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathRecordReader.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathRecordReader.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathRecordReader.java
index 311215c..13daf49 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathRecordReader.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathRecordReader.java
@@ -57,8 +57,8 @@ import org.slf4j.LoggerFactory;
  * @since solr 1.3
  */
 public class XPathRecordReader {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-  private static final XMLErrorLogger XMLLOG = new XMLErrorLogger(LOG);
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final XMLErrorLogger XMLLOG = new XMLErrorLogger(log);
 
   private Node rootNode = new Node("/", null);
 
@@ -645,7 +645,7 @@ public class XPathRecordReader {
     } catch (IllegalArgumentException ex) {
       // Other implementations will likely throw this exception since "reuse-instance"
       // isimplementation specific.
-      LOG.debug("Unable to set the 'reuse-instance' property for the input chain: " + factory);
+      log.debug("Unable to set the 'reuse-instance' property for the input chain: " + factory);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/config/DIHConfiguration.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/config/DIHConfiguration.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/config/DIHConfiguration.java
index 2b91042..0ba13ea 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/config/DIHConfiguration.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/config/DIHConfiguration.java
@@ -47,7 +47,7 @@ import org.w3c.dom.Element;
  * @since solr 1.3
  */
 public class DIHConfiguration {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   // TODO - remove from here and add it to entity
   private final String deleteQuery;
@@ -106,7 +106,7 @@ public class DIHConfiguration {
       SchemaField sf = entry.getValue();
       if (!fields.containsKey(sf.getName())) {
         if (sf.isRequired()) {
-          LOG.info(sf.getName() + " is a required field in SolrSchema . But not found in DataConfig");
+          log.info(sf.getName() + " is a required field in SolrSchema . But not found in DataConfig");
         }
       }
     }
@@ -114,7 +114,7 @@ public class DIHConfiguration {
       EntityField fld = entry.getValue();
       SchemaField field = getSchemaField(fld.getName());
       if (field == null && !isSpecialCommand(fld.getName())) {
-        LOG.info("The field :" + fld.getName() + " present in DataConfig does not have a counterpart in Solr Schema");
+        log.info("The field :" + fld.getName() + " present in DataConfig does not have a counterpart in Solr Schema");
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java
index 858b9cc..7e08f0e 100644
--- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java
+++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java
@@ -47,7 +47,7 @@ import java.util.Properties;
  */
 public class TestSolrEntityProcessorEndToEnd extends AbstractDataImportHandlerTestCase {
   
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   
   private static final String SOLR_CONFIG = "dataimport-solrconfig.xml";
   private static final String SOLR_SCHEMA = "dataimport-schema.xml";
@@ -136,7 +136,7 @@ public class TestSolrEntityProcessorEndToEnd extends AbstractDataImportHandlerTe
     try {
       deleteCore();
     } catch (Exception e) {
-      LOG.error("Error deleting core", e);
+      log.error("Error deleting core", e);
     }
     jetty.stop();
     instance.tearDown();
@@ -151,7 +151,7 @@ public class TestSolrEntityProcessorEndToEnd extends AbstractDataImportHandlerTe
       addDocumentsToSolr(SOLR_DOCS);
       runFullImport(generateDIHConfig("query='*:*' rows='2' fl='id,desc' onError='skip'", false));
     } catch (Exception e) {
-      LOG.error(e.getMessage(), e);
+      log.error(e.getMessage(), e);
       fail(e.getMessage());
     }
     
@@ -169,7 +169,7 @@ public class TestSolrEntityProcessorEndToEnd extends AbstractDataImportHandlerTe
       map.put("rows", "50");
       runFullImport(generateDIHConfig("query='*:*' fq='desc:Description1*,desc:Description*2' rows='2'", false), map);
     } catch (Exception e) {
-      LOG.error(e.getMessage(), e);
+      log.error(e.getMessage(), e);
       fail(e.getMessage());
     }
     
@@ -184,7 +184,7 @@ public class TestSolrEntityProcessorEndToEnd extends AbstractDataImportHandlerTe
       addDocumentsToSolr(generateSolrDocuments(7));
       runFullImport(generateDIHConfig("query='*:*' fl='id' rows='2'"+(random().nextBoolean() ?" cursorMark='true' sort='id asc'":""), false));
     } catch (Exception e) {
-      LOG.error(e.getMessage(), e);
+      log.error(e.getMessage(), e);
       fail(e.getMessage());
     }
     
@@ -221,7 +221,7 @@ public class TestSolrEntityProcessorEndToEnd extends AbstractDataImportHandlerTe
       addDocumentsToSolr(DOCS);
       runFullImport(getDihConfigTagsInnerEntity());
     } catch (Exception e) {
-      LOG.error(e.getMessage(), e);
+      log.error(e.getMessage(), e);
       fail(e.getMessage());
     } finally {
       MockDataSource.clearCache();
@@ -244,7 +244,7 @@ public class TestSolrEntityProcessorEndToEnd extends AbstractDataImportHandlerTe
     try {
       runFullImport(generateDIHConfig("query='*:*' rows='2' fl='id,desc' onError='skip'", true /* use dead server */));
     } catch (Exception e) {
-      LOG.error(e.getMessage(), e);
+      log.error(e.getMessage(), e);
       fail(e.getMessage());
     }
     
@@ -258,7 +258,7 @@ public class TestSolrEntityProcessorEndToEnd extends AbstractDataImportHandlerTe
       runFullImport(generateDIHConfig("query='bogus:3' rows='2' fl='id,desc' onError='"+
             (random().nextBoolean() ? "abort" : "justtogetcoverage")+"'", false));
     } catch (Exception e) {
-      LOG.error(e.getMessage(), e);
+      log.error(e.getMessage(), e);
       fail(e.getMessage());
     }
     
@@ -278,7 +278,7 @@ public class TestSolrEntityProcessorEndToEnd extends AbstractDataImportHandlerTe
       runFullImport(generateDIHConfig(attrs,
             false));
     } catch (Exception e) {
-      LOG.error(e.getMessage(), e);
+      log.error(e.getMessage(), e);
       fail(e.getMessage());
     }
     

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java b/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java
index 14f4dc9..5fdec0f 100644
--- a/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java
+++ b/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java
@@ -89,17 +89,17 @@ public class JettySolrRunner {
   private final JettyConfig config;
   private final String solrHome;
   private final Properties nodeProperties;
-  
+
   private volatile boolean startedBefore = false;
 
   private LinkedList<FilterHolder> extraFilters;
 
   private static final String excludePatterns = "/css/.+,/js/.+,/img/.+,/tpl/.+";
-  
+
   private int proxyPort = -1;
 
   public static class DebugFilter implements Filter {
-    public final static Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+    private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
     private AtomicLong nRequests = new AtomicLong();
     

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/cloud/LockTree.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/LockTree.java b/solr/core/src/java/org/apache/solr/cloud/LockTree.java
index 8ae7f75..af0d30e 100644
--- a/solr/core/src/java/org/apache/solr/cloud/LockTree.java
+++ b/solr/core/src/java/org/apache/solr/cloud/LockTree.java
@@ -36,7 +36,7 @@ import org.slf4j.LoggerFactory;
  * but internally it is synchronized so that only one thread can perform any operation.
  */
 public class LockTree {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   private final Node root = new Node(null, LockLevel.CLUSTER, null);
 
   public void clear() {
@@ -141,7 +141,7 @@ public class LockTree {
     void unlock(LockImpl lockObject) {
       if (myLock == lockObject) myLock = null;
       else {
-        LOG.info("Unlocked multiple times : {}", lockObject.toString());
+        log.info("Unlocked multiple times : {}", lockObject.toString());
       }
     }
 
@@ -171,7 +171,7 @@ public class LockTree {
 
     void clear() {
       if (myLock != null) {
-        LOG.warn("lock_is_leaked at" + constructPath(new LinkedList<>()));
+        log.warn("lock_is_leaked at" + constructPath(new LinkedList<>()));
         myLock = null;
       }
       for (Node node : children.values()) node.clear();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/cloud/OverseerTaskQueue.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/OverseerTaskQueue.java b/solr/core/src/java/org/apache/solr/cloud/OverseerTaskQueue.java
index 3df6501..66a31c5 100644
--- a/solr/core/src/java/org/apache/solr/cloud/OverseerTaskQueue.java
+++ b/solr/core/src/java/org/apache/solr/cloud/OverseerTaskQueue.java
@@ -43,7 +43,7 @@ import org.slf4j.LoggerFactory;
  * This is inefficient!  But the API on this class is kind of muddy..
  */
 public class OverseerTaskQueue extends ZkDistributedQueue {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   
   private static final String RESPONSE_PREFIX = "qnr-" ;
 
@@ -70,7 +70,7 @@ public class OverseerTaskQueue extends ZkDistributedQueue {
           if (data != null) {
             ZkNodeProps message = ZkNodeProps.load(data);
             if (message.containsKey(requestIdKey)) {
-              LOG.debug("Looking for {}, found {}", message.get(requestIdKey), requestId);
+              log.debug("Looking for {}, found {}", message.get(requestIdKey), requestId);
               if(message.get(requestIdKey).equals(requestId)) return true;
             }
           }
@@ -96,7 +96,7 @@ public class OverseerTaskQueue extends ZkDistributedQueue {
       if (zookeeper.exists(responsePath, true)) {
         zookeeper.setData(responsePath, event.getBytes(), true);
       } else {
-        LOG.info("Response ZK path: " + responsePath + " doesn't exist."
+        log.info("Response ZK path: " + responsePath + " doesn't exist."
             + "  Requestor may have disconnected from ZooKeeper");
       }
       try {
@@ -136,7 +136,7 @@ public class OverseerTaskQueue extends ZkDistributedQueue {
         return;
       }
       // If latchEventType is not null, only fire if the type matches
-      LOG.debug("{} fired on path {} state {} latchEventType {}", event.getType(), event.getPath(), event.getState(), latchEventType);
+      log.debug("{} fired on path {} state {} latchEventType {}", event.getType(), event.getPath(), event.getState(), latchEventType);
       if (latchEventType == null || event.getType() == latchEventType) {
         lock.lock();
         try {
@@ -234,7 +234,7 @@ public class OverseerTaskQueue extends ZkDistributedQueue {
       throws KeeperException, InterruptedException {
     ArrayList<QueueEvent> topN = new ArrayList<>();
 
-    LOG.debug("Peeking for top {} elements. ExcludeSet: {}", n, excludeSet);
+    log.debug("Peeking for top {} elements. ExcludeSet: {}", n, excludeSet);
     Timer.Context time;
     if (waitMillis == Long.MAX_VALUE) time = stats.time(dir + "_peekTopN_wait_forever");
     else time = stats.time(dir + "_peekTopN_wait" + waitMillis);
@@ -252,13 +252,13 @@ public class OverseerTaskQueue extends ZkDistributedQueue {
   }
 
   private static void printQueueEventsListElementIds(ArrayList<QueueEvent> topN) {
-    if (LOG.isDebugEnabled() && !topN.isEmpty()) {
+    if (log.isDebugEnabled() && !topN.isEmpty()) {
       StringBuilder sb = new StringBuilder("[");
       for (QueueEvent queueEvent : topN) {
         sb.append(queueEvent.getId()).append(", ");
       }
       sb.append("]");
-      LOG.debug("Returning topN elements: {}", sb.toString());
+      log.debug("Returning topN elements: {}", sb.toString());
     }
   }